code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class GlasgowConfig: <NEW_LINE> <INDENT> size = 64 <NEW_LINE> _encoding = "<1s16sI16s2H" <NEW_LINE> def __init__(self, revision, serial, bitstream_size=0, bitstream_id=b"\x00"*16, voltage_limit=None): <NEW_LINE> <INDENT> self.revision = revision <NEW_LINE> self.serial = serial <NEW_LINE> self.bitstream_size = bitstream_size <NEW_LINE> self.bitstream_id = bitstream_id <NEW_LINE> self.voltage_limit = [5500, 5500] if voltage_limit is None else voltage_limit <NEW_LINE> <DEDENT> def encode(self): <NEW_LINE> <INDENT> data = struct.pack(self._encoding, self.revision.encode("ascii"), self.serial.encode("ascii"), self.bitstream_size, self.bitstream_id, self.voltage_limit[0], self.voltage_limit[1]) <NEW_LINE> return data.ljust(self.size, b"\x00") <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def decode(cls, data): <NEW_LINE> <INDENT> if len(data) != cls.size: <NEW_LINE> <INDENT> raise ValueError("Incorrect configuration length") <NEW_LINE> <DEDENT> voltage_limit = [0, 0] <NEW_LINE> revision, serial, bitstream_size, bitstream_id, voltage_limit[0], voltage_limit[1] = struct.unpack_from(cls._encoding, data, 0) <NEW_LINE> return cls(revision.decode("ascii"), serial.decode("ascii"), bitstream_size, bitstream_id, voltage_limit) | Glasgow EEPROM configuration data.
:ivar int size:
Total size of configuration block (currently 64).
:ivar str[1] revision:
Revision letter, ``A``-``Z``.
:ivar str[16] serial:
Serial number, in ISO 8601 format.
:ivar int bitstream_size:
Size of bitstream flashed to ICE_MEM, or 0 if there isn't one.
:ivar bytes[16] bitstream_id:
Opaque string that uniquely identifies bitstream functionality,
but not necessarily any particular routing and placement.
Only meaningful if ``bitstream_size`` is set.
:ivar int[2] voltage_limit:
Maximum allowed I/O port voltage, in millivolts. | 62598fc17cff6e4e811b5c89 |
class Process(object): <NEW_LINE> <INDENT> def __init__(self, name, is_signal): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.is_signal = is_signal <NEW_LINE> self.systematics = {} <NEW_LINE> <DEDENT> def apply_systematic(self, other): <NEW_LINE> <INDENT> syst_name, syst_value = other.systematic, other.value <NEW_LINE> if syst_name in self.systematics: <NEW_LINE> <INDENT> raise KeyError("Systematic %s already entered into process %s" % (syst_name, self.name)) <NEW_LINE> <DEDENT> self.systematics[syst_name] = syst_value <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self is other | Represents a specific process (Zjets) in a category
One can inplace-multiply a nuisance tuple to register
that nuisance affects this process.
>>> proc = Process('a_process', False)
>>> proc.is_signal
False
>>> proc.name
'a_process'
>>> my_nuisance = Nuisance('a_syst', 'lnN')
>>> proc.apply_systematic(my_nuisance(1.05))
>>> proc.systematics[my_nuisance]
1.05 | 62598fc19f288636728189ae |
class ArchiveCoverage(ShellCommand): <NEW_LINE> <INDENT> flunkOnFailure = True <NEW_LINE> description = ["arch cov"] <NEW_LINE> name = "arch cov" <NEW_LINE> COMMAND_TEMPL = 'rm cov-*.tar.bz2 ; VER=`python setup.py --name`-`python setup.py --version` ; export VER ; coverage html ; mv .coverage "coverage-${VER}" && mv .coverage-results "coverage-results-${VER}" && mv htmlcov "htmlcov-${VER}" && %s cjvf "cov-${VER}.tar.bz2" "coverage-${VER}" "coverage-results-${VER}" "htmlcov-${VER}"' <NEW_LINE> def __init__(self, TAR, *args, **kwargs): <NEW_LINE> <INDENT> ShellCommand.__init__(self, *args, **kwargs) <NEW_LINE> self.addFactoryArguments(TAR=TAR) <NEW_LINE> self.command = self.COMMAND_TEMPL % TAR | Put coverage results into an archive for transport. | 62598fc13346ee7daa33777b |
class UserProfileManager(BaseUserManager): <NEW_LINE> <INDENT> def create_user(self, email, name, password=None): <NEW_LINE> <INDENT> if not email: <NEW_LINE> <INDENT> raise ValueError("User must have email address") <NEW_LINE> <DEDENT> email = self.normalize_email(email) <NEW_LINE> user = self.model(email=email, name=name) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user <NEW_LINE> <DEDENT> def create_superuser(self, email, name, password): <NEW_LINE> <INDENT> user = self.create_user(email, name, password) <NEW_LINE> user.is_staff = True <NEW_LINE> user.is_superuser = True <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user | Custom Manager for user profiles | 62598fc13d592f4c4edbb123 |
class Weapon(Item): <NEW_LINE> <INDENT> def __init__(self, damage=10, damage_type='smashing', name='WEAPON', description='THIS IS A WEAPON', weight=1.0, value=10, slot='hand', reqs={'level':0,'class':None}): <NEW_LINE> <INDENT> super().__init__(name=name, description=description, weight=weight, value=value, slot=slot, reqs=reqs) <NEW_LINE> self.damage = damage <NEW_LINE> self.damage_type = damage_type <NEW_LINE> <DEDENT> def equip(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def unequip(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def use(self): <NEW_LINE> <INDENT> return self.damage, self.damage_type | Base class for weapons. Inherits from 'Item'
Attributes:
damage {int} -- Amount of damage this weapon does. (default: {10})
damage_type {str} -- Type of damage this weapon does. (default: {'smashing'})
Returns:
[type] -- [description] | 62598fc1377c676e912f6ea6 |
@LinkState.register(_type=1116) <NEW_LINE> class UnidirectDelayVar(TLV): <NEW_LINE> <INDENT> TYPE_STR = 'unidirect_delay_var' <NEW_LINE> @classmethod <NEW_LINE> def unpack(cls, data): <NEW_LINE> <INDENT> value = int(binascii.b2a_hex(data), 16) <NEW_LINE> return cls(value=value) | Unidirectional Delay Variation | 62598fc17d847024c075c622 |
class NoStartTeacher(Convai2Teacher): <NEW_LINE> <INDENT> def __init__(self, opt, shared=None): <NEW_LINE> <INDENT> super().__init__(opt, shared) <NEW_LINE> self.num_exs = sum(len(d['dialogue']) - 1 for d in self.data) <NEW_LINE> self.all_eps = self.data + [d for d in self.data if len(d['dialogue']) > 2] <NEW_LINE> self.num_eps = len(self.all_eps) <NEW_LINE> <DEDENT> def get(self, episode_idx, entry_idx=0): <NEW_LINE> <INDENT> full_eps = self.all_eps[episode_idx] <NEW_LINE> entries = full_eps['dialogue'] <NEW_LINE> speaker_id = int(episode_idx >= len(self.data)) <NEW_LINE> their_turn = entries[speaker_id + 2 * entry_idx] <NEW_LINE> my_turn = entries[1 + speaker_id + 2 * entry_idx] <NEW_LINE> episode_done = 2 * entry_idx + speaker_id + 1 >= len(entries) - 2 <NEW_LINE> action = { 'topic': full_eps['topic'], 'text': their_turn['text'], 'emotion': their_turn['emotion'], 'act_type': their_turn['act'], 'labels': [my_turn['text']], 'episode_done': episode_done, } <NEW_LINE> return action | Same as default teacher, but it doesn't contain __SILENCE__ entries.
If we are the first speaker, then the first utterance is skipped. | 62598fc13317a56b869be683 |
class Tag(object): <NEW_LINE> <INDENT> def __init__(self, tag_name, commit): <NEW_LINE> <INDENT> self.tag_name = tag_name <NEW_LINE> self.commit = commit <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> git('tag', '-d', self.tag_name) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.tag_name | Static label for a commit.
Attributes:
tag_name: the name of this tag.
commit: the commit this tag labels. | 62598fc17c178a314d78d705 |
class ServerFunCateg(models.Model): <NEW_LINE> <INDENT> server_categ_name = models.CharField(max_length = 60) <NEW_LINE> delmark = models.CharField(max_length = 10, default = False) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.server_categ_name | docstring for ServerFunCateg | 62598fc1be7bc26dc9251f8f |
class File(system.Item): <NEW_LINE> <INDENT> __image__ = "desktop/images/document.gif" <NEW_LINE> __props__ = system.Item.__props__ + ('file',) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> system.Item.__init__(self) <NEW_LINE> self.file = datatypes.RequiredFile() <NEW_LINE> <DEDENT> def get_size(self): <NEW_LINE> <INDENT> return len(self.file) <NEW_LINE> <DEDENT> size = property(get_size, None, None, "The file's size") | Simple file object
@ivar file: The file data type
@type file: L{RequiredFile<porcupine.datatypes.RequiredFile>} | 62598fc157b8e32f52508251 |
class OnAccessMutant(metaclass=_MetaImmutableMutant): <NEW_LINE> <INDENT> __slots__ = ('__wrapped_object__', '__wrapped_mutator__') | A class that proxies everything to another object.
The quirk that it can change the proxied object on every access
witha mutator function.
But this object doesn't have to be constant:
it's reevaluated on every access by calling the provided callable.
Usage: OnAccessMutant(initial_object, callable_mutator)
where: callable_mutator(wrapped_object) -> new_wrapped_object | 62598fc18a349b6b436864a4 |
class WorkflowTemplate(_messages.Message): <NEW_LINE> <INDENT> @encoding.MapUnrecognizedFields('additionalProperties') <NEW_LINE> class LabelsValue(_messages.Message): <NEW_LINE> <INDENT> class AdditionalProperty(_messages.Message): <NEW_LINE> <INDENT> key = _messages.StringField(1) <NEW_LINE> value = _messages.StringField(2) <NEW_LINE> <DEDENT> additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) <NEW_LINE> <DEDENT> createTime = _messages.StringField(1) <NEW_LINE> id = _messages.StringField(2) <NEW_LINE> jobs = _messages.MessageField('OrderedJob', 3, repeated=True) <NEW_LINE> labels = _messages.MessageField('LabelsValue', 4) <NEW_LINE> name = _messages.StringField(5) <NEW_LINE> parameters = _messages.MessageField('TemplateParameter', 6, repeated=True) <NEW_LINE> placement = _messages.MessageField('WorkflowTemplatePlacement', 7) <NEW_LINE> updateTime = _messages.StringField(8) <NEW_LINE> version = _messages.IntegerField(9, variant=_messages.Variant.INT32) | A Cloud Dataproc workflow template resource.
Messages:
LabelsValue: Optional. The labels to associate with this template. These
labels will be propagated to all jobs and clusters created by the
workflow instance.Label keys must contain 1 to 63 characters, and must
conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt).Label values
may be empty, but, if present, must contain 1 to 63 characters, and must
conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt).No more than
32 labels can be associated with a template.
Fields:
createTime: Output only. The time template was created.
id: Required. The template id.The id must contain only letters (a-z, A-Z),
numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end
with underscore or hyphen. Must consist of between 3 and 50 characters.
jobs: Required. The Directed Acyclic Graph of Jobs to submit.
labels: Optional. The labels to associate with this template. These labels
will be propagated to all jobs and clusters created by the workflow
instance.Label keys must contain 1 to 63 characters, and must conform to
RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt).Label values may be
empty, but, if present, must contain 1 to 63 characters, and must
conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt).No more than
32 labels can be associated with a template.
name: Output only. The "resource name" of the template, as described in
https://cloud.google.com/apis/design/resource_names of the form
projects/{project_id}/regions/{region}/workflowTemplates/{template_id}
parameters: Optional. Template parameters whose values are substituted
into the template. Values for parameters must be provided when the
template is instantiated.
placement: Required. WorkflowTemplate scheduling information.
updateTime: Output only. The time template was last updated.
version: Optional. Used to perform a consistent read-modify-write.This
field should be left blank for a CreateWorkflowTemplate request. It is
required for an UpdateWorkflowTemplate request, and must match the
current server version. A typical update template flow would fetch the
current template with a GetWorkflowTemplate request, which will return
the current template with the version field filled in with the current
server version. The user updates other fields in the template, then
returns it as part of the UpdateWorkflowTemplate request. | 62598fc121bff66bcd722ed1 |
class _MyFormatter(logging.Formatter): <NEW_LINE> <INDENT> def format(self, record): <NEW_LINE> <INDENT> format_orig = self._fmt <NEW_LINE> if record.levelno == logging.DEBUG: <NEW_LINE> <INDENT> self._fmt = " %(msg)s" <NEW_LINE> <DEDENT> elif record.levelno == logging.INFO: <NEW_LINE> <INDENT> self._fmt = "%(msg)s" <NEW_LINE> <DEDENT> elif record.levelno == logging.WARNING: <NEW_LINE> <INDENT> self._fmt = _yellow("[W] %(msg)s") <NEW_LINE> <DEDENT> elif record.levelno == logging.ERROR: <NEW_LINE> <INDENT> self._fmt = _red("[E] %(msg)s") <NEW_LINE> <DEDENT> result = logging.Formatter.format(self, record) <NEW_LINE> self._fmt = format_orig <NEW_LINE> return result | Logging Formatter | 62598fc17d43ff2487427538 |
class Meta: <NEW_LINE> <INDENT> app_label = "toolbox" <NEW_LINE> verbose_name = "Licentie" <NEW_LINE> verbose_name_plural = "Licenties" | Change display of model in Django admin | 62598fc126068e7796d4cbc2 |
class ResetPasswordEmail(BaseModel): <NEW_LINE> <INDENT> email_code: str <NEW_LINE> user_phone: str <NEW_LINE> new_password: str <NEW_LINE> confirm_password: str | 通过邮箱重置密码 | 62598fc17cff6e4e811b5c8b |
class Marker(BaseElement, ViewBox, Presentation): <NEW_LINE> <INDENT> elementname = 'marker' <NEW_LINE> def __init__(self, insert=None, size=None, orient=None, **extra): <NEW_LINE> <INDENT> super(Marker, self).__init__(**extra) <NEW_LINE> if insert: <NEW_LINE> <INDENT> self['refX'] = insert[0] <NEW_LINE> self['refY'] = insert[1] <NEW_LINE> <DEDENT> if size: <NEW_LINE> <INDENT> self['markerWidth'] = size[0] <NEW_LINE> self['markerHeight'] = size[1] <NEW_LINE> <DEDENT> if orient is not None: <NEW_LINE> <INDENT> self['orient'] = orient <NEW_LINE> <DEDENT> if 'id' not in self.attribs: <NEW_LINE> <INDENT> self['id'] = self.next_id() | The **marker** element defines the graphics that is to be used for
drawing arrowheads or polymarkers on a given **path**, **line**, **polyline**
or **polygon** element.
Add Marker definitions to a **defs** section, preferred to the **defs** section
of the **main drawing**. | 62598fc12c8b7c6e89bd3a29 |
class APIgetStatusCodeDeckCard(APITestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.get_method_endpoints = [ reverse('flashcards:flashcards_api:deck_list'), reverse('flashcards:flashcards_api:card_list'), ] <NEW_LINE> <DEDENT> def test_flashcards_endpoint_get_method(self): <NEW_LINE> <INDENT> c = Client() <NEW_LINE> for endpoint in self.get_method_endpoints: <NEW_LINE> <INDENT> response = c.get(endpoint) <NEW_LINE> self.assertEqual(response.status_code, 200) | Tests API endpoint response status codes | 62598fc197e22403b383b171 |
class GPXException(Exception): <NEW_LINE> <INDENT> pass | Exception used for invalid GPX files. Is is used when the XML file is
valid but something is wrong with the GPX data. | 62598fc1851cf427c66b851d |
class ProbablyAlive(Rule): <NEW_LINE> <INDENT> labels = [_("On date:")] <NEW_LINE> name = _('People probably alive') <NEW_LINE> description = _("Matches people without indications of death that are not too old") <NEW_LINE> category = _('General filters') <NEW_LINE> def prepare(self,db): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.current_date = parser.parse(conv_to_unicode_direct(self.list[0])) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.current_date = None <NEW_LINE> <DEDENT> <DEDENT> def apply(self,db,person): <NEW_LINE> <INDENT> return probably_alive(person,db,self.current_date) | People probably alive | 62598fc155399d3f0562677e |
class Clipboard(common.AbstractWindowsCommand, sessions.SessionsMixin): <NEW_LINE> <INDENT> def calculate(self): <NEW_LINE> <INDENT> kernel_space = utils.load_as(self._config) <NEW_LINE> sesses = dict((int(session.SessionId), session) for session in self.session_spaces(kernel_space) ) <NEW_LINE> session_handles = {} <NEW_LINE> e0 = obj.NoneObject("Unknown tagCLIPDATA") <NEW_LINE> e1 = obj.NoneObject("Unknown tagWINDOWSTATION") <NEW_LINE> e2 = obj.NoneObject("Unknown tagCLIP") <NEW_LINE> filters = [lambda x : str(x.bType) == "TYPE_CLIPDATA"] <NEW_LINE> for sid, session in list(sesses.items()): <NEW_LINE> <INDENT> handles = {} <NEW_LINE> shared_info = session.find_shared_info() <NEW_LINE> if not shared_info: <NEW_LINE> <INDENT> debug.debug("No shared info for session {0}".format(sid)) <NEW_LINE> continue <NEW_LINE> <DEDENT> for handle in shared_info.handles(filters): <NEW_LINE> <INDENT> handles[int(handle.phead.h)] = handle <NEW_LINE> <DEDENT> session_handles[sid] = handles <NEW_LINE> <DEDENT> for wndsta in windowstations.WndScan(self._config).calculate(): <NEW_LINE> <INDENT> session = sesses.get(int(wndsta.dwSessionId), None) <NEW_LINE> if not session: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> handles = session_handles.get(int(session.SessionId), None) <NEW_LINE> if not handles: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> clip_array = wndsta.pClipBase.dereference() <NEW_LINE> if not clip_array: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for clip in clip_array: <NEW_LINE> <INDENT> handle = handles.get(int(clip.hData), e0) <NEW_LINE> if handle: <NEW_LINE> <INDENT> handles.pop(int(clip.hData)) <NEW_LINE> <DEDENT> yield session, wndsta, clip, handle <NEW_LINE> <DEDENT> <DEDENT> for sid in list(sesses.keys()): <NEW_LINE> <INDENT> handles = session_handles.get(sid, None) <NEW_LINE> if not handles: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for handle in list(handles.values()): <NEW_LINE> <INDENT> yield sesses[sid], e1, e2, handle <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def render_text(self, outfd, data): <NEW_LINE> <INDENT> self.table_header(outfd, [("Session", "10"), ("WindowStation", "12"), ("Format", "18"), ("Handle", "[addr]"), ("Object", "[addrpad]"), ("Data", "50"), ]) <NEW_LINE> for session, wndsta, clip, handle in data: <NEW_LINE> <INDENT> if not clip: <NEW_LINE> <INDENT> fmt = obj.NoneObject("Format unknown") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if clip.fmt.v() in consts.CLIPBOARD_FORMAT_ENUM: <NEW_LINE> <INDENT> fmt = str(clip.fmt) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fmt = hex(clip.fmt.v()) <NEW_LINE> <DEDENT> <DEDENT> if clip: <NEW_LINE> <INDENT> handle_value = clip.hData <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> handle_value = handle.phead.h <NEW_LINE> <DEDENT> clip_data = "" <NEW_LINE> if handle and "TEXT" in fmt: <NEW_LINE> <INDENT> clip_data = handle.reference_object().as_string(fmt) <NEW_LINE> <DEDENT> self.table_row(outfd, session.SessionId, wndsta.Name, fmt, handle_value, handle.phead.v(), clip_data) <NEW_LINE> if self._config.VERBOSE and handle: <NEW_LINE> <INDENT> hex_dump = handle.reference_object().as_hex() <NEW_LINE> outfd.write("{0}".format(hex_dump)) | Extract the contents of the windows clipboard | 62598fc192d797404e388c96 |
class ProductIdentifier (pyxb.binding.basis.complexTypeDefinition): <NEW_LINE> <INDENT> _TypeDefinition = None <NEW_LINE> _ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY <NEW_LINE> _Abstract = False <NEW_LINE> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ProductIdentifier') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/home/toivotuo/Dropbox/Personal/Studies/UoH/tlbop/tapestry/tapestry/router/xsd/rocs.001.001.06.xsd', 146, 2) <NEW_LINE> _ElementMap = {} <NEW_LINE> _AttributeMap = {} <NEW_LINE> __ProductName = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'ProductName'), 'ProductName', '__urnrocs_001_001_06_ProductIdentifier_urnrocs_001_001_06ProductName', False, pyxb.utils.utility.Location('/home/toivotuo/Dropbox/Personal/Studies/UoH/tlbop/tapestry/tapestry/router/xsd/rocs.001.001.06.xsd', 148, 6), ) <NEW_LINE> ProductName = property(__ProductName.value, __ProductName.set, None, None) <NEW_LINE> _ElementMap.update({ __ProductName.name() : __ProductName }) <NEW_LINE> _AttributeMap.update({ }) | Complex type {urn:rocs.001.001.06}ProductIdentifier with content type ELEMENT_ONLY | 62598fc17c178a314d78d707 |
class UseOldRepo(LoginRequiredMixin, View): <NEW_LINE> <INDENT> def get(self, request, repo_name, *args, **kwargs): <NEW_LINE> <INDENT> user = request.user <NEW_LINE> repo_name = self.kwargs['repo_name'] <NEW_LINE> old_repo_obj = OldRepoSetUp(user, repo_name) <NEW_LINE> return_dict = old_repo_obj.use_old_repo() <NEW_LINE> if return_dict['message_type'] is 'error': <NEW_LINE> <INDENT> messages.error(request, return_dict['message']) <NEW_LINE> return HttpResponseRedirect(reverse('old-repo')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> messages.success(request, return_dict['message']) <NEW_LINE> return HttpResponseRedirect(reverse('home')) | UseOldRepo to register a repo on already created Repository.
Example:
Triggers when:
User clicks on one of the already created repository
clamining that the repo contains the required files.
Tasks:
* View for logged in users only.
* Select any repo from the repo list
* Make some earlier checks
* Clone the repo
* Check if the required contents are present or not
* If yes do the required operations:
* Fill repo table
* Select Main Site
* Else give an error message
* Integrate celery to show the amount of task completed | 62598fc1ff9c53063f51a8b6 |
class Paraboloid(QuadricGM): <NEW_LINE> <INDENT> def __init__(self, a=1., b=None): <NEW_LINE> <INDENT> if b is None: <NEW_LINE> <INDENT> b = a <NEW_LINE> <DEDENT> QuadricGM.__init__(self) <NEW_LINE> self.a = 1./(a**2) <NEW_LINE> self.b = 1./(b**2) <NEW_LINE> <DEDENT> def _normals(self, hits, directs): <NEW_LINE> <INDENT> hit = N.dot(N.linalg.inv(self._working_frame), N.vstack((hits.T, N.ones(hits.shape[0])))) <NEW_LINE> dir_loc = N.dot(self._working_frame[:3,:3].T, directs.T) <NEW_LINE> partial_x = 2*hit[0]*self.a <NEW_LINE> partial_y = 2*hit[1]*self.b <NEW_LINE> partial_z = -1*N.ones(N.shape(hits)[0]) <NEW_LINE> local_normal = N.vstack((partial_x, partial_y, partial_z)) <NEW_LINE> local_unit = local_normal/N.sqrt(N.sum(local_normal**2, axis=0)) <NEW_LINE> down = N.sum(dir_loc * local_unit, axis=0) > 0. <NEW_LINE> local_unit[:,down] *= -1 <NEW_LINE> normals = N.dot(self._working_frame[:3,:3], local_unit) <NEW_LINE> return normals <NEW_LINE> <DEDENT> def get_ABC(self, ray_bundle): <NEW_LINE> <INDENT> d = N.dot(self._working_frame[:3,:3].T, ray_bundle.get_directions()) <NEW_LINE> v = N.dot(N.linalg.inv(self._working_frame), N.vstack((ray_bundle.get_vertices(), N.ones(d.shape[1]))))[:3] <NEW_LINE> A = self.a*d[0]**2 + self.b*d[1]**2 <NEW_LINE> B = 2*self.a*d[0]*v[0] + 2*self.b*d[1]*v[1] - d[2] <NEW_LINE> C = self.a*v[0]**2 + self.b*v[1]**2 - v[2] <NEW_LINE> return A, B, C | Implements the geometry of a circular paraboloid surface | 62598fc1ad47b63b2c5a7abf |
class BankSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Bank <NEW_LINE> fields = ('id', 'name', 'rank') | @class BankSerializer
@brief
Serializer for Bank | 62598fc1796e427e5384e9fe |
class Emptyfy(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "object.emptyfy" <NEW_LINE> bl_label = "Emptify" <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> return context.active_object is not None <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> main(context) <NEW_LINE> return {'FINISHED'} | Tooltip | 62598fc126068e7796d4cbc4 |
class TestServicesVrrp(BaseActionTestCase): <NEW_LINE> <INDENT> action_cls = services_vrrp <NEW_LINE> def test_action(self): <NEW_LINE> <INDENT> action = self.get_action_instance() <NEW_LINE> mock_callback = MockCallback() <NEW_LINE> kwargs = { 'ip_version': '4', 'ip': '', 'username': '', 'password': '', 'port': '22', 'test': True, 'callback': mock_callback.callback } <NEW_LINE> action.run(**kwargs) <NEW_LINE> expected_xml = ( '<config><rbridge-id xmlns="urn:brocade.com:mgmt:brocade-rbridge">' '<rbridge-id>1</rbridge-id><protocol xmlns="urn:brocade.com:mgmt:b' 'rocade-interface"><hide-vrrp-holder xmlns="urn:brocade.com:mgmt:b' 'rocade-vrrp"><vrrp /></hide-vrrp-holder></protocol></rbridge-id><' '/config>' ) <NEW_LINE> self.assertTrue(expected_xml, mock_callback.returned_data) | Test holder class
| 62598fc176e4537e8c3ef80f |
class class_hierarchy(root): <NEW_LINE> <INDENT> def __init__(self, hierarchy): <NEW_LINE> <INDENT> super(class_hierarchy, self).__init__("class", hierarchy) | Represent a name scope hierarchy.
The class hierarchy represents things that in C++ would equate to using a ``::`` to
gain access to. This includes:
- Classes and structs (:class:`hierarchies.clike <testing.hierarchies.clike>`).
- Enums (:class:`hierarchies.enum <testing.hierarchies.enum>`).
- Namespaces (:class:`hierarchies.namespace <testing.hierarchies.namespace>`).
- Unions (:class:`hierarchies.union <testing.hierarchies.union>`).
Consider the following C++ code:
.. code-block:: cpp
// in file: include/main.h
#pragma once
namespace detail {
struct SomeStruct { /* ... */ };
}
struct SomeStruct {
struct View { /* ... */ };
};
Then the testing code may look like:
.. code-block:: py
from testing.base import ExhaleTestCase
from testing.hierarchies import class_hierarchy, \
clike, \
compare_class_hierarchy, \
namespace
class SomeTest(ExhaleTestCase):
test_project = "..." # specify the correct name...
def test_class_hierarchy(self):
class_hierarchy_dict = {
clike("struct", "SomeStruct"): {
clike("struct", "View"): {}
},
namespace("detail"): {
clike("struct", "SomeStruct"): {}
}
}
compare_class_hierarchy(self, class_hierarchy(class_hierarchy_dict))
**Parameters**
``hierarchy`` (:class:`python:dict`)
The hierarchy associated with the name scopes for the test project. | 62598fc1fff4ab517ebcda4f |
class TestUpload(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> filename = "sample_file.txt" <NEW_LINE> test_dir = os.path.dirname(os.path.realpath(__file__)) <NEW_LINE> filepath = os.path.join(test_dir, filename) <NEW_LINE> warnings.simplefilter("ignore", ResourceWarning) <NEW_LINE> u = Uploader(profile_name='wmorgan85') <NEW_LINE> u.upload_file(filepath, "wmorgan85-iot-dashboard", filename) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> u = Uploader(profile_name='wmorgan85') <NEW_LINE> u.delete_file("sample_file.txt", "wmorgan85-iot-dashboard") <NEW_LINE> <DEDENT> def test_can_create_s3_client_with_profile(self): <NEW_LINE> <INDENT> u = Uploader() <NEW_LINE> s = u.create_s3_client(profile_name='wmorgan85') <NEW_LINE> self.assertIsNotNone(s) <NEW_LINE> <DEDENT> def test_can_list_s3_buckets(self): <NEW_LINE> <INDENT> u = Uploader() <NEW_LINE> bucket_list = u.list_buckets() <NEW_LINE> is_list = isinstance(bucket_list, list) <NEW_LINE> self.assertEqual(is_list, True) <NEW_LINE> <DEDENT> def test_can_list_bucket_items(self): <NEW_LINE> <INDENT> u = Uploader() <NEW_LINE> items = u.list_items("wmorgan85-iot-dashboard") <NEW_LINE> validation_list = [ "README", "index.html", "moviedata.json", "refresh.js", "requirements.txt", "sample_file.txt" ] <NEW_LINE> self.assertEqual(items, validation_list) <NEW_LINE> <DEDENT> def test_can_upload_file(self): <NEW_LINE> <INDENT> bucket = "wmorgan85-iot-dashboard" <NEW_LINE> filename = "sample_file.txt" <NEW_LINE> test_dir = os.path.dirname(os.path.realpath(__file__)) <NEW_LINE> filepath = os.path.join(test_dir, filename) <NEW_LINE> u = Uploader(profile_name='wmorgan85') <NEW_LINE> self.assertEqual(u.upload_file(filepath, bucket, filename), True) <NEW_LINE> <DEDENT> def test_can_download_file(self): <NEW_LINE> <INDENT> bucket = "wmorgan85-iot-dashboard" <NEW_LINE> filename = "sample_file.txt" <NEW_LINE> test_dir = os.path.dirname(os.path.realpath(__file__)) <NEW_LINE> filepath = os.path.join(test_dir, filename) <NEW_LINE> u = Uploader(profile_name='wmorgan85') <NEW_LINE> u.download_file(filename, filepath+".new", bucket) <NEW_LINE> self.assertEqual(filecmp.cmp(filepath, filepath+".new"), True) <NEW_LINE> <DEDENT> def test_can_delete_file(self): <NEW_LINE> <INDENT> bucket = "wmorgan85-iot-dashboard" <NEW_LINE> filename = "sample_file.txt" <NEW_LINE> expected_response = [{'Key': filename}] <NEW_LINE> u = Uploader(profile_name='wmorgan85') <NEW_LINE> response = u.delete_file(filename, bucket) <NEW_LINE> self.assertEqual(response, expected_response) <NEW_LINE> <DEDENT> def test_can_create_s3_client_with_secrets(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_can_close_s3_session(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_can_send_file_with_permissions(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_can_send_file_with_progress(self): <NEW_LINE> <INDENT> pass | Test cases for AWS connectivity | 62598fc12c8b7c6e89bd3a2b |
class Post: <NEW_LINE> <INDENT> class Body(RepoCommitBody): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class Header(Schema): <NEW_LINE> <INDENT> X_GitHub_Media_Type = fields.String(data_key='X-GitHub-Media-Type', description='You can check the current version of media type in responses.\n') <NEW_LINE> Accept = fields.String(description='Is used to set specified media type.') <NEW_LINE> X_RateLimit_Limit = fields.Integer(data_key='X-RateLimit-Limit') <NEW_LINE> X_RateLimit_Remaining = fields.Integer(data_key='X-RateLimit-Remaining') <NEW_LINE> X_RateLimit_Reset = fields.Integer(data_key='X-RateLimit-Reset') <NEW_LINE> X_GitHub_Request_Id = fields.Integer(data_key='X-GitHub-Request-Id') <NEW_LINE> <DEDENT> class Path(Schema): <NEW_LINE> <INDENT> owner = fields.String(required=True, description='Name of repository owner.') <NEW_LINE> repo = fields.String(required=True, description='Name of repository.') | Create a Commit. | 62598fc13d592f4c4edbb126 |
class Affine1d(Module): <NEW_LINE> <INDENT> def __init__(self, num_features: int, bias: bool = True, device=None, dtype=None) -> None: <NEW_LINE> <INDENT> factory_kwargs = {'device': device, 'dtype': dtype} <NEW_LINE> super(Affine1d, self).__init__() <NEW_LINE> self.num_features = num_features <NEW_LINE> self.weight = Parameter(torch.empty(num_features, **factory_kwargs)) <NEW_LINE> if bias: <NEW_LINE> <INDENT> self.bias = Parameter(torch.empty(num_features, **factory_kwargs)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.register_parameter('bias', None) <NEW_LINE> <DEDENT> self.reset_parameters() <NEW_LINE> <DEDENT> def reset_parameters(self) -> None: <NEW_LINE> <INDENT> init.ones_(self.weight) <NEW_LINE> if self.bias is not None: <NEW_LINE> <INDENT> init.zeros_(self.bias) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, input: Tensor) -> Tensor: <NEW_LINE> <INDENT> if self.bias is not None: <NEW_LINE> <INDENT> return input * self.weight.unsqueeze(0) + self.bias <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return input * self.weight.unsqueeze(0) <NEW_LINE> <DEDENT> <DEDENT> def extra_repr(self) -> str: <NEW_LINE> <INDENT> return 'num_features={}, bias={}'.format( self.num_features, self.bias is not None ) | Computes the transformation out = weight * input + bias
where * is the elementwise multiplication. This is similar to the
scaling and translation given by parameters gamma and beta in batch norm | 62598fc1aad79263cf42ea3f |
class EvalResult(): <NEW_LINE> <INDENT> def __init__(self, e_name, e_result): <NEW_LINE> <INDENT> self.e_name = e_name <NEW_LINE> self.e_result = e_result <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> ret = "EVAL ALG NAME: " <NEW_LINE> ret += self.e_name + "\n" <NEW_LINE> ret += "EVAL RESULT:\n\n" <NEW_LINE> ret += self.e_result.__str__() <NEW_LINE> return ret | Stores and displays a single evaluation algorithm result. | 62598fc1f9cc0f698b1c5405 |
class CustomRequest(WSGIRequest): <NEW_LINE> <INDENT> node = None | Class to maintain an active Client/Server instance. | 62598fc13617ad0b5ee063b1 |
class LoginWithTenant(Login): <NEW_LINE> <INDENT> if _regions_supported(): <NEW_LINE> <INDENT> region = region_field <NEW_LINE> <DEDENT> username = forms.CharField(max_length="20", widget=forms.TextInput(attrs={'readonly': 'readonly'})) <NEW_LINE> tenant = forms.CharField(widget=forms.HiddenInput()) | Exactly like :class:`.Login` but includes the tenant id as a field
so that the process of choosing a default tenant is bypassed. | 62598fc123849d37ff85131d |
class InvalidLogFormatException(DebugError): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> super(InvalidLogFormatException, self).__init__(message) | A log format expression was invalid. | 62598fc1956e5f7376df57b3 |
class BaseNEncoding(Encoding): <NEW_LINE> <INDENT> def __init__(self, categorical_columns = None, base = 2, return_df = False, delete_original_columns=True): <NEW_LINE> <INDENT> if base<1 or base>10: <NEW_LINE> <INDENT> raise ValueError("Either base is less than 1 or greater than 10 or n is less than 0") <NEW_LINE> <DEDENT> self.base = base <NEW_LINE> self.categorical_columns = categorical_columns <NEW_LINE> self.return_df = return_df <NEW_LINE> self.delete_original_columns = delete_original_columns <NEW_LINE> <DEDENT> def create_encoding_dict(self, X, y): <NEW_LINE> <INDENT> encoding_dict = {} <NEW_LINE> self.categorical_columns = self.get_categorical_columns(X, self.categorical_columns) <NEW_LINE> for col in self.categorical_columns: <NEW_LINE> <INDENT> encoding_dict.update({col:{x :i+1 for i, x in enumerate(pd.unique(X[col]))}}) <NEW_LINE> <DEDENT> return encoding_dict <NEW_LINE> <DEDENT> def toStrOfBase(self, n, base): <NEW_LINE> <INDENT> convertString = "0123456789" <NEW_LINE> if n == 0: return '0' <NEW_LINE> if base == 1: return ''.join(['1' if i == 0 else '0' for i in range(n)]) <NEW_LINE> if n < base: <NEW_LINE> <INDENT> return str(n) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.toStrOfBase(n//base,base) + convertString[n%base] <NEW_LINE> <DEDENT> <DEDENT> def apply_encoding(self, X_in, encoding_dict): <NEW_LINE> <INDENT> X = self.convert_input(X_in) <NEW_LINE> for col in self.categorical_columns: <NEW_LINE> <INDENT> freq_dict = encoding_dict[col] <NEW_LINE> _max = max(freq_dict.values()) <NEW_LINE> _max_base_len = len(self.toStrOfBase(_max, self.base)) <NEW_LINE> X['tmp_'+str(col)] = X[col].apply(lambda x: self.toStrOfBase(freq_dict.get(x, 0), self.base).zfill(_max_base_len)) <NEW_LINE> for i in range(_max_base_len): <NEW_LINE> <INDENT> X['col_%s_base%s_%d'%(str(col), str(self.base), i)] = X['tmp_'+str(col)].str[_max_base_len-i-1] <NEW_LINE> <DEDENT> del X['tmp_'+str(col)] <NEW_LINE> if self.delete_original_columns: <NEW_LINE> <INDENT> del X[col] <NEW_LINE> <DEDENT> <DEDENT> return X | class to perform BaseNEncoding on Categorical Variables
Initialization Variabes:
categorical_columns: list of categorical columns from the dataframe
or list of indexes of caategorical columns for numpy ndarray
base: base number
return_df: boolean
if True: returns pandas dataframe on transformation
else: return numpy ndarray | 62598fc1283ffb24f3cf3aef |
class ChainLightning(Spell): <NEW_LINE> <INDENT> name = "Chain Lightning" <NEW_LINE> level = 6 <NEW_LINE> casting_time = "1 action" <NEW_LINE> components = ('V', 'S', 'M') <NEW_LINE> materials = "a bit of fur; a piece of amber, glass, or a crystal rod; and three silver pins" <NEW_LINE> duration = "Instantaneous" <NEW_LINE> magic_school = "Evocation" <NEW_LINE> classes = () | You create a bolt of lightning that arcs toward a target of your
choice that you can see within range. Three bolts then leap from
that target to as many as three other targets, each of which must
be within 30 feet of the first target. A target can be a creature
or an object and can be targeted by only one of the bolts. A
target must make a Dexterity saving throw. The target takes 10d8
lightning damage on a failed save, or half as much damage on a
successful one. At Higher Levels. When you cast this spell using a
spell slot of 7th level or higher, one additional bolt leaps from
the first target to another target for each slot level above
6th. | 62598fc17cff6e4e811b5c8f |
class CurrentProfileSync_Enum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin): <NEW_LINE> <INDENT> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'CurrentProfileSync.Enum') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/home/afcastel/hmc_rest_api/code/pmc.schema.pcm-8.8.5.0/schema/common/inc/CommonEnumerations.xsd', 902, 3) <NEW_LINE> _Documentation = None | An atomic simple type. | 62598fc1a05bb46b3848aad7 |
class SpecArithmetic(object): <NEW_LINE> <INDENT> def search_peak(self, xdata, ydata): <NEW_LINE> <INDENT> ydata = numpy.array(ydata, copy=False) <NEW_LINE> ymax = ydata[numpy.isfinite(ydata)].max() <NEW_LINE> idx = self.__give_index(ymax, ydata) <NEW_LINE> return xdata[idx], ymax, idx <NEW_LINE> <DEDENT> def search_com(self, xdata,ydata): <NEW_LINE> <INDENT> num = numpy.sum(xdata * ydata) <NEW_LINE> denom = numpy.sum(ydata) <NEW_LINE> if abs(denom) > 0: <NEW_LINE> <INDENT> result = num / denom <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = 0 <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def search_fwhm(self, xdata, ydata, peak=None, index=None): <NEW_LINE> <INDENT> if peak is None or index is None: <NEW_LINE> <INDENT> x, mypeak, index_peak = self.search_peak(xdata, ydata) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mypeak = peak <NEW_LINE> index_peak = index <NEW_LINE> <DEDENT> hm = mypeak / 2 <NEW_LINE> idx = index_peak <NEW_LINE> try: <NEW_LINE> <INDENT> while ydata[idx] >= hm: <NEW_LINE> <INDENT> idx -= 1 <NEW_LINE> <DEDENT> x0 = float(xdata[idx]) <NEW_LINE> x1 = float(xdata[idx + 1]) <NEW_LINE> y0 = float(ydata[idx]) <NEW_LINE> y1 = float(ydata[idx + 1]) <NEW_LINE> lhmx = (hm * (x1 - x0) - (y0 * x1) + (y1 * x0)) / (y1 - y0) <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> lhmx = 0 <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> lhmx = xdata[0] <NEW_LINE> <DEDENT> idx = index_peak <NEW_LINE> try: <NEW_LINE> <INDENT> while ydata[idx] >= hm: <NEW_LINE> <INDENT> idx += 1 <NEW_LINE> <DEDENT> x0 = float(xdata[idx - 1]) <NEW_LINE> x1 = float(xdata[idx]) <NEW_LINE> y0 = float(ydata[idx - 1]) <NEW_LINE> y1 = float(ydata[idx]) <NEW_LINE> uhmx = (hm * (x1 - x0) - (y0 * x1) + (y1 * x0)) / (y1 - y0) <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> uhmx = 0 <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> uhmx = xdata[-1] <NEW_LINE> <DEDENT> fwhm = uhmx - lhmx <NEW_LINE> cfwhm = (uhmx + lhmx) / 2 <NEW_LINE> return fwhm, cfwhm <NEW_LINE> <DEDENT> def __give_index(self, elem,array): <NEW_LINE> <INDENT> mylist = array.tolist() <NEW_LINE> return mylist.index(elem) | This class tries to mimic SPEC operations.
Correct peak positions and fwhm information
have to be made via a fit. | 62598fc1dc8b845886d53826 |
class ApplicationGatewayRewriteRuleSet(SubResource): <NEW_LINE> <INDENT> _validation = { 'etag': {'readonly': True}, 'provisioning_state': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'rewrite_rules': {'key': 'properties.rewriteRules', 'type': '[ApplicationGatewayRewriteRule]'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, rewrite_rules: Optional[List["ApplicationGatewayRewriteRule"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ApplicationGatewayRewriteRuleSet, self).__init__(id=id, **kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.etag = None <NEW_LINE> self.rewrite_rules = rewrite_rules <NEW_LINE> self.provisioning_state = None | Rewrite rule set of an application gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the rewrite rule set that is unique within an Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param rewrite_rules: Rewrite rules in the rewrite rule set.
:type rewrite_rules: list[~azure.mgmt.network.v2020_03_01.models.ApplicationGatewayRewriteRule]
:ivar provisioning_state: The provisioning state of the rewrite rule set resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2020_03_01.models.ProvisioningState | 62598fc13d592f4c4edbb128 |
class BotCommand(BaseType): <NEW_LINE> <INDENT> def __init__(self, command, description): <NEW_LINE> <INDENT> self.command = command <NEW_LINE> self.description = description | This object represents a bot command.
Parameters
----------
command : String
Text of the command, 1-32 characters. Can contain only lowercase English letters, digits and underscores.
description : String
Description of the command, 3-256 characters. | 62598fc1f9cc0f698b1c5406 |
class SaveTool(Tool): <NEW_LINE> <INDENT> pass | *toolbar icon*: |save_icon|
The save tool is an action. When activated, the tool opens a download dialog
which allows to save an image reproduction of the plot in PNG format. If
automatic download is not support by a web browser, the tool falls back to
opening the generated image in a new tab or window. User then can manually
save it by right clicking on the image and choosing "Save As" (or similar)
menu item.
.. |save_icon| image:: /_images/icons/Save.png
:height: 18pt | 62598fc10fa83653e46f5151 |
class EQLFastRCNNOutputLayers(FastRCNNOutputLayers): <NEW_LINE> <INDENT> def __init__(self, input_size, num_classes, cls_agnostic_bbox_reg, box_dim=4, prior_prob=0.001): <NEW_LINE> <INDENT> super(FastRCNNOutputLayers, self).__init__() <NEW_LINE> if not isinstance(input_size, int): <NEW_LINE> <INDENT> input_size = np.prod(input_size) <NEW_LINE> <DEDENT> self.cls_score = nn.Linear(input_size, num_classes) <NEW_LINE> num_bbox_reg_classes = 1 if cls_agnostic_bbox_reg else num_classes <NEW_LINE> self.bbox_pred = nn.Linear(input_size, num_bbox_reg_classes * box_dim) <NEW_LINE> nn.init.normal_(self.cls_score.weight, std=0.01) <NEW_LINE> nn.init.normal_(self.bbox_pred.weight, std=0.001) <NEW_LINE> nn.init.constant_(self.bbox_pred.bias, 0) <NEW_LINE> bias_value = -math.log((1 - prior_prob) / prior_prob) <NEW_LINE> nn.init.constant_(self.cls_score.bias, bias_value) | Two linear layers for predicting Fast R-CNN outputs:
(1) proposal-to-detection box regression deltas
(2) classification scores | 62598fc18a349b6b436864aa |
class Vote(models.Model): <NEW_LINE> <INDENT> YES = 1 <NEW_LINE> NO = 2 <NEW_LINE> ABSTAIN = 3 <NEW_LINE> ABSENT = 4 <NEW_LINE> VOTES = ( (YES, 'Yes'), (NO, 'No'), (ABSTAIN, 'Abstain'), (ABSENT, 'Absent') ) <NEW_LINE> msp = models.ForeignKey(MSP) <NEW_LINE> division = models.ForeignKey(Division) <NEW_LINE> vote = models.CharField(max_length=1, choices=VOTES, null=True) <NEW_LINE> rebellious = models.BooleanField() <NEW_LINE> party_vote = models.CharField(max_length=1, choices=VOTES, null=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.vote | represents an msp's vote for a division | 62598fc1ad47b63b2c5a7ac3 |
class CreateDatabaseFromBackupRequest(_messages.Message): <NEW_LINE> <INDENT> backup = _messages.StringField(1) <NEW_LINE> name = _messages.StringField(2) | A CreateDatabaseFromBackupRequest object.
Fields:
backup: Required. Name of the backup from which to restore. Values are of
the form `projects/<project>/instances/<instance>/backups/<backup>`.
name: Required. Name of the database to create and restore to. This
database must not already exist. The instance must be consistent with
destination_parent. Values are of the form
`projects/<project>/instances/<instance>/databases/<database>`. | 62598fc1adb09d7d5dc0a7ea |
class AbsorbMulIntoMultiThreshold(Transformation): <NEW_LINE> <INDENT> def apply(self, model): <NEW_LINE> <INDENT> graph = model.graph <NEW_LINE> node_ind = 0 <NEW_LINE> graph_modified = False <NEW_LINE> for n in graph.node: <NEW_LINE> <INDENT> node_ind += 1 <NEW_LINE> if ( n.op_type == "Mul" and not model.is_fork_node(n) and not model.is_join_node(n) ): <NEW_LINE> <INDENT> mul_weight_name = n.input[1] <NEW_LINE> A = model.get_initializer(mul_weight_name) <NEW_LINE> assert A is not None, "Initializer for mul weights is not set." <NEW_LINE> is_signed = (A < 0).any() <NEW_LINE> is_scalar = A.ndim == 0 or all(x == 1 for x in A.shape) <NEW_LINE> actual_ndims = len(tuple(filter(lambda x: x > 1, A.shape))) <NEW_LINE> is_1d = actual_ndims == 1 <NEW_LINE> consumer = model.find_consumer(n.output[0]) <NEW_LINE> if consumer is not None and consumer.op_type == "MultiThreshold": <NEW_LINE> <INDENT> if not is_signed and (is_1d or is_scalar): <NEW_LINE> <INDENT> threshold_name = consumer.input[1] <NEW_LINE> T = model.get_initializer(threshold_name) <NEW_LINE> assert T is not None, "Initializer for thresholds is not set." <NEW_LINE> start_name = n.input[0] <NEW_LINE> Tnew = T / A.reshape(-1, 1) <NEW_LINE> model.set_initializer(threshold_name, Tnew) <NEW_LINE> consumer.input[0] = start_name <NEW_LINE> graph.node.remove(n) <NEW_LINE> graph_modified = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return (model, graph_modified) | Absorb preceding Mul ops into MultiThreshold by updating the threshold
values. Only *positive* scalar/1D mul vectors can be absorbed. | 62598fc12c8b7c6e89bd3a2f |
class ConfigureDialog(QtWidgets.QDialog): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> QtWidgets.QDialog.__init__(self, parent) <NEW_LINE> self._ui = Ui_Dialog() <NEW_LINE> self._ui.setupUi(self) <NEW_LINE> self._previousIdentifier = '' <NEW_LINE> self.identifierOccursCount = None <NEW_LINE> self._makeConnections() <NEW_LINE> <DEDENT> def _makeConnections(self): <NEW_LINE> <INDENT> self._ui.lineEdit0.textChanged.connect(self.validate) <NEW_LINE> <DEDENT> def accept(self): <NEW_LINE> <INDENT> result = QtWidgets.QMessageBox.Yes <NEW_LINE> if not self.validate(): <NEW_LINE> <INDENT> result = QtWidgets.QMessageBox.warning(self, 'Invalid Configuration', 'This configuration is invalid. Unpredictable behaviour may result if you choose \'Yes\', are you sure you want to save this configuration?)', QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No, QtWidgets.QMessageBox.No) <NEW_LINE> <DEDENT> if result == QtWidgets.QMessageBox.Yes: <NEW_LINE> <INDENT> QtWidgets.QDialog.accept(self) <NEW_LINE> <DEDENT> <DEDENT> def validate(self): <NEW_LINE> <INDENT> value = self.identifierOccursCount(self._ui.lineEdit0.text()) <NEW_LINE> valid = (value == 0) or (value == 1 and self._previousIdentifier == self._ui.lineEdit0.text()) <NEW_LINE> if valid: <NEW_LINE> <INDENT> self._ui.lineEdit0.setStyleSheet(DEFAULT_STYLE_SHEET) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._ui.lineEdit0.setStyleSheet(INVALID_STYLE_SHEET) <NEW_LINE> <DEDENT> return valid <NEW_LINE> <DEDENT> def getConfig(self): <NEW_LINE> <INDENT> self._previousIdentifier = self._ui.lineEdit0.text() <NEW_LINE> config = {} <NEW_LINE> config['identifier'] = self._ui.lineEdit0.text() <NEW_LINE> config['discretisation'] = self._ui.lineEdit1.text() <NEW_LINE> config['node coordinates'] = self._ui.lineEdit2.text() == 'True' <NEW_LINE> config['elements'] = self._ui.lineEdit3.text() <NEW_LINE> return config <NEW_LINE> <DEDENT> def setConfig(self, config): <NEW_LINE> <INDENT> self._previousIdentifier = config['identifier'] <NEW_LINE> self._ui.lineEdit0.setText(config['identifier']) <NEW_LINE> self._ui.lineEdit1.setText(config['discretisation']) <NEW_LINE> self._ui.lineEdit2.setText(str(config['node coordinates'])) <NEW_LINE> self._ui.lineEdit3.setText(config['elements']) | Configure dialog to present the user with the options to configure this step. | 62598fc14c3428357761a529 |
class SimpleFormatExportOptionsForm(SimpleFormatForm): <NEW_LINE> <INDENT> TERMINATOR_DEFAULT = "LF" <NEW_LINE> TERMINATOR_CHOICES = [("LF", "Linux (LF)"), ("CRLF", "Windows (CRLF)"), ("CR", "Mac (CR)"), ("", "Other:")] <NEW_LINE> TERMINATOR_CHARACTER_MAP = { "LF": "\n", "CRLF": "\r\n", "CR": "\r" } <NEW_LINE> line_terminator = CharField(widget=ComboTextInput(choices=TERMINATOR_CHOICES, default=TERMINATOR_DEFAULT), help_text="Used to separate records.") <NEW_LINE> @property <NEW_LINE> def cleaned_metadata(self): <NEW_LINE> <INDENT> metadata = super(SimpleFormatExportOptionsForm, self).cleaned_metadata <NEW_LINE> if metadata["line_terminator"] in self.TERMINATOR_CHARACTER_MAP: <NEW_LINE> <INDENT> metadata["line_terminator"] = self.TERMINATOR_CHARACTER_MAP[metadata["line_terminator"]] <NEW_LINE> <DEDENT> return metadata <NEW_LINE> <DEDENT> class Meta(SimpleFormatForm.Meta): <NEW_LINE> <INDENT> exclude = ["name"] | Presents the user with common options used to export simple formatted data. | 62598fc13346ee7daa33777f |
class BusinessCardOCRRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ImageBase64 = None <NEW_LINE> self.ImageUrl = None <NEW_LINE> self.Config = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ImageBase64 = params.get("ImageBase64") <NEW_LINE> self.ImageUrl = params.get("ImageUrl") <NEW_LINE> self.Config = params.get("Config") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | BusinessCardOCR请求参数结构体
| 62598fc155399d3f05626784 |
class AdminCodeDatabase(Database): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Database.__init__(self) <NEW_LINE> self.connect('regionDB', 'AdminCode') <NEW_LINE> <DEDENT> def find(self, **conds): <NEW_LINE> <INDENT> projection = conds.get('projection') <NEW_LINE> if projection is None: <NEW_LINE> <INDENT> projection = {'region': 1, 'version': 1, 'adminlevel': 1, 'acode': 1, '_id': 1, 'parent': 1} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> conds.pop('projection') <NEW_LINE> <DEDENT> sorts = conds.get('sorts') <NEW_LINE> if sorts is None: <NEW_LINE> <INDENT> sorts = [('year', ASCENDING), ('acode', ASCENDING)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> conds.pop('sorts') <NEW_LINE> <DEDENT> condition = dict() <NEW_LINE> for key in conds: <NEW_LINE> <INDENT> if isinstance(conds[key], list): <NEW_LINE> <INDENT> condition[key] = {'$in': conds[key]} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> condition[key] = conds[key] <NEW_LINE> <DEDENT> <DEDENT> return self.collection.find(condition, projection).sort(sorts) <NEW_LINE> <DEDENT> @property <NEW_LINE> def period(self): <NEW_LINE> <INDENT> return sorted(self.find().distinct('year')) <NEW_LINE> <DEDENT> def version(self, year=None): <NEW_LINE> <INDENT> if year is None: <NEW_LINE> <INDENT> return sorted(self.find().distinct('version')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return sorted(self.find(year=str(year)).distinct('version')) | AdminDatabase类用来处理区域行政区划
| 62598fc1377c676e912f6eaa |
class CombinedCriteria(Criterion): <NEW_LINE> <INDENT> def __init__(self, *criteria): <NEW_LINE> <INDENT> super(CombinedCriteria, self).__init__() <NEW_LINE> self._criteria = criteria <NEW_LINE> <DEDENT> def name(self): <NEW_LINE> <INDENT> names = (criterion.name() for criterion in self._criteria) <NEW_LINE> return '__'.join(sorted(names)) <NEW_LINE> <DEDENT> def is_adversarial(self, predictions, label): <NEW_LINE> <INDENT> for criterion in self._criteria: <NEW_LINE> <INDENT> if not criterion.is_adversarial(predictions, label): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True | Meta criterion that combines several criteria into a new one.
Considers images as adversarial that are considered adversarial
by all sub-criteria that are combined by this criterion.
Instead of using this class directly, it is possible to combine
criteria like this: criteria1 & criteria2
Parameters
----------
*criteria : variable length list of :class:`Criterion` instances
List of sub-criteria that will be combined.
Notes
-----
This class uses lazy evaluation of the criteria in the order they
are passed to the constructor. | 62598fc171ff763f4b5e79ea |
class ContentFileField(CharField): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.filename = kwargs.pop('filename', 'file.txt') <NEW_LINE> super().__init__(**kwargs) <NEW_LINE> <DEDENT> def to_internal_value(self, data): <NEW_LINE> <INDENT> return ContentFile(data.encode('utf-8'), name=self.filename) <NEW_LINE> <DEDENT> def to_representation(self, value): <NEW_LINE> <INDENT> value.open() <NEW_LINE> with closing(value) as value: <NEW_LINE> <INDENT> return value.read().decode('utf-8') | Serializer field that deserializes text into a ContentFile. | 62598fc14a966d76dd5ef143 |
class AltitudeDuringCabinAltitudeWarningMax(KeyPointValueNode): <NEW_LINE> <INDENT> units = ut.FT <NEW_LINE> def derive(self, cab_warn=M('Cabin Altitude Warning'), airborne=S('Airborne'), alt=P('Altitude STD Smoothed')): <NEW_LINE> <INDENT> warns = np.ma.clump_unmasked(np.ma.masked_equal(cab_warn.array, 0)) <NEW_LINE> air_warns = slices_and(warns, airborne.get_slices()) <NEW_LINE> self.create_kpvs_within_slices(alt.array, air_warns, max_value) | The maximum aircraft altitude when the Cabin Altitude Warning was sounding. | 62598fc17047854f4633f641 |
class NestedForeignKeySourceModelViewSet(NestedResourceMixin, viewsets.ModelViewSet): <NEW_LINE> <INDENT> parent_model = TargetModel <NEW_LINE> model = ForeignKeySourceModel | /targets/<target_pk>/sources/ | 62598fc1956e5f7376df57b5 |
class ListProposals(ListView): <NEW_LINE> <INDENT> paginate_by = 50 <NEW_LINE> context_object_name = 'proposal' <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> place = get_object_or_404(Space, url=self.kwargs['space_name']) <NEW_LINE> objects = Proposal.objects.all().filter(space=place.id).order_by('pub_date') <NEW_LINE> return objects <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(ListProposals, self).get_context_data(**kwargs) <NEW_LINE> context['get_place'] = get_object_or_404(Space, url=self.kwargs['space_name']) <NEW_LINE> return context | List all proposals stored whithin a space. Inherits from django :class:`ListView`
generic view.
:rtype: Object list
:context: proposal | 62598fc15fc7496912d483b2 |
class Message(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'messages' <NEW_LINE> id = db.Column( db.Integer, primary_key=True, ) <NEW_LINE> text = db.Column( db.String(140), nullable=False, ) <NEW_LINE> timestamp = db.Column( db.DateTime, nullable=False, default=datetime.utcnow(), ) <NEW_LINE> user_id = db.Column( db.Integer, db.ForeignKey('users.id', ondelete='CASCADE'), nullable=False, ) <NEW_LINE> user = db.relationship('User') <NEW_LINE> def to_json(self): <NEW_LINE> <INDENT> return { 'text': self.text, 'id': self.id, 'timestamp': self.timestamp.strftime('%d %B %Y') } <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_message_from_id(cls, message_id): <NEW_LINE> <INDENT> return Message.query.get_or_404(message_id) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def append_message_to_user(cls, message, user): <NEW_LINE> <INDENT> user.messages.append(message) <NEW_LINE> db.session.commit() | An individual message ("warble"). | 62598fc14f88993c371f0641 |
class NoteList(NoteResource, ModelListResource): <NEW_LINE> <INDENT> ARGS = NOTE_ARGS <NEW_LINE> def post(self): <NEW_LINE> <INDENT> args = self._parse_request() <NEW_LINE> book = Book.api_get_or_404(args['book_id']) <NEW_LINE> new_note = Note.create(text=args['text'], book=book) <NEW_LINE> result = { 'result': self._serialize(new_note) } <NEW_LINE> return result, http.CREATED | Generic set of views for the note list endpoint.
| 62598fc1a8370b77170f0650 |
class Cluster(): <NEW_LINE> <INDENT> def __init__(self, coords, count, triangles): <NEW_LINE> <INDENT> self.coords = coords <NEW_LINE> self.count = count <NEW_LINE> self.triangles = triangles <NEW_LINE> self.color=False | A class for point clusters. | 62598fc1099cdd3c63675519 |
class GFFParser(_AbstractMapReduceGFF): <NEW_LINE> <INDENT> def __init__(self, line_adjust_fn=None, create_missing=True): <NEW_LINE> <INDENT> _AbstractMapReduceGFF.__init__(self, create_missing=create_missing) <NEW_LINE> self._line_adjust_fn = line_adjust_fn <NEW_LINE> <DEDENT> def _gff_process(self, gff_files, limit_info, target_lines): <NEW_LINE> <INDENT> line_gen = self._file_line_generator(gff_files) <NEW_LINE> for out in self._lines_to_out_info(line_gen, limit_info, target_lines): <NEW_LINE> <INDENT> yield out <NEW_LINE> <DEDENT> <DEDENT> def _file_line_generator(self, gff_files): <NEW_LINE> <INDENT> for gff_file in gff_files: <NEW_LINE> <INDENT> if hasattr(gff_file, "read"): <NEW_LINE> <INDENT> need_close = False <NEW_LINE> in_handle = gff_file <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> need_close = True <NEW_LINE> in_handle = open(gff_file) <NEW_LINE> <DEDENT> found_seqs = False <NEW_LINE> while 1: <NEW_LINE> <INDENT> line = in_handle.readline() <NEW_LINE> if not line: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> yield line <NEW_LINE> <DEDENT> if need_close: <NEW_LINE> <INDENT> in_handle.close() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _lines_to_out_info(self, line_iter, limit_info=None, target_lines=None): <NEW_LINE> <INDENT> params = self._examiner._get_local_params(limit_info) <NEW_LINE> out_info = _GFFParserLocalOut((target_lines is not None and target_lines > 1)) <NEW_LINE> found_seqs = False <NEW_LINE> for line in line_iter: <NEW_LINE> <INDENT> results = self._map_fn(line, params) <NEW_LINE> if self._line_adjust_fn and results: <NEW_LINE> <INDENT> if results[0][0] not in ['directive']: <NEW_LINE> <INDENT> results = [(results[0][0], self._line_adjust_fn(results[0][1]))] <NEW_LINE> <DEDENT> <DEDENT> self._reduce_fn(results, out_info, params) <NEW_LINE> if (target_lines and out_info.num_lines >= target_lines and out_info.can_break): <NEW_LINE> <INDENT> yield out_info.get_results() <NEW_LINE> out_info = _GFFParserLocalOut((target_lines is not None and target_lines > 1)) <NEW_LINE> <DEDENT> if (results and results[0][0] == 'directive' and results[0][1] == 'FASTA'): <NEW_LINE> <INDENT> found_seqs = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> class FakeHandle: <NEW_LINE> <INDENT> def __init__(self, line_iter): <NEW_LINE> <INDENT> self._iter = line_iter <NEW_LINE> <DEDENT> def read(self): <NEW_LINE> <INDENT> return "".join(l for l in self._iter) <NEW_LINE> <DEDENT> def readline(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._iter.next() <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if found_seqs: <NEW_LINE> <INDENT> fasta_recs = self._parse_fasta(FakeHandle(line_iter)) <NEW_LINE> out_info.add('fasta', fasta_recs) <NEW_LINE> <DEDENT> if out_info.has_items(): <NEW_LINE> <INDENT> yield out_info.get_results() | Local GFF parser providing standardized parsing of GFF3 and GFF2 files.
| 62598fc121bff66bcd722ed9 |
class alias(Command): <NEW_LINE> <INDENT> context = 'browser' <NEW_LINE> resolve_macros = False <NEW_LINE> def execute(self): <NEW_LINE> <INDENT> if not self.arg(1) or not self.arg(2): <NEW_LINE> <INDENT> self.fm.notify('Syntax: alias <newcommand> <oldcommand>', bad=True) <NEW_LINE> return <NEW_LINE> <DEDENT> self.fm.commands.alias(self.arg(1), self.rest(2)) | :alias <newcommand> <oldcommand>
Copies the oldcommand as newcommand. | 62598fc13d592f4c4edbb12d |
@TestDataGenerator.RegisterClass <NEW_LINE> class IdentityTestDataGenerator(TestDataGenerator): <NEW_LINE> <INDENT> NAME = 'identity' <NEW_LINE> def __init__(self, output_directory_prefix, copy_with_identity): <NEW_LINE> <INDENT> TestDataGenerator.__init__(self, output_directory_prefix) <NEW_LINE> self._copy_with_identity = copy_with_identity <NEW_LINE> <DEDENT> @property <NEW_LINE> def copy_with_identity(self): <NEW_LINE> <INDENT> return self._copy_with_identity <NEW_LINE> <DEDENT> def _Generate( self, input_signal_filepath, test_data_cache_path, base_output_path): <NEW_LINE> <INDENT> config_name = 'default' <NEW_LINE> output_path = self._MakeDir(base_output_path, config_name) <NEW_LINE> if self._copy_with_identity: <NEW_LINE> <INDENT> input_signal_filepath_new = os.path.join( test_data_cache_path, os.path.split(input_signal_filepath)[1]) <NEW_LINE> logging.info('copying ' + input_signal_filepath + ' to ' + ( input_signal_filepath_new)) <NEW_LINE> shutil.copy(input_signal_filepath, input_signal_filepath_new) <NEW_LINE> input_signal_filepath = input_signal_filepath_new <NEW_LINE> <DEDENT> self._AddNoiseReferenceFilesPair( config_name=config_name, noisy_signal_filepath=input_signal_filepath, reference_signal_filepath=input_signal_filepath, output_path=output_path) | Generator that adds no noise.
Both the noisy and the reference signals are the input signal. | 62598fc192d797404e388c9a |
class _FactoryWrapper: <NEW_LINE> <INDENT> def __init__(self, factory_or_path): <NEW_LINE> <INDENT> self.factory = None <NEW_LINE> self.module = self.name = '' <NEW_LINE> if isinstance(factory_or_path, type): <NEW_LINE> <INDENT> self.factory = factory_or_path <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not (isinstance(factory_or_path, str) and '.' in factory_or_path): <NEW_LINE> <INDENT> raise ValueError( "A factory= argument must receive either a class " "or the fully qualified path to a Factory subclass; got " "%r instead." % factory_or_path) <NEW_LINE> <DEDENT> self.module, self.name = factory_or_path.rsplit('.', 1) <NEW_LINE> <DEDENT> <DEDENT> def get(self): <NEW_LINE> <INDENT> if self.factory is None: <NEW_LINE> <INDENT> self.factory = utils.import_object( self.module, self.name, ) <NEW_LINE> <DEDENT> return self.factory <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self.factory is None: <NEW_LINE> <INDENT> return f'<_FactoryImport: {self.module}.{self.name}>' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return f'<_FactoryImport: {self.factory.__class__}>' | Handle a 'factory' arg.
Such args can be either a Factory subclass, or a fully qualified import
path for that subclass (e.g 'myapp.factories.MyFactory'). | 62598fc17d847024c075c62c |
class NativeWindow(MarshalByRefObject,IWin32Window): <NEW_LINE> <INDENT> def AssignHandle(self,handle): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def CreateHandle(self,cp): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def DefWndProc(self,m): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def DestroyHandle(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def FromHandle(handle): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def MemberwiseClone(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def OnHandleChange(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def OnThreadException(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ReleaseHandle(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def WndProc(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> Handle=property(lambda self: object(),lambda self,v: None,lambda self: None) | Provides a low-level encapsulation of a window handle and a window procedure.
NativeWindow() | 62598fc1956e5f7376df57b6 |
class Decorator(object): <NEW_LINE> <INDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.required()(*args, **kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def required(cls): <NEW_LINE> <INDENT> def decorator(func): <NEW_LINE> <INDENT> @functools.wraps(func) <NEW_LINE> def wrapper(self, *args, **kwargs): <NEW_LINE> <INDENT> authorization = self.request_state.headers.get("AUTHORIZATION", "") <NEW_LINE> self.session = Oauth2.get(authorization.replace("Bearer ", "")) <NEW_LINE> return func(self, *args, **kwargs) <NEW_LINE> <DEDENT> wrapper.oauth2_required = True <NEW_LINE> return wrapper <NEW_LINE> <DEDENT> return decorator <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def optional(cls): <NEW_LINE> <INDENT> def decorator(func): <NEW_LINE> <INDENT> @functools.wraps(func) <NEW_LINE> def wrapper(self, *args, **kwargs): <NEW_LINE> <INDENT> self.session = None <NEW_LINE> authorization = self.request_state.headers.get("AUTHORIZATION", "") <NEW_LINE> try: <NEW_LINE> <INDENT> self.session = Oauth2.get(authorization.replace("Bearer ", "")) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return func(self, *args, **kwargs) <NEW_LINE> <DEDENT> wrapper.oauth2_optional = True <NEW_LINE> return wrapper <NEW_LINE> <DEDENT> return decorator | Protorpc method decorators. Reads the Authorization header, or authorization
query paramater, and exposes self.session. | 62598fc15fdd1c0f98e5e203 |
class RESTBaseAPI(RESTApi): <NEW_LINE> <INDENT> def __init__(self, app, config, mount): <NEW_LINE> <INDENT> RESTApi.__init__(self, app, config, mount) <NEW_LINE> self.formats = [ ('application/json', JSONFormat()) ] <NEW_LINE> if not os.path.exists(config.cachedir) or not os.path.isdir(config.cachedir): <NEW_LINE> <INDENT> raise Exception("Failing to start because of wrong cache directory '%s'" % config.cachedir) <NEW_LINE> <DEDENT> if hasattr(config, 'powerusers'): <NEW_LINE> <INDENT> UserFileCache.RESTExtensions.POWER_USERS_LIST = config.powerusers <NEW_LINE> <DEDENT> if hasattr(config, 'quota_user_limit'): <NEW_LINE> <INDENT> UserFileCache.RESTExtensions.QUOTA_USER_LIMIT = config.quota_user_limit * 1024 * 1024 <NEW_LINE> <DEDENT> self._add( {'logfile': RESTLogFile(app, self, config, mount), 'file': RESTFile(app, self, config, mount), 'info': RESTInfo(app, self, config, mount)} ) | The UserFileCache REST API module | 62598fc1bf627c535bcb1717 |
class Operation(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display': {'key': 'display', 'type': 'OperationDisplay'}, 'origin': {'key': 'origin', 'type': 'str'}, 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'ServiceSpecification'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(Operation, self).__init__(**kwargs) <NEW_LINE> self.name = kwargs.get('name', None) <NEW_LINE> self.display = kwargs.get('display', None) <NEW_LINE> self.origin = kwargs.get('origin', None) <NEW_LINE> self.is_data_action = kwargs.get('is_data_action', None) <NEW_LINE> self.service_specification = kwargs.get('service_specification', None) | Key Vault REST API operation definition.
:param name: Operation name: {provider}/{resource}/{operation}.
:type name: str
:param display: Display metadata associated with the operation.
:type display: ~azure.mgmt.keyvault.v2019_09_01.models.OperationDisplay
:param origin: The origin of operations.
:type origin: str
:param is_data_action: Property to specify whether the action is a data action.
:type is_data_action: bool
:param service_specification: One property of operation, include metric specifications.
:type service_specification: ~azure.mgmt.keyvault.v2019_09_01.models.ServiceSpecification | 62598fc1a219f33f346c6a78 |
class GooglePlacesSearchResult(object): <NEW_LINE> <INDENT> def __init__(self, query_instance, response): <NEW_LINE> <INDENT> self._places = [] <NEW_LINE> for place in response['results']: <NEW_LINE> <INDENT> self._places.append(Place(query_instance, place)) <NEW_LINE> <DEDENT> self._html_attributions = response['html_attributions'] <NEW_LINE> self._next_page_token = response.get("next_page_token") <NEW_LINE> <DEDENT> @property <NEW_LINE> def places(self): <NEW_LINE> <INDENT> return self._places <NEW_LINE> <DEDENT> @property <NEW_LINE> def html_attributions(self): <NEW_LINE> <INDENT> return self._html_attributions <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_attributions(self): <NEW_LINE> <INDENT> return len(self.html_attributions) > 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def next_page_token(self): <NEW_LINE> <INDENT> return self._next_page_token | Wrapper around the Google Places API query JSON response. | 62598fc199fddb7c1ca62f25 |
class InventoryDict(dict): <NEW_LINE> <INDENT> def __iter__(self): <NEW_LINE> <INDENT> self._iter_next_list = sorted(self.keys()) <NEW_LINE> self._iter_next_list.reverse() <NEW_LINE> return(iter(self._iter_next, None)) <NEW_LINE> <DEDENT> def _iter_next(self): <NEW_LINE> <INDENT> if (len(self._iter_next_list)>0): <NEW_LINE> <INDENT> return(self[self._iter_next_list.pop()]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return(None) <NEW_LINE> <DEDENT> <DEDENT> def add(self, resource, replace=False): <NEW_LINE> <INDENT> uri = resource.uri <NEW_LINE> if (uri in self and not replace): <NEW_LINE> <INDENT> raise InventoryDupeError("Attempt to add resource already in inventory") <NEW_LINE> <DEDENT> self[uri]=resource | Default implementation of class to store resources in Inventory
Key properties of this class are:
- has add(resource) method
- is iterable and results given in alphanumeric order by resource.uri | 62598fc2d486a94d0ba2c242 |
class _IndexMaps(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.keypath_value_id = defaultdict(lambda: defaultdict(list)) <NEW_LINE> self.value_id_keypath = defaultdict(lambda: defaultdict(list)) <NEW_LINE> self.values = {} | Helper to hold the index dictionaries.
| 62598fc2a05bb46b3848aadd |
class Ward(BaseEstimator): <NEW_LINE> <INDENT> def __init__(self, n_clusters=2, memory=Memory(cachedir=None, verbose=0), connectivity=None, copy=True, n_components=None): <NEW_LINE> <INDENT> self.n_clusters = n_clusters <NEW_LINE> self.memory = memory <NEW_LINE> self.copy = copy <NEW_LINE> self.n_components = n_components <NEW_LINE> self.connectivity = connectivity <NEW_LINE> <DEDENT> def fit(self, X): <NEW_LINE> <INDENT> memory = self.memory <NEW_LINE> if isinstance(memory, basestring): <NEW_LINE> <INDENT> memory = Memory(cachedir=memory) <NEW_LINE> <DEDENT> self.children_, self.n_components, self.n_leaves_ = memory.cache(ward_tree)(X, self.connectivity, n_components=self.n_components, copy=self.copy) <NEW_LINE> self.labels_ = _hc_cut(self.n_clusters, self.children_, self.n_leaves_) <NEW_LINE> return self | Ward hierarchical clustering: constructs a tree and cuts it.
Parameters
----------
n_clusters : int or ndarray
The number of clusters.
connectivity : sparse matrix.
connectivity matrix. Defines for each sample the neigbhoring
samples following a given structure of the data.
Defaut is None, i.e, the hiearchical clustering algorithm is
unstructured.
memory : Instance of joblib.Memory or string
Used to cache the output of the computation of the tree.
By default, no caching is done. If a string is given, it is the
path to the caching directory.
copy : bool
Copy the connectivity matrix or work inplace.
n_components : int (optional)
The number of connected components in the graph defined by the
connectivity matrix. If not set, it is estimated.
Methods
-------
fit:
Compute the clustering
Attributes
----------
children_ : array-like, shape = [n_nodes, 2]
List of the children of each nodes.
Leaves of the tree do not appear.
labels_ : array [n_points]
cluster labels for each point
n_leaves_ : int
Number of leaves in the hiearchical tree. | 62598fc24527f215b58ea140 |
class StatsClientBase(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._prefix = None <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def _send(self, *args, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def pipeline(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def timer(self, stat, rate=1): <NEW_LINE> <INDENT> return StatsdTimer(self, stat, rate) <NEW_LINE> <DEDENT> def timing(self, stat, delta, rate=1): <NEW_LINE> <INDENT> if isinstance(delta, timedelta): <NEW_LINE> <INDENT> delta = delta.total_seconds() * 1000.0 <NEW_LINE> <DEDENT> self._send_stat(stat, "%0.6f|ms" % delta, rate) <NEW_LINE> <DEDENT> def incr(self, stat, count=1, rate=1): <NEW_LINE> <INDENT> self._send_stat(stat, "%s|c" % count, rate) <NEW_LINE> <DEDENT> def decr(self, stat, count=1, rate=1): <NEW_LINE> <INDENT> self.incr(stat, -count, rate) <NEW_LINE> <DEDENT> def gauge(self, stat, value, rate=1, delta=False): <NEW_LINE> <INDENT> if value < 0 and not delta: <NEW_LINE> <INDENT> if rate < 1: <NEW_LINE> <INDENT> if random.random() > rate: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> with self.pipeline() as pipe: <NEW_LINE> <INDENT> pipe._send_stat(stat, "0|g", 1) <NEW_LINE> pipe._send_stat(stat, "%s|g" % value, 1) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> prefix = "+" if delta and value >= 0 else "" <NEW_LINE> self._send_stat(stat, "%s%s|g" % (prefix, value), rate) <NEW_LINE> <DEDENT> <DEDENT> def set(self, stat, value, rate=1): <NEW_LINE> <INDENT> self._send_stat(stat, "%s|s" % value, rate) <NEW_LINE> <DEDENT> def _send_stat(self, stat, value, rate): <NEW_LINE> <INDENT> self._after(self._prepare(stat, value, rate)) <NEW_LINE> <DEDENT> def _prepare(self, stat, value, rate): <NEW_LINE> <INDENT> if rate < 1: <NEW_LINE> <INDENT> if random.random() > rate: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> value = "%s|@%s" % (value, rate) <NEW_LINE> <DEDENT> if self._prefix: <NEW_LINE> <INDENT> stat = "%s.%s" % (self._prefix, stat) <NEW_LINE> <DEDENT> return "%s:%s" % (stat, value) <NEW_LINE> <DEDENT> def _after(self, data): <NEW_LINE> <INDENT> if data: <NEW_LINE> <INDENT> self._send(data) | A Base class for various statsd clients. | 62598fc24c3428357761a52d |
class CLHEP2110(clhep.Clhep): <NEW_LINE> <INDENT> def __init__(self, system): <NEW_LINE> <INDENT> super(CLHEP2110, self).__init__("clhep-2.1.1.0", system, "clhep-2.1.1.0.tgz") | Clhep 2.1.1.0, install package. | 62598fc23d592f4c4edbb12e |
class FlatList(list): <NEW_LINE> <INDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return list(self) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<{}: {}>'.format(self.__class__.__name__, list(self)) | This class inherits from list and has the same interface as a list-type.
However, there is a 'data'-attribute introduced, that is required for the encoding of the list!
The fields of the encoding-Schema must match the fields of the Object to be encoded! | 62598fc255399d3f05626788 |
class GroupNorm1d(_GroupNorm): <NEW_LINE> <INDENT> pass | Dragon does not use separate backend functions. | 62598fc2e1aae11d1e7ce95e |
class Sources(object): <NEW_LINE> <INDENT> openapi_types = { 'links': 'ResourceMembersLinks', 'sources': 'list[Source]' } <NEW_LINE> attribute_map = { 'links': 'links', 'sources': 'sources' } <NEW_LINE> def __init__(self, links=None, sources=None): <NEW_LINE> <INDENT> self._links = None <NEW_LINE> self._sources = None <NEW_LINE> self.discriminator = None <NEW_LINE> if links is not None: <NEW_LINE> <INDENT> self.links = links <NEW_LINE> <DEDENT> if sources is not None: <NEW_LINE> <INDENT> self.sources = sources <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def links(self): <NEW_LINE> <INDENT> return self._links <NEW_LINE> <DEDENT> @links.setter <NEW_LINE> def links(self, links): <NEW_LINE> <INDENT> self._links = links <NEW_LINE> <DEDENT> @property <NEW_LINE> def sources(self): <NEW_LINE> <INDENT> return self._sources <NEW_LINE> <DEDENT> @sources.setter <NEW_LINE> def sources(self, sources): <NEW_LINE> <INDENT> self._sources = sources <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Sources): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually. | 62598fc23317a56b869be689 |
class PylonsLoader(BaseLoader): <NEW_LINE> <INDENT> def read_configuration(self): <NEW_LINE> <INDENT> self.configured = True <NEW_LINE> return PylonsSettingsProxy() <NEW_LINE> <DEDENT> def on_worker_init(self): <NEW_LINE> <INDENT> self.import_default_modules() | Pylons celery loader
Maps the celery config onto pylons.config | 62598fc27c178a314d78d711 |
class ContentHandler(ResponseHandler): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def accepts(content_type): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def replacer(cls, response_data, path): <NEW_LINE> <INDENT> return path <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def dumps(data, pretty=False, test=None): <NEW_LINE> <INDENT> return data <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def loads(data): <NEW_LINE> <INDENT> return data | A subclass of ResponseHandlers that adds content handling. | 62598fc24f88993c371f0643 |
class HeatClient(base.DriverBase): <NEW_LINE> <INDENT> def __init__(self, params): <NEW_LINE> <INDENT> super(HeatClient, self).__init__(params) <NEW_LINE> self.fake_stack_create = { "id": "3095aefc-09fb-4bc7-b1f0-f21a304e864c", "links": [ { "href": " ", "rel": "self" } ] } <NEW_LINE> self.fake_stack_get = { "capabilities": [], "creation_time": "2014-06-03T20:59:46Z", "description": "sample stack", "disable_rollback": True, "id": "3095aefc-09fb-4bc7-b1f0-f21a304e864c", "links": [ { "href": " ", "rel": "self" } ], "notification_topics": [], "outputs": [], "parameters": { "OS::project_id": "3ab5b02f-a01f-4f95-afa1-e254afc4a435", "OS::stack_id": "3095aefc-09fb-4bc7-b1f0-f21a304e864c", "OS::stack_name": "simple_stack" }, "stack_name": "simple_stack", "stack_owner": "simple_username", "stack_status": "CREATE_COMPLETE", "stack_status_reason": "Stack CREATE completed successfully", "template_description": "sample stack", "stack_user_project_id": "65728b74-cfe7-4f17-9c15-11d4f686e591", "timeout_mins": "", "updated_time": "", "parent": "", "tags": "", "status": "CREATE_COMPLETE" } <NEW_LINE> <DEDENT> def stack_create(self, **params): <NEW_LINE> <INDENT> return sdk.FakeResourceObject(self.fake_stack_create) <NEW_LINE> <DEDENT> def stack_get(self, stack_id): <NEW_LINE> <INDENT> return sdk.FakeResourceObject(self.fake_stack_get) <NEW_LINE> <DEDENT> def stack_find(self, name_or_id): <NEW_LINE> <INDENT> return sdk.FakeResourceObject(self.fake_stack_get) <NEW_LINE> <DEDENT> def stack_update(self, stack_id, **params): <NEW_LINE> <INDENT> self.fake_stack_get["status"] = "UPDATE_COMPLETE" <NEW_LINE> return sdk.FakeResourceObject(self.fake_stack_get) <NEW_LINE> <DEDENT> def stack_delete(self, stack_id, ignore_missing=True): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def wait_for_stack(self, stack_id, status, failures=None, interval=2, timeout=None): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def wait_for_stack_delete(self, stack_id, timeout=None): <NEW_LINE> <INDENT> return | Heat V1 driver. | 62598fc2ec188e330fdf8b06 |
class FanBase(device_base.DeviceBase): <NEW_LINE> <INDENT> DEVICE_TYPE = "fan" <NEW_LINE> FAN_DIRECTION_INTAKE = "intake" <NEW_LINE> FAN_DIRECTION_EXHAUST = "exhaust" <NEW_LINE> STATUS_LED_COLOR_GREEN = "green" <NEW_LINE> STATUS_LED_COLOR_AMBER = "amber" <NEW_LINE> STATUS_LED_COLOR_RED = "red" <NEW_LINE> STATUS_LED_COLOR_OFF = "off" <NEW_LINE> def get_direction(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_speed(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_target_speed(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_speed_tolerance(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def set_speed(self, speed): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def set_status_led(self, color): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_status_led(self): <NEW_LINE> <INDENT> raise NotImplementedError | Abstract base class for interfacing with a fan module | 62598fc2bf627c535bcb1719 |
class AjaxListView(AjaxMultipleObjectTemplateResponseMixin, BaseListView): <NEW_LINE> <INDENT> pass | Allows Ajax pagination of a list of objects.
You can use this class-based view in place of *ListView* in order to
recreate the behaviour of the *page_template* decorator.
For instance, assume you have this code (taken from Django docs)::
from django.conf.urls.defaults import *
from django.views.generic import ListView
from books.models import Publisher
urlpatterns = patterns('',
(r'^publishers/$', ListView.as_view(model=Publisher)),
)
You want to Ajax paginate publishers, so, as seen, you need to switch
the template if the request is Ajax and put the page template
into the context as a variable named *page_template*.
This is straightforward, you only need to replace the view class, e.g.::
from django.conf.urls.defaults import *
from books.models import Publisher
from el_pagination.views import AjaxListView
urlpatterns = patterns('',
(r'^publishers/$', AjaxListView.as_view(model=Publisher)),
)
NOTE: Django >= 1.3 is required to use this view. | 62598fc221bff66bcd722edd |
class AppPatch(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'application_id': {'readonly': True}, 'state': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'sku', 'type': 'AppSkuInfo'}, 'identity': {'key': 'identity', 'type': 'SystemAssignedServiceIdentity'}, 'application_id': {'key': 'properties.applicationId', 'type': 'str'}, 'display_name': {'key': 'properties.displayName', 'type': 'str'}, 'subdomain': {'key': 'properties.subdomain', 'type': 'str'}, 'template': {'key': 'properties.template', 'type': 'str'}, 'state': {'key': 'properties.state', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(AppPatch, self).__init__(**kwargs) <NEW_LINE> self.tags = kwargs.get('tags', None) <NEW_LINE> self.sku = kwargs.get('sku', None) <NEW_LINE> self.identity = kwargs.get('identity', None) <NEW_LINE> self.application_id = None <NEW_LINE> self.display_name = kwargs.get('display_name', None) <NEW_LINE> self.subdomain = kwargs.get('subdomain', None) <NEW_LINE> self.template = kwargs.get('template', None) <NEW_LINE> self.state = None | The description of the IoT Central application.
Variables are only populated by the server, and will be ignored when sending a request.
:param tags: A set of tags. Instance tags.
:type tags: dict[str, str]
:param sku: A valid instance SKU.
:type sku: ~azure.mgmt.iotcentral.models.AppSkuInfo
:param identity: The managed identities for the IoT Central application.
:type identity: ~azure.mgmt.iotcentral.models.SystemAssignedServiceIdentity
:ivar application_id: The ID of the application.
:vartype application_id: str
:param display_name: The display name of the application.
:type display_name: str
:param subdomain: The subdomain of the application.
:type subdomain: str
:param template: The ID of the application template, which is a blueprint that defines the
characteristics and behaviors of an application. Optional; if not specified, defaults to a
blank blueprint and allows the application to be defined from scratch.
:type template: str
:ivar state: The current state of the application. Possible values include: "created",
"suspended".
:vartype state: str or ~azure.mgmt.iotcentral.models.AppState | 62598fc24527f215b58ea142 |
class UserSave(models.Model): <NEW_LINE> <INDENT> lab_proxy = models.ForeignKey(LabProxy) <NEW_LINE> user = models.ForeignKey(User) <NEW_LINE> save_file = models.FileField(blank=True, null=True, upload_to='edx/labster/lab/save') <NEW_LINE> created_at = models.DateTimeField(default=timezone.now) <NEW_LINE> modified_at = models.DateTimeField(default=timezone.now) <NEW_LINE> play_count = models.IntegerField(default=0) <NEW_LINE> is_finished = models.BooleanField(default=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = ('lab_proxy', 'user') <NEW_LINE> <DEDENT> def get_new_save_file_name(self): <NEW_LINE> <INDENT> timestamp = calendar.timegm(datetime.utcnow().utctimetuple()) <NEW_LINE> file_name = "{}_{}_{}.zip".format(timestamp, self.lab_proxy_id, self.user_id) <NEW_LINE> return file_name | SavePoint need to be linked to LabProxy instead of Lab
The way we designed the system, many courses could use same lab,
with different set of questions. | 62598fc2dc8b845886d5382e |
class TestFileItem(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> logging.basicConfig() <NEW_LINE> logging.getLogger("wetransfer-python-sdk").setLevel(logging.DEBUG) <NEW_LINE> self.mock_handler = MockLoggingHandler() <NEW_LINE> LOGGER.addHandler(self.mock_handler) <NEW_LINE> self.temp_file = tempfile.NamedTemporaryFile() <NEW_LINE> with open(self.temp_file.name, 'w') as f: <NEW_LINE> <INDENT> f.write("123456") <NEW_LINE> <DEDENT> <DEDENT> def test_init(self): <NEW_LINE> <INDENT> item = File(self.temp_file.name) <NEW_LINE> self.assertEqual(item.filename, os.path.split(self.temp_file.name)[1]) <NEW_LINE> self.assertEqual(item.filesize, 6) <NEW_LINE> self.assertEqual(item.content_identifier, "file") <NEW_LINE> self.assertEqual(item.local_identifier, self.temp_file.name) <NEW_LINE> <DEDENT> def test_serialize(self): <NEW_LINE> <INDENT> item = File(self.temp_file.name) <NEW_LINE> expected_value = { "filename": item.filename, "filesize": 6, "content_identifier": item.content_identifier, "local_identifier": item.local_identifier[-34:] } <NEW_LINE> self.assertEqual(item.serialize(), expected_value) <NEW_LINE> <DEDENT> def test_load_info(self): <NEW_LINE> <INDENT> item = File(self.temp_file.name) <NEW_LINE> kwargs = { "id": 1, "transfer_id": 1, "client_options": {}, "multipart_parts": [], "multipart_upload_id": [], } <NEW_LINE> item.load_info(**kwargs) <NEW_LINE> self.assertEqual(item.id, 1) <NEW_LINE> self.assertEqual(item.transfer_id, 1) <NEW_LINE> self.assertEqual(item.client_options, {}) <NEW_LINE> self.assertEqual(item.multipart_parts, []) <NEW_LINE> self.assertEqual(item.multipart_upload_id, []) <NEW_LINE> <DEDENT> def test_str(self): <NEW_LINE> <INDENT> item = File(self.temp_file.name) <NEW_LINE> regexp = ( r"Transfer item, file type, with size 6, name \S+, and local path" r" \S+, has None multi parts" ) <NEW_LINE> pattern = re.compile(regexp) <NEW_LINE> is_match = pattern.match(str(item)) <NEW_LINE> self.assertTrue(is_match) | Test class to host main tests for File class in items package. | 62598fc260cbc95b063645b0 |
class BitStore(object): <NEW_LINE> <INDENT> def __init__(self, shape, device, store=None): <NEW_LINE> <INDENT> if store is not None: <NEW_LINE> <INDENT> self.store = store <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.store = torch.zeros(shape, dtype=torch.long).to(device) <NEW_LINE> <DEDENT> <DEDENT> def push(self, N, M): <NEW_LINE> <INDENT> self.store *= M <NEW_LINE> self.store += N <NEW_LINE> <DEDENT> def pop(self, M): <NEW_LINE> <INDENT> N = self.store % M <NEW_LINE> self.store = torch.div(self.store, M, rounding_mode="trunc") <NEW_LINE> return N <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return repr(self.store) | Efficiently stores information with non-integer number of bits (up to 16). | 62598fc24428ac0f6e658798 |
class ManagerNotExecutingError(ManagerError): <NEW_LINE> <INDENT> pass | Base class for Yarely Manager thread execution errors. | 62598fc266656f66f7d5a666 |
class Adapter(object): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> @abc.abstractproperty <NEW_LINE> def name(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def start_scan(self, timeout_sec): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def stop_scan(self, timeout_sec): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abc.abstractproperty <NEW_LINE> def is_scanning(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def power_on(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def power_off(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abc.abstractproperty <NEW_LINE> def is_powered(self): <NEW_LINE> <INDENT> raise NotImplementedError | Base class for a BLE network adapter. | 62598fc223849d37ff851327 |
class DataManager(list): <NEW_LINE> <INDENT> def __init__(self, backend): <NEW_LINE> <INDENT> self.backend = backend <NEW_LINE> self.convert = BFConvertWrapper(self.backend) <NEW_LINE> <DEDENT> def load(self, fpath): <NEW_LINE> <INDENT> def is_microscopy_item(fpath): <NEW_LINE> <INDENT> l = fpath.split('.') <NEW_LINE> ext = l[-1] <NEW_LINE> pre_ext = l[-2] <NEW_LINE> if ( (ext == 'tif' or ext == 'tiff') and pre_ext != 'ome' ): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> if not self.convert.already_converted(fpath): <NEW_LINE> <INDENT> path_to_manifest = self.convert(fpath) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> path_to_manifest = os.path.join(self.backend.directory, os.path.basename(fpath), 'manifest.json') <NEW_LINE> <DEDENT> collection = None <NEW_LINE> if is_microscopy_item(fpath): <NEW_LINE> <INDENT> collection = MicroscopyCollection() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> collection = ImageCollection() <NEW_LINE> <DEDENT> collection.parse_manifest(path_to_manifest) <NEW_LINE> self.append(collection) <NEW_LINE> return collection | Class for managing :class:`jicimagelib.image.ImageCollection` instances. | 62598fc27047854f4633f647 |
class not_in_offical(Exception): <NEW_LINE> <INDENT> pass | This package is not in official repoisitories | 62598fc2be7bc26dc9251f96 |
class Partner(osv.Model): <NEW_LINE> <INDENT> _inherit = 'res.partner' <NEW_LINE> _columns = { 'instructor' : fields.boolean(string="Instructor"), } <NEW_LINE> _defaults = { 'instructor' : False, } | Heredado de res.partner | 62598fc2adb09d7d5dc0a7f2 |
class Top25SmoothPageSet(page_set_module.PageSet): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Top25SmoothPageSet, self).__init__( user_agent_type='desktop', archive_data_file='data/top_25_smooth.json', bucket=page_set_module.PARTNER_BUCKET) <NEW_LINE> self.AddUserStory(_CreatePageClassWithSmoothInteractions( top_pages.GoogleWebSearchPage)(self)) <NEW_LINE> self.AddUserStory(GmailSmoothPage(self)) <NEW_LINE> self.AddUserStory(GoogleCalendarSmoothPage(self)) <NEW_LINE> self.AddUserStory(_CreatePageClassWithSmoothInteractions( top_pages.GoogleImageSearchPage)(self)) <NEW_LINE> self.AddUserStory(GoogleDocSmoothPage(self)) <NEW_LINE> self.AddUserStory(_CreatePageClassWithSmoothInteractions( top_pages.GooglePlusPage)(self)) <NEW_LINE> self.AddUserStory(_CreatePageClassWithSmoothInteractions( top_pages.YoutubePage)(self)) <NEW_LINE> self.AddUserStory(_CreatePageClassWithSmoothInteractions( top_pages.BlogspotPage)(self)) <NEW_LINE> self.AddUserStory(_CreatePageClassWithSmoothInteractions( top_pages.WordpressPage)(self)) <NEW_LINE> self.AddUserStory(_CreatePageClassWithSmoothInteractions( top_pages.FacebookPage)(self)) <NEW_LINE> self.AddUserStory(_CreatePageClassWithSmoothInteractions( top_pages.LinkedinPage)(self)) <NEW_LINE> self.AddUserStory(_CreatePageClassWithSmoothInteractions( top_pages.WikipediaPage)(self)) <NEW_LINE> self.AddUserStory(_CreatePageClassWithSmoothInteractions( top_pages.TwitterPage)(self)) <NEW_LINE> self.AddUserStory(_CreatePageClassWithSmoothInteractions( top_pages.PinterestPage)(self)) <NEW_LINE> self.AddUserStory(ESPNSmoothPage(self)) <NEW_LINE> self.AddUserStory(_CreatePageClassWithSmoothInteractions( top_pages.WeatherPage)(self)) <NEW_LINE> self.AddUserStory(_CreatePageClassWithSmoothInteractions( top_pages.YahooGamesPage)(self)) <NEW_LINE> other_urls = [ 'http://news.yahoo.com', 'http://www.cnn.com', 'http://www.amazon.com', 'http://www.ebay.com', 'http://booking.com', 'http://answers.yahoo.com', 'http://sports.yahoo.com/', 'http://techcrunch.com' ] <NEW_LINE> for url in other_urls: <NEW_LINE> <INDENT> self.AddUserStory(TopSmoothPage(url, self)) | Pages hand-picked for 2012 CrOS scrolling tuning efforts. | 62598fc2bf627c535bcb171b |
class SQLAlchemyBinding(object): <NEW_LINE> <INDENT> def __init__(self, provider, session, user=None, client=None, token=None, grant=None, current_user=None): <NEW_LINE> <INDENT> if user: <NEW_LINE> <INDENT> user_binding = UserBinding(user, session) <NEW_LINE> provider.usergetter(user_binding.get) <NEW_LINE> <DEDENT> if client: <NEW_LINE> <INDENT> client_binding = ClientBinding(client, session) <NEW_LINE> provider.clientgetter(client_binding.get) <NEW_LINE> <DEDENT> if token: <NEW_LINE> <INDENT> token_binding = TokenBinding(token, session) <NEW_LINE> provider.tokengetter(token_binding.get) <NEW_LINE> provider.tokensetter(token_binding.set) <NEW_LINE> <DEDENT> if grant: <NEW_LINE> <INDENT> if not current_user: <NEW_LINE> <INDENT> raise ValueError(('`current_user` is required' 'for Grant Binding')) <NEW_LINE> <DEDENT> grant_binding = GrantBinding(grant, session, current_user) <NEW_LINE> provider.grantgetter(grant_binding.get) <NEW_LINE> provider.grantsetter(grant_binding.set) | Configures the given :class:`OAuth2Provider` instance with the
required getters and setters for persistence with SQLAlchemy.
An example of using all models::
oauth = OAuth2Provider(app)
SQLAlchemyBinding(oauth, session, user=User, client=Client,
token=Token, grant=Grant, current_user=current_user)
You can omit any model if you wish to register the functions yourself.
It is also possible to override the functions by registering them
afterwards::
oauth = OAuth2Provider(app)
SQLAlchemyBinding(oauth, session, user=User, client=Client,
token=Token)
@oauth.grantgetter
def get_grant(client_id, code):
pass
@oauth.grantsetter
def set_grant(client_id, code, request, *args, **kwargs):
pass
# register tokensetter with oauth but keeping the tokengetter
# registered by `SQLAlchemyBinding`
# You would only do this for the token and grant since user and client
# only have getters
@oauth.tokensetter
def set_token(token, request, *args, **kwargs):
pass
Note that current_user is only required if you're using SQLAlchemy
for grant caching. If you're using another caching system with
GrantCacheBinding instead, omit current_user.
:param provider: :class:`OAuth2Provider` instance
:param session: A :class:`Session` object
:param user: :class:`User` model
:param client: :class:`Client` model
:param token: :class:`Token` model
:param grant: :class:`Grant` model
:param current_user: function that returns a :class:`User` object | 62598fc2ec188e330fdf8b08 |
class ShowdownPokemon(object): <NEW_LINE> <INDENT> schema = { 'name':'', 'hp':0, 'maxhp':0, 'ability':'', 'item':'', 'stats':[0], 'state':'', 'status':[''] } <NEW_LINE> STATES = ['active', 'fainted'] <NEW_LINE> STATUSES = [] <NEW_LINE> @classmethod <NEW_LINE> @schema_validated <NEW_LINE> def from_team_icon(cls, poke_elem): <NEW_LINE> <INDENT> root = {} <NEW_LINE> name_string = poke_elem.get_attribute('title') <NEW_LINE> for state in cls.STATES: <NEW_LINE> <INDENT> if(name_string.endswith(" ({})".format(state))): <NEW_LINE> <INDENT> root['name'] = name_string.split(' ')[0] <NEW_LINE> root['status'].append(state) <NEW_LINE> return root <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> @schema_validated <NEW_LINE> def from_pop_up(cls, button_object, pop_up): <NEW_LINE> <INDENT> root = {} <NEW_LINE> data = button_object.get_attribute('value').split(',') <NEW_LINE> root['name'] = data[0] <NEW_LINE> if(len(data) > 1): <NEW_LINE> <INDENT> root['state'] = data[1] <NEW_LINE> <DEDENT> sections = pop_up.find_elements_by_tag_name('p') <NEW_LINE> root['stats'] = [] <NEW_LINE> statuses = sections[0].find_elements_by_tag_name('span') <NEW_LINE> for status in statuses: <NEW_LINE> <INDENT> root['stats'].append(status.text) <NEW_LINE> <DEDENT> hp_string = sections[0].text <NEW_LINE> hp_data = re.findall(r'\d+/(?:\d+|undefined)', hp_string)[0].split('/') <NEW_LINE> root['hp'] = int(hp_data[0]) <NEW_LINE> root['max_hp'] = int(hp_data[1]) <NEW_LINE> ability_string = sections[1].text <NEW_LINE> root['ability'] = ability_string.split(': ')[-1] <NEW_LINE> item_string = sections[2].text <NEW_LINE> root['item'] = item_string.split(': ')[-1] <NEW_LINE> stats_string = sections[3].text <NEW_LINE> root['stats'] = [int(x) for x in re.findall(stats_string, r'\d+')] <NEW_LINE> return root | A class to get dict representations of pokemon from PS | 62598fc2099cdd3c6367551c |
class AnyValue: <NEW_LINE> <INDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return False | Pseudo-value that returns True when compared to any other object.
This object can be used for example to store parameters in resultsets.
One concrete usage example is the following: let's assume that a user runs
an experiment using various strategies under different values of a
specific parameter and that the user knows that one strategy does not
depend on that parameters while others do.
If a user wants to plot the sensitivity of all these strategies against
this parameter, he would want the strategy insensitive to that parameter to
be selected from the resultset when filtering it against any value of that
parameter. This can be achieved by setting AnyValue() to this parameter in
the result related to that strategy. | 62598fc2cc40096d6161a313 |
class USCTListFilterTestCase(EmployeeAdminFilterTestCase): <NEW_LINE> <INDENT> def test_lookups(self): <NEW_LINE> <INDENT> usct_regiment = RegimentFactory(usct=True) <NEW_LINE> usct_employee = EmployeeFactory(last_name='Dodge') <NEW_LINE> usct_employee.regiments.add(usct_regiment) <NEW_LINE> vrc_regiment = RegimentFactory(vrc=True) <NEW_LINE> vrc_employee = EmployeeFactory(last_name='MacNulty') <NEW_LINE> vrc_employee.regiments.add(vrc_regiment) <NEW_LINE> request = self.request_factory.get('/') <NEW_LINE> request.user = self.user <NEW_LINE> changelist = self.modeladmin.get_changelist_instance(request) <NEW_LINE> filter = USCTListFilter(request, params='', model=Employee, model_admin=EmployeeAdmin) <NEW_LINE> self.assertEqual(sorted(filter.lookup_choices), sorted(YES_NO_LOOKUPS)) <NEW_LINE> queryset = changelist.get_queryset(request) <NEW_LINE> self.assertSetEqual(set(queryset), {usct_employee, vrc_employee}) <NEW_LINE> request = self.request_factory.get('/', {'usct': 'Yes'}) <NEW_LINE> request.user = self.user <NEW_LINE> changelist = self.modeladmin.get_changelist_instance(request) <NEW_LINE> queryset = changelist.get_queryset(request) <NEW_LINE> self.assertSetEqual(set(queryset), {usct_employee}) <NEW_LINE> request = self.request_factory.get('/', {'usct': 'No'}) <NEW_LINE> request.user = self.user <NEW_LINE> changelist = self.modeladmin.get_changelist_instance(request) <NEW_LINE> queryset = changelist.get_queryset(request) <NEW_LINE> self.assertSetEqual(set(queryset), {vrc_employee}) <NEW_LINE> request = self.request_factory.get('/', {'usct': 'Maybe'}) <NEW_LINE> request.user = self.user <NEW_LINE> changelist = self.modeladmin.get_changelist_instance(request) <NEW_LINE> queryset = changelist.get_queryset(request) <NEW_LINE> self.assertSetEqual(set(queryset), set(Employee.objects.all())) | Test list filter for membership in a USCT regiment | 62598fc25fdd1c0f98e5e208 |
class Address ( BaseModel ): <NEW_LINE> <INDENT> id = models.AutoField(primary_key=True, db_column='ID_SITE_ADDRESS') <NEW_LINE> street = models.CharField(max_length=50, null=True, blank=True, db_column='STREET', verbose_name = _('Street'), help_text = _('Street')) <NEW_LINE> city = models.CharField(max_length=20, db_column='CITY', verbose_name = _('City'), help_text = _('City')) <NEW_LINE> region = models.CharField(max_length=20, null=True, blank=True, db_column='REGION', verbose_name = _('Region'), help_text = _('Region')) <NEW_LINE> zipCode = models.CharField(max_length=20, null=True, blank=True, db_column='ZIP_CODE', verbose_name = _('Zip Code'), help_text = _('Zip Code')) <NEW_LINE> country = models.CharField(max_length=2, choices=Choices.COUNTRY, db_column='COUNTRY', verbose_name = _('Country'), help_text = _('Country')) <NEW_LINE> long = models.DecimalField(max_digits=18, decimal_places=12, null=True, blank=True, verbose_name=_('Geo Longitude'), help_text=_('Geo longitude')) <NEW_LINE> lat = models.DecimalField(max_digits=18, decimal_places=12, null=True, blank=True, verbose_name=_('Geo Latitude'), help_text=_('Geo latitude')) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return '%s %s' % (self.street, self.city) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> db_table = 'SITE_ADDRESS' <NEW_LINE> verbose_name = _('Address') <NEW_LINE> verbose_name_plural = _('Addresses') | Address model. It can store only cities and countries or sholw addresses with geo spatial information.
**Attributes**
* ``id`` : Primary key
* ``street`` :CharField(50) : Street address.
* ``city``:CharField(20) : City.
* ``region``:CharField(20) : Region.
* ``zipCode``:CharField(20) : Zip code.
* ``country``:CharField(2) : Country from Choices.COUNTRY
* ``long``:DecimalField(18,12) : Longitude for address (Geo Data)
* ``lat``:DecimalField(18,12) : Latitude for address (Geo Data) | 62598fc266656f66f7d5a668 |
class TidePoolAlreadyBoundError(TidePoolException): <NEW_LINE> <INDENT> pass | Attempted to bind a bound TidePool | 62598fc2283ffb24f3cf3afa |
class MaterialForm(ModelForm): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(MaterialForm, self).__init__(*args, **kwargs) <NEW_LINE> self.fields['unidad'].empty_label = "Seleccione la unidad" <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = Material <NEW_LINE> fields = [ 'material', 'unidad', 'ancho', 'largo', 'alto', 'peso', 'precio', 'contenedor', 'capacidad_peso', 'capacidad_volumen', 'recuperable' ] <NEW_LINE> labels = { 'material': ('Nombre del material'), 'precio': ('Precio unitario del material'), 'peso': ('Peso unitario del material'), 'recuperable': ('Marcar si el material es recuperable luego de su uso'), 'ancho': ('Ancho del material (cms)'), 'largo': ('Largo del material (cms)'), 'alto': ('Alto del material (cms)'), 'capacidad_peso': ('Capacidad de contener objetos (kg)'), 'capacidad_volumen': ('Capacidad de contener objetos (m3)'), 'contenedor': ('Marcar si el material es contenedor'), 'unidad': ('Unidad de medida'), } <NEW_LINE> <DEDENT> def clean_capacidad_peso(self): <NEW_LINE> <INDENT> diccionario_limpio = self.cleaned_data <NEW_LINE> capacidad_peso = diccionario_limpio.get('capacidad_peso') <NEW_LINE> contenedor = diccionario_limpio.get('contenedor') <NEW_LINE> if contenedor: <NEW_LINE> <INDENT> if capacidad_peso <= 0: <NEW_LINE> <INDENT> raise forms.ValidationError("La capacidad de contener objetos (m3) debe ser mayor a cero") <NEW_LINE> <DEDENT> <DEDENT> return capacidad_peso <NEW_LINE> <DEDENT> def clean_capacidad_volumen(self): <NEW_LINE> <INDENT> diccionario_limpio = self.cleaned_data <NEW_LINE> capacidad_volumen = diccionario_limpio.get('capacidad_volumen') <NEW_LINE> contenedor = diccionario_limpio.get('contenedor') <NEW_LINE> if contenedor: <NEW_LINE> <INDENT> if capacidad_volumen <= 0: <NEW_LINE> <INDENT> raise forms.ValidationError("La capacidad de contener objetos (kg) debe ser mayor a cero") <NEW_LINE> <DEDENT> <DEDENT> return capacidad_volumen | docstring | 62598fc27047854f4633f649 |
class Resource(Model): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'location': {'required': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, } <NEW_LINE> def __init__(self, location, tags=None): <NEW_LINE> <INDENT> self.id = None <NEW_LINE> self.name = None <NEW_LINE> self.type = None <NEW_LINE> self.location = location <NEW_LINE> self.tags = tags | An azure resource object.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Azure resource Id
:vartype id: str
:ivar name: Azure resource name
:vartype name: str
:ivar type: Azure resource type
:vartype type: str
:param location: Resource location
:type location: str
:param tags: Resource tags
:type tags: dict | 62598fc2a8370b77170f0657 |
class WebAPIScopeDictionary(object): <NEW_LINE> <INDENT> def __init__(self, root_resource): <NEW_LINE> <INDENT> self.resource_trees = {root_resource} <NEW_LINE> self._update_lock = threading.Lock() <NEW_LINE> self._scope_dict = {} <NEW_LINE> <DEDENT> @property <NEW_LINE> def scope_dict(self): <NEW_LINE> <INDENT> if not self._scope_dict: <NEW_LINE> <INDENT> with self._update_lock: <NEW_LINE> <INDENT> if not self._scope_dict: <NEW_LINE> <INDENT> self._walk_resources(self.resource_trees) <NEW_LINE> assert self._scope_dict <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return self._scope_dict <NEW_LINE> <DEDENT> def iterkeys(self): <NEW_LINE> <INDENT> return self.scope_dict.iterkeys() <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return six.iterkeys(self.scope_dict) <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self._scope_dict.clear() <NEW_LINE> <DEDENT> def _walk_resources(self, resources): <NEW_LINE> <INDENT> for resource in resources: <NEW_LINE> <INDENT> self._walk_resources(resource.list_child_resources) <NEW_LINE> self._walk_resources(resource.item_child_resources) <NEW_LINE> scope_to_methods = defaultdict(list) <NEW_LINE> if not isinstance(resource, ResourceOAuth2TokenMixin): <NEW_LINE> <INDENT> logging.warning( 'Resource %r does not inherit from ' 'djblets.webapi.resources.mixins.oauth2_tokens.' 'ResourceOAuth2TokenMixin: it will not be accessible with ' 'OAuth2 tokens. It is recommended that all your resources ' 'inherit from a base class that includes this mixin.', type(resource), ) <NEW_LINE> continue <NEW_LINE> <DEDENT> if not resource.oauth2_token_access_allowed: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for method in resource.allowed_methods: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> suffix = resource.HTTP_SCOPE_METHOD_MAP[method] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> logger.error('Unknown HTTP method %s not present in ' 'HTTP_SCOPE_METHOD_MAP: a scope will not be ' 'generated for this method.', method) <NEW_LINE> continue <NEW_LINE> <DEDENT> scope_to_methods[suffix].append(method) <NEW_LINE> <DEDENT> for suffix, methods in six.iteritems(scope_to_methods): <NEW_LINE> <INDENT> scope_name = '%s:%s' % (resource.scope_name, suffix) <NEW_LINE> self._scope_dict[scope_name] = ( _('Ability to perform HTTP %(methods)s on the %(name)s ' 'resource') % { 'methods': ', '.join(sorted(methods)), 'name': resource.name, } ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.scope_dict[key] <NEW_LINE> <DEDENT> def __contains__(self, key): <NEW_LINE> <INDENT> return key in self.scope_dict <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '%s(%r)' % (type(self).__name__, list(six.iterkeys(self.scope_dict))) | A Web API scope dictionary.
This class knows how to build a list of available scopes from the WebAPI
resource tree at runtime.
By default, it will only have to walk the API tree once, after which the
value can be cached. | 62598fc2956e5f7376df57b9 |
class BaseConfig(object): <NEW_LINE> <INDENT> DEBUG = False <NEW_LINE> CSRF_ENABLED = True <NEW_LINE> SECRET_KEY = os.getenv('SECRET') <NEW_LINE> SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL') | Base configuration class. | 62598fc28a349b6b436864b4 |
class Solution4: <NEW_LINE> <INDENT> def reverseBits(self, n: int) -> int: <NEW_LINE> <INDENT> ret, power = 0, 31 <NEW_LINE> while n: <NEW_LINE> <INDENT> ret += (n & 1) << power <NEW_LINE> n >>= 1 <NEW_LINE> power -= 1 <NEW_LINE> <DEDENT> return ret | Bit by Bit
Algorithm: The key idea is that for a bit that is situated at the index i,
after the reversion, its position should be 31-i (note: the index starts from zero).
* We iterate through the bit string of the input integer, from right to left (i.e. n = n >> 1).
To retrieve the right-most bit of an integer, we apply the bit AND operation (n & 1).
* For each bit, we reverse it to the correct position (i.e. (n & 1) << power).
Then we accumulate this reversed bit to the final result.
* When there is no more bits of one left (i.e. n == 0), we terminate the iteration.
https://leetcode.com/problems/reverse-bits/Figures/190/190_reverse_bits.png
Runtime: 32 ms, faster than 70.60% of Python3
Memory Usage: 13.9 MB, less than 98.13% of Python3 | 62598fc25fc7496912d483b6 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.