code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class OpXORTarget(OpRegTarget): <NEW_LINE> <INDENT> def execute(self, slot, subindex, roi, result): <NEW_LINE> <INDENT> if slot is self.Valid: <NEW_LINE> <INDENT> result[:] = 1 <NEW_LINE> return <NEW_LINE> <DEDENT> data = self.Input[roi.start[0]:roi.stop[0], :].wait() <NEW_LINE> result[:, 0] = 1 - np.square(1 - data.sum(axis=1)) | The result of (kinda) XORing channel 0 and 1
xor_cont(a, b) := 1 - (1 - a - b)^2 | 62598fc3be7bc26dc9251fb0 |
class TestingConfig(DevelopmentConfig): <NEW_LINE> <INDENT> TESTING = True <NEW_LINE> WTF_CSRF_ENABLED = False | Used when running tests. | 62598fc32c8b7c6e89bd3a6c |
class Environment: <NEW_LINE> <INDENT> UAT = 1 <NEW_LINE> PROD = 9 | Application Environment Value | 62598fc37b180e01f3e491a4 |
class Glyph: <NEW_LINE> <INDENT> def __init__(self,glyphname = '',contours = []): <NEW_LINE> <INDENT> self.glyphname = glyphname <NEW_LINE> self.contours = contours <NEW_LINE> <DEDENT> def charger(self,data): <NEW_LINE> <INDENT> self.glyphname = data['glyphname'] <NEW_LINE> self.contours = [] <NEW_LINE> for donnees_contour in data['contours']: <NEW_LINE> <INDENT> contour = None <NEW_LINE> contour = Contour() <NEW_LINE> contour.points = [] <NEW_LINE> contour.charger(donnees_contour) <NEW_LINE> self.contours.append(contour) <NEW_LINE> <DEDENT> <DEDENT> def traiter(self,resolution): <NEW_LINE> <INDENT> resolution = resolution + 2 <NEW_LINE> self.copie = Glyph(self.glyphname) <NEW_LINE> self.copie.contours = [] <NEW_LINE> for contour in self.contours: <NEW_LINE> <INDENT> self.copie.contours.append(contour.traiter(resolution)) <NEW_LINE> <DEDENT> <DEDENT> def export(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.copie <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> return self.glyphname <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{\"glyphname\":\"%s\",\"contours\":%s}" % (self.glyphname, str(self.contours)) | Classe permettant le représentatation simple
d'un objet Glyphe calqué sur FontForge | 62598fc360cbc95b063645e6 |
class PlotRaster(QtWidgets.QDialog): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> self.indata = {} <NEW_LINE> self.parent = parent <NEW_LINE> self.setAttribute(QtCore.Qt.WA_DeleteOnClose) <NEW_LINE> self.setWindowTitle('Graph Window') <NEW_LINE> vbl = QtWidgets.QVBoxLayout(self) <NEW_LINE> hbl = QtWidgets.QHBoxLayout() <NEW_LINE> self.mmc = MyMplCanvas(self) <NEW_LINE> mpl_toolbar = NavigationToolbar2QT(self.mmc, self.parent) <NEW_LINE> self.combobox1 = QtWidgets.QComboBox() <NEW_LINE> label1 = QtWidgets.QLabel('Bands:') <NEW_LINE> hbl.addWidget(label1) <NEW_LINE> hbl.addWidget(self.combobox1) <NEW_LINE> self.combobox2 = QtWidgets.QComboBox() <NEW_LINE> label2 = QtWidgets.QLabel('Colormap:') <NEW_LINE> hbl.addWidget(label2) <NEW_LINE> hbl.addWidget(self.combobox2) <NEW_LINE> self.combobox2.addItems(['viridis', 'jet', 'gray', 'terrain']) <NEW_LINE> vbl.addWidget(self.mmc) <NEW_LINE> vbl.addWidget(mpl_toolbar) <NEW_LINE> vbl.addLayout(hbl) <NEW_LINE> self.setFocus() <NEW_LINE> self.combobox1.currentIndexChanged.connect(self.change_band) <NEW_LINE> self.combobox2.currentIndexChanged.connect(self.change_band) <NEW_LINE> <DEDENT> def change_band(self): <NEW_LINE> <INDENT> i = self.combobox1.currentIndex() <NEW_LINE> cmap = self.combobox2.currentText() <NEW_LINE> if 'Raster' in self.indata: <NEW_LINE> <INDENT> data = self.indata['Raster'] <NEW_LINE> self.mmc.update_raster(data[i], cmap) <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.show() <NEW_LINE> if 'Raster' in self.indata: <NEW_LINE> <INDENT> data = self.indata['Raster'] <NEW_LINE> <DEDENT> elif 'Cluster' in self.indata: <NEW_LINE> <INDENT> data = self.indata['Cluster'] <NEW_LINE> <DEDENT> for i in data: <NEW_LINE> <INDENT> self.combobox1.addItem(i.dataid) | Plot Raster Class.
Attributes
----------
parent : parent
reference to the parent routine
indata : dictionary
dictionary of input datasets | 62598fc3f9cc0f698b1c5426 |
class ShowIPsecPolicy(neutronv20.ShowCommand): <NEW_LINE> <INDENT> resource = 'ipsecpolicy' <NEW_LINE> log = logging.getLogger(__name__ + '.ShowIPsecPolicy') | Show information of a given IPsec policy. | 62598fc355399d3f056267c2 |
class FuseSegmentations(FSCommand): <NEW_LINE> <INDENT> _cmd = 'mri_fuse_segmentations' <NEW_LINE> input_spec = FuseSegmentationsInputSpec <NEW_LINE> output_spec = FuseSegmentationsOutputSpec <NEW_LINE> def _format_arg(self, name, spec, value): <NEW_LINE> <INDENT> if name in ('in_segmentations', 'in_segmentations_noCC', 'in_norms'): <NEW_LINE> <INDENT> return spec.argstr % os.path.basename(value[0]) <NEW_LINE> <DEDENT> return super(FuseSegmentations, self)._format_arg(name, spec, value) <NEW_LINE> <DEDENT> def _list_outputs(self): <NEW_LINE> <INDENT> outputs = self.output_spec().get() <NEW_LINE> outputs['out_file'] = os.path.abspath( self.inputs.out_file) <NEW_LINE> return outputs | fuse segmentations together from multiple timepoints
Examples
--------
>>> from nipype.interfaces.freesurfer import FuseSegmentations
>>> fuse = FuseSegmentations()
>>> fuse.inputs.subject_id = 'tp.long.A.template'
>>> fuse.inputs.timepoints = ['tp1', 'tp2']
>>> fuse.inputs.out_file = 'aseg.fused.mgz'
>>> fuse.inputs.in_segmentations = ['aseg.mgz', 'aseg.mgz']
>>> fuse.inputs.in_segmentations_noCC = ['aseg.mgz', 'aseg.mgz']
>>> fuse.inputs.in_norms = ['norm.mgz', 'norm.mgz', 'norm.mgz']
>>> fuse.cmdline # doctest: +IGNORE_UNICODE
'mri_fuse_segmentations -n norm.mgz -a aseg.mgz -c aseg.mgz tp.long.A.template tp1 tp2' | 62598fc392d797404e388cb7 |
class OperationConsumer(Process): <NEW_LINE> <INDENT> def __init__(self, pipe): <NEW_LINE> <INDENT> Process.__init__(self) <NEW_LINE> self._pipe = pipe <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self._logger = logging.getLogger( '{}:{}'.format(__name__, current_process().name)) <NEW_LINE> try: <NEW_LINE> <INDENT> self._running = True <NEW_LINE> while self._running: <NEW_LINE> <INDENT> if self._pipe.poll(): <NEW_LINE> <INDENT> msg = self._pipe.recv() <NEW_LINE> if msg == 'quit': <NEW_LINE> <INDENT> self._running = False <NEW_LINE> self._pipe.close() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ops = msg <NEW_LINE> results = [] <NEW_LINE> for op in ops: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> r = compute(op) <NEW_LINE> results.append(str(r)) <NEW_LINE> <DEDENT> except SyntaxError: <NEW_LINE> <INDENT> self._logger.warning( 'The operation "{}" cannot be computed, ' 'it will be skipped'.format(op)) <NEW_LINE> <DEDENT> <DEDENT> self._pipe.send(results) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except (SystemExit, KeyboardInterrupt): <NEW_LINE> <INDENT> self._running = False <NEW_LINE> self._pipe.close() <NEW_LINE> sys.exit(1) | Process derived class.
Consumes lists of operations in raw strings through its pipe,
computates results and sends back another list containing them. | 62598fc33d592f4c4edbb162 |
class CrawlerBankier(object): <NEW_LINE> <INDENT> def __init__(self, databaseHelper = AbstractDatabaseHelper("")): <NEW_LINE> <INDENT> self.databaseHelper = databaseHelper <NEW_LINE> <DEDENT> def collectData(self): <NEW_LINE> <INDENT> pass; | description of class | 62598fc33346ee7daa33779e |
class ServantYuHun(PassiveManage): <NEW_LINE> <INDENT> pass | 式神御魂 | 62598fc38a349b6b436864ea |
class HttpNotFoundException(HttpException): <NEW_LINE> <INDENT> def __init__(self, reason = None): <NEW_LINE> <INDENT> self.statuscode = 404 <NEW_LINE> if not reason: <NEW_LINE> <INDENT> reason = "" <NEW_LINE> <DEDENT> self.reason = reason | Exception thrown when a 404 status code has to be returned
| 62598fc366673b3332c3067f |
class HostDeleteTestCase(CLITestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(HostDeleteTestCase, self).setUp() <NEW_LINE> result = Proxy.list() <NEW_LINE> self.assertGreater(len(result), 0) <NEW_LINE> self.puppet_proxy = result[0] <NEW_LINE> self.host = entities.Host() <NEW_LINE> self.host.create_missing() <NEW_LINE> self.host = Host.create({ u'architecture-id': self.host.architecture.id, u'domain-id': self.host.domain.id, u'environment-id': self.host.environment.id, u'location-id': self.host.location.id, u'mac': self.host.mac, u'medium-id': self.host.medium.id, u'name': gen_string('alphanumeric'), u'operatingsystem-id': self.host.operatingsystem.id, u'organization-id': self.host.organization.id, u'partition-table-id': self.host.ptable.id, u'puppet-proxy-id': self.puppet_proxy['id'], u'root-pass': self.host.root_pass, }) <NEW_LINE> <DEDENT> @tier1 <NEW_LINE> def test_positive_delete_by_id(self): <NEW_LINE> <INDENT> Host.delete({'id': self.host['id']}) <NEW_LINE> with self.assertRaises(CLIReturnCodeError): <NEW_LINE> <INDENT> Host.info({'id': self.host['id']}) <NEW_LINE> <DEDENT> <DEDENT> @tier1 <NEW_LINE> def test_positive_delete_by_name(self): <NEW_LINE> <INDENT> Host.delete({'name': self.host['name']}) <NEW_LINE> with self.assertRaises(CLIReturnCodeError): <NEW_LINE> <INDENT> Host.info({'name': self.host['name']}) | Tests for deleting the hosts via CLI. | 62598fc33d592f4c4edbb163 |
class TriangularRandom(Generator): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Generator.__init__(self) <NEW_LINE> self.rng = UniformRNG() <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> while 1: <NEW_LINE> <INDENT> a = self.rng.random() <NEW_LINE> b = self.rng.random() / 2 <NEW_LINE> if (a < 0.5 and a > b) or (a >= 0.5 and (a - 0.5) < b): <NEW_LINE> <INDENT> return a | Random generator with triangular distribution. | 62598fc3ad47b63b2c5a7b03 |
class ClusterNodeVMDeploymentConfig(object): <NEW_LINE> <INDENT> swagger_types = { 'placement_type': 'str' } <NEW_LINE> attribute_map = { 'placement_type': 'placement_type' } <NEW_LINE> discriminator_value_class_map = { 'VsphereClusterNodeVMDeploymentConfig': 'VsphereClusterNodeVMDeploymentConfig' } <NEW_LINE> def __init__(self, placement_type=None): <NEW_LINE> <INDENT> self._placement_type = None <NEW_LINE> self.discriminator = 'placement_type' <NEW_LINE> self.placement_type = placement_type <NEW_LINE> <DEDENT> @property <NEW_LINE> def placement_type(self): <NEW_LINE> <INDENT> return self._placement_type <NEW_LINE> <DEDENT> @placement_type.setter <NEW_LINE> def placement_type(self, placement_type): <NEW_LINE> <INDENT> if placement_type is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `placement_type`, must not be `None`") <NEW_LINE> <DEDENT> allowed_values = ["VsphereClusterNodeVMDeploymentConfig"] <NEW_LINE> if placement_type not in allowed_values: <NEW_LINE> <INDENT> raise ValueError( "Invalid value for `placement_type` ({0}), must be one of {1}" .format(placement_type, allowed_values) ) <NEW_LINE> <DEDENT> self._placement_type = placement_type <NEW_LINE> <DEDENT> def get_real_child_model(self, data): <NEW_LINE> <INDENT> discriminator_value = data[self.discriminator].lower() <NEW_LINE> return self.discriminator_value_class_map.get(discriminator_value) <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(ClusterNodeVMDeploymentConfig, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ClusterNodeVMDeploymentConfig): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fc3283ffb24f3cf3b30 |
class AnalyticalSoln(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def displacement(self, locs): <NEW_LINE> <INDENT> (npts, dim) = locs.shape <NEW_LINE> disp = numpy.zeros( (1, npts, 2), dtype=numpy.float64) <NEW_LINE> disp[0,:,0] = exx*locs[:,0] + exy*locs[:,1] <NEW_LINE> disp[0,:,1] = eyy*locs[:,1] + exy*locs[:,0] <NEW_LINE> return disp <NEW_LINE> <DEDENT> def strain(self, locs): <NEW_LINE> <INDENT> (npts, dim) = locs.shape <NEW_LINE> strain = numpy.zeros( (1, npts, 3), dtype=numpy.float64) <NEW_LINE> strain[0,:,0] = exx <NEW_LINE> strain[0,:,1] = eyy <NEW_LINE> strain[0,:,2] = exy <NEW_LINE> return strain <NEW_LINE> <DEDENT> def stress(self, locs): <NEW_LINE> <INDENT> (npts, dim) = locs.shape <NEW_LINE> stress = numpy.zeros( (1, npts, 3), dtype=numpy.float64) <NEW_LINE> stress[0,:,0] = sxx <NEW_LINE> stress[0,:,1] = syy <NEW_LINE> stress[0,:,2] = sxy <NEW_LINE> return stress | Analytical solution to axial/shear displacement problem. | 62598fc350812a4eaa620d3b |
class QWinJumpList(__PyQt5_QtCore.QObject): <NEW_LINE> <INDENT> def addCategory(self, *__args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def categories(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def childEvent(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def connectNotify(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def customEvent(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def disconnectNotify(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def frequent(self): <NEW_LINE> <INDENT> return QWinJumpListCategory <NEW_LINE> <DEDENT> def identifier(self): <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def isSignalConnected(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def receivers(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def recent(self): <NEW_LINE> <INDENT> return QWinJumpListCategory <NEW_LINE> <DEDENT> def sender(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def senderSignalIndex(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def setIdentifier(self, p_str): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tasks(self): <NEW_LINE> <INDENT> return QWinJumpListCategory <NEW_LINE> <DEDENT> def timerEvent(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> pass | QWinJumpList(parent: QObject = None) | 62598fc3d8ef3951e32c7fb2 |
class BaseElement: <NEW_LINE> <INDENT> def __init__(self,basefile): <NEW_LINE> <INDENT> self.authors = basefile["author"] <NEW_LINE> if isinstance(self.authors,str): <NEW_LINE> <INDENT> self.authors = [self.authors] <NEW_LINE> <DEDENT> self.author_names = [] <NEW_LINE> self.author_mails = [] <NEW_LINE> for author in self.authors: <NEW_LINE> <INDENT> match = parse_angled(author) <NEW_LINE> self.author_names.append(match[0]) <NEW_LINE> self.author_mails.append(match[1]) <NEW_LINE> <DEDENT> self.license = basefile["license"] <NEW_LINE> match = parse_angled(self.license) <NEW_LINE> self.license_name = match[0] <NEW_LINE> self.license_url = match[1] <NEW_LINE> self.type = basefile["type"] <NEW_LINE> self.source = basefile.get("source","") | Base class for representing BaseElements, yaml structures that describe
the contents of file | 62598fc35fcc89381b2662a3 |
class IMMediate(SCPINode): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "IMMediate" <NEW_LINE> args = [] <NEW_LINE> class AMPLitude(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "AMPLitude" <NEW_LINE> args = ["1"] <NEW_LINE> class BACKup(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "BACKup" <NEW_LINE> args = ["1"] <NEW_LINE> <DEDENT> BACKup = BACKup() <NEW_LINE> class STEP(SCPINode): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "STEP" <NEW_LINE> args = [] <NEW_LINE> class INCRement(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "INCRement" <NEW_LINE> args = ["1"] <NEW_LINE> <DEDENT> INCRement = INCRement() <NEW_LINE> <DEDENT> STEP = STEP() <NEW_LINE> <DEDENT> AMPLitude = AMPLitude() <NEW_LINE> class OFFSet(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "OFFSet" <NEW_LINE> args = ["1"] <NEW_LINE> class LINear(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "LINear" <NEW_LINE> args = ["1"] <NEW_LINE> <DEDENT> LINear = LINear() <NEW_LINE> class LOGarithmic(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "LOGarithmic" <NEW_LINE> args = ["1"] <NEW_LINE> <DEDENT> LOGarithmic = LOGarithmic() <NEW_LINE> <DEDENT> OFFSet = OFFSet() | SOURce:POWer:LEVel:IMMediate
Arguments: | 62598fc3fff4ab517ebcda93 |
class APIKeyMissing(DelightedError): <NEW_LINE> <INDENT> pass | Without an API key this library cannot connect to Delighted. | 62598fc3be7bc26dc9251fb2 |
class IDC(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=32, verbose_name=u'机房名称') <NEW_LINE> linkman = models.CharField(max_length=16, blank=True, null=True, default='', verbose_name=u'联系人') <NEW_LINE> phone = models.CharField(max_length=32, blank=True, null=True, default='', verbose_name=u'联系电话') <NEW_LINE> address = models.CharField(max_length=128, blank=True, null=True, default='', verbose_name=u"机房地址") <NEW_LINE> network = models.TextField(blank=True, null=True, default='', verbose_name=u"IP地址段") <NEW_LINE> operator = models.CharField(max_length=32, blank=True, default='', null=True, verbose_name=u"运营商") <NEW_LINE> comment = models.CharField(max_length=128, blank=True, default='', null=True, verbose_name=u"备注") <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = u'IDC' <NEW_LINE> verbose_name_plural = verbose_name | 机房信息 | 62598fc35fdd1c0f98e5e240 |
class _LineOverflow(Exception): <NEW_LINE> <INDENT> pass | Used internally in `ConstructorStr`. | 62598fc32c8b7c6e89bd3a70 |
class Update: <NEW_LINE> <INDENT> def __init__(self, transport: Transport, collection: str, db_type: str, operation: str = 'all'): <NEW_LINE> <INDENT> self.transport = transport <NEW_LINE> self.collection = collection <NEW_LINE> self.db_type = db_type <NEW_LINE> self.operation = operation <NEW_LINE> self.params = {'find': {}, 'update': {}} <NEW_LINE> <DEDENT> def where(self, *conditions) -> 'Update': <NEW_LINE> <INDENT> self.params['find'] = generate_find(AND(*conditions)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def set(self, obj) -> 'Update': <NEW_LINE> <INDENT> self.params['update']['$set'] = obj <NEW_LINE> return self <NEW_LINE> <DEDENT> def push(self, obj) -> 'Update': <NEW_LINE> <INDENT> self.params['update']['$push'] = obj <NEW_LINE> return self <NEW_LINE> <DEDENT> def remove(self, *fields) -> 'Update': <NEW_LINE> <INDENT> self.params['update']['$unset'] = {x: '' for x in fields} <NEW_LINE> return self <NEW_LINE> <DEDENT> def rename(self, obj) -> 'Update': <NEW_LINE> <INDENT> self.params['update']['$rename'] = obj <NEW_LINE> return self <NEW_LINE> <DEDENT> def inc(self, obj) -> 'Update': <NEW_LINE> <INDENT> self.params['update']['$inc'] = obj <NEW_LINE> return self <NEW_LINE> <DEDENT> def mul(self, obj) -> 'Update': <NEW_LINE> <INDENT> self.params['update']['$mul'] = obj <NEW_LINE> return self <NEW_LINE> <DEDENT> def max(self, obj) -> 'Update': <NEW_LINE> <INDENT> self.params['update']['$max'] = obj <NEW_LINE> return self <NEW_LINE> <DEDENT> def min(self, obj) -> 'Update': <NEW_LINE> <INDENT> self.params['update']['$min'] = obj <NEW_LINE> return self <NEW_LINE> <DEDENT> def current_timestamp(self, *values) -> 'Update': <NEW_LINE> <INDENT> if self.params['update'].get('$currentDate') is None: <NEW_LINE> <INDENT> self.params['update']['$currentDate'] = {} <NEW_LINE> <DEDENT> self.params['update']['$currentDate'].update({x: {'$type': 'timestamp'} for x in values}) <NEW_LINE> return self <NEW_LINE> <DEDENT> def current_date(self, *values) -> 'Update': <NEW_LINE> <INDENT> if self.params['update'].get('$currentDate') is None: <NEW_LINE> <INDENT> self.params['update']['$currentDate'] = {} <NEW_LINE> <DEDENT> self.params['update']['$currentDate'].update({x: {'$type': 'date'} for x in values}) <NEW_LINE> return self <NEW_LINE> <DEDENT> def apply(self) -> Response: <NEW_LINE> <INDENT> return self.transport.update(self.params['find'], self.operation, self.params['update'], self.db_type, self.collection) | The DB Update Class
::
from space_api import API, AND, OR, COND
api = API("My-Project", "localhost:4124")
db = api.mongo() # For a MongoDB interface
response = db.update('posts').where(AND(COND('title', '==', 'Title1'))).set({'title':'Title2'}).apply()
:param transport: (Transport) The API's transport instance
:param collection: (str) The collection name
:param db_type: (str) The database type
:param operation: (str) The (optional) operation (one/all/upsert) (Defaults to 'all') | 62598fc399fddb7c1ca62f43 |
class CreateDynamicOctrees(QueenbeeTask): <NEW_LINE> <INDENT> _input_params = luigi.DictParameter() <NEW_LINE> @property <NEW_LINE> def model(self): <NEW_LINE> <INDENT> value = pathlib.Path(self.input()['CreateRadFolder']['model_folder'].path) <NEW_LINE> return value.as_posix() if value.is_absolute() else pathlib.Path(self.initiation_folder, value).resolve().as_posix() <NEW_LINE> <DEDENT> @property <NEW_LINE> def sunpath(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pathlib.Path(self.input()['GenerateSunpath']['sunpath'].path) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> value = pathlib.Path(self.input()['GenerateSunpath']['sunpath'].path) <NEW_LINE> return value.as_posix() if value.is_absolute() else pathlib.Path(self.initiation_folder, value).resolve().as_posix() <NEW_LINE> <DEDENT> @property <NEW_LINE> def execution_folder(self): <NEW_LINE> <INDENT> return pathlib.Path(self._input_params['simulation_folder']).as_posix() <NEW_LINE> <DEDENT> @property <NEW_LINE> def initiation_folder(self): <NEW_LINE> <INDENT> return pathlib.Path(self._input_params['simulation_folder']).as_posix() <NEW_LINE> <DEDENT> @property <NEW_LINE> def params_folder(self): <NEW_LINE> <INDENT> return pathlib.Path(self.execution_folder, self._input_params['params_folder']).resolve().as_posix() <NEW_LINE> <DEDENT> def command(self): <NEW_LINE> <INDENT> return 'honeybee-radiance octree from-abstracted-groups model --sun-path sunpath.mtx --output-folder octree' <NEW_LINE> <DEDENT> def requires(self): <NEW_LINE> <INDENT> return {'GenerateSunpath': GenerateSunpath(_input_params=self._input_params), 'CreateRadFolder': CreateRadFolder(_input_params=self._input_params)} <NEW_LINE> <DEDENT> def output(self): <NEW_LINE> <INDENT> return { 'scene_folder': luigi.LocalTarget( pathlib.Path(self.execution_folder, 'radiance/shortwave/resources/dynamic_groups').resolve().as_posix() ), 'scene_info': luigi.LocalTarget( pathlib.Path( self.params_folder, 'octree/group_info.json').resolve().as_posix() ) } <NEW_LINE> <DEDENT> @property <NEW_LINE> def input_artifacts(self): <NEW_LINE> <INDENT> return [ {'name': 'model', 'to': 'model', 'from': self.model, 'optional': False}, {'name': 'sunpath', 'to': 'sunpath.mtx', 'from': self.sunpath, 'optional': True}] <NEW_LINE> <DEDENT> @property <NEW_LINE> def output_artifacts(self): <NEW_LINE> <INDENT> return [ { 'name': 'scene-folder', 'from': 'octree', 'to': pathlib.Path(self.execution_folder, 'radiance/shortwave/resources/dynamic_groups').resolve().as_posix(), 'optional': False, 'type': 'folder' }] <NEW_LINE> <DEDENT> @property <NEW_LINE> def output_parameters(self): <NEW_LINE> <INDENT> return [{'name': 'scene-info', 'from': 'octree/group_info.json', 'to': pathlib.Path(self.params_folder, 'octree/group_info.json').resolve().as_posix()}] | Generate a set of octrees from a folder containing abstracted aperture groups. | 62598fc371ff763f4b5e7a2a |
class AddAccountView(PassportView): <NEW_LINE> <INDENT> parameters = {"common": CommonParameters} <NEW_LINE> payload_cls = ManageAccountPayloadSchema <NEW_LINE> responses = { HTTPStatus.CREATED: AccountResponseSchema, } <NEW_LINE> async def process_request( self, request: web.Request, payload: Optional[Payload] = None, **kwargs ) -> Union[web.Response, Tuple[Any, HTTPStatus]]: <NEW_LINE> <INDENT> storage = DBStorage(request.app["db"]) <NEW_LINE> try: <NEW_LINE> <INDENT> add_account = AddUseCase(storage, logger=request.app["logger"]) <NEW_LINE> account = await add_account.execute(payload=payload) <NEW_LINE> return {"account": account}, HTTPStatus.CREATED <NEW_LINE> <DEDENT> except AccountAlreadyExist: <NEW_LINE> <INDENT> return json_response({"errors": {"name": "Already exist"}}, status=422) | Add new account. | 62598fc3167d2b6e312b7224 |
class Web_Services_Securities_s(Collection): <NEW_LINE> <INDENT> def __init__(self, blocking_settings): <NEW_LINE> <INDENT> super(Web_Services_Securities_s, self).__init__(blocking_settings) <NEW_LINE> self._meta_data['object_has_stats'] = False <NEW_LINE> self._meta_data['allowed_lazy_attributes'] = [Web_Services_Security] <NEW_LINE> self._meta_data['required_json_kind'] = 'tm:asm:policies:blocking-settings:web-services-securities:web-services-securitycollectionstate' <NEW_LINE> self._meta_data['attribute_registry'] = { 'tm:asm:policies:blocking-settings:web-services-securities:web-services-securitystate': Web_Services_Security } | BIG-IP® ASM Web-Services-Securities sub-collection | 62598fc34a966d76dd5ef182 |
class SingleLinkedList(object): <NEW_LINE> <INDENT> def __init__(self, node=None): <NEW_LINE> <INDENT> self.__head = node <NEW_LINE> <DEDENT> def is_empty(self): <NEW_LINE> <INDENT> return self.__head is None <NEW_LINE> <DEDENT> def length(self): <NEW_LINE> <INDENT> cur = self.__head <NEW_LINE> count = 0 <NEW_LINE> while cur is not None: <NEW_LINE> <INDENT> count += 1 <NEW_LINE> cur = cur.next <NEW_LINE> <DEDENT> return count <NEW_LINE> <DEDENT> def append(self, item): <NEW_LINE> <INDENT> node = Node(item) <NEW_LINE> if self.__head is None: <NEW_LINE> <INDENT> self.__head = node <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> node.next = self.__head <NEW_LINE> self.__head = node <NEW_LINE> <DEDENT> <DEDENT> def add(self, item): <NEW_LINE> <INDENT> node = Node(item) <NEW_LINE> cur = self.__head <NEW_LINE> while cur.next is not None: <NEW_LINE> <INDENT> cur = cur.next <NEW_LINE> <DEDENT> cur.next = node <NEW_LINE> <DEDENT> def travel(self): <NEW_LINE> <INDENT> cur = self.__head <NEW_LINE> while cur is not None: <NEW_LINE> <INDENT> print(cur.element, end=" ") <NEW_LINE> cur = cur.next <NEW_LINE> <DEDENT> print("") <NEW_LINE> <DEDENT> def insert(self, pos, item): <NEW_LINE> <INDENT> node = Node(item) <NEW_LINE> if pos <= 0: <NEW_LINE> <INDENT> self.append(item) <NEW_LINE> <DEDENT> elif pos > self.length(): <NEW_LINE> <INDENT> self.add(item) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cur = self.__head <NEW_LINE> count = 0 <NEW_LINE> while count < pos - 1: <NEW_LINE> <INDENT> count += 1 <NEW_LINE> cur = cur.next <NEW_LINE> <DEDENT> node.next = cur.next <NEW_LINE> cur.next = node <NEW_LINE> <DEDENT> <DEDENT> def remove(self, item): <NEW_LINE> <INDENT> cur = self.__head <NEW_LINE> pre = None <NEW_LINE> while cur is not None: <NEW_LINE> <INDENT> if cur.element == item: <NEW_LINE> <INDENT> if pre is not None: <NEW_LINE> <INDENT> pre.next = cur.next <NEW_LINE> return "remove %s" % item <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> pre = cur <NEW_LINE> cur = cur.next <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def search(self, item): <NEW_LINE> <INDENT> cur = self.__head <NEW_LINE> count = 0 <NEW_LINE> while cur is not None: <NEW_LINE> <INDENT> if cur.element == item: <NEW_LINE> <INDENT> return count <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cur = cur.next <NEW_LINE> count += 1 <NEW_LINE> <DEDENT> <DEDENT> return None | 单项列表 | 62598fc3bf627c535bcb1755 |
class SipUserIdSerializer(serializers.Serializer): <NEW_LINE> <INDENT> sip_user_id = serializers.IntegerField(max_value=999999999, min_value=int(1e8)) | Base serializer for the sip_user_id field. | 62598fc3442bda511e95c70e |
class DatabaseJSON(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.dir_path = os.path.dirname( os.path.realpath(__file__)) <NEW_LINE> self.json_path = os.path.join(self.dir_path, 'database.json') <NEW_LINE> self.last_update = time.time() <NEW_LINE> <DEDENT> def _get_database_json(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(self.json_path, 'r') as file: <NEW_LINE> <INDENT> return json.load(file) <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> print('File is still open.') <NEW_LINE> time.sleep(1) <NEW_LINE> self._get_database_json() <NEW_LINE> <DEDENT> <DEDENT> def _initialize_database(self): <NEW_LINE> <INDENT> print('Database initialized here:') <NEW_LINE> print(self.json_path) <NEW_LINE> with open(self.json_path, 'w+') as file: <NEW_LINE> <INDENT> json.dump({ 'lastEntry': { 'analog': {}, 'digital': { 'led': 0 } }, 'entries': {} }, file, indent=4, sort_keys=True) <NEW_LINE> <DEDENT> return self._get_database_json() <NEW_LINE> <DEDENT> def update_database(self, dictionary): <NEW_LINE> <INDENT> data = self._get_database_json() <NEW_LINE> if (time.time() - self.last_update) >= 10: <NEW_LINE> <INDENT> print('Time of logged value') <NEW_LINE> timestamp = datetime.datetime.utcnow().strftime('%y-%m-%d-%H-%M-%S.%f') <NEW_LINE> print(timestamp + ' - data stored') <NEW_LINE> data['entries'].update({timestamp: dictionary['lastEntry']}) <NEW_LINE> print(str(data)) <NEW_LINE> self.last_update = time.time() <NEW_LINE> <DEDENT> data['lastEntry'].update(dictionary['lastEntry']) <NEW_LINE> try: <NEW_LINE> <INDENT> with open(self.json_path, 'w') as data_file: <NEW_LINE> <INDENT> json.dump(data, data_file, indent=4, sort_keys=True) <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> print('File is still open.') <NEW_LINE> time.sleep(1) <NEW_LINE> self._get_database_json() <NEW_LINE> <DEDENT> <DEDENT> def get_database_data(self): <NEW_LINE> <INDENT> return self._get_database_json() if os.path.isfile( self.json_path) else self._initialize_database() | Creates and manages database updated | 62598fc3dc8b845886d5386a |
class BaseConfig: <NEW_LINE> <INDENT> SECRET_KEY = 'my_precious' <NEW_LINE> DEBUG = False <NEW_LINE> BCRYPT_LOG_ROUNDS = 13 <NEW_LINE> SQLALCHEMY_TRACK_MODIFICATIONS = False <NEW_LINE> SECRET_KEY = os.getenv('SECRET_KEY', 'my_precious') <NEW_LINE> SECURITY_PASSWORD_SALT = os.getenv('SECRET_KEY_SALT','my_precious_two') <NEW_LINE> MAIL_SERVER = 'smtp.googlemail.com' <NEW_LINE> MAIL_PORT = 465 <NEW_LINE> MAIL_USE_TLS = False <NEW_LINE> MAIL_USE_SSL = True <NEW_LINE> MAIL_USERNAME = os.getenv('APP_MAIL_USERNAME','scloud.service.mail@gmail.com') <NEW_LINE> MAIL_PASSWORD = os.getenv('APP_MAIL_PASSWORD','thesis2017') <NEW_LINE> MAIL_DEFAULT_SENDER = MAIL_USERNAME | Base configuration. | 62598fc34f88993c371f0662 |
class FlavorCoin(Bitcoin): <NEW_LINE> <INDENT> name = 'flavorcoin' <NEW_LINE> symbols = ('FLVR', ) <NEW_LINE> seeds = ("2flav.nodes.altcoinsteps.com", ) <NEW_LINE> port = 17771 <NEW_LINE> message_start = b'\xa4\xd2\xf8\xa6' <NEW_LINE> base58_prefixes = { 'PUBKEY_ADDR': 3, 'SCRIPT_ADDR': 85, 'SECRET_KEY': 131 } | Class with all the necessary FlavorCoin network information based on
https://github.com/flavorcoin/FlavorCoin-V2/blob/master/src/net.cpp
(date of access: 02/15/2018) | 62598fc37c178a314d78d74e |
class Optimizer(object): <NEW_LINE> <INDENT> def apply_grads(self, grads, variables): <NEW_LINE> <INDENT> ops = [] <NEW_LINE> for grad, var in zip(grads, variables): <NEW_LINE> <INDENT> ops.extend(self.apply_grad(grad, var)) <NEW_LINE> <DEDENT> if not ops: <NEW_LINE> <INDENT> return ops <NEW_LINE> <DEDENT> return variables[0].graph.combine_assignments(ops) <NEW_LINE> <DEDENT> def apply_grad(self, grad, var): <NEW_LINE> <INDENT> raise ValueError("apply_grad not implemented %s %s" % (grad, var)) | Base optimizer class.
Constructor of subclasses must take `learning_rate` as an argument. | 62598fc33317a56b869be6a8 |
class IscsiInitiatorTargetBaseParameters(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { 'portals': 'list[TargetPortal]' } <NEW_LINE> self.attribute_map = { 'portals': 'portals' } <NEW_LINE> self._portals = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def portals(self): <NEW_LINE> <INDENT> return self._portals <NEW_LINE> <DEDENT> @portals.setter <NEW_LINE> def portals(self, portals): <NEW_LINE> <INDENT> self._portals = portals <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if self is None or other is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fc360cbc95b063645ed |
class ContainerTaskInstanceCreateTaskTypeMenu( BaseTaskInstanceCreateTaskTypeMenu ): <NEW_LINE> <INDENT> form_class = ContainerTaskTypeSelectForm <NEW_LINE> create_urlname = "containertaskinstance-create" | View for container task instance creation submenu. | 62598fc33d592f4c4edbb167 |
class Notifier(common.loggable): <NEW_LINE> <INDENT> def __init__(self, api): <NEW_LINE> <INDENT> self._api = api <NEW_LINE> self._followers = None <NEW_LINE> <DEDENT> def send(self, message): <NEW_LINE> <INDENT> if not isinstance(message, str): <NEW_LINE> <INDENT> message = str(message, errors="ignore") <NEW_LINE> <DEDENT> self.logger.debug("Message to send: \"{}\"".format(message)) <NEW_LINE> if len(message.strip()) == 0: <NEW_LINE> <INDENT> self.logger.warn("Empty message") <NEW_LINE> return <NEW_LINE> <DEDENT> for follower in self._get_followers(): <NEW_LINE> <INDENT> self.logger.info("Sending message to \"{}\": \"{}\"".format( follower.screen_name, message)) <NEW_LINE> self._api.send_direct_message(user_id=follower.id, text=message) <NEW_LINE> <DEDENT> <DEDENT> def _get_followers(self): <NEW_LINE> <INDENT> if self._followers is None: <NEW_LINE> <INDENT> self._followers = self._api.followers() <NEW_LINE> <DEDENT> return self._followers | It sends a message to destinations (followers) with twitter API.
:param api: An API instance (for now, we are using Tweepy) | 62598fc3cc40096d6161a330 |
@python_2_unicode_compatible <NEW_LINE> class SecretRole(ChangeLoggedModel): <NEW_LINE> <INDENT> name = models.CharField( max_length=50, unique=True ) <NEW_LINE> slug = models.SlugField( unique=True ) <NEW_LINE> users = models.ManyToManyField( to=User, related_name='secretroles', blank=True ) <NEW_LINE> groups = models.ManyToManyField( to=Group, related_name='secretroles', blank=True ) <NEW_LINE> csv_headers = ['name', 'slug'] <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ['name'] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return "{}?role={}".format(reverse('secrets:secret_list'), self.slug) <NEW_LINE> <DEDENT> def to_csv(self): <NEW_LINE> <INDENT> return ( self.name, self.slug, ) <NEW_LINE> <DEDENT> def has_member(self, user): <NEW_LINE> <INDENT> if user.is_superuser: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return user in self.users.all() or user.groups.filter(pk__in=self.groups.all()).exists() | A SecretRole represents an arbitrary functional classification of Secrets. For example, a user might define roles
such as "Login Credentials" or "SNMP Communities."
By default, only superusers will have access to decrypt Secrets. To allow other users to decrypt Secrets, grant them
access to the appropriate SecretRoles either individually or by group. | 62598fc3283ffb24f3cf3b34 |
class PascalCaseJSONRenderer(JSONRenderer): <NEW_LINE> <INDENT> def render(self, data, accepted_media_type=None, renderer_context=None): <NEW_LINE> <INDENT> data = transformations.keys_to_pascalcase(data) <NEW_LINE> return super(PascalCaseJSONRenderer, self).render(data, accepted_media_type=accepted_media_type, renderer_context=renderer_context) | Renderer which serializes to JSON using PascalCase keys. | 62598fc3956e5f7376df57d6 |
class Fresh(Goal): <NEW_LINE> <INDENT> def __init__(self, function): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.function = function <NEW_LINE> self.function_vars = None <NEW_LINE> self.goal = None <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> goal = self.getFunctionGoal() <NEW_LINE> return "Fresh %s: %s" % (str(self.function_vars), str(type(self.goal))) <NEW_LINE> <DEDENT> def getFunctionGoal(self): <NEW_LINE> <INDENT> if self.goal: <NEW_LINE> <INDENT> return self.goal <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> params = signature(self.function).parameters <NEW_LINE> arg_count = len(params) <NEW_LINE> self.function_vars = lvars(arg_count) <NEW_LINE> for (var, name) in zip(self.function_vars, params): <NEW_LINE> <INDENT> var.name = name <NEW_LINE> <DEDENT> self.goal = self.function(*self.function_vars) <NEW_LINE> return self.goal <NEW_LINE> <DEDENT> <DEDENT> def __run__(self, state): <NEW_LINE> <INDENT> goal = self.getFunctionGoal() <NEW_LINE> yield from goal.run(state) | Fresh is used to bring new variables into an assertion.
| 62598fc3851cf427c66b8565 |
class Fifo(list): <NEW_LINE> <INDENT> def write(self, data): <NEW_LINE> <INDENT> self.__iadd__(data) <NEW_LINE> return len(data) <NEW_LINE> <DEDENT> def read(self): <NEW_LINE> <INDENT> return self.pop(0) | Basic first in first out (FIFO) buffer implementation | 62598fc35fc7496912d483d3 |
class TrackException(Exception): <NEW_LINE> <INDENT> pass | TrackException class. | 62598fc3a8370b77170f068c |
class AudioInfoViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = AudioInfo.objects.all() <NEW_LINE> serializer_class = AudioInfoSerializer | This viewset automatically provides `list`, `create`, `retrieve`,
`update` and `destroy` actions. | 62598fc3ff9c53063f51a8fe |
class CGLSPlugin(astra.plugin.base): <NEW_LINE> <INDENT> astra_name = "CGLS-PLUGIN" <NEW_LINE> def initialize(self,cfg): <NEW_LINE> <INDENT> self.W = astra.OpTomo(cfg['ProjectorId']) <NEW_LINE> self.vid = cfg['ReconstructionDataId'] <NEW_LINE> self.sid = cfg['ProjectionDataId'] <NEW_LINE> try: <NEW_LINE> <INDENT> v = astra.data2d.get_shared(self.vid) <NEW_LINE> s = astra.data2d.get_shared(self.sid) <NEW_LINE> self.data_mod = astra.data2d <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> v = astra.data3d.get_shared(self.vid) <NEW_LINE> s = astra.data3d.get_shared(self.sid) <NEW_LINE> self.data_mod = astra.data3d <NEW_LINE> <DEDENT> <DEDENT> def run(self, its): <NEW_LINE> <INDENT> v = self.data_mod.get_shared(self.vid) <NEW_LINE> s = self.data_mod.get_shared(self.sid) <NEW_LINE> z = np.zeros(v.shape, dtype=np.float32) <NEW_LINE> p = np.zeros(v.shape, dtype=np.float32) <NEW_LINE> r = np.zeros(s.shape, dtype=np.float32) <NEW_LINE> w = np.zeros(s.shape, dtype=np.float32) <NEW_LINE> W = self.W <NEW_LINE> W.FP(v, out=w) <NEW_LINE> r[:] = s <NEW_LINE> r -= w <NEW_LINE> W.BP(r, out=p) <NEW_LINE> gamma = np.dot(p.ravel(), p.ravel()) <NEW_LINE> for i in range(its): <NEW_LINE> <INDENT> W.FP(p, out=w) <NEW_LINE> alpha = gamma / np.dot(w.ravel(), w.ravel()) <NEW_LINE> z[:] = p <NEW_LINE> z *= alpha <NEW_LINE> v += z <NEW_LINE> w *= -alpha; <NEW_LINE> r += w <NEW_LINE> W.BP(r, out=z) <NEW_LINE> newgamma = np.dot(z.ravel(), z.ravel()) <NEW_LINE> beta = newgamma / gamma <NEW_LINE> gamma = newgamma <NEW_LINE> p *= beta <NEW_LINE> p += z | CGLS. | 62598fc3d8ef3951e32c7fb4 |
class Base(Default, HasVolume, Cancellable): <NEW_LINE> <INDENT> def __init__(self, volume, owner = None, volumeFilled = 0): <NEW_LINE> <INDENT> HasVolume.__init__(self, volume, volumeFilled) <NEW_LINE> Cancellable.__init__(self) <NEW_LINE> Default.__init__(self, owner) <NEW_LINE> <DEDENT> def copyTo(self, dst): <NEW_LINE> <INDENT> HasVolume.copyTo(self, dst) <NEW_LINE> Cancellable.copyTo(self, dst) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s_%s[%s]" % (type(self).__name__, str(self.side), HasVolume.__str__(self)) | Base class for market and limit orders.
Responsible for:
- tracking order's volume
- keeping order cancellation flag (does it needed for market orders???)
- notifying order listeners about order matching
TBD: split into Cancelable, HavingVolume base classes | 62598fc3a05bb46b3848ab1d |
class CDNJsObject(object): <NEW_LINE> <INDENT> def __init__(self, name, version, default=None, files=None, keywords=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.version = version <NEW_LINE> self.default = default.split('/')[-1] <NEW_LINE> self.files = files or {} <NEW_LINE> self.keywords = keywords or [] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '<{}/{}>'.format(self.name, self.version) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return str(self) <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> for name, obj in self.files.items(): <NEW_LINE> <INDENT> if name.endswith(item): <NEW_LINE> <INDENT> return obj['uri' if Settings.get('USE_LOCAL') else 'cdn'] <NEW_LINE> <DEDENT> <DEDENT> raise FileNotFoundException('File {} was not found at {}'.format(item, self.name)) <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> if 'uri' not in value or 'cdn' not in value: <NEW_LINE> <INDENT> raise InvalidFileException('File {} that is trying to add is invalid'.format(key)) <NEW_LINE> <DEDENT> self.files[key] = value <NEW_LINE> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> for f in self.files.keys(): <NEW_LINE> <INDENT> if item in f: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def dict(self): <NEW_LINE> <INDENT> return { 'default': self.default, 'files': self.files } <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_valid(self): <NEW_LINE> <INDENT> return len(self.files.keys()) > 0 <NEW_LINE> <DEDENT> def matches(self, name, version=None): <NEW_LINE> <INDENT> if not any([name in x for x in self.keywords]) and name not in self.name: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if version is not None and self.version != version: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def download(self): <NEW_LINE> <INDENT> storage_path = os.path.join( Settings.get('STATIC_ROOT'), self.name, self.version) <NEW_LINE> if not os.path.exists(storage_path): <NEW_LINE> <INDENT> os.makedirs(storage_path) <NEW_LINE> <DEDENT> for name, path_data in self.files.items(): <NEW_LINE> <INDENT> subdir = CDNJs.get_dir(path_data['cdn'], self.version) <NEW_LINE> dir_path = os.path.join(storage_path, subdir) <NEW_LINE> file_path = os.path.join(dir_path, name) <NEW_LINE> file_uri = '{root}{name}/{version}/{subdir}{file}'.format( root=Settings.get('STATIC_URL'), name=self.name, version=self.version, subdir=subdir + '/' if subdir else '', file=name ) <NEW_LINE> if not os.path.exists(file_path): <NEW_LINE> <INDENT> if not os.path.exists(dir_path): <NEW_LINE> <INDENT> os.makedirs(dir_path) <NEW_LINE> <DEDENT> with open(file_path, 'w') as f: <NEW_LINE> <INDENT> for c in requests.get(path_data['cdn']): <NEW_LINE> <INDENT> f.write(c) <NEW_LINE> <DEDENT> f.close() <NEW_LINE> <DEDENT> <DEDENT> self[name] = { 'cdn': path_data['cdn'], 'uri': file_uri } | CDNJs object | 62598fc3656771135c489920 |
class UserAttributeSimilarityValidator: <NEW_LINE> <INDENT> DEFAULT_USER_ATTRIBUTES = ('username', 'first_name', 'last_name', 'email') <NEW_LINE> def __init__(self, user_attributes=DEFAULT_USER_ATTRIBUTES, max_similarity=0.7): <NEW_LINE> <INDENT> self.user_attributes = user_attributes <NEW_LINE> if max_similarity < 0.1: <NEW_LINE> <INDENT> raise ValueError('max_similarity must be at least 0.1') <NEW_LINE> <DEDENT> self.max_similarity = max_similarity <NEW_LINE> <DEDENT> def validate(self, password, user=None): <NEW_LINE> <INDENT> if not user: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> password = password.lower() <NEW_LINE> for attribute_name in self.user_attributes: <NEW_LINE> <INDENT> value = getattr(user, attribute_name, None) <NEW_LINE> if not value or not isinstance(value, str): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> value_lower = value.lower() <NEW_LINE> value_parts = re.split(r'\W+', value_lower) + [value_lower] <NEW_LINE> for value_part in value_parts: <NEW_LINE> <INDENT> if exceeds_maximum_length_ratio(password, self.max_similarity, value_part): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if SequenceMatcher(a=password, b=value_part).quick_ratio() >= self.max_similarity: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> verbose_name = str(user._meta.get_field(attribute_name).verbose_name) <NEW_LINE> <DEDENT> except FieldDoesNotExist: <NEW_LINE> <INDENT> verbose_name = attribute_name <NEW_LINE> <DEDENT> raise ValidationError( _("The password is too similar to the %(verbose_name)s."), code='password_too_similar', params={'verbose_name': verbose_name}, ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def get_help_text(self): <NEW_LINE> <INDENT> return _('Your password can’t be too similar to your other personal information.') | Validate that the password is sufficiently different from the user's
attributes.
If no specific attributes are provided, look at a sensible list of
defaults. Attributes that don't exist are ignored. Comparison is made to
not only the full attribute value, but also its components, so that, for
example, a password is validated against either part of an email address,
as well as the full address. | 62598fc366673b3332c30685 |
class ElectricAppliances(Inventory): <NEW_LINE> <INDENT> def __init__(self, info): <NEW_LINE> <INDENT> Inventory.__init__(self, info) <NEW_LINE> self.brand = info['brand'] <NEW_LINE> self.voltage = info['voltage'] <NEW_LINE> <DEDENT> def return_as_dictionary(self): <NEW_LINE> <INDENT> output_dict = Inventory.return_as_dictionary(self) <NEW_LINE> output_dict['brand'] = self.brand <NEW_LINE> output_dict['voltage'] = self.voltage <NEW_LINE> return output_dict | ElectricAppliances class is a subclass of Inventory | 62598fc363b5f9789fe85425 |
class GenerateIncrementalDiffJob(BranchMergeProposalJobDerived): <NEW_LINE> <INDENT> implements(IGenerateIncrementalDiffJob) <NEW_LINE> classProvides(IGenerateIncrementalDiffJobSource) <NEW_LINE> class_job_type = BranchMergeProposalJobType.GENERATE_INCREMENTAL_DIFF <NEW_LINE> task_queue = 'bzrsyncd_job' <NEW_LINE> config = config.IBranchMergeProposalJobSource <NEW_LINE> def acquireLease(self, duration=600): <NEW_LINE> <INDENT> return self.job.acquireLease(duration) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> revision_set = getUtility(IRevisionSet) <NEW_LINE> old_revision = revision_set.getByRevisionId(self.old_revision_id) <NEW_LINE> new_revision = revision_set.getByRevisionId(self.new_revision_id) <NEW_LINE> with server(get_ro_server(), no_replace=True): <NEW_LINE> <INDENT> self.branch_merge_proposal.generateIncrementalDiff( old_revision, new_revision) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def create(cls, merge_proposal, old_revision_id, new_revision_id): <NEW_LINE> <INDENT> metadata = cls.getMetadata(old_revision_id, new_revision_id) <NEW_LINE> return cls._create(merge_proposal, metadata) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getMetadata(old_revision_id, new_revision_id): <NEW_LINE> <INDENT> return { 'old_revision_id': old_revision_id, 'new_revision_id': new_revision_id, } <NEW_LINE> <DEDENT> @property <NEW_LINE> def old_revision_id(self): <NEW_LINE> <INDENT> return self.metadata['old_revision_id'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def new_revision_id(self): <NEW_LINE> <INDENT> return self.metadata['new_revision_id'] <NEW_LINE> <DEDENT> def getOopsVars(self): <NEW_LINE> <INDENT> vars = BranchMergeProposalJobDerived.getOopsVars(self) <NEW_LINE> vars.extend([ ('old_revision_id', self.metadata['old_revision_id']), ('new_revision_id', self.metadata['new_revision_id']), ]) <NEW_LINE> return vars <NEW_LINE> <DEDENT> def getOperationDescription(self): <NEW_LINE> <INDENT> return ('generating an incremental diff for a merge proposal') <NEW_LINE> <DEDENT> def getErrorRecipients(self): <NEW_LINE> <INDENT> registrant = self.branch_merge_proposal.registrant <NEW_LINE> return format_address_for_person(registrant) | A job to generate an incremental diff for a branch merge proposal.
Provides class methods to create and retrieve such jobs. | 62598fc34c3428357761a56e |
class Cmd: <NEW_LINE> <INDENT> def __init__(self, name, obj, act, metadata): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.obj = obj <NEW_LINE> self.act = act <NEW_LINE> self.metadata = metadata | 'command' class. A command has a name, the action (act) it performs on
an object (obj), and a dictionary type metadata. | 62598fc3aad79263cf42ea86 |
class SR560(Instrument): <NEW_LINE> <INDENT> def __init__(self, name, **kwargs): <NEW_LINE> <INDENT> super().__init__(name, **kwargs) <NEW_LINE> cutoffs = [0.03, 0.1, 0.3, 1, 3, 10, 30, 100, 300, 1000, 3000, 10000, 30000, 100000, 300000, 1000000] <NEW_LINE> gains = [1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000] <NEW_LINE> self.add_parameter('cutoff_lo', parameter_class=ManualParameter, initial_value='DC', label='High pass', units='Hz', vals=Enum(*cutoffs)) <NEW_LINE> self.add_parameter('cutoff_hi', parameter_class=ManualParameter, initial_value='1e6', label='Low pass', units='Hz', vals=Enum(*cutoffs)) <NEW_LINE> self.add_parameter('invert', parameter_class=ManualParameter, initial_value=True, label='Inverted output', vals=Bool()) <NEW_LINE> self.add_parameter('gain', parameter_class=ManualParameter, initial_value=10, label='Gain', units=None, vals=Enum(*gains)) <NEW_LINE> <DEDENT> def get_idn(self): <NEW_LINE> <INDENT> vendor = 'Stanford Research Systems' <NEW_LINE> model = 'SR560' <NEW_LINE> serial = None <NEW_LINE> firmware = None <NEW_LINE> return {'vendor': vendor, 'model': model, 'serial': serial, 'firmware': firmware} | This is the qcodes driver for the SR 560 Voltage-preamplifier.
This is a virtual driver only and will not talk to your instrument.
Note:
- The ``cutoff_lo`` and ``cutoff_hi`` parameters will interact with
each other on the instrument (hi cannot be <= lo) but this is not
managed here, you must ensure yourself that both are correct whenever
you change one of them.
- ``gain`` has a vernier setting, which does not yield a well-defined
output. We restrict this driver to only the predefined gain values. | 62598fc3ad47b63b2c5a7b09 |
class SellerParty(Node): <NEW_LINE> <INDENT> tag = "SellerParty" <NEW_LINE> validation_schema = SELLER_PARTY_SCHEMA <NEW_LINE> def __init__( self, name: str, reg_number: str, vat_reg_number: Optional[str] = None, contact_data: Optional[ContactData] = None, account_info: Optional[AccountInfo] = None, ) -> None: <NEW_LINE> <INDENT> self.elements = self.validate( { "Name": name, "RegNumber": reg_number, "VATRegNumber": vat_reg_number, "ContactData": contact_data, "AccountInfo": account_info, } ) | Defines SellerParty involved with the invoice. Differs from the buyer party
by the mandatory register code.
name: Name of the party of the invoice.
reg_number: Registration number of the party.
vat_reg_number: VAT registration number of the party.
contact_data: Contact information of the party (phone number, e-mail, address).
account_info: Describes the accounts of the party. | 62598fc397e22403b383b1bb |
class IllegalMinuteWarning(Warning): <NEW_LINE> <INDENT> def __init__(self, minute, alternativeactionstr=None): <NEW_LINE> <INDENT> self.minute = minute <NEW_LINE> self.alternativeactionstr = alternativeactionstr <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> message = "'minute' was found to be '{0}', which is not in range [0,60).".format(self.minute) <NEW_LINE> if self.alternativeactionstr is not None: <NEW_LINE> <INDENT> message += ' ' + self.alternativeactionstr <NEW_LINE> <DEDENT> return message | Raised when a minute value is 60.
Parameters
----------
minute : int, float | 62598fc376e4537e8c3ef857 |
class RoleExclusion(Exclusion): <NEW_LINE> <INDENT> role = models.CharField(max_length=25, choices=role_options) <NEW_LINE> event = models.ForeignKey('gbe.Event', blank=True, null=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> describe = self.role <NEW_LINE> if self.event: <NEW_LINE> <INDENT> describe += ", " + str(self.event) <NEW_LINE> <DEDENT> return unicode(describe) <NEW_LINE> <DEDENT> def is_excluded(self, profile, conference): <NEW_LINE> <INDENT> is_excluded = False <NEW_LINE> if not self.event: <NEW_LINE> <INDENT> is_excluded = self.role in profile.get_roles(conference) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> is_excluded = profile.has_role_in_event(self.role, self.event) <NEW_LINE> <DEDENT> return is_excluded | This is the implementation of the case under which we don't give a ticket
because of the event that the person is participating in. This is largely
because we know the person will not be able to participate in an event
they are contributing to - for example a performer in a show.
If no event, then the implication is that being this role for ANY event
means the exclusion takes effect | 62598fc45fc7496912d483d4 |
class Solution: <NEW_LINE> <INDENT> def predictPartyVictory(self, senate): <NEW_LINE> <INDENT> Q = collections.deque() <NEW_LINE> people = [0, 0] <NEW_LINE> ban = [0, 0] <NEW_LINE> for s in senate: <NEW_LINE> <INDENT> x = s == 'R' <NEW_LINE> people[x] += 1 <NEW_LINE> Q.append(x) <NEW_LINE> <DEDENT> while all(people): <NEW_LINE> <INDENT> x = Q.popleft() <NEW_LINE> if ban[x]: <NEW_LINE> <INDENT> ban[x] -= 1 <NEW_LINE> people[x] -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ban[x ^ 1] += 1 <NEW_LINE> Q.append(x) <NEW_LINE> <DEDENT> <DEDENT> return "Radiant" if people[1] else "Dire" | @param senate: a string
@return: return a string | 62598fc4283ffb24f3cf3b37 |
class SampleData(object): <NEW_LINE> <INDENT> def __init__(self, fName, name, line, index, coord, data, note="", scale=(1,1), offset=(0,0)): <NEW_LINE> <INDENT> self.file=fName <NEW_LINE> self.coord=coord <NEW_LINE> self.data=data <NEW_LINE> self.name=name <NEW_LINE> self.__line=line <NEW_LINE> self.index=index <NEW_LINE> self.note=note <NEW_LINE> self.scale=scale <NEW_LINE> self.offset=offset <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self.isVector(): <NEW_LINE> <INDENT> vect=" (vector)" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> vect="" <NEW_LINE> <DEDENT> return "SampleData of %s%s on %s at t=%s " % (self.name,vect,self.line(),self.time()) <NEW_LINE> <DEDENT> def line(self): <NEW_LINE> <INDENT> return self.__line <NEW_LINE> <DEDENT> def time(self): <NEW_LINE> <INDENT> return path.basename(path.dirname(self.file)) <NEW_LINE> <DEDENT> def isVector(self): <NEW_LINE> <INDENT> if type(self.data[0])==tuple: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def range(self,component=None): <NEW_LINE> <INDENT> data=self.component(component) <NEW_LINE> return (min(data),max(data)) <NEW_LINE> <DEDENT> def domain(self): <NEW_LINE> <INDENT> return (min(self.coord),max(self.coord)) <NEW_LINE> <DEDENT> def component(self,component=None): <NEW_LINE> <INDENT> if self.isVector(): <NEW_LINE> <INDENT> data=[] <NEW_LINE> if component==None: <NEW_LINE> <INDENT> for d in self.data: <NEW_LINE> <INDENT> data.append(math.sqrt(d[0]*d[0]+d[1]*d[1]+d[2]*d[2])) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if component<0 or component>=len(self.data[0]): <NEW_LINE> <INDENT> error("Requested component",component,"does not fit the size of the data",len(self.data[0])) <NEW_LINE> <DEDENT> for d in self.data: <NEW_LINE> <INDENT> data.append(d[component]) <NEW_LINE> <DEDENT> <DEDENT> return data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.data <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, scaleX=1., scaleData=1, offsetData=0, offsetX=0): <NEW_LINE> <INDENT> data=[] <NEW_LINE> if self.isVector(): <NEW_LINE> <INDENT> for i,c in enumerate(self.coord): <NEW_LINE> <INDENT> data.append([scaleX*c+offsetX]+[scaleData*v+offsetData for v in self.data[i]]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for i,c in enumerate(self.coord): <NEW_LINE> <INDENT> data.append([scaleX*c+offsetX,scaleData*self.data[i]+offsetData]) <NEW_LINE> <DEDENT> <DEDENT> names=["coord"] <NEW_LINE> if self.isVector(): <NEW_LINE> <INDENT> names+=[self.name+"_x",self.name+"_y",self.name+"_z"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> names.append(self.name) <NEW_LINE> <DEDENT> return SpreadsheetData(data=data, names=names, title="%s_t=%s" % (self.line(),self.time())) | Data from a sample-set | 62598fc40fa83653e46f5199 |
class TestSpecificScenarios(unittest.TestCase): <NEW_LINE> <INDENT> def test_four_move_check_mate(self): <NEW_LINE> <INDENT> chess_board = Board() <NEW_LINE> chess_board.move_piece('E2', 'E4') <NEW_LINE> chess_board.move_piece('E7', 'E5') <NEW_LINE> chess_board.move_piece('F1', 'C4') <NEW_LINE> chess_board.move_piece('D7', 'D6') <NEW_LINE> chess_board.move_piece('D1', 'F3') <NEW_LINE> chess_board.move_piece('B8', 'C6') <NEW_LINE> chess_board.move_piece('F3', 'F7') <NEW_LINE> assert_that(chess_board.winner, is_(Winner.WHITE)) <NEW_LINE> <DEDENT> def test_knight_can_capture_attacker(self): <NEW_LINE> <INDENT> chess_board = Board(u"_m♖___♗♘♖-♙__m♔___♙-______♘_-________-___♝____-___♟♟♙__-____m♚__♟-♜♕_♛_♝♞♕", Color.BLACK) <NEW_LINE> knight_moves = chess_board.get_moves('G8') <NEW_LINE> assert_that(knight_moves, is_(set(['F6']))) <NEW_LINE> <DEDENT> def test_knight_and_bishop_can_block_attacker(self): <NEW_LINE> <INDENT> chess_board = Board(u"♖♘♗_♔_♘♖-♙♙♙__♙♙♙-________-__♛_____-________-_____♞_♝-♟♟♟__♕__-♜♞_♖___m♚", Color.BLACK) <NEW_LINE> king_moves = chess_board.get_moves('H8') <NEW_LINE> assert_that(king_moves, is_(set([]))) <NEW_LINE> bishop_moves = chess_board.get_moves('H6') <NEW_LINE> assert_that(bishop_moves, is_(set(['F8']))) <NEW_LINE> knight_moves = chess_board.get_moves('F6') <NEW_LINE> assert_that(knight_moves, is_(set(['E8', 'G8']))) <NEW_LINE> <DEDENT> def test_checkmate_stops_movement(self): <NEW_LINE> <INDENT> chess_board = Board(u"____♔_♘♖-♙♙♙___♙♙-__♘_____-__♟_____-_♟____♗_-________-♟___m♖___-___m♖__m♚_", Color.BLACK) <NEW_LINE> checkmate = chess_board.is_checkmate(Color.BLACK) <NEW_LINE> assert_that(checkmate, is_(True)) <NEW_LINE> <DEDENT> def test_blackbishop_can_move(self): <NEW_LINE> <INDENT> chess_board = Board(u"♖♘♗_♔♗♘♖-♙♙♙_♙♙♙♙-________-___♙____-___♟___♟-_♟___♞__-__♟_♟♟♟_-_♕♝♛♚♝_♜", Color.BLACK) <NEW_LINE> moves = chess_board.get_moves('C8') <NEW_LINE> assert_that(moves, is_(set(['A6', 'B7', 'D7', 'E6', 'F5', 'G4', 'H3']))) <NEW_LINE> <DEDENT> def test_en_passant_4(self): <NEW_LINE> <INDENT> chess_board = Board(u"♖♘♗♕♔♗♘♖-♙♙♙_♙__♙-________-___♙♟_♙_-__♟__♙__-________-♟♟_♟_♟♟♟-♜♞♝♛♚♝♞♜", Color.BLACK) <NEW_LINE> chess_board.previous_move = Move('WP', 'D2', 'D4', True) <NEW_LINE> chess_board.move_piece('E4', 'D3') | These are additional tests to check specific scenarios noticed during manual testing. | 62598fc47c178a314d78d752 |
class FilterRoutingRegion(Region): <NEW_LINE> <INDENT> def __init__(self, keyspace_routes, filter_routing_tag="filter_routing", index_field="index"): <NEW_LINE> <INDENT> self.keyspace_routes = keyspace_routes <NEW_LINE> self.filter_routing_tag = filter_routing_tag <NEW_LINE> self.index_field = index_field <NEW_LINE> <DEDENT> def sizeof(self, *args): <NEW_LINE> <INDENT> return 4 * (1 + 4*len(self.keyspace_routes)) <NEW_LINE> <DEDENT> def write_subregion_to_file(self, fp, *args, **kwargs): <NEW_LINE> <INDENT> data = bytearray(self.sizeof()) <NEW_LINE> struct.pack_into("<I", data, 0, len(self.keyspace_routes)) <NEW_LINE> for i, (ks, index) in enumerate(self.keyspace_routes): <NEW_LINE> <INDENT> struct.pack_into("<4I", data, 4 + 16*i, ks.get_value(tag=self.filter_routing_tag), ks.get_mask(tag=self.filter_routing_tag), ks.get_mask(field=self.index_field), index) <NEW_LINE> <DEDENT> fp.write(data) | Region of memory which maps routing entries to filter indices.
Attributes
----------
keyspace_routes : [(BitField, int), ...]
Pairs of BitFields (keyspaces) to the index of the filter that packets
matching the entry should be routed. | 62598fc4167d2b6e312b722a |
class Zidan(object): <NEW_LINE> <INDENT> def __init__(self,sha_shang_li): <NEW_LINE> <INDENT> super(Zidan,self).__init__() <NEW_LINE> self.sha_shang_li = sha_shang_li <NEW_LINE> <DEDENT> def dazhong(self,diren): <NEW_LINE> <INDENT> diren.diao_xue(self.sha_shang_li) | 子弹 | 62598fc44c3428357761a570 |
class CGGenericGetter(CGAbstractBindingMethod): <NEW_LINE> <INDENT> def __init__(self, descriptor, lenientThis=False): <NEW_LINE> <INDENT> args = [Argument('*mut JSContext', 'cx'), Argument('libc::c_uint', 'argc'), Argument('*mut JSVal', 'vp')] <NEW_LINE> if lenientThis: <NEW_LINE> <INDENT> name = "genericLenientGetter" <NEW_LINE> unwrapFailureCode = ( "MOZ_ASSERT(!JS_IsExceptionPending(cx));\n" "JS_SET_RVAL(cx, vp, JS::UndefinedValue());\n" "return true;") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> name = "genericGetter" <NEW_LINE> unwrapFailureCode = None <NEW_LINE> <DEDENT> CGAbstractBindingMethod.__init__(self, descriptor, name, args, unwrapFailureCode) <NEW_LINE> <DEDENT> def generate_code(self): <NEW_LINE> <INDENT> return CGGeneric( "let info: *const JSJitInfo = RUST_FUNCTION_VALUE_TO_JITINFO(JS_CALLEE(cx, vp));\n" "return CallJitPropertyOp(info, cx, obj, this.unsafe_get() as *mut libc::c_void, vp);\n") | A class for generating the C++ code for an IDL attribute getter. | 62598fc4f9cc0f698b1c542b |
class Jump(): <NEW_LINE> <INDENT> def __init__(self, y, x): <NEW_LINE> <INDENT> self.string = '\033[%i;%iH' % (y, x) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.string <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> print(self.string, end = '') | Create a cursor jump that can either be included in a print statement
as a string or invoked
@param y:int The row, 1 based
@param x:int The column, 1 based
@string :str|()→void Functor that can be treated as a string for jumping | 62598fc4956e5f7376df57d8 |
class TestCollectdPluginRead(BaseTestCollectdPlugin): <NEW_LINE> <INDENT> @patch.object(collectd_plugin.rabbit.RabbitMQStats, 'get_vhosts') <NEW_LINE> def test_read(self, mock_vhosts): <NEW_LINE> <INDENT> mock_vhosts.return_value = [dict(name='test_vhost')] <NEW_LINE> dispatch_nodes = MagicMock() <NEW_LINE> dispatch_queues = MagicMock() <NEW_LINE> dispatch_exchanges = MagicMock() <NEW_LINE> self.collectd_plugin.dispatch_nodes = dispatch_nodes <NEW_LINE> self.collectd_plugin.dispatch_queues = dispatch_queues <NEW_LINE> self.collectd_plugin.dispatch_exchanges = dispatch_exchanges <NEW_LINE> self.collectd_plugin.read() <NEW_LINE> self.assertTrue(dispatch_nodes.called) <NEW_LINE> self.assertTrue(dispatch_queues.called) <NEW_LINE> self.assertTrue(dispatch_exchanges.called) <NEW_LINE> <DEDENT> def test_read_no_vhosts(self): <NEW_LINE> <INDENT> dispatch_nodes = MagicMock() <NEW_LINE> dispatch_queues = MagicMock() <NEW_LINE> dispatch_exchanges = MagicMock() <NEW_LINE> self.collectd_plugin.dispatch_nodes = dispatch_nodes <NEW_LINE> self.collectd_plugin.dispatch_queues = dispatch_queues <NEW_LINE> self.collectd_plugin.dispatch_exchanges = dispatch_exchanges <NEW_LINE> self.collectd_plugin.read() <NEW_LINE> self.assertTrue(dispatch_nodes.called) <NEW_LINE> self.assertFalse(dispatch_queues.called) <NEW_LINE> self.assertFalse(dispatch_exchanges.called) | Test that the read method dispatches the proper data. | 62598fc4bf627c535bcb175b |
class LegacyAddressMapper(AddressMapper): <NEW_LINE> <INDENT> def __init__(self, scheduler, engine, build_root): <NEW_LINE> <INDENT> self._scheduler = scheduler <NEW_LINE> self._engine = engine <NEW_LINE> self._build_root = build_root <NEW_LINE> <DEDENT> def scan_build_files(self, base_path): <NEW_LINE> <INDENT> subject = DescendantAddresses(base_path) <NEW_LINE> selector = SelectDependencies(BuildFiles, BuildDirs, field_types=(Dir,)) <NEW_LINE> request = self._scheduler.selection_request([(selector, subject)]) <NEW_LINE> result = self._engine.execute(request) <NEW_LINE> if result.error: <NEW_LINE> <INDENT> raise result.error <NEW_LINE> <DEDENT> build_files_set = set() <NEW_LINE> for state in result.root_products.values(): <NEW_LINE> <INDENT> for build_files in state.value: <NEW_LINE> <INDENT> build_files_set.update(f.path for f in build_files.files) <NEW_LINE> <DEDENT> <DEDENT> return build_files_set <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def is_declaring_file(address, file_path): <NEW_LINE> <INDENT> return (os.path.dirname(file_path) == address.spec_path and BuildFile._is_buildfile_name(os.path.basename(file_path))) <NEW_LINE> <DEDENT> def addresses_in_spec_path(self, spec_path): <NEW_LINE> <INDENT> return self.scan_specs([SiblingAddresses(spec_path)]) <NEW_LINE> <DEDENT> def scan_specs(self, specs, fail_fast=True): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> addresses = set(address for a in self._engine.product_request(Addresses, specs) for address in a.dependencies) <NEW_LINE> <DEDENT> except ExecutionError as e: <NEW_LINE> <INDENT> raise self.BuildFileScanError(str(e)) <NEW_LINE> <DEDENT> return addresses <NEW_LINE> <DEDENT> def scan_addresses(self, root=None): <NEW_LINE> <INDENT> if root: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> base_path = fast_relpath(root, self._build_root) <NEW_LINE> <DEDENT> except ValueError as e: <NEW_LINE> <INDENT> raise self.InvalidRootError(e) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> base_path = '' <NEW_LINE> <DEDENT> return self.scan_specs([DescendantAddresses(base_path)]) | Provides an implementation of AddressMapper using v2 engine.
This allows tasks to use the context's address_mapper when the v2 engine is enabled. | 62598fc4d486a94d0ba2c285 |
class FauxSocket: <NEW_LINE> <INDENT> def _reuse(self): <NEW_LINE> <INDENT> pass | Faux socket with the minimal interface required by pypy. | 62598fc4ff9c53063f51a902 |
class PorukaView(View): <NEW_LINE> <INDENT> def __init__(self, poruka): <NEW_LINE> <INDENT> View.__init__(self) <NEW_LINE> self.poruka=poruka <NEW_LINE> <DEDENT> def showChild(self): <NEW_LINE> <INDENT> self.ispis(self.poruka) <NEW_LINE> self.unos("Unesite Enter za dalje...") | classdocs | 62598fc4a05bb46b3848ab20 |
class Richards_lateral(lateral_sub_surface_flux): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> flow_thickness = _swig_property(_cmf_core.Richards_lateral_flow_thickness_get, _cmf_core.Richards_lateral_flow_thickness_set) <NEW_LINE> wet_right_node = _swig_property(_cmf_core.Richards_lateral_wet_right_node_get, _cmf_core.Richards_lateral_wet_right_node_set) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> _cmf_core.Richards_lateral_swiginit(self, _cmf_core.new_Richards_lateral(*args, **kwargs)) <NEW_LINE> <DEDENT> def usebaseflow(*args, **kwargs): <NEW_LINE> <INDENT> return _cmf_core.Richards_lateral_usebaseflow(*args, **kwargs) <NEW_LINE> <DEDENT> usebaseflow = staticmethod(usebaseflow) <NEW_LINE> __swig_destroy__ = _cmf_core.delete_Richards_lateral | Calculates the flux using Richard's equation for adjacent layers
.. math::
q_{lat} = \frac{\Psi_1 - \Psi_2}{\|C_1-C_2\|}
K(\theta) A
where:
:math:`q_{lat}` the lateral flow in :math:`m^3/day`
:math:`\Psi_i` the head of node i
:math:`\|C_1-C_2\|` is the distance from Cell 1 to Cell 2
:math:`K(\theta_{1,2}) = \sqrt{K(\theta_1) K(\theta_2)}`
:math:`A` the crosssectional area of the interface between storages 1 and 2
C++ includes: subsurfacefluxes.h | 62598fc4be7bc26dc9251fb6 |
class Solution1: <NEW_LINE> <INDENT> def productExceptSelf(self, nums): <NEW_LINE> <INDENT> n = len(nums) <NEW_LINE> from_left = [1] * n <NEW_LINE> from_right = [1] * n <NEW_LINE> res = [1] * n <NEW_LINE> for i in range(1, n): <NEW_LINE> <INDENT> from_left[i] = nums[i - 1] * from_left[i - 1] <NEW_LINE> <DEDENT> for i in range(n - 2, -1, -1): <NEW_LINE> <INDENT> from_right[i] = nums[i + 1] * from_right[i + 1] <NEW_LINE> <DEDENT> for i in range(n): <NEW_LINE> <INDENT> res[i] = from_left[i] * from_right[i] <NEW_LINE> <DEDENT> print(from_left) <NEW_LINE> print(from_right) <NEW_LINE> print(res) <NEW_LINE> return res | @param nums: an array of integers
@return: the product of all the elements of nums except nums[i]. | 62598fc4656771135c489924 |
class ConflictingValues(ValueError): <NEW_LINE> <INDENT> pass | Raised when an incoming value collides with an existing value.
In one sense, it is both a KeyError and a ValueError. | 62598fc42c8b7c6e89bd3a78 |
class ColoredFormatter(logging.Formatter): <NEW_LINE> <INDENT> def __init__(self, fmt=None, datefmt=None, level_styles=None, field_styles=None): <NEW_LINE> <INDENT> self.nn = NameNormalizer() <NEW_LINE> fmt = fmt or DEFAULT_LOG_FORMAT <NEW_LINE> datefmt = datefmt or DEFAULT_DATE_FORMAT <NEW_LINE> self.level_styles = self.nn.normalize_keys(DEFAULT_LEVEL_STYLES if level_styles is None else level_styles) <NEW_LINE> self.field_styles = self.nn.normalize_keys(DEFAULT_FIELD_STYLES if field_styles is None else field_styles) <NEW_LINE> logging.Formatter.__init__(self, self.colorize_format(fmt), datefmt) <NEW_LINE> <DEDENT> def colorize_format(self, fmt): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for token in fmt.split(): <NEW_LINE> <INDENT> for match in re.finditer(r'%\((\w+)\)', token): <NEW_LINE> <INDENT> style = self.nn.get(self.field_styles, match.group(1)) <NEW_LINE> if style: <NEW_LINE> <INDENT> token = ansi_wrap(token, **style) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> result.append(token) <NEW_LINE> <DEDENT> return ' '.join(result) <NEW_LINE> <DEDENT> def format(self, record): <NEW_LINE> <INDENT> style = self.nn.get(self.level_styles, record.levelname) <NEW_LINE> if style: <NEW_LINE> <INDENT> record = copy.copy(record) <NEW_LINE> record.msg = ansi_wrap(coerce_string(record.msg), **style) <NEW_LINE> <DEDENT> return logging.Formatter.format(self, record) | Log :class:`~logging.Formatter` that uses `ANSI escape sequences`_ to create colored logs. | 62598fc45166f23b2e243696 |
class Insertion(object): <NEW_LINE> <INDENT> def __init__(self, element, location, offset, s): <NEW_LINE> <INDENT> self.element = element <NEW_LINE> self.location = location <NEW_LINE> self.offset = offset <NEW_LINE> self.s = s <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Insertion({!r}, {!r}, {!r}, {!r})'.format( self.element, self.location, self.offset, self.s ) <NEW_LINE> <DEDENT> def _fulltext(self): <NEW_LINE> <INDENT> return (self.element.text or '') if self.location == Location.TEXT else (self.element.tail or '') <NEW_LINE> <DEDENT> def _set_fulltext(self, s): <NEW_LINE> <INDENT> if self.location == Location.TEXT: <NEW_LINE> <INDENT> self.element.text = s <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.element.tail = s <NEW_LINE> <DEDENT> <DEDENT> def apply(self, additional_offset=0): <NEW_LINE> <INDENT> self._set_fulltext(_str_insert(self._fulltext(), self.offset + additional_offset, self.s)) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def apply_all(insertions): <NEW_LINE> <INDENT> grouped = defaultdict(lambda: []) <NEW_LINE> for i in insertions: <NEW_LINE> <INDENT> key = i.element, i.location <NEW_LINE> grouped[key].append(i) <NEW_LINE> <DEDENT> for key, group in grouped.items(): <NEW_LINE> <INDENT> group.sort(key=lambda i: i.offset) <NEW_LINE> accumulated_offset = 0 <NEW_LINE> for i in group: <NEW_LINE> <INDENT> i.apply(accumulated_offset) <NEW_LINE> accumulated_offset += len(i.s) | An object representing inserting `s` into text of `element` at `offset`. | 62598fc4283ffb24f3cf3b3a |
class IParentTitleAsCreator(form.Schema): <NEW_LINE> <INDENT> containers_as_creators = schema.Tuple( title=_(u'label_containers_as_creators', default=u'List Content Types for Parent\'s Title as Creator'), description=_( u'help_containers_as_creators', default=(u'List each content type in a new line. ' u'Enable Parent\'s Title as Creator Behavior for child ' u'content type.') ), value_type=schema.TextLine(), required=False, missing_value="", ) | Define controlpanel Data data structure | 62598fc4aad79263cf42ea8b |
@implementer(ISplitter) <NEW_LINE> class Splitter(object): <NEW_LINE> <INDENT> rx = re.compile(r"(?u)\w+") <NEW_LINE> rxGlob = re.compile(r"(?u)\w+[\w*?]*") <NEW_LINE> def process(self, lst): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for s in lst: <NEW_LINE> <INDENT> result += self.rx.findall(s) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def processGlob(self, lst): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for s in lst: <NEW_LINE> <INDENT> result += self.rxGlob.findall(s) <NEW_LINE> <DEDENT> return result | A simple :class:`zope.index.text.interfaces.ISplitter`. | 62598fc4851cf427c66b856b |
class BaseDistOptimizer(BaseOptimizer): <NEW_LINE> <INDENT> construct_initial = BaseOptimizer.construct_uniform_initial <NEW_LINE> def __init__(self, dist, marginals, rv_mode=None): <NEW_LINE> <INDENT> super().__init__(dist, dist.rvs, crvs=[], rv_mode='indices') <NEW_LINE> self._all_vars = self._rvs <NEW_LINE> self.dist = prepare_dist(dist) <NEW_LINE> self._vpmf = self.dist.pmf.copy() <NEW_LINE> self._A, self._b = marginal_constraints_generic(self.dist, marginals, rv_mode) <NEW_LINE> self._shape = [len(alpha) for alpha in self.dist.alphabet] <NEW_LINE> self._free = infer_free_values(self._A, self._b) <NEW_LINE> self.constraints = [{'type': 'eq', 'fun': self.constraint_match_marginals, }, ] <NEW_LINE> self._optvec_size = len(self._free) <NEW_LINE> self._default_hops = 50 <NEW_LINE> self._additional_options = {'options': {'maxiter': 1000, 'ftol': 1e-7, 'eps': 1.4901161193847656e-08, } } <NEW_LINE> <DEDENT> def optimize(self, x0=None, niter=None, maxiter=None, polish=1e-8, callback=False): <NEW_LINE> <INDENT> if len(self._free) == 0: <NEW_LINE> <INDENT> self._optima = self._vpmf <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if x0 is not None and len(x0) == len(self._vpmf): <NEW_LINE> <INDENT> x0 = x0[self._free] <NEW_LINE> <DEDENT> result = super().optimize(x0=x0, niter=niter, maxiter=maxiter, polish=polish, callback=callback) <NEW_LINE> return result <NEW_LINE> <DEDENT> <DEDENT> def construct_vector(self, x): <NEW_LINE> <INDENT> if self._free: <NEW_LINE> <INDENT> self._vpmf[self._free] = x <NEW_LINE> <DEDENT> return self._vpmf <NEW_LINE> <DEDENT> def construct_joint(self, x): <NEW_LINE> <INDENT> vec = self.construct_vector(x) <NEW_LINE> pmf = vec.reshape(self._shape) <NEW_LINE> return pmf <NEW_LINE> <DEDENT> def constraint_match_marginals(self, x): <NEW_LINE> <INDENT> pmf = self.construct_vector(x) <NEW_LINE> return sum((np.dot(self._A, pmf) - self._b)**2) <NEW_LINE> <DEDENT> def construct_dist(self, x=None, cutoff=1e-6, sparse=True): <NEW_LINE> <INDENT> if x is None: <NEW_LINE> <INDENT> x = self._optima.copy() <NEW_LINE> <DEDENT> pmf = self.construct_vector(x) <NEW_LINE> pmf[pmf < cutoff] = 0 <NEW_LINE> pmf /= pmf.sum() <NEW_LINE> new_dist = self.dist.copy() <NEW_LINE> new_dist.pmf = pmf.ravel() <NEW_LINE> if sparse: <NEW_LINE> <INDENT> new_dist.make_sparse() <NEW_LINE> <DEDENT> new_dist.set_rv_names(self.dist.get_rv_names()) <NEW_LINE> return new_dist | Calculate an optimized distribution consistent with the given marginal constraints. | 62598fc45fdd1c0f98e5e249 |
class QuarterHPI(struct): <NEW_LINE> <INDENT> _slots = ((int, 'year'),(int, 'qtr'),(float, 'index')) | QuarterHPI class | 62598fc450812a4eaa620d40 |
class YamlOutputHandler(output.CementOutputHandler): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> interface = output.IOutput <NEW_LINE> label = 'yaml' <NEW_LINE> <DEDENT> def __init__(self, *args, **kw): <NEW_LINE> <INDENT> super(YamlOutputHandler, self).__init__(*args, **kw) <NEW_LINE> self.config = None <NEW_LINE> <DEDENT> def _setup(self, app_obj): <NEW_LINE> <INDENT> self.app = app_obj <NEW_LINE> <DEDENT> def render(self, data_dict, template=None): <NEW_LINE> <INDENT> LOG.debug("rendering output as Yaml via %s" % self.__module__) <NEW_LINE> sys.stdout = backend.__saved_stdout__ <NEW_LINE> sys.stderr = backend.__saved_stderr__ <NEW_LINE> return yaml.dump(data_dict) | This class implements the :ref:`IOutput <cement.core.output>`
interface. It provides YAML output from a data dictionary and uses
`pyYAML <http://pyyaml.org/wiki/PyYAMLDocumentation>`_ to dump it to
STDOUT.
Note: The cement framework detects the '--yaml' option and suppresses
output (same as if passing --quiet). Therefore, if debugging or
troubleshooting issues you must pass the --debug option to see whats
going on. | 62598fc4adb09d7d5dc0a832 |
class LiveChatWindow(): <NEW_LINE> <INDENT> @skip('manual') <NEW_LINE> @priority("Low") <NEW_LINE> def test_live_chat_minimaze_or_maximize(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @skip('manual') <NEW_LINE> @priority("Low") <NEW_LINE> def test_live_chat_close(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @skip('manual') <NEW_LINE> @priority("Low") <NEW_LINE> def test_live_chat_view(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @skip('manual') <NEW_LINE> @priority("Low") <NEW_LINE> def test_live_on_all_pages(self): <NEW_LINE> <INDENT> pass | Story: Окно LiveChat | 62598fc426068e7796d4cc12 |
class CausalMeanValueImputation(MissingValueImputation): <NEW_LINE> <INDENT> def __call__(self, values: np.ndarray) -> np.ndarray: <NEW_LINE> <INDENT> if len(values) == 1 or np.isnan(values).all(): <NEW_LINE> <INDENT> return DummyValueImputation()(values) <NEW_LINE> <DEDENT> mask = np.isnan(values) <NEW_LINE> last_value_imputation = LastValueImputation() <NEW_LINE> value_no_nans = last_value_imputation(values) <NEW_LINE> adjusted_values_to_causality = np.concatenate( (np.repeat(0.0, 1), value_no_nans[:-1]) ) <NEW_LINE> cumsum = np.cumsum(adjusted_values_to_causality) <NEW_LINE> indices = np.linspace(0, len(value_no_nans) - 1, len(value_no_nans)) <NEW_LINE> ar_res = cumsum / indices.astype(float) <NEW_LINE> values[mask] = ar_res[mask] <NEW_LINE> values[0] = value_no_nans[0] <NEW_LINE> return values | This class replaces each missing value with the average of all the values up to this point.
(If the first values are missing, they are replaced by the closest non missing value.) | 62598fc4f548e778e596b855 |
class FileSynchronisation(Synchronisation): <NEW_LINE> <INDENT> def __init__(self, source: str, destination: str, overwrite: bool=False): <NEW_LINE> <INDENT> self.source = source <NEW_LINE> self.destination = destination <NEW_LINE> self.overwrite = overwrite | File synchronisation configuration. | 62598fc4656771135c489926 |
class RedisBackend(KeyValueStoreBackend): <NEW_LINE> <INDENT> redis = redis <NEW_LINE> host = 'localhost' <NEW_LINE> port = 6379 <NEW_LINE> db = 0 <NEW_LINE> password = None <NEW_LINE> max_connections = None <NEW_LINE> supports_native_join = True <NEW_LINE> implements_incr = True <NEW_LINE> def __init__(self, host=None, port=None, db=None, password=None, expires=None, max_connections=None, url=None, **kwargs): <NEW_LINE> <INDENT> super(RedisBackend, self).__init__(**kwargs) <NEW_LINE> conf = self.app.conf <NEW_LINE> if self.redis is None: <NEW_LINE> <INDENT> raise ImproperlyConfigured( 'You need to install the redis library in order to use ' + 'the Redis result store backend.') <NEW_LINE> <DEDENT> def _get(key): <NEW_LINE> <INDENT> for prefix in 'CELERY_REDIS_%s', 'REDIS_%s': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return conf[prefix % key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if host and '://' in host: <NEW_LINE> <INDENT> url, host = host, None <NEW_LINE> <DEDENT> self.url = url <NEW_LINE> uhost = uport = upass = udb = None <NEW_LINE> if url: <NEW_LINE> <INDENT> _, uhost, uport, _, upass, udb, _ = _parse_url(url) <NEW_LINE> udb = udb.strip('/') if udb else 0 <NEW_LINE> <DEDENT> self.host = uhost or host or _get('HOST') or self.host <NEW_LINE> self.port = int(uport or port or _get('PORT') or self.port) <NEW_LINE> self.db = udb or db or _get('DB') or self.db <NEW_LINE> self.password = upass or password or _get('PASSWORD') or self.password <NEW_LINE> self.expires = self.prepare_expires(expires, type=int) <NEW_LINE> self.max_connections = (max_connections or _get('MAX_CONNECTIONS') or self.max_connections) <NEW_LINE> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> return self.client.get(key) <NEW_LINE> <DEDENT> def mget(self, keys): <NEW_LINE> <INDENT> return self.client.mget(keys) <NEW_LINE> <DEDENT> def set(self, key, value): <NEW_LINE> <INDENT> client = self.client <NEW_LINE> if self.expires is not None: <NEW_LINE> <INDENT> client.setex(key, value, self.expires) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> client.set(key, value) <NEW_LINE> <DEDENT> client.publish(key, value) <NEW_LINE> <DEDENT> def delete(self, key): <NEW_LINE> <INDENT> self.client.delete(key) <NEW_LINE> <DEDENT> def incr(self, key): <NEW_LINE> <INDENT> return self.client.incr(key) <NEW_LINE> <DEDENT> def expire(self, key, value): <NEW_LINE> <INDENT> return self.client.expire(key, value) <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def client(self): <NEW_LINE> <INDENT> pool = self.redis.ConnectionPool(host=self.host, port=self.port, db=self.db, password=self.password, max_connections=self.max_connections) <NEW_LINE> return self.redis.Redis(connection_pool=pool) <NEW_LINE> <DEDENT> def __reduce__(self, args=(), kwargs={}): <NEW_LINE> <INDENT> kwargs.update( dict(host=self.host, port=self.port, db=self.db, password=self.password, expires=self.expires, max_connections=self.max_connections)) <NEW_LINE> return super(RedisBackend, self).__reduce__(args, kwargs) | Redis task result store. | 62598fc499fddb7c1ca62f48 |
class VALue(SCPINode, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "VALue" <NEW_LINE> args = ["1"] | DIGital:SKEW:VALue
Arguments: 1 | 62598fc44a966d76dd5ef18c |
class SubtractMaximumDatasetPlugin(_OneOutputDatasetPlugin): <NEW_LINE> <INDENT> menu = (_('Subtract'), _('Maximum'),) <NEW_LINE> name = 'Subtract Maximum' <NEW_LINE> description_short = _('Subtract maximum from dataset') <NEW_LINE> description_full = _('Subtract the maximum value from a dataset') <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.fields = [ field.FieldDataset('ds_in', _('Input dataset 1')), field.FieldDataset('ds_out', _('Output dataset name')), ] <NEW_LINE> <DEDENT> def updateDatasets(self, fields, helper): <NEW_LINE> <INDENT> dsin = helper.getDataset(fields['ds_in']) <NEW_LINE> data = dsin.data <NEW_LINE> filtered = data[N.isfinite(data)] <NEW_LINE> if len(filtered) != 0: <NEW_LINE> <INDENT> data = data - filtered.max() <NEW_LINE> <DEDENT> self.dsout.update( data=data, serr=dsin.serr, perr=dsin.perr, nerr=dsin.nerr) | Dataset plugin to subtract minimum from dataset. | 62598fc463b5f9789fe8542b |
class CreateMirror12Test(BaseTest): <NEW_LINE> <INDENT> runCmd = "aptly mirror create --keyring=aptlytest.gpg mirror12 http://mirror.yandex.ru/debian/ squeeze" <NEW_LINE> fixtureGpg = False <NEW_LINE> gold_processor = BaseTest.expand_environ <NEW_LINE> outputMatchPrepare = lambda _, s: re.sub(r'Signature made .* using|gpgv: keyblock resource .*$|gpgv: Can\'t check signature: .*$', '', s, flags=re.MULTILINE) <NEW_LINE> expectedCode = 1 | create mirror: repo with Release+Release.gpg verification, failure | 62598fc497e22403b383b1c1 |
class TestFillCyclicInitZero(GridFillTest): <NEW_LINE> <INDENT> cyclic = True <NEW_LINE> initzonal = False | Cyclic, initialized with zeros. | 62598fc4ad47b63b2c5a7b0f |
class LinearDecayGreedyEpsilonPolicy(GreedyEpsilonPolicy): <NEW_LINE> <INDENT> def __init__(self, start_eps, end_eps, num_steps): <NEW_LINE> <INDENT> super(LinearDecayGreedyEpsilonPolicy, self).__init__(start_eps) <NEW_LINE> self.num_steps = num_steps <NEW_LINE> self.decay_rate = (start_eps - end_eps) / float(num_steps) <NEW_LINE> <DEDENT> def select_action(self, q_values): <NEW_LINE> <INDENT> action = super(LinearDecayGreedyEpsilonPolicy, self).select_action(q_values) <NEW_LINE> if self.num_steps > 0: <NEW_LINE> <INDENT> self.epsilon -= self.decay_rate <NEW_LINE> self.num_steps -= 1 <NEW_LINE> <DEDENT> return action | Policy with a parameter that decays linearly.
Like GreedyEpsilonPolicy but the epsilon decays from a start value
to an end value over k steps.
Parameters
----------
start_value: int, float
The initial value of the parameter
end_value: int, float
The value of the policy at the end of the decay.
num_steps: int
The number of steps over which to decay the value. | 62598fc43d592f4c4edbb16f |
class SFQClass(_BasicFilterHTBClass): <NEW_LINE> <INDENT> perturb = None <NEW_LINE> def __init__(self, perturb=10, *args, **kwargs): <NEW_LINE> <INDENT> self.perturb = perturb <NEW_LINE> super().__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def _add_qdisc(self): <NEW_LINE> <INDENT> tools.qdisc_add(self._interface, parent=self.classid, handle=tools.get_child_qdiscid(self.classid), algorithm="sfq", perturb=self.perturb) | HTB class with a SFQ qdisc builtin | 62598fc4283ffb24f3cf3b3c |
class UserEditForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Identity <NEW_LINE> fields = ('first_name', 'last_name', 'email', ) | Edit user model form | 62598fc4f9cc0f698b1c542d |
class ActivityNotFound(Exception): <NEW_LINE> <INDENT> pass | Raised when the activity is not present in the aggregated Activity | 62598fc47cff6e4e811b5cdd |
class MSVCModule(Package, DownloadableModule): <NEW_LINE> <INDENT> type = 'msvc' <NEW_LINE> PHASE_CHECKOUT = DownloadableModule.PHASE_CHECKOUT <NEW_LINE> PHASE_FORCE_CHECKOUT = DownloadableModule.PHASE_FORCE_CHECKOUT <NEW_LINE> PHASE_BUILD = 'build' <NEW_LINE> PHASE_INSTALL = 'install' <NEW_LINE> def __init__(self, name, branch=None, solution='', msvcargs=''): <NEW_LINE> <INDENT> Package.__init__(self, name, branch=branch) <NEW_LINE> self.solution = solution <NEW_LINE> self.msvcargs = msvcargs <NEW_LINE> <DEDENT> def get_srcdir(self, buildscript): <NEW_LINE> <INDENT> return self.branch.srcdir <NEW_LINE> <DEDENT> def get_builddir(self, buildscript): <NEW_LINE> <INDENT> return self.get_srcdir(buildscript) <NEW_LINE> <DEDENT> def do_build(self, buildscript): <NEW_LINE> <INDENT> buildscript.set_action('Building', self) <NEW_LINE> srcdir = self.get_srcdir(buildscript) <NEW_LINE> msbuild = buildscript.config.msbuild <NEW_LINE> cmd = [ msbuild, self.solution, self.makeargs ] <NEW_LINE> buildscript.execute(cmd, cwd = srcdir) <NEW_LINE> <DEDENT> do_build.depends = [PHASE_CHECKOUT] <NEW_LINE> do_build.error_phases = [PHASE_FORCE_CHECKOUT] <NEW_LINE> def do_install(self, buildscript): <NEW_LINE> <INDENT> buildscript.set_action('Installing', self) <NEW_LINE> <DEDENT> do_install.depends = [PHASE_BUILD] <NEW_LINE> def xml_tag_and_attrs(self): <NEW_LINE> <INDENT> return 'msvc', [('id', 'name', None)] | Base type for modules that use MSBuild build system. | 62598fc4ec188e330fdf8b4c |
class ButtonsGuiMixin: <NEW_LINE> <INDENT> buttons = [] <NEW_LINE> def deactivate_buttons(self): <NEW_LINE> <INDENT> for b in self.buttons: <NEW_LINE> <INDENT> b['state'] = tkinter.DISABLED <NEW_LINE> <DEDENT> <DEDENT> def activate_buttons(self): <NEW_LINE> <INDENT> for b in self.buttons: <NEW_LINE> <INDENT> b['state'] = tkinter.NORMAL | The class shall add the Tkinter buttons to the self.buttons array | 62598fc4d486a94d0ba2c289 |
class TurnoverPartner(Report): <NEW_LINE> <INDENT> _name = "ekd.balances.party.turnovers" <NEW_LINE> def parse(self, report, objects, datas, localcontext={}): <NEW_LINE> <INDENT> tmp_objects = [] <NEW_LINE> tmp_account = [] <NEW_LINE> context = Transaction().context <NEW_LINE> user = self.pool.get('res.user').browse(Transaction().user) <NEW_LINE> localcontext['company'] = self.pool.get('company.company').browse(context.get('company', False)) <NEW_LINE> localcontext['period'] = self.pool.get('ekd.period').browse(context.get('current_period', False)) <NEW_LINE> localcontext['start_date'] = context.get('start_period', False) <NEW_LINE> localcontext['end_date'] = context.get('end_period', False) <NEW_LINE> localcontext['current_date'] = context.get('current_date', False) <NEW_LINE> localcontext['total_balance_dt'] = sum((x['balance_dt'] for x in objects)) <NEW_LINE> localcontext['total_balance_ct'] = sum((x['balance_ct'] for x in objects)) <NEW_LINE> localcontext['total_debit'] = sum((x['debit'] for x in objects)) <NEW_LINE> localcontext['total_credit'] = sum((x['credit'] for x in objects)) <NEW_LINE> localcontext['total_balance_dt_end'] = sum((x['balance_dt_end'] for x in objects)) <NEW_LINE> localcontext['total_balance_ct_end'] = sum((x['balance_ct_end'] for x in objects)) <NEW_LINE> for obj in objects: <NEW_LINE> <INDENT> if obj['account'] not in tmp_account: <NEW_LINE> <INDENT> tmp_account.append(obj['account']) <NEW_LINE> <DEDENT> <DEDENT> if len(tmp_account) == 1: <NEW_LINE> <INDENT> res = super(TurnoverPartner, self).parse(report, [{ 'total_dt': sum((x['balance_dt'] for x in objects)), 'total_ct': sum((x['balance_ct'] for x in objects)), 'total_debit': sum((x['debit'] for x in objects)), 'total_credit': sum((x['credit'] for x in objects)), 'total_dt_end': sum((x['balance_dt_end'] for x in objects)), 'total_ct_end': sum((x['balance_ct_end'] for x in objects)), 'acc_code': '10', 'acc_name':'sdfsdfsdf', 'lines':objects, },], datas, localcontext) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res = super(TurnoverPartner, self).parse(report, [{ 'total_dt': sum((x['balance_dt'] for x in objects)), 'total_ct': sum((x['balance_ct'] for x in objects)), 'total_debit': sum((x['debit'] for x in objects)), 'total_credit': sum((x['credit'] for x in objects)), 'total_dt_end': sum((x['balance_dt_end'] for x in objects)), 'total_ct_end': sum((x['balance_ct_end'] for x in objects)), 'acc_code': '10', 'acc_name':'sdfsdfsdf', 'lines':objects, },], datas, localcontext) <NEW_LINE> <DEDENT> return res | Turnover parties | 62598fc4f548e778e596b857 |
class AttentionWithContext(Layer): <NEW_LINE> <INDENT> def __init__(self, hidden_dim=300, W_regularizer=None, u_regularizer=None, b_regularizer=None, W_constraint=None, u_constraint=None, b_constraint=None, bias=True, **kwargs): <NEW_LINE> <INDENT> self.dim = hidden_dim <NEW_LINE> self.supports_masking = True <NEW_LINE> self.init = initializers.get('he_normal') <NEW_LINE> self.W_regularizer = regularizers.get(W_regularizer) <NEW_LINE> self.u_regularizer = regularizers.get(u_regularizer) <NEW_LINE> self.b_regularizer = regularizers.get(b_regularizer) <NEW_LINE> self.W_constraint = constraints.get(W_constraint) <NEW_LINE> self.u_constraint = constraints.get(u_constraint) <NEW_LINE> self.b_constraint = constraints.get(b_constraint) <NEW_LINE> self.bias = bias <NEW_LINE> super(AttentionWithContext, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def build(self, input_shape): <NEW_LINE> <INDENT> a = len(input_shape) <NEW_LINE> self.W = self.add_weight((input_shape[0][-1], self.dim,), initializer=self.init, name='{}_W'.format(self.name), regularizer=self.W_regularizer, constraint=self.W_constraint) <NEW_LINE> if self.bias: <NEW_LINE> <INDENT> self.b = self.add_weight((len(input_shape),self.dim), initializer='zero', name='{}_b'.format(self.name), regularizer=self.b_regularizer, constraint=self.b_constraint) <NEW_LINE> <DEDENT> self.u = self.add_weight((self.dim,), initializer=self.init, name='{}_u'.format(self.name), regularizer=self.u_regularizer, constraint=self.u_constraint) <NEW_LINE> super(AttentionWithContext, self).build(input_shape) <NEW_LINE> <DEDENT> def compute_mask(self, input, input_mask=None): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def call(self, inp, mask=None): <NEW_LINE> <INDENT> x = K.stack(inp, axis=1) <NEW_LINE> uit = K.dot(x, self.W) <NEW_LINE> if self.bias: <NEW_LINE> <INDENT> uit += self.b <NEW_LINE> <DEDENT> uit = K.sigmoid(uit) <NEW_LINE> ait = dot_product(uit, self.u) <NEW_LINE> a = K.softmax(ait) <NEW_LINE> a = K.expand_dims(a) <NEW_LINE> weighted_input = x * a <NEW_LINE> a = K.transpose(a) <NEW_LINE> result = K.sum(weighted_input, axis=1) <NEW_LINE> return [result, a] <NEW_LINE> <DEDENT> def compute_output_shape(self, input_shape): <NEW_LINE> <INDENT> return [(input_shape[0][0], input_shape[0][-1]), (input_shape[0][0], 1, len(input_shape))] | Attention operation, with a context/query vector, for temporal data.
Supports Masking.
Follows the work of Yang et al. [https://www.cs.cmu.edu/~diyiy/docs/naacl16.pdf]
"Hierarchical Attention Networks for Document Classification"
by using a context vector to assist the attention
# Input shape
3D tensor with shape: `(samples, steps, features)`.
# Output shape
2D tensor with shape: `(samples, features)`.
How to use:
Just put it on top of an RNN Layer (GRU/LSTM/SimpleRNN) with return_sequences=True.
The dimensions are inferred based on the output shape of the RNN.
Note: The layer has been tested with Keras 2.0.6
Example:
save_model.add(LSTM(64, return_sequences=True))
save_model.add(AttentionWithContext())
# next add a Dense layer (for classification/regression) or whatever... | 62598fc4099cdd3c6367553e |
class SimpleStatus: <NEW_LINE> <INDENT> def __init__(self, *, done=False, success=False): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._lock = RLock() <NEW_LINE> self._cb = None <NEW_LINE> self.done = done <NEW_LINE> self.success = success <NEW_LINE> <DEDENT> def _finished(self, success=True, **kwargs): <NEW_LINE> <INDENT> if self.done: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> with self._lock: <NEW_LINE> <INDENT> self.success = success <NEW_LINE> self.done = True <NEW_LINE> if self._cb is not None: <NEW_LINE> <INDENT> self._cb() <NEW_LINE> self._cb = None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def finished_cb(self): <NEW_LINE> <INDENT> return self._cb <NEW_LINE> <DEDENT> @finished_cb.setter <NEW_LINE> def finished_cb(self, cb): <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> if self._cb is not None: <NEW_LINE> <INDENT> raise RuntimeError("Cannot change the call back") <NEW_LINE> <DEDENT> if self.done: <NEW_LINE> <INDENT> cb() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._cb = cb <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ('{0}(done={1.done}, ' 'success={1.success})' ''.format(self.__class__.__name__, self) ) <NEW_LINE> <DEDENT> __repr__ = __str__ | This provides a single-slot callback for when the operation has finished.
It is "simple" because it does not support a timeout or a settling time. | 62598fc4be7bc26dc9251fb8 |
class Spider(object): <NEW_LINE> <INDENT> start_urls = [] <NEW_LINE> def start_requests(self): <NEW_LINE> <INDENT> for url in self.start_urls: <NEW_LINE> <INDENT> yield Request(url, callback="parse") <NEW_LINE> <DEDENT> <DEDENT> def parse(self, response): <NEW_LINE> <INDENT> raise Exception("Must overwrite parse func") | 框架提供的Spider爬虫原型类,用户可以通过继承 重写类属性和类方法 | 62598fc44f88993c371f0667 |
class Profile(object): <NEW_LINE> <INDENT> import lnt.testing.profile <NEW_LINE> def __init__(self, impl): <NEW_LINE> <INDENT> assert isinstance(impl, ProfileImpl) <NEW_LINE> self.impl = impl <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def fromFile(f): <NEW_LINE> <INDENT> for impl in lnt.testing.profile.IMPLEMENTATIONS.values(): <NEW_LINE> <INDENT> if impl.checkFile(f): <NEW_LINE> <INDENT> ret = impl.deserialize(open(f, 'rb')) <NEW_LINE> if ret: <NEW_LINE> <INDENT> return Profile(ret) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> raise RuntimeError('No profile implementations could read this file!') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def fromRendered(s): <NEW_LINE> <INDENT> s = base64.b64decode(s) <NEW_LINE> with tempfile.NamedTemporaryFile() as fd: <NEW_LINE> <INDENT> fd.write(s) <NEW_LINE> fd.flush() <NEW_LINE> fd.seek(0) <NEW_LINE> for impl in lnt.testing.profile.IMPLEMENTATIONS.values(): <NEW_LINE> <INDENT> if impl.checkFile(fd.name): <NEW_LINE> <INDENT> ret = impl.deserialize(fd) <NEW_LINE> if ret: <NEW_LINE> <INDENT> return Profile(ret) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> raise RuntimeError('No profile implementations could read this file!') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def saveFromRendered(s, filename=None, profileDir=None, prefix=''): <NEW_LINE> <INDENT> s = base64.b64decode(s) <NEW_LINE> if not filename: <NEW_LINE> <INDENT> assert profileDir is not None <NEW_LINE> if not os.path.exists(profileDir): <NEW_LINE> <INDENT> os.makedirs(profileDir) <NEW_LINE> <DEDENT> tf = tempfile.NamedTemporaryFile(prefix=prefix, suffix='.lntprof', dir=profileDir, delete=False) <NEW_LINE> tf.write(s) <NEW_LINE> return tf.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> open(filename, 'w').write(s) <NEW_LINE> return filename <NEW_LINE> <DEDENT> <DEDENT> def save(self, filename=None, profileDir=None, prefix=''): <NEW_LINE> <INDENT> if filename: <NEW_LINE> <INDENT> self.impl.serialize(filename) <NEW_LINE> return filename <NEW_LINE> <DEDENT> assert profileDir is not None <NEW_LINE> if not os.path.exists(profileDir): <NEW_LINE> <INDENT> os.makedirs(profileDir) <NEW_LINE> <DEDENT> tf = tempfile.NamedTemporaryFile(prefix=prefix, suffix='.lntprof', dir=profileDir, delete=False) <NEW_LINE> self.impl.serialize(tf.name) <NEW_LINE> return os.path.relpath(tf.name, profileDir) <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> return base64.b64encode(self.impl.serialize()) <NEW_LINE> <DEDENT> def upgrade(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> version = self.impl.getVersion() <NEW_LINE> new_version = version + 1 <NEW_LINE> if new_version not in lnt.testing.profile.IMPLEMENTATIONS: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> new_impl = lnt.testing.profile.IMPLEMENTATIONS[new_version] <NEW_LINE> self.impl = new_impl.upgrade(self.impl) <NEW_LINE> <DEDENT> <DEDENT> def getVersion(self): <NEW_LINE> <INDENT> return self.impl.getVersion() <NEW_LINE> <DEDENT> def getTopLevelCounters(self): <NEW_LINE> <INDENT> return self.impl.getTopLevelCounters() <NEW_LINE> <DEDENT> def getDisassemblyFormat(self): <NEW_LINE> <INDENT> return self.impl.getDisassemblyFormat() <NEW_LINE> <DEDENT> def getFunctions(self): <NEW_LINE> <INDENT> return self.impl.getFunctions() <NEW_LINE> <DEDENT> def getCodeForFunction(self, fname): <NEW_LINE> <INDENT> return self.impl.getCodeForFunction(fname) | Profile objects hold a performance profile.
The Profile class itself is a thin wrapper around a ProfileImpl
object, which is what actually holds, reads, writes and dispenses
the profile information. | 62598fc499fddb7c1ca62f49 |
class StrArray(object): <NEW_LINE> <INDENT> def __init__(self, l): <NEW_LINE> <INDENT> if not isinstance(l, list): <NEW_LINE> <INDENT> raise TypeError("Value must be a list") <NEW_LINE> <DEDENT> arr = ffi.new('git_strarray *') <NEW_LINE> strings = [None] * len(l) <NEW_LINE> for i in range(len(l)): <NEW_LINE> <INDENT> if not is_string(l[i]): <NEW_LINE> <INDENT> raise TypeError("Value must be a string") <NEW_LINE> <DEDENT> strings[i] = ffi.new('char []', to_bytes(l[i])) <NEW_LINE> <DEDENT> self._arr = ffi.new('char *[]', strings) <NEW_LINE> self._strings = strings <NEW_LINE> self.array = ffi.new('git_strarray *', [self._arr, len(strings)]) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self.array <NEW_LINE> <DEDENT> def __exit__(self, type, value, traceback): <NEW_LINE> <INDENT> pass | A git_strarray wrapper
Use this in order to get a git_strarray* to pass to libgit2 out of a
list of strings. This has a context manager, which you should use, e.g.
with StrArray(list_of_strings) as arr:
C.git_function_that_takes_strarray(arr) | 62598fc471ff763f4b5e7a36 |
class Append(Actor): <NEW_LINE> <INDENT> @manage() <NEW_LINE> def init(self, inside_base): <NEW_LINE> <INDENT> self.inside_base = bool(inside_base) <NEW_LINE> self.use('calvinsys.native.python-os-path', shorthand='path') <NEW_LINE> <DEDENT> def gen_path(self, base, append): <NEW_LINE> <INDENT> base = self['path'].abspath(base) <NEW_LINE> if self['path'].isabs(append): <NEW_LINE> <INDENT> path = base + append <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> path = self['path'].join(base, append) <NEW_LINE> <DEDENT> path = self['path'].abspath(path) <NEW_LINE> invalid_path = False <NEW_LINE> if self.inside_base: <NEW_LINE> <INDENT> if not path.startswith(base): <NEW_LINE> <INDENT> path = base <NEW_LINE> invalid_path = True <NEW_LINE> <DEDENT> <DEDENT> return (path, invalid_path) <NEW_LINE> <DEDENT> @condition(['base', 'append'], ['path', 'error']) <NEW_LINE> def path(self, base, append): <NEW_LINE> <INDENT> prod = self.gen_path(base, append) <NEW_LINE> return ActionResult(production=prod) <NEW_LINE> <DEDENT> action_priority = (path, ) <NEW_LINE> require = ['calvinsys.native.python-os-path'] <NEW_LINE> def test_set_inside_base(self): <NEW_LINE> <INDENT> self.inside_base = 1 <NEW_LINE> <DEDENT> def test_clear_inside_base(self): <NEW_LINE> <INDENT> self.inside_base = 0 <NEW_LINE> <DEDENT> test_args = [0] <NEW_LINE> test_set = [ {'in': {'base': ["./path"], 'append': ["relpath"]}, 'out': {'path': ["%s" % (test_helper_abspath(test_helper_join("./path", "relpath")),)], 'error': [0]} } ] <NEW_LINE> test_set += [ {'setup': [test_set_inside_base], 'in': {'base': ["./path"], 'append': ["relpath/../../../"]}, 'out': {'path': ["%s" % (test_helper_abspath("./path"),)], 'error': [1]} } ] | Append 'append' to 'base'.
If inside_base is true, generate an error status if
resulting path is not inside 'base' directory.
Inputs:
base : Base path
append : Relative path
Outputs:
path : Absolute path formed from 'base' + 'append', or 'base' on error
error : True if checking enabled and not inside 'base' (error), false otherwise | 62598fc497e22403b383b1c3 |
class NScript(): <NEW_LINE> <INDENT> def extract(): <NEW_LINE> <INDENT> file_all = os.listdir('input') <NEW_LINE> data = open_file_b('input/nscript.dat') <NEW_LINE> if 'decoded' not in file_all: <NEW_LINE> <INDENT> data = bytearray(data) <NEW_LINE> idx = 0 <NEW_LINE> while idx < len(data): <NEW_LINE> <INDENT> data[idx] ^= 0x84 <NEW_LINE> idx += 1 <NEW_LINE> <DEDENT> save_file_b('input/nscript.dat', data) <NEW_LINE> save_file('input/decoded', '') <NEW_LINE> print('file decoded') <NEW_LINE> <DEDENT> file_data = data.decode('cp932') <NEW_LINE> jp_all = file_data.splitlines() <NEW_LINE> ans = [] <NEW_LINE> for line in jp_all: <NEW_LINE> <INDENT> if has_jp(line) and line[0] not in '*; abcdefghijklmnopqrstuvwxyz': <NEW_LINE> <INDENT> ans.append(line) <NEW_LINE> <DEDENT> <DEDENT> save_file('intermediate_file/jp_all.txt', '\n'.join(ans)) <NEW_LINE> <DEDENT> def output(encrypt=True): <NEW_LINE> <INDENT> jp_chs = open_json('intermediate_file/jp_chs.json') <NEW_LINE> jp = open_file('input/nscript.dat', encoding='cp932').splitlines() <NEW_LINE> cnt = 0 <NEW_LINE> failed = [] <NEW_LINE> for idx, line in enumerate(jp): <NEW_LINE> <INDENT> if has_jp(line) and line[0] not in '*; abcdefghijklmnopqrstuvwxyz': <NEW_LINE> <INDENT> if line in jp_chs and jp_chs[line]: <NEW_LINE> <INDENT> jp[idx] = strB2Q(jp_chs[line], '\\') <NEW_LINE> cnt += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> failed.append(line) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> data = bytearray('\n'.join(jp).encode('gbk', errors='ignore')) <NEW_LINE> if encrypt: <NEW_LINE> <INDENT> idx = 0 <NEW_LINE> while idx < len(data): <NEW_LINE> <INDENT> data[idx] ^= 0x84 <NEW_LINE> idx += 1 <NEW_LINE> <DEDENT> <DEDENT> save_file_b('output/nscript.dat', data) <NEW_LINE> save_file('intermediate_file/failed.txt', '\n'.join(failed)) <NEW_LINE> print('替换:', cnt) <NEW_LINE> print('失败:', len(failed)) | textouta
createfonta push 0x80 -> push 0x86 | 62598fc4d486a94d0ba2c28b |
class Location: <NEW_LINE> <INDENT> def __init__(self, x, y, z): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.z = z <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> assert 0 <= item <= 2 <NEW_LINE> if item == 0: <NEW_LINE> <INDENT> return self.z <NEW_LINE> <DEDENT> if item == 1: <NEW_LINE> <INDENT> return self.y <NEW_LINE> <DEDENT> return self.x <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "{{x={x:d}, y={y:d}, z={z:d}}}".format( x=self.x, y=self.y, z=self.z) | A coordinate location | 62598fc423849d37ff85136d |
class HomeKitHumidifier(HomeKitEntity, HumidifierEntity): <NEW_LINE> <INDENT> _attr_device_class = HumidifierDeviceClass.HUMIDIFIER <NEW_LINE> def get_characteristic_types(self) -> list[str]: <NEW_LINE> <INDENT> return [ CharacteristicsTypes.ACTIVE, CharacteristicsTypes.CURRENT_HUMIDIFIER_DEHUMIDIFIER_STATE, CharacteristicsTypes.TARGET_HUMIDIFIER_DEHUMIDIFIER_STATE, CharacteristicsTypes.RELATIVE_HUMIDITY_HUMIDIFIER_THRESHOLD, ] <NEW_LINE> <DEDENT> @property <NEW_LINE> def supported_features(self) -> int: <NEW_LINE> <INDENT> return SUPPORT_FLAGS | SUPPORT_MODES <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self) -> bool: <NEW_LINE> <INDENT> return self.service.value(CharacteristicsTypes.ACTIVE) <NEW_LINE> <DEDENT> async def async_turn_on(self, **kwargs: Any) -> None: <NEW_LINE> <INDENT> await self.async_put_characteristics({CharacteristicsTypes.ACTIVE: True}) <NEW_LINE> <DEDENT> async def async_turn_off(self, **kwargs: Any) -> None: <NEW_LINE> <INDENT> await self.async_put_characteristics({CharacteristicsTypes.ACTIVE: False}) <NEW_LINE> <DEDENT> @property <NEW_LINE> def target_humidity(self) -> int | None: <NEW_LINE> <INDENT> return self.service.value( CharacteristicsTypes.RELATIVE_HUMIDITY_HUMIDIFIER_THRESHOLD ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def mode(self) -> str | None: <NEW_LINE> <INDENT> mode = self.service.value( CharacteristicsTypes.CURRENT_HUMIDIFIER_DEHUMIDIFIER_STATE ) <NEW_LINE> return MODE_AUTO if mode == 1 else MODE_NORMAL <NEW_LINE> <DEDENT> @property <NEW_LINE> def available_modes(self) -> list[str] | None: <NEW_LINE> <INDENT> available_modes = [ MODE_NORMAL, MODE_AUTO, ] <NEW_LINE> return available_modes <NEW_LINE> <DEDENT> async def async_set_humidity(self, humidity: int) -> None: <NEW_LINE> <INDENT> await self.async_put_characteristics( {CharacteristicsTypes.RELATIVE_HUMIDITY_HUMIDIFIER_THRESHOLD: humidity} ) <NEW_LINE> <DEDENT> async def async_set_mode(self, mode: str) -> None: <NEW_LINE> <INDENT> if mode == MODE_AUTO: <NEW_LINE> <INDENT> await self.async_put_characteristics( { CharacteristicsTypes.TARGET_HUMIDIFIER_DEHUMIDIFIER_STATE: 0, CharacteristicsTypes.ACTIVE: True, } ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> await self.async_put_characteristics( { CharacteristicsTypes.TARGET_HUMIDIFIER_DEHUMIDIFIER_STATE: 1, CharacteristicsTypes.ACTIVE: True, } ) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def min_humidity(self) -> int: <NEW_LINE> <INDENT> return int( self.service[ CharacteristicsTypes.RELATIVE_HUMIDITY_HUMIDIFIER_THRESHOLD ].minValue or DEFAULT_MIN_HUMIDITY ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def max_humidity(self) -> int: <NEW_LINE> <INDENT> return int( self.service[ CharacteristicsTypes.RELATIVE_HUMIDITY_HUMIDIFIER_THRESHOLD ].maxValue or DEFAULT_MAX_HUMIDITY ) | Representation of a HomeKit Controller Humidifier. | 62598fc450812a4eaa620d42 |
class AgeFitnessOrganism(Organism): <NEW_LINE> <INDENT> @comparable <NEW_LINE> def __cmp__(self, other): <NEW_LINE> <INDENT> return cmp(self.fitness(), other.fitness()) <NEW_LINE> <DEDENT> def fitness(self): <NEW_LINE> <INDENT> return self.age <NEW_LINE> <DEDENT> phenotypes = {Challenge: fitness} | An organism with no genotype whose fitness is the same as its age | 62598fc44527f215b58ea18a |
class HikesView(CategoryRegion, ListView): <NEW_LINE> <INDENT> model = Hike <NEW_LINE> queryset = Hike.objects.filter(draft=False) | List of hikes | 62598fc4adb09d7d5dc0a836 |
class ReLU(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.params = [] <NEW_LINE> <DEDENT> def forward(self, X): <NEW_LINE> <INDENT> self.X = X <NEW_LINE> return np.maximum(X, 0) <NEW_LINE> <DEDENT> def backward(self, dout): <NEW_LINE> <INDENT> dX = dout.copy() <NEW_LINE> dX[self.X <= 0] = 0 <NEW_LINE> return dX, [] | Implements activation function rectified linear unit (ReLU)
ReLU activation function is defined as the positive part of
its argument. Todo: insert arxiv paper reference | 62598fc4fff4ab517ebcdaa1 |
class HookResult(object): <NEW_LINE> <INDENT> def __init__(self, hook, project, commit, error, files=(), fixup_func=None): <NEW_LINE> <INDENT> self.hook = hook <NEW_LINE> self.project = project <NEW_LINE> self.commit = commit <NEW_LINE> self.error = error <NEW_LINE> self.files = files <NEW_LINE> self.fixup_func = fixup_func <NEW_LINE> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> return bool(self.error) <NEW_LINE> <DEDENT> def __nonzero__(self): <NEW_LINE> <INDENT> return self.__bool__() | A single hook result. | 62598fc4656771135c48992a |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.