code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Linear3d(Camera): <NEW_LINE> <INDENT> def __init__(self, data=None): <NEW_LINE> <INDENT> Camera.__init__(self, data) <NEW_LINE> self.model = 'Linear3d' <NEW_LINE> <DEDENT> def set_calibration(self, calib): <NEW_LINE> <INDENT> assert calib.shape == (3,4) <NEW_LINE> self.calib = calib <NEW_LINE> <DEDENT> def read_camera(self, filename): <NEW_LINE> <INDENT> lines = open(filename).readlines() <NEW_LINE> nlines = len(lines) <NEW_LINE> n = 0 <NEW_LINE> while n < nlines: <NEW_LINE> <INDENT> line = lines[n] <NEW_LINE> if line.lower().find('calibration') == 0: <NEW_LINE> <INDENT> if line.lower().find('linear3d') > 0: <NEW_LINE> <INDENT> calib = numpy.array([ [float(x) for x in lines[n+1].split()], [float(x) for x in lines[n+2].split()], [float(x) for x in lines[n+3].split()] ]) <NEW_LINE> self.set_calibration(calib) <NEW_LINE> n += 3 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.set_keyword(line) <NEW_LINE> <DEDENT> n += 1 <NEW_LINE> <DEDENT> self.shape = self.pixels <NEW_LINE> <DEDENT> def save_camera(self, filename): <NEW_LINE> <INDENT> f = open(filename,'w') <NEW_LINE> f.write('# par2vel camera file\n') <NEW_LINE> f.write("model = '{:}'\n".format(self.model)) <NEW_LINE> self.save_keywords(f) <NEW_LINE> print('Calibration Linear3d', file=f) <NEW_LINE> for row in self.calib: <NEW_LINE> <INDENT> for number in row: <NEW_LINE> <INDENT> print(repr(number), end=' ', file=f) <NEW_LINE> <DEDENT> print(file=f) <NEW_LINE> <DEDENT> f.close() <NEW_LINE> <DEDENT> def X2x(self, X): <NEW_LINE> <INDENT> from numpy import vstack, dot, ones <NEW_LINE> ni, nj = X.shape <NEW_LINE> Xone = vstack((X,ones(nj))) <NEW_LINE> k = dot(self.calib,Xone) <NEW_LINE> x = k[0:2,:] / k[2,:] <NEW_LINE> return x <NEW_LINE> <DEDENT> def calibrate(self, X, x, print_residual=False): <NEW_LINE> <INDENT> from scipy.optimize import minimize <NEW_LINE> from numpy import array <NEW_LINE> calib = array([[1.0, 0, 0, 0], [ 0, 1.0, 0, 0], [ 0, 0, 0, 1]]) <NEW_LINE> a0 = calib.flatten() <NEW_LINE> def func(a): <NEW_LINE> <INDENT> calib = a.reshape((3,4)) <NEW_LINE> self.set_calibration(calib) <NEW_LINE> return ((self.X2x(X) - x)**2).sum() <NEW_LINE> <DEDENT> res = minimize(func, a0) <NEW_LINE> calib = res.x.reshape((3,4)) <NEW_LINE> self.set_calibration(calib)
Camera model using Direct Linear Transform (DFT)
62598fc8f548e778e596b8e4
class salesContract(View): <NEW_LINE> <INDENT> template_name = 'sales_contract.html' <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> return render(request, self.template_name)
docstring for 销售合同管理.
62598fc83346ee7daa3377ec
class PyTerminado(PythonPackage): <NEW_LINE> <INDENT> homepage = "https://pypi.python.org/pypi/terminado" <NEW_LINE> url = "https://pypi.io/packages/source/t/terminado/terminado-0.6.tar.gz" <NEW_LINE> version('0.6', '5b6c65da27fe1ed07a9f80f0588cdaba') <NEW_LINE> depends_on('py-tornado@4:', type=('build', 'run')) <NEW_LINE> depends_on('py-ptyprocess', type=('build', 'run'))
Terminals served to term.js using Tornado websockets
62598fc8be7bc26dc9251fff
class TestMeasurementPeriod(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testMeasurementPeriod(self): <NEW_LINE> <INDENT> model = swagger_client.models.measurement_period.MeasurementPeriod()
MeasurementPeriod unit test stubs
62598fc863b5f9789fe854bd
class TimelapseSettings(models.Model): <NEW_LINE> <INDENT> camera = models.ForeignKey( 'Camera', related_name='+', blank=True, null=True, default=None ) <NEW_LINE> name = models.CharField( max_length=30, blank=False ) <NEW_LINE> enabled = models.BooleanField( default=False ) <NEW_LINE> interval = models.IntegerField( default=0, validators=[ MinValueValidator(0), MaxValueValidator(1000) ] ) <NEW_LINE> duration = models.IntegerField( default=0, validators=[ MinValueValidator(0), MaxValueValidator(1000) ] ) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u'%s' % (self.name) <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> validate_only_one_instance(self) <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> super(Camera, self).save(*args, **kwargs) <NEW_LINE> self.full_clean() <NEW_LINE> if not self.name: <NEW_LINE> <INDENT> self.name = 'Timelapse ' + str(self.id) <NEW_LINE> self.save()
Model for Timelapse Settings. - camera (FK) - name - enabled - interval - duration
62598fc8a05bb46b3848abb5
class Solution: <NEW_LINE> <INDENT> def getAnswer(self, A, B): <NEW_LINE> <INDENT> union=len(set(A).union(set(B))) <NEW_LINE> intersection=len(set(A).intersection(set(B))) <NEW_LINE> difference=len(set(A).difference(set(B))) <NEW_LINE> return [union,intersection,difference]
@param A: The set A @param B: The set B @return: Return the size of three sets
62598fc85fc7496912d4841e
class WelcomePage(MainHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> user_id, user = self.get_user() <NEW_LINE> if not user: <NEW_LINE> <INDENT> self.redirect('/blog/login') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> template = 'blog-welcome-page.html' <NEW_LINE> params = {'user': user} <NEW_LINE> self.render(template, **params)
Landing page after successful signup/login
62598fc84c3428357761a607
class itkImageToVTKImageFilterIUS2(ITKCommonBasePython.itkProcessObject): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __New_orig__(): <NEW_LINE> <INDENT> return _itkImageToVTKImageFilterPython.itkImageToVTKImageFilterIUS2___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> def GetOutput(self): <NEW_LINE> <INDENT> return _itkImageToVTKImageFilterPython.itkImageToVTKImageFilterIUS2_GetOutput(self) <NEW_LINE> <DEDENT> def SetInput(self, *args): <NEW_LINE> <INDENT> return _itkImageToVTKImageFilterPython.itkImageToVTKImageFilterIUS2_SetInput(self, *args) <NEW_LINE> <DEDENT> def GetImporter(self): <NEW_LINE> <INDENT> return _itkImageToVTKImageFilterPython.itkImageToVTKImageFilterIUS2_GetImporter(self) <NEW_LINE> <DEDENT> def GetExporter(self): <NEW_LINE> <INDENT> return _itkImageToVTKImageFilterPython.itkImageToVTKImageFilterIUS2_GetExporter(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkImageToVTKImageFilterPython.delete_itkImageToVTKImageFilterIUS2 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkImageToVTKImageFilterPython.itkImageToVTKImageFilterIUS2_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkImageToVTKImageFilterPython.itkImageToVTKImageFilterIUS2_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkImageToVTKImageFilterIUS2.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New)
Proxy of C++ itkImageToVTKImageFilterIUS2 class
62598fc8656771135c4899ba
class IPFS_API: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ipfslist = ['Addresses', 'ID', 'AgentVersion', "ProtocolVersion", "PublicKey" ] <NEW_LINE> self.ipfsapi_ip = '127.0.0.1' <NEW_LINE> self.ipfsapi_port = 5001 <NEW_LINE> self.ipfsapi_port2 = 5002 <NEW_LINE> self.debug = False <NEW_LINE> self._api_connection = self.__initialize_ipfsapi_connection() <NEW_LINE> self._api_id_info = self.__api_id_info <NEW_LINE> self.reader = self._ipfs_reader <NEW_LINE> self.writer = self._ipfs_writer <NEW_LINE> self.bitswap = self._ipfs_bitswap_stat <NEW_LINE> self.ipfs_client = self._ipfs_client <NEW_LINE> <DEDENT> def __initialize_ipfsapi_connection(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.api_connection = ipfsapi.connect(self.ipfsapi_ip, self.ipfsapi_port) <NEW_LINE> <DEDENT> except ConnectionRefusedError: <NEW_LINE> <INDENT> self.api_connection = ipfsapi.connect(self.ipfsapi_ip, self.ipfsapi_port2) <NEW_LINE> <DEDENT> return self.api_connection <NEW_LINE> <DEDENT> def __api_id_info(self): <NEW_LINE> <INDENT> self.apiid = self.api_connection.id() <NEW_LINE> self.ipfs_addresses = self.apiid[self.ipfslist[0]] <NEW_LINE> if self.debug != False: <NEW_LINE> <INDENT> for i in range(1, len(self.ipfslist)): <NEW_LINE> <INDENT> print(self.ipfslist[i],'\n' + self.apiid[self.ipfslist[i]] + '\n') <NEW_LINE> <DEDENT> print(self.ipfslist[0]) <NEW_LINE> for i in range(0, len(self.ipfs_addresses)): <NEW_LINE> <INDENT> print(self.ipfs_addresses[i]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _ipfs_client(self): <NEW_LINE> <INDENT> return self._api_connection <NEW_LINE> <DEDENT> def _ipfs_reader(self, _filehash): <NEW_LINE> <INDENT> return self._api_connection.cat(_filehash) <NEW_LINE> <DEDENT> def _ipfs_writer(self, _file): <NEW_LINE> <INDENT> return self._api_connection.add(_file) <NEW_LINE> <DEDENT> def _ipfs_bitswap_stat(self): <NEW_LINE> <INDENT> return self._api_connection.bitswap_stat
USE: # first initialize the connection ipfsnode = Ipfs_API()
62598fc8adb09d7d5dc0a8c5
class DeleteDocxTableRowRangeResponse(object): <NEW_LINE> <INDENT> swagger_types = { 'successful': 'bool', 'edited_document_url': 'str' } <NEW_LINE> attribute_map = { 'successful': 'Successful', 'edited_document_url': 'EditedDocumentURL' } <NEW_LINE> def __init__(self, successful=None, edited_document_url=None): <NEW_LINE> <INDENT> self._successful = None <NEW_LINE> self._edited_document_url = None <NEW_LINE> self.discriminator = None <NEW_LINE> if successful is not None: <NEW_LINE> <INDENT> self.successful = successful <NEW_LINE> <DEDENT> if edited_document_url is not None: <NEW_LINE> <INDENT> self.edited_document_url = edited_document_url <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def successful(self): <NEW_LINE> <INDENT> return self._successful <NEW_LINE> <DEDENT> @successful.setter <NEW_LINE> def successful(self, successful): <NEW_LINE> <INDENT> self._successful = successful <NEW_LINE> <DEDENT> @property <NEW_LINE> def edited_document_url(self): <NEW_LINE> <INDENT> return self._edited_document_url <NEW_LINE> <DEDENT> @edited_document_url.setter <NEW_LINE> def edited_document_url(self, edited_document_url): <NEW_LINE> <INDENT> self._edited_document_url = edited_document_url <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(DeleteDocxTableRowRangeResponse, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, DeleteDocxTableRowRangeResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fc863b5f9789fe854bf
class FilterAccessLogger(AccessLogger): <NEW_LINE> <INDENT> def log(self, request, response, time): <NEW_LINE> <INDENT> if self.logger.level != logging.DEBUG and response.status == 200 and request.path in ['/health','/metrics']: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> super().log(request, response, time)
/health and /metrics filter Hidding those requests if we have a 200 OK when we are not in DEBUG
62598fc87b180e01f3e491f5
class BaseFeaturedFeedTriggerView(BaseTriggerView): <NEW_LINE> <INDENT> _base_url = 'https://{0.wiki}/w/api.php?action=featuredfeed&feed={0.feed}' <NEW_LINE> def get_feed(self): <NEW_LINE> <INDENT> url = self._base_url.format(self) <NEW_LINE> feed = cache.get(url) <NEW_LINE> if not feed: <NEW_LINE> <INDENT> feed = feedparser.parse(urllib2.urlopen(url)) <NEW_LINE> cache.set(url, feed, timeout=CACHE_EXPIRATION) <NEW_LINE> <DEDENT> return feed <NEW_LINE> <DEDENT> def parse_entry(self, entry): <NEW_LINE> <INDENT> meta_id = url_to_uuid5(entry.id.replace('http:', 'https:')) <NEW_LINE> date = entry.published_parsed <NEW_LINE> created_at = utc_to_iso8601(date) <NEW_LINE> ts = utc_to_epoch(date) <NEW_LINE> return {'created_at': created_at, 'entry_id': meta_id, 'url': entry.id, 'meta': {'id': meta_id, 'timestamp': ts}} <NEW_LINE> <DEDENT> def get_data(self): <NEW_LINE> <INDENT> feed = self.get_feed() <NEW_LINE> feed.entries.sort(key=operator.attrgetter('published_parsed'), reverse=True) <NEW_LINE> return map(self.parse_entry, feed.entries)
Generic view for IFTT Triggers based on FeaturedFeeds.
62598fc8ec188e330fdf8be0
class Act(object): <NEW_LINE> <INDENT> def __init__(self, id, name, proctime, start, end, is_fixed, is_opt): <NEW_LINE> <INDENT> self._id = id <NEW_LINE> self._name = name <NEW_LINE> self._proctime = proctime <NEW_LINE> self._start = start[:-3] <NEW_LINE> self._end = end[:-3] <NEW_LINE> self._fixed = is_fixed <NEW_LINE> self._optimized = is_opt <NEW_LINE> <DEDENT> def act_id(self): return self._id <NEW_LINE> def start(self): return self._start <NEW_LINE> def end(self): return self._end <NEW_LINE> def name(self): return self._name
hold one activity
62598fc850812a4eaa620d8a
class SqlRelation(nodes.ExpressionNode): <NEW_LINE> <INDENT> __slots__ = ('incarnation', 'sqlCode') <NEW_LINE> def __init__(self, incarnation, sqlCode): <NEW_LINE> <INDENT> super(SqlRelation, self).__init__() <NEW_LINE> self.incarnation = incarnation <NEW_LINE> self.sqlCode = sqlCode <NEW_LINE> <DEDENT> def attributesRepr(self): <NEW_LINE> <INDENT> return '%s, %s' % (repr(self.sqlCode), repr(self.incarnation)) <NEW_LINE> <DEDENT> def prettyPrintAttributes(self, stream, indentLevel): <NEW_LINE> <INDENT> stream.write(' %s %s' % (self.sqlCode, self.incarnation))
An expression node corresponding to an SQL expression producing a relation, with incarnation.
62598fc871ff763f4b5e7acb
class ComfoConnectFan(FanEntity): <NEW_LINE> <INDENT> current_speed = None <NEW_LINE> def __init__(self, ccb: ComfoConnectBridge) -> None: <NEW_LINE> <INDENT> self._ccb = ccb <NEW_LINE> <DEDENT> async def async_added_to_hass(self) -> None: <NEW_LINE> <INDENT> _LOGGER.debug("Registering for fan speed") <NEW_LINE> self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_COMFOCONNECT_UPDATE_RECEIVED.format(SENSOR_FAN_SPEED_MODE), self._handle_update, ) ) <NEW_LINE> await self.hass.async_add_executor_job( self._ccb.comfoconnect.register_sensor, SENSOR_FAN_SPEED_MODE ) <NEW_LINE> <DEDENT> def _handle_update(self, value): <NEW_LINE> <INDENT> _LOGGER.debug( "Handle update for fan speed (%d): %s", SENSOR_FAN_SPEED_MODE, value ) <NEW_LINE> self.current_speed = value <NEW_LINE> self.schedule_update_ha_state() <NEW_LINE> <DEDENT> @property <NEW_LINE> def should_poll(self) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self): <NEW_LINE> <INDENT> return self._ccb.unique_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._ccb.name <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> return "mdi:air-conditioner" <NEW_LINE> <DEDENT> @property <NEW_LINE> def supported_features(self) -> int: <NEW_LINE> <INDENT> return SUPPORT_SET_SPEED <NEW_LINE> <DEDENT> @property <NEW_LINE> def percentage(self) -> int | None: <NEW_LINE> <INDENT> if self.current_speed is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return ranged_value_to_percentage(SPEED_RANGE, self.current_speed) <NEW_LINE> <DEDENT> @property <NEW_LINE> def speed_count(self) -> int: <NEW_LINE> <INDENT> return int_states_in_range(SPEED_RANGE) <NEW_LINE> <DEDENT> def turn_on( self, speed: str | None = None, percentage: int | None = None, preset_mode: str | None = None, **kwargs, ) -> None: <NEW_LINE> <INDENT> if percentage is None: <NEW_LINE> <INDENT> self.set_percentage(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.set_percentage(percentage) <NEW_LINE> <DEDENT> <DEDENT> def turn_off(self, **kwargs: Any) -> None: <NEW_LINE> <INDENT> self.set_percentage(0) <NEW_LINE> <DEDENT> def set_percentage(self, percentage: int) -> None: <NEW_LINE> <INDENT> _LOGGER.debug("Changing fan speed percentage to %s", percentage) <NEW_LINE> if percentage == 0: <NEW_LINE> <INDENT> cmd = CMD_FAN_MODE_AWAY <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> speed = math.ceil(percentage_to_ranged_value(SPEED_RANGE, percentage)) <NEW_LINE> cmd = CMD_MAPPING[speed] <NEW_LINE> <DEDENT> self._ccb.comfoconnect.cmd_rmi_request(cmd)
Representation of the ComfoConnect fan platform.
62598fc8adb09d7d5dc0a8c7
class _WeightedMeanLossMetric(keras_metrics.Metric): <NEW_LINE> <INDENT> def __init__(self, name='loss', dtype=tf.float32): <NEW_LINE> <INDENT> super(_WeightedMeanLossMetric, self).__init__(name, dtype) <NEW_LINE> self._total_loss = self.add_weight('total_loss', initializer='zeros') <NEW_LINE> self._total_weight = self.add_weight( 'total_weight', initializer='zeros') <NEW_LINE> self._loss_fn = loss_func <NEW_LINE> <DEDENT> def update_state(self, y_true, y_pred, sample_weight=None): <NEW_LINE> <INDENT> y_true = tf.cast(y_true, self._dtype) <NEW_LINE> y_pred = tf.cast(y_pred, self._dtype) <NEW_LINE> batch_size = tf.cast(tf.shape(y_pred)[0], self._dtype) <NEW_LINE> batch_total_loss = self._loss_fn(y_true, y_pred) * batch_size <NEW_LINE> op = self._total_loss.assign_add(batch_total_loss) <NEW_LINE> with tf.control_dependencies([op]): <NEW_LINE> <INDENT> return self._total_weight.assign_add(batch_size) <NEW_LINE> <DEDENT> <DEDENT> def result(self): <NEW_LINE> <INDENT> return tf.div_no_nan(self._total_loss, self._total_weight)
A `tf.keras.metrics.Metric` wrapper for the loss function.
62598fc89f28863672818a22
@attr.s <NEW_LINE> class Transform(Operation): <NEW_LINE> <INDENT> def apply_to(self, group): <NEW_LINE> <INDENT> return [ group[i] for i in self.a ]
Not a real instruction, but maybe useful for optimizing? a is expected to be a list(length=len(group)) with transformation positions ex: a = [4, 1, 2, 0, 3]
62598fc83346ee7daa3377ee
class RedisSessionHandler(SessionHandler): <NEW_LINE> <INDENT> def __init__(self, engine): <NEW_LINE> <INDENT> SessionHandler.__init__(self, engine) <NEW_LINE> self.data_source = None <NEW_LINE> <DEDENT> def configure(self): <NEW_LINE> <INDENT> self.life_time = firenado.conf.session['life_time'] <NEW_LINE> self.data_source = ( self.engine.get_session_aware_instance().get_data_source( firenado.conf.session['redis']['data']['source'] ) ) <NEW_LINE> <DEDENT> def create_session(self, session_id, data): <NEW_LINE> <INDENT> self.write_stored_session(session_id, data) <NEW_LINE> <DEDENT> def read_stored_session(self, session_id): <NEW_LINE> <INDENT> key = self.__get_key(session_id) <NEW_LINE> self.data_source.get_connection().expire(key, self.life_time) <NEW_LINE> return self.data_source.get_connection().get(key) <NEW_LINE> <DEDENT> def write_stored_session(self, session_id, data): <NEW_LINE> <INDENT> key = self.__get_key(session_id) <NEW_LINE> self.data_source.get_connection().set(key, data) <NEW_LINE> self.data_source.get_connection().expire(key, self.life_time) <NEW_LINE> <DEDENT> def destroy_stored_session(self, session_id): <NEW_LINE> <INDENT> logger.debug("Destroying session %s." % session_id) <NEW_LINE> key = self.__get_key(session_id) <NEW_LINE> self.data_source.get_connection().delete(key) <NEW_LINE> logger.debug("Session %s destroyed." % session_id) <NEW_LINE> <DEDENT> def purge_expired_sessions(self): <NEW_LINE> <INDENT> logger.debug("Redis handler looking for sessions without ttl.") <NEW_LINE> self.engine.session_callback.stop() <NEW_LINE> logging.debug("Session periodic callback stopped by the redis " "handler.") <NEW_LINE> keys = self.data_source.get_connection().keys(self.__get_key("*")) <NEW_LINE> purge_count = 0 <NEW_LINE> purge_hiccup = False <NEW_LINE> for key in keys: <NEW_LINE> <INDENT> ttl = self.data_source.get_connection().ttl(key) <NEW_LINE> if ttl == -1: <NEW_LINE> <INDENT> logger.warning( "Session %s without ttl. Setting expiration now." % key ) <NEW_LINE> self.data_source.get_connection().expire(key, self.life_time) <NEW_LINE> purge_count += 1 <NEW_LINE> if purge_count == firenado.conf.session['purge_limit']: <NEW_LINE> <INDENT> purge_hiccup = True <NEW_LINE> logger.warning( "Set ttl to 500 sessions. Exiting the call and waiting" " for purge hiccup." ) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if purge_hiccup: <NEW_LINE> <INDENT> self.engine.set_purge_hiccup() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.engine.set_purge_normal() <NEW_LINE> <DEDENT> self.engine.session_callback.start() <NEW_LINE> logging.debug("Session periodic callback resumed by the redis " "handler.") <NEW_LINE> <DEDENT> def is_session_stored(self, session_id): <NEW_LINE> <INDENT> key = self.__get_key(session_id) <NEW_LINE> return self.data_source.get_connection().get(key) is not None <NEW_LINE> <DEDENT> def __get_key(self, session_id): <NEW_LINE> <INDENT> if firenado.conf.app['id'] is not None: <NEW_LINE> <INDENT> return '%s:%s:%s' % ( firenado.conf.session['prefix'], firenado.conf.app['id'], session_id ) <NEW_LINE> <DEDENT> return '%s:%s' % ( firenado.conf.session['prefix'], session_id )
Session handler that deals with file data stored in a redis database.
62598fc87b180e01f3e491f6
class TopicCategoryResource(TypeFilteredResource): <NEW_LINE> <INDENT> def dehydrate_count(self, bundle): <NEW_LINE> <INDENT> resources = bundle.obj.resourcebase_set.all() <NEW_LINE> if settings.RESOURCE_PUBLISHING: <NEW_LINE> <INDENT> resources = resources.filter(is_published=True) <NEW_LINE> <DEDENT> if self.type_filter: <NEW_LINE> <INDENT> resources = resources.instance_of(self.type_filter) <NEW_LINE> <DEDENT> if self.title_filter: <NEW_LINE> <INDENT> resources = resources.filter(title__icontains=self.title_filter) <NEW_LINE> <DEDENT> if not settings.SKIP_PERMS_FILTER: <NEW_LINE> <INDENT> permitted = get_objects_for_user( bundle.request.user, 'base.view_resourcebase').values_list( 'id', flat=True) <NEW_LINE> resources = resources.filter(id__in=permitted) <NEW_LINE> <DEDENT> return resources.count() <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> queryset = TopicCategory.objects.all() <NEW_LINE> resource_name = 'categories' <NEW_LINE> allowed_methods = ['get'] <NEW_LINE> filtering = { 'identifier': ALL, }
Category api
62598fc8cc40096d6161a37e
class PyMemprof(PythonPackage): <NEW_LINE> <INDENT> homepage = "http://jmdana.github.io/memprof/" <NEW_LINE> pypi = "memprof/memprof-0.3.6.tar.gz" <NEW_LINE> version('0.3.6', sha256='a8376ce476bf82a5eb465d1a30b8ffc86cc55b0b6de7aa4cdeccb4c99586d967') <NEW_LINE> depends_on('py-setuptools', type='build') <NEW_LINE> depends_on('py-cython', type='build') <NEW_LINE> depends_on('py-argparse', when='^python@:2.6', type=('build', 'run')) <NEW_LINE> depends_on('py-matplotlib', type=('build', 'run'))
memprof logs and plots the memory usage of all the variables during the execution of the decorated methods.
62598fc8956e5f7376df5824
class TestAlerts(object): <NEW_LINE> <INDENT> def test_not_implemented(self, api_client): <NEW_LINE> <INDENT> runner = CliRunner() <NEW_LINE> expected_output = "Error: 'alerts' subcommand is not implemented yet.\n" <NEW_LINE> api_client.not_implemented.side_effect = RequestFailure(501) <NEW_LINE> result = runner.invoke(subcommand.alerts) <NEW_LINE> api_client.not_implemented.assert_called_with("alerts") <NEW_LINE> assert result.exit_code == 1 <NEW_LINE> assert result.output == expected_output
Alerts subcommand test cases.
62598fc8bf627c535bcb17f6
@ddt.ddt <NEW_LINE> class Message_detail(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> self.client = Client(url=self.url, method=Method.POST, type=Type.URL_ENCODE) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.client.result() <NEW_LINE> <DEDENT> def testcase01(self): <NEW_LINE> <INDENT> client = self.client <NEW_LINE> client.set_header('token', token) <NEW_LINE> client.send() <NEW_LINE> client.status_code_is_200() <NEW_LINE> client.res_times_less_than(200) <NEW_LINE> client.incloud('ok', client.json.get('message')) <NEW_LINE> <DEDENT> def testcase02(self): <NEW_LINE> <INDENT> client = self.client <NEW_LINE> client.set_header('token', token1) <NEW_LINE> client.send() <NEW_LINE> client.status_code_is_200() <NEW_LINE> client.res_times_less_than(200) <NEW_LINE> client.incloud('无效', client.json.get('message'))
冒烟-02-消息列表
62598fc897e22403b383b252
class SameValuesBank(BaseBank): <NEW_LINE> <INDENT> def setup(self, value, purities): <NEW_LINE> <INDENT> self.values = value * np.ones_like(purities) <NEW_LINE> self.weights = self.values / np.array(purities)
The values of all the pieces in the bank are equal (egalitarian bank). But they might have different purities. Weights adapt accordingly.
62598fc83617ad0b5ee06495
class UnitTetrahedronMesh(Mesh): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.name = "unittetra" <NEW_LINE> coords = [[0., 0., 0.], [1., 0., 0.], [0., 1., 0.], [0., 0., 1.]] <NEW_LINE> cells = [[1, 0, 3, 2]] <NEW_LINE> plex = dmplex._from_cell_list(3, cells, coords) <NEW_LINE> super(UnitTetrahedronMesh, self).__init__(self.name, plex=plex)
Class that represents a tetrahedron mesh that is composed of one element.
62598fc87cff6e4e811b5d75
class Parser(argparse.ArgumentParser): <NEW_LINE> <INDENT> def error(self, message): <NEW_LINE> <INDENT> sys.stderr.write('error: %s\n' % message) <NEW_LINE> self.print_help() <NEW_LINE> sys.exit(2)
override the default behavior of the error method of argument parser
62598fc84a966d76dd5ef224
class Arc: <NEW_LINE> <INDENT> def __init__( self, *, routes: Optional[Sequence[Route]] = None, middleware: Optional[Sequence[tuple[Type[T], dict]]] = None, ): <NEW_LINE> <INDENT> self.router = Router(self, routes) <NEW_LINE> self.middleware = ( self.router ) <NEW_LINE> if middleware is not None: <NEW_LINE> <INDENT> for cls, args in reversed(middleware): <NEW_LINE> <INDENT> self.middleware = cls(**args) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> async def __call__( self, scope: dict, receive: CoroutineFunction, send: CoroutineFunction ): <NEW_LINE> <INDENT> await self.router(scope, receive, send) <NEW_LINE> <DEDENT> def route(self, path: str, methods: Optional[Sequence[str]]) -> DCallable: <NEW_LINE> <INDENT> def wrapper(handler: Callable): <NEW_LINE> <INDENT> self.router.register(path, handler, methods) <NEW_LINE> return handler <NEW_LINE> <DEDENT> return wrapper <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> uvicorn.run(self, host="127.0.0.1", port=5000)
The main ASGI application An ASGI application. Args: routes: A sequence of routes that the Arc application will use. middleware: A sequence of objects that are used as the middleware for the ASGI app. Attributes: router: The router for the ASGI app. middleware: The middleware for the ASGI app.
62598fc8adb09d7d5dc0a8c9
class TestListMaintenanceResponse(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testListMaintenanceResponse(self): <NEW_LINE> <INDENT> pass
ListMaintenanceResponse unit test stubs
62598fc8283ffb24f3cf3bd4
class GuestUsagesResourceList(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'value': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'value': {'key': 'value', 'type': '[GuestUsagesResource]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(GuestUsagesResourceList, self).__init__(**kwargs) <NEW_LINE> self.value = None
The collection of guest usages resources. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: List of guest usages resources. :vartype value: list[~$(python-base-namespace).v2020_05_01_preview.models.GuestUsagesResource]
62598fc8ec188e330fdf8be4
class ClearCustomDescriptorsHandler(BaseStackdriverCommandHandler): <NEW_LINE> <INDENT> def __do_clear(self, options): <NEW_LINE> <INDENT> project = options.get('project', None) <NEW_LINE> stackdriver = stackdriver_service.make_service(options) <NEW_LINE> type_map = stackdriver.fetch_all_custom_descriptors(project) <NEW_LINE> delete_method = (stackdriver.stub.projects().metricDescriptors().delete) <NEW_LINE> def delete_invocation(descriptor): <NEW_LINE> <INDENT> name = descriptor['name'] <NEW_LINE> logging.info('batch DELETE %s', name) <NEW_LINE> return delete_method(name=name) <NEW_LINE> <DEDENT> get_descriptor_name = lambda descriptor: descriptor['name'] <NEW_LINE> processor = BatchProcessor( project, stackdriver, type_map.values(), delete_invocation, get_descriptor_name) <NEW_LINE> processor.process() <NEW_LINE> return type_map, processor <NEW_LINE> <DEDENT> def process_commandline_request(self, options): <NEW_LINE> <INDENT> type_map, processor = self.__do_clear(options) <NEW_LINE> headers, body = processor.make_response( None, False, 'Deleted', 'Cleared Time Series') <NEW_LINE> self.output(options, body) <NEW_LINE> <DEDENT> def process_web_request(self, request, path, params, fragment): <NEW_LINE> <INDENT> type_map, processor = self.__do_clear(params) <NEW_LINE> response_code = (httplib.OK if processor.num_ok == len(type_map) else httplib.INTERNAL_SERVER_ERROR) <NEW_LINE> headers, body = processor.make_response( request, accepts_content_type(request, 'text/html'), 'Deleted', 'Cleared Time Series') <NEW_LINE> request.respond(response_code, headers, body)
Administrative handler to clear all the known descriptors. This clears all the TimeSeries history as well.
62598fc8167d2b6e312b72c6
class Error(Exception): <NEW_LINE> <INDENT> pass
Base class to be used for other module's exceptions.
62598fc826068e7796d4ccab
class Producer: <NEW_LINE> <INDENT> def produce(self): <NEW_LINE> <INDENT> print("Producer is working hard!") <NEW_LINE> <DEDENT> def meet(self): <NEW_LINE> <INDENT> print("Produce has time to meet you now!")
Define the 'resourse-intensive objects to instantiate'
62598fc88a349b6b4368658e
class MEMM(_StationaryModel): <NEW_LINE> <INDENT> def __init__(self, models, f_therm, pi=None, f=None, label='ground state'): <NEW_LINE> <INDENT> self.set_model_params(models=models, f_therm=f_therm, pi=pi, f=f, label=label) <NEW_LINE> <DEDENT> @property <NEW_LINE> def unbiased_state(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._unbiased_state <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def msm(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.models[self.unbiased_state] <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def msm_active_set(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.model_active_set[self.unbiased_state] <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def set_model_params(self, models=None, f_therm=None, pi=None, f=None, label='ground state'): <NEW_LINE> <INDENT> _StationaryModel.set_model_params(self, pi=pi, f=f, normalize_f=False) <NEW_LINE> _types.assert_array(f_therm, ndim=1, kind='numeric') <NEW_LINE> f_therm = _np.array(f_therm, dtype=float) <NEW_LINE> for m in models: <NEW_LINE> <INDENT> assert issubclass(m.__class__, _Model) <NEW_LINE> <DEDENT> self.update_model_params(models=models, f_therm=f_therm) <NEW_LINE> <DEDENT> def meval(self, f, *args, **kw): <NEW_LINE> <INDENT> return [_call_member(M, f, *args, **kw) for M in self.models]
Coupled set of Models at multiple thermodynamic states Parameters ---------- models : list of Model objects List of Model objects, e.g. StationaryModel or MSM objects, at the different thermodynamic states. This list may include the ground state, such that self.pi = self.models[0].pi holds. An example for that is data obtained from parallel tempering or replica-exchange, where the lowest simulated temperature is usually identical to the thermodynamic ground state. However, the list does not have to include the thermodynamic ground state. For example, when obtaining data from umbrella sampling, models might be the list of stationary models for n umbrellas (biased ensembles), while the thermodynamic ground state is the unbiased ensemble. In that case, self.pi would be different from any self.models[i].pi f_therm : ndarray(k) free energies at the different thermodynamic states pi : ndarray(n), default=None Stationary distribution of the thermodynamic ground state. If not already normalized, pi will be scaled to fulfill :math:`\sum_i \pi_i = 1`. If None, models[0].pi will be used f : ndarray(n) Discrete-state free energies of the thermodynamic ground state. label : str, default='ground state' Human-readable description for the thermodynamic ground state or reference state of this multiensemble. May contain a temperature description, such as '300 K' or a description of bias energy such as 'unbiased'.
62598fc850812a4eaa620d8c
class ellipse(): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def centerRadius(parent, centerPoint, radiusX, radiusY, offset=[0, 0], label='circle', lineStyle=lineStyle.setSimpleBlack()): <NEW_LINE> <INDENT> arcStringA = ' a %f,%f 0 1 1 %f,%f' % (radiusX, radiusY, -2 * radiusX, 0) <NEW_LINE> arcStringB = ' a %f,%f 0 1 1 %f,%f' % (radiusX, radiusY, 2 * radiusX, 0) <NEW_LINE> Attribs = {inkex.addNS('label', 'inkscape'): label, 'style': simplestyle.formatStyle(lineStyle), inkex.addNS('type', 'sodipodi'): 'arc', inkex.addNS('rx', 'sodipodi'): str(radiusX), inkex.addNS('ry', 'sodipodi'): str(radiusY), inkex.addNS('cx', 'sodipodi'): str(centerPoint[0] + offset[0]), inkex.addNS('cy', 'sodipodi'): str(centerPoint[1] + offset[1]), inkex.addNS('start', 'sodipodi'): '0', inkex.addNS('end', 'sodipodi'): str(2 * math.pi), 'd': 'M ' + str(centerPoint[0] + offset[0] + radiusX) + ' ' + str( centerPoint[1] + offset[1]) + arcStringA + ' ' + arcStringB + ' z'} <NEW_LINE> return inkex.etree.SubElement(parent, inkex.addNS('path', 'svg'), Attribs)
This is a class with different methods for drawing ellipses. This class contains only static methods so that you don't have to inherit this in your class
62598fc8f548e778e596b8ec
class Language(models.Model): <NEW_LINE> <INDENT> name = models.CharField( max_length=100, help_text='Enter language book is written in') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
Model representing the book language
62598fc8f9cc0f698b1c547a
class ProportionalValveSymbolIcon(BaseSymbolIcon): <NEW_LINE> <INDENT> def __init__(self, parent=None, **kwargs): <NEW_LINE> <INDENT> super(ProportionalValveSymbolIcon, self).__init__(parent, **kwargs) <NEW_LINE> self._interlock_brush = QBrush(QColor(0, 255, 0), Qt.SolidPattern) <NEW_LINE> <DEDENT> @Property(QBrush) <NEW_LINE> def interlockBrush(self): <NEW_LINE> <INDENT> return self._interlock_brush <NEW_LINE> <DEDENT> @interlockBrush.setter <NEW_LINE> def interlockBrush(self, new_brush): <NEW_LINE> <INDENT> if new_brush != self._interlock_brush: <NEW_LINE> <INDENT> self._interlock_brush = new_brush <NEW_LINE> self.update() <NEW_LINE> <DEDENT> <DEDENT> def draw_icon(self, painter): <NEW_LINE> <INDENT> path = QPainterPath(QPointF(0, 0.3)) <NEW_LINE> path.lineTo(0, 0.9) <NEW_LINE> path.lineTo(1, 0.3) <NEW_LINE> path.lineTo(1, 0.9) <NEW_LINE> path.closeSubpath() <NEW_LINE> painter.drawPath(path) <NEW_LINE> painter.drawLine(QPointF(0.5, 0.6), QPointF(0.5, 0.15)) <NEW_LINE> painter.setBrush(self._interlock_brush) <NEW_LINE> painter.drawRect(QRectF(0.35, 0, 0.3, 0.3)) <NEW_LINE> painter.setBrush(QBrush(QColor(0, 0, 0))) <NEW_LINE> top_arrow_point = QPointF(0.65, 0.42) <NEW_LINE> arrow = QPolygonF( [QPointF(-0.07, 0.0), QPointF(-0.005, 0.0), QPointF(-0.005, 0.8), QPointF(0.005, 0.8), QPointF(0.005, 0.0), QPointF(0.07, 0.0), QPointF(0.00, -0.25)] ) <NEW_LINE> t = QTransform() <NEW_LINE> t.rotate(40) <NEW_LINE> top_arrow_r = t.map(arrow) <NEW_LINE> arrow_l = top_arrow_r.translated(top_arrow_point) <NEW_LINE> painter.drawPolygon(arrow_l) <NEW_LINE> t_x = 0.4 <NEW_LINE> t_y = 0.05 <NEW_LINE> painter.drawLines([QLineF(0.0+t_x, 0.0+t_y, 0.0+t_x, 0.2+t_y), QLineF(0.0+t_x, 0.0+t_y, 0.1+t_x, 0.2+t_y), QLineF(0.1+t_x, 0.2+t_y, 0.2+t_x, 0.0+t_y), QLineF(0.2+t_x, 0.0+t_y, 0.2+t_x, 0.2+t_y)])
A widget with a proportional valve symbol drawn in it. Parameters ---------- parent : QWidget The parent widget for the icon
62598fc866673b3332c30726
class CategorySerializer(serializers.Serializer): <NEW_LINE> <INDENT> pk = serializers.IntegerField(read_only=True) <NEW_LINE> name = serializers.CharField(required=True, max_length=100) <NEW_LINE> def create(self, validated_data): <NEW_LINE> <INDENT> return Category.objects.create(**validated_data)
Serializer for category model
62598fc863b5f9789fe854c5
class Config: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.gmaps_key = os.getenv('GMAPKEY', 'XXXXXXXXXXXXXXXXXXXXXXXX') <NEW_LINE> self.pstack_key = os.getenv('PSTACKEY', 'XXXXXXXXXXXXXXXXXXXXXXXX') <NEW_LINE> self.opencage_key = os.getenv('OPCAGEKEY', 'XXXXXXXXXXXXXXXXXXXXXXXX') <NEW_LINE> self.es_host = os.getenv('ESHOST', 'XXXXXXXXXXXXXXXXXXXXXXXX') <NEW_LINE> self.es_port = os.getenv('ESPORT', 'XXXXXXXXXXXXXXXXXXXXXXXX') <NEW_LINE> self.es_pass = os.getenv('ESPASS', 'XXXXXXXXXXXXXXXXXXXXXXXX') <NEW_LINE> self.es_user = os.getenv('ESUSER', 'XXXXXXXXXXXXXXXXXXXXXXXX') <NEW_LINE> self.es_index = os.getenv('EINDEX ', 'XXXXXXXXXXXXXXXXXXXXXXXX')
Configuration class to store all the env vars
62598fc8d486a94d0ba2c322
class ParameterizedForEach(ForEachBase): <NEW_LINE> <INDENT> def on_failure(self, values, history, *args, **kwargs): <NEW_LINE> <INDENT> return self._on_failure(values, history) <NEW_LINE> <DEDENT> def execute(self, values, history, *args, **kwargs): <NEW_LINE> <INDENT> return self._get_next_value(values, history)
Applies a dynamically provided collection of strategies. Accepts a collection of decision strategies from a predecessor (or from storage) as a parameter and returns the next element of that collection on each try.
62598fc8be7bc26dc9252003
class ReadDataFromKinesis(ExternalTransform): <NEW_LINE> <INDENT> URN = 'beam:external:java:kinesis:read_data:v1' <NEW_LINE> def __init__( self, stream_name, aws_access_key, aws_secret_key, region, service_endpoint=None, verify_certificate=None, max_num_records=None, max_read_time=None, initial_position_in_stream=None, initial_timestamp_in_stream=None, request_records_limit=None, up_to_date_threshold=None, max_capacity_per_shard=None, watermark_policy=None, watermark_idle_duration_threshold=None, rate_limit=None, expansion_service=None, ): <NEW_LINE> <INDENT> WatermarkPolicy.validate_param(watermark_policy) <NEW_LINE> InitialPositionInStream.validate_param(initial_position_in_stream) <NEW_LINE> if watermark_idle_duration_threshold: <NEW_LINE> <INDENT> assert WatermarkPolicy.ARRIVAL_TIME == watermark_policy <NEW_LINE> <DEDENT> if request_records_limit: <NEW_LINE> <INDENT> assert 0 < request_records_limit <= 10000 <NEW_LINE> <DEDENT> initial_timestamp_in_stream = int( initial_timestamp_in_stream) if initial_timestamp_in_stream else None <NEW_LINE> if initial_timestamp_in_stream and initial_timestamp_in_stream < time.time( ): <NEW_LINE> <INDENT> logging.warning('Provided timestamp emplaced not in the past.') <NEW_LINE> <DEDENT> super(ReadDataFromKinesis, self).__init__( self.URN, NamedTupleBasedPayloadBuilder( ReadFromKinesisSchema( stream_name=stream_name, aws_access_key=aws_access_key, aws_secret_key=aws_secret_key, region=region, service_endpoint=service_endpoint, verify_certificate=verify_certificate, max_num_records=max_num_records, max_read_time=max_read_time, initial_position_in_stream=initial_position_in_stream, initial_timestamp_in_stream=initial_timestamp_in_stream, request_records_limit=request_records_limit, up_to_date_threshold=up_to_date_threshold, max_capacity_per_shard=max_capacity_per_shard, watermark_policy=watermark_policy, watermark_idle_duration_threshold= watermark_idle_duration_threshold, rate_limit=rate_limit, )), expansion_service or default_io_expansion_service(), )
An external PTransform which reads byte array stream from Amazon Kinesis. Experimental; no backwards compatibility guarantees.
62598fc80fa83653e46f5237
class Twitt_listener(StreamListener): <NEW_LINE> <INDENT> def __init__(self, twitt_list, twitt_num): <NEW_LINE> <INDENT> self.invocation_count = 0 <NEW_LINE> self.twitt_list = twitt_list <NEW_LINE> self.twitt_num = twitt_num <NEW_LINE> <DEDENT> def on_data(self, data): <NEW_LINE> <INDENT> self.invocation_count += 1 <NEW_LINE> if (self.invocation_count == self.twitt_num): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self.twitt_list.append(data) <NEW_LINE> print("twitt %d" % self.invocation_count) <NEW_LINE> return True <NEW_LINE> <DEDENT> def on_error(self, status): <NEW_LINE> <INDENT> print(status)
A listener that handles twitts that are the received from the twitter streaming API. Receives twitt_num number of twitts and appends them to twitt_list
62598fc897e22403b383b256
class SetIdentityProvider(command.Command): <NEW_LINE> <INDENT> log = logging.getLogger(__name__ + '.SetIdentityProvider') <NEW_LINE> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(SetIdentityProvider, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( 'identity_provider', metavar='<identity-provider>', help='Identity provider to modify', ) <NEW_LINE> enable_identity_provider = parser.add_mutually_exclusive_group() <NEW_LINE> enable_identity_provider.add_argument( '--enable', action='store_true', help='Enable the identity provider', ) <NEW_LINE> enable_identity_provider.add_argument( '--disable', action='store_true', help='Disable the identity provider', ) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> self.log.debug('take_action(%s)', parsed_args) <NEW_LINE> federation_client = self.app.client_manager.identity.federation <NEW_LINE> if parsed_args.enable is True: <NEW_LINE> <INDENT> enabled = True <NEW_LINE> <DEDENT> elif parsed_args.disable is True: <NEW_LINE> <INDENT> enabled = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.log.error("No changes requested") <NEW_LINE> return (None, None) <NEW_LINE> <DEDENT> identity_provider = federation_client.identity_providers.update( parsed_args.identity_provider, enabled=enabled) <NEW_LINE> identity_provider._info.pop('links', None) <NEW_LINE> return zip(*sorted(six.iteritems(identity_provider._info)))
Set identity provider properties
62598fc823849d37ff851403
class String(Serializer): <NEW_LINE> <INDENT> def __init__(self, min_length=None, max_length=None, *args, **kwargs): <NEW_LINE> <INDENT> self._min = min_length <NEW_LINE> self._max = max_length <NEW_LINE> super(String, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def validate(self, key, value): <NEW_LINE> <INDENT> super(String, self).validate(key, value) <NEW_LINE> self.u(value) <NEW_LINE> if self._min is not None: <NEW_LINE> <INDENT> if len(value) < self._min: <NEW_LINE> <INDENT> raise self.ValidationError( "Length of '%s's value should be more" "that %s character" % (key, self._min)) <NEW_LINE> <DEDENT> <DEDENT> if self._max: <NEW_LINE> <INDENT> if len(value) > self._max: <NEW_LINE> <INDENT> raise self.ValidationError("Length of '%s's value should be" "less that %s character""" % (key, self._max)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def is_valid_value(self, value): <NEW_LINE> <INDENT> if isinstance(value, six.string_types): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def serialize(self, value, **kwargs): <NEW_LINE> <INDENT> return six.u(value) <NEW_LINE> <DEDENT> def deserialize(self, value): <NEW_LINE> <INDENT> return self.u(value) <NEW_LINE> <DEDENT> def form_field_factory(self, name, **kwargs): <NEW_LINE> <INDENT> return super(String, self).form_field_factory(name, min_length=self._min, max_length=self._max) <NEW_LINE> <DEDENT> def u(self, value): <NEW_LINE> <INDENT> if six.PY3: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if isinstance(value, str): <NEW_LINE> <INDENT> return unicode(value) <NEW_LINE> <DEDENT> elif isinstance(value, unicode): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return six.u(value)
Serializer for string data. :param min_length: This parameter specify the minimum length of the string. :param max_length: This parameter specify the maximum length of the string.
62598fc8f9cc0f698b1c547b
class Command(BaseCommand): <NEW_LINE> <INDENT> def add_arguments(self, parser): <NEW_LINE> <INDENT> parser.add_argument('args', nargs='*') <NEW_LINE> <DEDENT> def handle(self, *args, **options): <NEW_LINE> <INDENT> if args: <NEW_LINE> <INDENT> users = [User.objects.get(username=arg) for arg in args] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> users = User.objects.all() <NEW_LINE> <DEDENT> print("Superusers: ") <NEW_LINE> for user in [u for u in users if u.is_superuser]: <NEW_LINE> <INDENT> print("%15s %40s %s %s" % (user.username, user.email, user.first_name, user.last_name)) <NEW_LINE> <DEDENT> print("\nStaff: ") <NEW_LINE> for user in [u for u in users if u.is_staff]: <NEW_LINE> <INDENT> print("%15s %40s %s %s" % (user.username, user.email, user.first_name, user.last_name)) <NEW_LINE> <DEDENT> print("\nAll non-superusers, non-staff: " ) <NEW_LINE> all_other_users = sorted([u for u in users if not u.is_staff and not u.is_superuser], key=lambda u: u.email) <NEW_LINE> import collections <NEW_LINE> emails = collections.defaultdict(int) <NEW_LINE> user_emails_filename = "xbrowse_user_emails.tsv" <NEW_LINE> f = open(user_emails_filename, "w") <NEW_LINE> print("\nWriting all user emails to %s" % os.path.abspath(user_emails_filename)) <NEW_LINE> for user in all_other_users: <NEW_LINE> <INDENT> emails[user.email] += 1 <NEW_LINE> print("%15s %40s %10s %10s %s" % (user.username, user.email, user.first_name, user.last_name, [p.project_id for p in Project.objects.all().order_by('project_id') if p.can_view(user)])) <NEW_LINE> f.write("%s\n" % user.email) <NEW_LINE> <DEDENT> f.close() <NEW_LINE> print("\nWrote all user emails to %s" % os.path.abspath(user_emails_filename)) <NEW_LINE> print("\nDuplicate accounts with same email address:") <NEW_LINE> found = False <NEW_LINE> for email, counter in emails.items(): <NEW_LINE> <INDENT> if counter > 1: <NEW_LINE> <INDENT> print("%s - count: %s" % (email, counter)) <NEW_LINE> found = True <NEW_LINE> <DEDENT> <DEDENT> if not found: <NEW_LINE> <INDENT> print(" None found")
Command to print out basic stats on some or all projects. Optionally takes a list of project_ids.
62598fc871ff763f4b5e7ad1
class ImuHw(object): <NEW_LINE> <INDENT> def __init__(self, driver=None, callback=None, logger=None): <NEW_LINE> <INDENT> self._driver = driver <NEW_LINE> self._callback = callback <NEW_LINE> self._logger = logger or Logger() <NEW_LINE> <DEDENT> def _imu_callback(self, msg): <NEW_LINE> <INDENT> self._callback(msg) <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self._driver.start(self._imu_callback) <NEW_LINE> <DEDENT> def shutdown(self): <NEW_LINE> <INDENT> start = time.time() <NEW_LINE> self._logger.info("ImuHw starting shutdown ...") <NEW_LINE> self._driver.stop() <NEW_LINE> self._logger.info("ImuHw shutdown time: {}".format(time.time() - start)) <NEW_LINE> <DEDENT> def calibrate(self, data): <NEW_LINE> <INDENT> self._logger.info("imuhw calibrating") <NEW_LINE> self._driver.calibrate() <NEW_LINE> <DEDENT> def get_status(self): <NEW_LINE> <INDENT> pass
The ImuHw wraps the IMU driver which can be implemented using various physical IMU hardware devices and translates IMU data into messages delivered to the HAL node.
62598fc84c3428357761a60f
class MeetmeJoin(_Message): <NEW_LINE> <INDENT> pass
Indicates that a user has joined a Meetme bridge. - 'Channel' : The channel that was bridged - 'Meetme' : The ID of the Meetme bridge, typically a number formatted as a string - 'Uniqueid' : An Asterisk unique value - 'Usernum' : The bridge-specific participant ID assigned to the channel
62598fc87c178a314d78d7f1
class DeclarativeColumnsMetaclass(type): <NEW_LINE> <INDENT> def __new__(mcs, name, bases, attrs): <NEW_LINE> <INDENT> attrs['_meta'] = opts = TableOptions(attrs.get('Meta', None)) <NEW_LINE> cols, remainder = [], {} <NEW_LINE> for attr_name, attr in attrs.items(): <NEW_LINE> <INDENT> if isinstance(attr, columns.Column): <NEW_LINE> <INDENT> attr._explicit = True <NEW_LINE> cols.append((attr_name, attr)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> remainder[attr_name] = attr <NEW_LINE> <DEDENT> <DEDENT> attrs = remainder <NEW_LINE> cols.sort(key=lambda x: x[1].creation_counter) <NEW_LINE> parent_columns = [] <NEW_LINE> for base in bases[::-1]: <NEW_LINE> <INDENT> if hasattr(base, 'base_columns'): <NEW_LINE> <INDENT> parent_columns = list(base.base_columns.items()) + parent_columns <NEW_LINE> <DEDENT> <DEDENT> base_columns = OrderedDict(parent_columns) <NEW_LINE> if opts.model: <NEW_LINE> <INDENT> extra = OrderedDict() <NEW_LINE> if opts.fields is not None: <NEW_LINE> <INDENT> for field_name in opts.fields: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> field = opts.model._meta.get_field(field_name) <NEW_LINE> <DEDENT> except FieldDoesNotExist: <NEW_LINE> <INDENT> extra[field_name] = columns.Column() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> extra[field_name] = columns.library.column_for_field(field) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for field in opts.model._meta.fields: <NEW_LINE> <INDENT> extra[field.name] = columns.library.column_for_field(field) <NEW_LINE> <DEDENT> <DEDENT> for key, col in extra.items(): <NEW_LINE> <INDENT> if key in base_columns and base_columns[key]._explicit is True: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> base_columns[key] = col <NEW_LINE> <DEDENT> <DEDENT> base_columns.update(OrderedDict(cols)) <NEW_LINE> for exclusion in opts.exclude: <NEW_LINE> <INDENT> if exclusion in base_columns: <NEW_LINE> <INDENT> base_columns.pop(exclusion) <NEW_LINE> <DEDENT> <DEDENT> if opts.sequence: <NEW_LINE> <INDENT> opts.sequence.expand(base_columns.keys()) <NEW_LINE> base_columns = OrderedDict(( (x, base_columns[x]) for x in opts.sequence if x in base_columns )) <NEW_LINE> <DEDENT> for col_name in base_columns.keys(): <NEW_LINE> <INDENT> localize_column = None <NEW_LINE> if col_name in opts.localize: <NEW_LINE> <INDENT> localize_column = True <NEW_LINE> <DEDENT> if col_name in opts.unlocalize: <NEW_LINE> <INDENT> localize_column = False <NEW_LINE> <DEDENT> if localize_column is not None: <NEW_LINE> <INDENT> base_columns[col_name].localize = localize_column <NEW_LINE> <DEDENT> <DEDENT> attrs['base_columns'] = base_columns <NEW_LINE> return super(DeclarativeColumnsMetaclass, mcs).__new__(mcs, name, bases, attrs)
Metaclass that converts `.Column` objects defined on a class to the dictionary `.Table.base_columns`, taking into account parent class `base_columns` as well.
62598fc8ad47b63b2c5a7bac
class TestCreateValidationFailureMessage(cros_test_lib.TestCase): <NEW_LINE> <INDENT> def GetPatches(self, how_many=1): <NEW_LINE> <INDENT> patches = [SimplePatch() for _ in xrange(how_many)] <NEW_LINE> if how_many == 1: <NEW_LINE> <INDENT> return patches[0] <NEW_LINE> <DEDENT> return patches <NEW_LINE> <DEDENT> def _AssertMessage(self, change, suspects, messages): <NEW_LINE> <INDENT> msg = validation_pool.ValidationPool._CreateValidationFailureMessage( False, change, set(suspects), messages) <NEW_LINE> for x in messages: <NEW_LINE> <INDENT> self.assertTrue(x in msg) <NEW_LINE> <DEDENT> return msg <NEW_LINE> <DEDENT> def testSuspectChange(self): <NEW_LINE> <INDENT> patch = self.GetPatches(1) <NEW_LINE> self._AssertMessage(patch, [patch], ['%s failed' % patch]) <NEW_LINE> <DEDENT> def testInnocentChange(self): <NEW_LINE> <INDENT> patch1, patch2 = self.GetPatches(2) <NEW_LINE> self._AssertMessage(patch1, [patch2], ['%s failed' % patch2]) <NEW_LINE> <DEDENT> def testSuspectChanges(self): <NEW_LINE> <INDENT> patches = self.GetPatches(2) <NEW_LINE> self._AssertMessage(patches[0], patches, ['%s and %s failed' % tuple(patches)]) <NEW_LINE> <DEDENT> def testInnocentChangeWithMultipleSuspects(self): <NEW_LINE> <INDENT> patches = self.GetPatches(3) <NEW_LINE> self._AssertMessage(patches[0], patches[1:], ['%s and %s failed' % tuple(patches[1:])]) <NEW_LINE> <DEDENT> def testNoSuspects(self): <NEW_LINE> <INDENT> self._AssertMessage(self.GetPatches(1), [], ['Internal error']) <NEW_LINE> <DEDENT> def testNoMessages(self): <NEW_LINE> <INDENT> patch1 = self.GetPatches(1) <NEW_LINE> self._AssertMessage(patch1, [patch1], [])
Tests validation_pool.ValidationPool._CreateValidationFailureMessage
62598fc87b180e01f3e491f9
class Triangle: <NEW_LINE> <INDENT> def __init__(self, lenght): <NEW_LINE> <INDENT> self.lenght = lenght <NEW_LINE> <DEDENT> def get_triangle(self): <NEW_LINE> <INDENT> triangle = "" <NEW_LINE> down_triangle = "" <NEW_LINE> for i in range(self.lenght + 1): <NEW_LINE> <INDENT> triangle += "*" * i <NEW_LINE> triangle += "\n" <NEW_LINE> <DEDENT> for i in range(self.lenght - 1, 0, -1): <NEW_LINE> <INDENT> down_triangle += "*" * i <NEW_LINE> down_triangle += "\n" <NEW_LINE> <DEDENT> return triangle + down_triangle
Class that returns a triangle.
62598fc84527f215b58ea222
class InsertPatternException(KnittingError): <NEW_LINE> <INDENT> pass
Exception raised for errors in the insert of the pattern. Attributes: expression -- insert pattern expression in which the error occurred message -- explanation of the error
62598fc8377c676e912f6f1f
class NiftiGeneratorBase(BaseInterface): <NEW_LINE> <INDENT> def _get_out_path(self, meta): <NEW_LINE> <INDENT> if self.inputs.out_format: <NEW_LINE> <INDENT> out_fmt = self.inputs.out_format <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out_fmt = [] <NEW_LINE> if 'SeriesNumber' in meta: <NEW_LINE> <INDENT> out_fmt.append('%(SeriesNumber)03d') <NEW_LINE> <DEDENT> if 'ProtocolName' in meta: <NEW_LINE> <INDENT> out_fmt.append('%(ProtocolName)s') <NEW_LINE> <DEDENT> elif 'SeriesDescription' in meta: <NEW_LINE> <INDENT> out_fmt.append('%(SeriesDescription)s') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out_fmt.append('sequence') <NEW_LINE> <DEDENT> out_fmt = '-'.join(out_fmt) <NEW_LINE> <DEDENT> out_fn = (out_fmt % meta) + self.inputs.out_ext <NEW_LINE> out_fn = sanitize_path_comp(out_fn) <NEW_LINE> return path.join(os.getcwd(), out_fn)
Base class for interfaces that produce Nifti files, potentially with embeded meta data.
62598fc826068e7796d4ccaf
class Embedding(): <NEW_LINE> <INDENT> def __init__(self, size=None, ngram=1): <NEW_LINE> <INDENT> self.size = size <NEW_LINE> self.ngram = ngram <NEW_LINE> self.vocabulary = dict() <NEW_LINE> self.vocab_size = 0 <NEW_LINE> self.embedding_matrix = None <NEW_LINE> <DEDENT> def load(self, load_path): <NEW_LINE> <INDENT> self.vocab_size = file_len(load_path) <NEW_LINE> with open(load_path, mode='r', newline='') as embedding_file: <NEW_LINE> <INDENT> reader = csv.reader( embedding_file, delimiter=' ', quoting=csv.QUOTE_NONE) <NEW_LINE> for index, embedding in tqdm(enumerate(reader)): <NEW_LINE> <INDENT> if self.embedding_matrix is None: <NEW_LINE> <INDENT> self.size = len(embedding) - 1 <NEW_LINE> self.embedding_matrix = np.zeros( (self.vocab_size, self.size), dtype=float) <NEW_LINE> <DEDENT> self.vocabulary[embedding[0]] = index <NEW_LINE> self.embedding_matrix[index] = embedding[1:] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def initialize_embeddings_from_sentences(self, sentences): <NEW_LINE> <INDENT> assert sentences is not None and self.size is not None <NEW_LINE> print('Building Vocabulary') <NEW_LINE> self._build_vocabulary(sentences) <NEW_LINE> self.embedding_matrix = np.random.uniform(-1, 1, (self.vocab_size, self.size)) <NEW_LINE> <DEDENT> def _build_vocabulary(self, sentences): <NEW_LINE> <INDENT> unique_words = set() <NEW_LINE> unique_words.add('<unknown>') <NEW_LINE> for sentence in tqdm(sentences): <NEW_LINE> <INDENT> grams = ngrams(sentence, self.ngram) <NEW_LINE> for gram in grams: <NEW_LINE> <INDENT> unique_words.add('_'.join(gram)) <NEW_LINE> <DEDENT> <DEDENT> self.vocabulary = { word: index for index, word in enumerate(sorted(unique_words)) } <NEW_LINE> self.vocab_size = len(self.vocabulary) <NEW_LINE> <DEDENT> def lookup(self, words): <NEW_LINE> <INDENT> ids = lookup_ids(self.vocabulary, words) <NEW_LINE> return self.embedding_matrix[ids, ] <NEW_LINE> <DEDENT> def save(self, save_path): <NEW_LINE> <INDENT> makedirs(dirname(save_path), exist_ok=True) <NEW_LINE> with open(save_path, 'w', newline='') as save_file: <NEW_LINE> <INDENT> writer = csv.writer( save_file, delimiter=' ', quoting=csv.QUOTE_NONE, escapechar='', quotechar='') <NEW_LINE> print('Writing embeddings to file...') <NEW_LINE> for word in sorted( self.vocabulary, key=lambda k: self.vocabulary[k]): <NEW_LINE> <INDENT> writer.writerow([word] + list(self.lookup([word])[0])) <NEW_LINE> <DEDENT> print('Writing vocabulary to file...') <NEW_LINE> vocab_path = splitext(save_path)[0] + '.vocab' <NEW_LINE> save_vocab(self.vocabulary, vocab_path)
Class wrapping Embedding initialization, loading, lookup and saving operations.
62598fc823849d37ff851405
class VersionInfo(Persistent): <NEW_LINE> <INDENT> __allow_access_to_unprotected_subobjects__ = 1 <NEW_LINE> def __init__(self, history_id, version_id, status): <NEW_LINE> <INDENT> self.timestamp = time.time() <NEW_LINE> self.history_id = history_id <NEW_LINE> self.version_id = version_id <NEW_LINE> self.status = status <NEW_LINE> self.user_id = _findUserId() <NEW_LINE> <DEDENT> sticky = None <NEW_LINE> CHECKED_OUT = 0 <NEW_LINE> CHECKED_IN = 1 <NEW_LINE> def branchName(self): <NEW_LINE> <INDENT> if self.sticky is not None and self.sticky[0] == 'B': <NEW_LINE> <INDENT> return self.sticky[1] <NEW_LINE> <DEDENT> return 'mainline' <NEW_LINE> <DEDENT> def clone(self, clear_sticky=0): <NEW_LINE> <INDENT> info = VersionInfo(self.history_id, self.version_id, self.status) <NEW_LINE> dict = info.__dict__ <NEW_LINE> for name, value in self.__dict__.items(): <NEW_LINE> <INDENT> dict[name] = value <NEW_LINE> <DEDENT> if clear_sticky: <NEW_LINE> <INDENT> if dict.has_key('sticky'): <NEW_LINE> <INDENT> del dict['sticky'] <NEW_LINE> <DEDENT> <DEDENT> info.user_id = _findUserId() <NEW_LINE> info.timestamp = time.time() <NEW_LINE> return info
A VersionInfo object contains bookkeeping information for version controlled objects. The bookkeeping information can be read (but not changed) by restricted code.
62598fc8a05bb46b3848abbf
class Route(ContainerAware): <NEW_LINE> <INDENT> def __call__(self, event): <NEW_LINE> <INDENT> auth_config = self.container.get('application').config['auth'] <NEW_LINE> request = event.params['context']['request'] <NEW_LINE> for provider in auth_config['providers']: <NEW_LINE> <INDENT> provider = self.container.get(provider) <NEW_LINE> provider.handle_request(request)
Listens for a route event and attempts to inject the user into the request if one has been authenticated.
62598fc8a219f33f346c6b5d
class RunFile(FileObject): <NEW_LINE> <INDENT> __tablename__ = 'run_file' <NEW_LINE> run_id = Column( String(32), ForeignKey('workflow_run.run_id') ) <NEW_LINE> UniqueConstraint('run_id', 'name') <NEW_LINE> run = relationship('RunObject', back_populates='files')
File resources that are created by successful workflow runs.
62598fc8ab23a570cc2d4f18
class TestKeyReset(SimTestCase): <NEW_LINE> <INDENT> _MENU = ["--propagate", "key", "reset"] <NEW_LINE> _KEYNAME = "testkey" <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> self._key_file = RandomKeyTmpFile() <NEW_LINE> RUNNER( ["key", "set", self._KEYNAME, "--keyfile", self._key_file.tmpfile_name()] ) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> super().tearDown() <NEW_LINE> self._key_file.close() <NEW_LINE> <DEDENT> def test_reset(self): <NEW_LINE> <INDENT> with RandomKeyTmpFile() as fname: <NEW_LINE> <INDENT> command_line = self._MENU + [self._KEYNAME, "--keyfile-path", fname] <NEW_LINE> TEST_RUNNER(command_line) <NEW_LINE> <DEDENT> <DEDENT> def test_reset_no_change(self): <NEW_LINE> <INDENT> command_line = self._MENU + [ self._KEYNAME, "--keyfile-path", self._key_file.tmpfile_name(), ] <NEW_LINE> self.check_error(StratisCliNoChangeError, command_line, _ERROR) <NEW_LINE> <DEDENT> def test_reset_does_not_exist(self): <NEW_LINE> <INDENT> command_line = self._MENU + [ "notakey", "--keyfile-path", self._key_file.tmpfile_name(), ] <NEW_LINE> self.check_error(StratisCliResourceNotFoundError, command_line, _ERROR) <NEW_LINE> <DEDENT> def test_reset_key_too_long(self): <NEW_LINE> <INDENT> with RandomKeyTmpFile(128) as fname: <NEW_LINE> <INDENT> command_line = self._MENU + [self._KEYNAME, "--keyfile-path", fname] <NEW_LINE> self.check_error(StratisCliEngineError, command_line, _ERROR)
Test resetting a key in the keyring.
62598fc8bf627c535bcb17fe
class TextPost(Entry): <NEW_LINE> <INDENT> _TYPE = ENTRY_TYPE_TEXT <NEW_LINE> _MIMETYPE = 'text/html' <NEW_LINE> post = db.TextProperty() <NEW_LINE> def toHTML(self): <NEW_LINE> <INDENT> return self.post <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self.post
Text post.
62598fc897e22403b383b25a
class StatusMonitor(cherrypy.Tool): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._point = 'on_start_resource' <NEW_LINE> self._name = 'status' <NEW_LINE> self._priority = 50 <NEW_LINE> self.seenThreads = {} <NEW_LINE> <DEDENT> def callable(self): <NEW_LINE> <INDENT> threadId = threading.current_thread().ident <NEW_LINE> self.seenThreads[threadId] = { 'start': cherrypy.response.time, 'url': cherrypy.url()} <NEW_LINE> <DEDENT> def unregister(self): <NEW_LINE> <INDENT> threadID = threading.current_thread().ident <NEW_LINE> if threadID in self.seenThreads: <NEW_LINE> <INDENT> self.seenThreads[threadID]['end'] = time.time() <NEW_LINE> <DEDENT> <DEDENT> def _setup(self): <NEW_LINE> <INDENT> cherrypy.Tool._setup(self) <NEW_LINE> cherrypy.request.hooks.attach('on_end_request', self.unregister)
Register the status of each thread.
62598fc85fc7496912d48424
class Hashed(Expiring): <NEW_LINE> <INDENT> prefix_length = 4 <NEW_LINE> algorithm = "sha512" <NEW_LINE> rounds = 16384 <NEW_LINE> salt_bytes = 8 <NEW_LINE> min_length = 8 <NEW_LINE> generated = True <NEW_LINE> generated_salt_bytes = 8 <NEW_LINE> generated_rounds = 1 <NEW_LINE> @property <NEW_LINE> def token(self): <NEW_LINE> <INDENT> raise AttributeError("token is write-only") <NEW_LINE> <DEDENT> @token.setter <NEW_LINE> def token(self, token): <NEW_LINE> <INDENT> self.prefix = token[: self.prefix_length] <NEW_LINE> if self.generated: <NEW_LINE> <INDENT> rounds = self.generated_rounds <NEW_LINE> salt_bytes = self.generated_salt_bytes <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rounds = self.rounds <NEW_LINE> salt_bytes = self.salt_bytes <NEW_LINE> <DEDENT> self.hashed = hash_token( token, rounds=rounds, salt=salt_bytes, algorithm=self.algorithm ) <NEW_LINE> <DEDENT> def match(self, token): <NEW_LINE> <INDENT> return compare_token(self.hashed, token) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def check_token(cls, db, token): <NEW_LINE> <INDENT> if len(token) < cls.min_length: <NEW_LINE> <INDENT> raise ValueError( "Tokens must be at least %i characters, got %r" % (cls.min_length, token) ) <NEW_LINE> <DEDENT> found = cls.find(db, token) <NEW_LINE> if found: <NEW_LINE> <INDENT> raise ValueError("Collision on token: %s..." % token[: cls.prefix_length]) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def find_prefix(cls, db, token): <NEW_LINE> <INDENT> prefix = token[: cls.prefix_length] <NEW_LINE> prefix_match = db.query(cls).filter( bindparam('prefix', prefix).startswith(cls.prefix) ) <NEW_LINE> prefix_match = prefix_match.filter( or_(cls.expires_at == None, cls.expires_at >= cls.now()) ) <NEW_LINE> return prefix_match <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def find(cls, db, token): <NEW_LINE> <INDENT> prefix_match = cls.find_prefix(db, token) <NEW_LINE> for orm_token in prefix_match: <NEW_LINE> <INDENT> if orm_token.match(token): <NEW_LINE> <INDENT> return orm_token
Mixin for tables with hashed tokens
62598fc9aad79263cf42eb24
class Trainer(object): <NEW_LINE> <INDENT> min_count = 10 <NEW_LINE> size = 200 <NEW_LINE> window = 10 <NEW_LINE> model = None <NEW_LINE> def __init__(self, corpus_iterator=None, result_name=None, retrain=True): <NEW_LINE> <INDENT> self.corpus_iterator = corpus_iterator <NEW_LINE> self.result_name = os.path.join('..', 'model', result_name) if result_name else None <NEW_LINE> self.retrain = retrain <NEW_LINE> <DEDENT> def load_model(self, name=None, bin=False): <NEW_LINE> <INDENT> if name: <NEW_LINE> <INDENT> self.result_name = os.path.join('..', 'model', name) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.model = models.KeyedVectors.load_word2vec_format(self.result_name, binary=True) if bin else models.Word2Vec.load(self.result_name) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error("加载模型失败:{}".format(e)) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> def train_model(self): <NEW_LINE> <INDENT> if not os.path.exists(self.result_name): <NEW_LINE> <INDENT> model = models.Word2Vec(self.corpus_iterator, min_count=self.min_count, size=self.size, window=self.window) <NEW_LINE> <DEDENT> elif self.retrain: <NEW_LINE> <INDENT> logging.info("模型已存在, 再次训练") <NEW_LINE> model = models.Word2Vec.load(self.result_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.error("模型存在,禁止再次训练") <NEW_LINE> return <NEW_LINE> <DEDENT> model.train(self.corpus_iterator, total_examples=model.corpus_count, epochs=model.iter) <NEW_LINE> model.save(self.result_name) <NEW_LINE> self.model = model <NEW_LINE> <DEDENT> def test_model(self, function): <NEW_LINE> <INDENT> print(self.model) <NEW_LINE> if not self.model: <NEW_LINE> <INDENT> logging.error("模型未加载或未训练") <NEW_LINE> return <NEW_LINE> <DEDENT> if not hasattr(function, "__call__"): <NEW_LINE> <INDENT> logging.error("未传入测试函数") <NEW_LINE> <DEDENT> function(self.model)
模型训练器
62598fc9283ffb24f3cf3bdb
class ObjectappApphook(CMSApp): <NEW_LINE> <INDENT> name = _('Objectapp App Hook') <NEW_LINE> urls = ['objectapp.urls'] <NEW_LINE> menus = APP_MENUS
Objectapp's Apphook
62598fc9ad47b63b2c5a7bb0
class SearchTestMixin(DBTestMixin): <NEW_LINE> <INDENT> _used_search = False <NEW_LINE> def init_search(self): <NEW_LINE> <INDENT> self._used_search = True <NEW_LINE> self.app.config['AUTO_INDEX'] = True <NEW_LINE> es.initialize() <NEW_LINE> es.cluster.health(wait_for_status='yellow', request_timeout=10) <NEW_LINE> <DEDENT> @contextmanager <NEW_LINE> def autoindex(self): <NEW_LINE> <INDENT> self.init_search() <NEW_LINE> yield <NEW_LINE> es.indices.refresh(index=es.index_name) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> super(SearchTestMixin, self).tearDown() <NEW_LINE> if self._used_search and es.indices.exists(index=es.index_name): <NEW_LINE> <INDENT> es.indices.delete(index=es.index_name)
A mixin allowing to optionnaly enable indexation and cleanup after
62598fc9dc8b845886d53912
class Pr(object): <NEW_LINE> <INDENT> def __init__(self, P0=0.2, f=1.769, tau_F=67.351, d=0.878, tau_D=92.918, dt=0.01): <NEW_LINE> <INDENT> self.P0 = P0 <NEW_LINE> self.f = f <NEW_LINE> self.tau_F = tau_F <NEW_LINE> self.d = d <NEW_LINE> self.tau_D = tau_D <NEW_LINE> self.P = P0 <NEW_LINE> self.dt = dt <NEW_LINE> self.tlast = None <NEW_LINE> self.F0 = 1. <NEW_LINE> self.D0 = 1. <NEW_LINE> self.F = None <NEW_LINE> self.D = None <NEW_LINE> self.t_array = None <NEW_LINE> self.P_array = None <NEW_LINE> <DEDENT> def stim(self, stim_time): <NEW_LINE> <INDENT> if self.tlast is not None: <NEW_LINE> <INDENT> t_segment = np.arange(self.t_array[-1], stim_time, self.dt) <NEW_LINE> self.F = 1. + (self.F0 - 1.) * np.exp(-(t_segment - self.t_array[-1]) / self.tau_F) <NEW_LINE> self.D = 1. - (1. - self.D0) * np.exp(-(t_segment - self.t_array[-1]) / self.tau_D) <NEW_LINE> P_segment = np.minimum(np.ones_like(t_segment), np.multiply(self.F, self.D) * self.P0) <NEW_LINE> self.P_array = np.append(self.P_array, P_segment) <NEW_LINE> self.t_array = np.append(self.t_array, t_segment) <NEW_LINE> self.F0 = self.F[-1] + self.f <NEW_LINE> self.D0 = self.D[-1] * self.d <NEW_LINE> self.P = self.P_array[-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.t_array = np.arange(0., stim_time, self.dt) <NEW_LINE> self.P_array = np.ones_like(self.t_array) * self.P0 <NEW_LINE> self.F0 += self.f <NEW_LINE> self.D0 *= self.d <NEW_LINE> <DEDENT> self.tlast = stim_time
This object contains internal variables to track the evolution in time of parameters governing synaptic release probability, used during optimization, and then exported to pr.mod for use during patterned input simulations.
62598fc9be7bc26dc9252006
class ImageCreateView(generics.ListCreateAPIView): <NEW_LINE> <INDENT> queryset = Image.objects.all() <NEW_LINE> serializer_class = ImageSerializer <NEW_LINE> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save()
This class defines the create behavior of our rest api.
62598fc93346ee7daa3377f3
class Post(): <NEW_LINE> <INDENT> author = None <NEW_LINE> time = None <NEW_LINE> device = None <NEW_LINE> content = None <NEW_LINE> post_id = None <NEW_LINE> thread_id = None <NEW_LINE> REPR = "{} wirte; {} on {} at {}" <NEW_LINE> INSERT_SQL = "insert into vclub_post_info (post_no, thread_no, content, author , post_time, device) VALUE ('{}', '{}','{}','{}','{}','{}')" <NEW_LINE> def __init__(self, content, author="None", time="None", device="None", post_id=None, thread_id=None): <NEW_LINE> <INDENT> if (len(content) > 10000): <NEW_LINE> <INDENT> print("the content is too long for now", len(content)) <NEW_LINE> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.post_id = post_id <NEW_LINE> self.thread_id = thread_id <NEW_LINE> self.content = content <NEW_LINE> self.author = author <NEW_LINE> self.device = device <NEW_LINE> self.time = time <NEW_LINE> <DEDENT> <DEDENT> def get_time(self): <NEW_LINE> <INDENT> return self.time <NEW_LINE> <DEDENT> def save_to_database(self, conns=None): <NEW_LINE> <INDENT> if conns is None: <NEW_LINE> <INDENT> print("the conns is None ,please check") <NEW_LINE> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with conns.cursor() as cursor: <NEW_LINE> <INDENT> print(self.INSERT_SQL.format(self.post_id, self.thread_id, self.content, self.author, self.time,self.device)) <NEW_LINE> cursor.execute( self.INSERT_SQL.format(self.post_id, self.thread_id, self.content, self.author, self.time, self.device)) <NEW_LINE> conns.commit() <NEW_LINE> <DEDENT> <DEDENT> except Exception as error: <NEW_LINE> <INDENT> print(error) <NEW_LINE> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.REPR.format(self.author, self.content, self.device, self.time)
用来表示一个post类型
62598fc94428ac0f6e65887e
class Suspend(base.HandlerPluginBase): <NEW_LINE> <INDENT> DESCRIPTION = "Suspend/Resume VM" <NEW_LINE> NAME = "suspend" <NEW_LINE> def start(self, ctx, action, block=False): <NEW_LINE> <INDENT> if not self.can_execute(action): <NEW_LINE> <INDENT> self.register_action(action, discard=True) <NEW_LINE> raise exceptions.ActionInProgress() <NEW_LINE> <DEDENT> self.register_action(action) <NEW_LINE> try: <NEW_LINE> <INDENT> client = utils.get_nova_client(ctx) <NEW_LINE> config = action.action_meta_obj.get('data') or {} <NEW_LINE> nova_action = config.get('action', 'suspend') <NEW_LINE> if nova_action == 'resume': <NEW_LINE> <INDENT> output = client.servers.resume(action.target_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output = client.servers.suspend(action.target_id) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> LOG.exception(e) <NEW_LINE> self.error(action, message=e.message) <NEW_LINE> return None <NEW_LINE> <DEDENT> self.finish(action, "") <NEW_LINE> return self.current_action.id <NEW_LINE> <DEDENT> def can_execute(self, action, ctx=None): <NEW_LINE> <INDENT> return super(Suspend, self).can_execute(action, ctx=ctx)
Suspend VM
62598fc95fcc89381b2662f9
class SolutionRemoveDuplicatesfromSortedList: <NEW_LINE> <INDENT> def deleteDuplicates(self, head): <NEW_LINE> <INDENT> p = head <NEW_LINE> while (p != None): <NEW_LINE> <INDENT> while ((p.next != None) and (p.val == p.next.val)): <NEW_LINE> <INDENT> p.next = p.next.next <NEW_LINE> <DEDENT> p = p.next <NEW_LINE> <DEDENT> return head
@param head: A ListNode @return: A ListNode
62598fc9ec188e330fdf8bee
@six.python_2_unicode_compatible <NEW_LINE> class SolumException(Exception): <NEW_LINE> <INDENT> message = _("An unknown exception occurred.") <NEW_LINE> code = 500 <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.kwargs = kwargs <NEW_LINE> if CONF.fatal_exception_format_errors: <NEW_LINE> <INDENT> assert isinstance(self.msg_fmt, six.text_type) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.message = self.msg_fmt % kwargs <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> LOG.exception(_('Exception in string format operation'), extra=dict( private=dict( msg=self.msg_fmt, args=kwargs ) ) ) <NEW_LINE> if CONF.fatal_exception_format_errors: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.message
Base Solum Exception To correctly use this class, inherit from it and define a 'msg_fmt' property. That msg_fmt will get printf'd with the keyword arguments provided to the constructor.
62598fc9adb09d7d5dc0a8d3
class V1GcsType(BaseTypeConfig, polyaxon_sdk.V1GcsType): <NEW_LINE> <INDENT> IDENTIFIER = "gcs" <NEW_LINE> SCHEMA = GcsTypeSchema <NEW_LINE> REDUCED_ATTRIBUTES = ["bucket", "blob"] <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> path = "gs://{}".format(self.bucket) <NEW_LINE> if self.blob: <NEW_LINE> <INDENT> path = os.path.join(path, self.blob) <NEW_LINE> <DEDENT> return path <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self) <NEW_LINE> <DEDENT> def to_param(self): <NEW_LINE> <INDENT> return str(self)
GCS type. Args: bucket: str blob: str ### YAML usage The inputs definition ```yaml >>> inputs: >>> - name: test1 >>> type: gcs >>> - name: test2 >>> type: gcs ``` The params usage ```yaml >>> params: >>> test1: {value: {bucket: "gs://bucket1"}} >>> test1: {value: {bucket: "gs://bucket2", blob: "blobName"}} ``` ### Python usage The inputs definition ```python >>> from polyaxon import types >>> from polyaxon.schemas import types >>> from polyaxon.polyflow import V1IO >>> inputs = [ >>> V1IO( >>> name="test1", >>> type=types.GCS, >>> ), >>> V1IO( >>> name="test2", >>> type=types.GCS, >>> ), >>> ] ``` The params usage ```python >>> from polyaxon import types >>> from polyaxon.schemas import types >>> from polyaxon.polyflow import V1Param >>> params = { >>> "test1": V1Param(value=types.V1GcsType(bucket="gs://bucket1")), >>> "test2": V1Param(value=types.V1GcsType(bucket="gs://bucket1", blob="blobName")), >>> } ```
62598fc94a966d76dd5ef230
class TestUniqueIdentity(TestCaseBase): <NEW_LINE> <INDENT> def test_unique_uuid(self): <NEW_LINE> <INDENT> with self.assertRaisesRegexp(IntegrityError, DUP_CHECK_ERROR): <NEW_LINE> <INDENT> uid1 = UniqueIdentity(uuid='John Smith') <NEW_LINE> uid2 = UniqueIdentity(uuid='John Smith') <NEW_LINE> self.session.add(uid1) <NEW_LINE> self.session.add(uid2) <NEW_LINE> self.session.commit() <NEW_LINE> <DEDENT> <DEDENT> def test_to_dict(self): <NEW_LINE> <INDENT> c = Country(code='US', name='United States of America', alpha3='USA') <NEW_LINE> self.session.add(c) <NEW_LINE> uid = UniqueIdentity(uuid='John Smith') <NEW_LINE> self.session.add(uid) <NEW_LINE> id1 = Identity(id='A', name='John Smith', email='jsmith@example.com', username='jsmith', source='scm', uuid='John Smith') <NEW_LINE> id2 = Identity(id='B', name=None, email='jsmith@example.net', username=None, source='scm', uuid='John Smith') <NEW_LINE> self.session.add(id1) <NEW_LINE> self.session.add(id2) <NEW_LINE> self.session.commit() <NEW_LINE> d = uid.to_dict() <NEW_LINE> self.assertIsInstance(d, dict) <NEW_LINE> self.assertEqual(d['uuid'], 'John Smith') <NEW_LINE> self.assertEqual(d['profile'], None) <NEW_LINE> identities = d['identities'] <NEW_LINE> self.assertEqual(len(identities), 2) <NEW_LINE> d0 = d['identities'][0] <NEW_LINE> self.assertEqual(d0['id'], 'A') <NEW_LINE> self.assertEqual(d0['name'], 'John Smith') <NEW_LINE> self.assertEqual(d0['email'], 'jsmith@example.com') <NEW_LINE> self.assertEqual(d0['username'], 'jsmith') <NEW_LINE> self.assertEqual(d0['source'], 'scm') <NEW_LINE> self.assertEqual(d0['uuid'], 'John Smith') <NEW_LINE> d1 = d['identities'][1] <NEW_LINE> self.assertEqual(d1['id'], 'B') <NEW_LINE> self.assertEqual(d1['name'], None) <NEW_LINE> self.assertEqual(d1['email'], 'jsmith@example.net') <NEW_LINE> self.assertEqual(d1['username'], None) <NEW_LINE> self.assertEqual(d1['source'], 'scm') <NEW_LINE> self.assertEqual(d1['uuid'], 'John Smith') <NEW_LINE> prf = Profile(uuid='John Smith', name='Smith, J.', email='jsmith@example.com', is_bot=True, country_code='US') <NEW_LINE> self.session.add(prf) <NEW_LINE> self.session.commit() <NEW_LINE> d = uid.to_dict() <NEW_LINE> dp = d['profile'] <NEW_LINE> self.assertEqual(dp['uuid'], 'John Smith') <NEW_LINE> self.assertEqual(dp['name'], 'Smith, J.') <NEW_LINE> self.assertEqual(dp['email'], 'jsmith@example.com') <NEW_LINE> self.assertEqual(dp['is_bot'], True) <NEW_LINE> self.assertEqual(dp['country']['code'], 'US') <NEW_LINE> self.assertEqual(dp['country']['name'], 'United States of America')
Unit tests for UniqueIdentity class
62598fc99f28863672818a29
class _StructField(AbstractProtoWrapper): <NEW_LINE> <INDENT> def __init__(self, proto): <NEW_LINE> <INDENT> assert proto <NEW_LINE> self._proto = proto <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._proto.field_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return DataType(self._proto.field_type)
A field in a struct.
62598fc9fbf16365ca794413
class ClasseFonction(Fonction): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def init_types(cls): <NEW_LINE> <INDENT> cls.ajouter_types(cls.entre_salle, "Salle", "Salle") <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def entre_salle(origine, destination): <NEW_LINE> <INDENT> if origine is destination: <NEW_LINE> <INDENT> raise ErreurExecution("{} est identique à {}".format( origine, destination)) <NEW_LINE> <DEDENT> if not origine.coords.valide: <NEW_LINE> <INDENT> raise ErreurExecution("{} n'a pas de coordonnées valides".format( origine)) <NEW_LINE> <DEDENT> if not destination.coords.valide: <NEW_LINE> <INDENT> raise ErreurExecution("{} n'a pas de coordonnées valides".format( destination)) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> chemin = origine.trouver_chemin(destination, explicite=True) <NEW_LINE> <DEDENT> except ValueError as err: <NEW_LINE> <INDENT> raise ErreurExecution("Erreur lors de la recherche du " "chemin entre {} et {} : {}".format(origine, destination, str(err))) <NEW_LINE> <DEDENT> if chemin is None: <NEW_LINE> <INDENT> raise ErreurExecution("Chemin entre {} et {} introuvable".format( origine, destination)) <NEW_LINE> <DEDENT> if not chemin.droit: <NEW_LINE> <INDENT> raise ErreurExecution("Chemin entre {} et {} non droit : " "{}".format(origine, destination, chemin)) <NEW_LINE> <DEDENT> intermediaires = [] <NEW_LINE> for sortie in chemin: <NEW_LINE> <INDENT> intermediaires.append(sortie.salle_dest) <NEW_LINE> <DEDENT> intermediaires.remove(destination) <NEW_LINE> return intermediaires
Retourne les salles entre deux autres salles.
62598fc9ad47b63b2c5a7bb4
class GeventServer(ServerAdapter): <NEW_LINE> <INDENT> def run(self, handler): <NEW_LINE> <INDENT> from gevent import wsgi as wsgi_fast, pywsgi as wsgi, monkey <NEW_LINE> if self.options.get('monkey', True): <NEW_LINE> <INDENT> monkey.patch_all() <NEW_LINE> <DEDENT> if self.options.get('fast', False): <NEW_LINE> <INDENT> wsgi = wsgi_fast <NEW_LINE> <DEDENT> wsgi.WSGIServer((self.host, self.port), handler).serve_forever()
Untested. Options: * `monkey` (default: True) fixes the stdlib to use greenthreads. * `fast` (default: False) uses libevent's http server, but has some issues: No streaming, no pipelining, no SSL.
62598fc9a8370b77170f0735
class SupervisedDataset(Dataset): <NEW_LINE> <INDENT> @drop_unused_kws <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def split_names(self) -> Dict[Split, str]: <NEW_LINE> <INDENT> return SplitIndexer.default_split_names <NEW_LINE> <DEDENT> @property <NEW_LINE> @abstractmethod <NEW_LINE> def response_shape(self) -> Tuple[int, ...]: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @property <NEW_LINE> @abstractmethod <NEW_LINE> def predictor_shape(self) -> Tuple[int, ...]: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def __len__(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def __getitem__(self, index: int): <NEW_LINE> <INDENT> raise NotImplementedError
Supervised Dataset is comprised of separate Splits
62598fc97b180e01f3e491fd
class ParallelInstruction(messages.Message): <NEW_LINE> <INDENT> flatten = messages.MessageField('FlattenInstruction', 1) <NEW_LINE> name = messages.StringField(2) <NEW_LINE> outputs = messages.MessageField('InstructionOutput', 3, repeated=True) <NEW_LINE> parDo = messages.MessageField('ParDoInstruction', 4) <NEW_LINE> partialGroupByKey = messages.MessageField('PartialGroupByKeyInstruction', 5) <NEW_LINE> read = messages.MessageField('ReadInstruction', 6) <NEW_LINE> systemName = messages.StringField(7) <NEW_LINE> write = messages.MessageField('WriteInstruction', 8)
A ParallelInstruction object. Fields: flatten: A FlattenInstruction attribute. name: A string attribute. outputs: A InstructionOutput attribute. parDo: A ParDoInstruction attribute. partialGroupByKey: A PartialGroupByKeyInstruction attribute. read: A ReadInstruction attribute. systemName: A string attribute. write: A WriteInstruction attribute.
62598fc9ab23a570cc2d4f1b
class RedisStorage(DataStorage): <NEW_LINE> <INDENT> def __init__(self, host="localhost", port=6379, db=0, password=None, socket_timeout=None, connection_pool=None, charset='utf-8', errors='strict', unix_socket_path=None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.conn = None <NEW_LINE> self.host = host <NEW_LINE> self.port = port <NEW_LINE> self.db = db <NEW_LINE> self.password = password <NEW_LINE> self.socket_timeout = socket_timeout <NEW_LINE> self.connection_pool = connection_pool <NEW_LINE> self.charset = charset <NEW_LINE> self.errors = errors <NEW_LINE> self.unix_socket_path = unix_socket_path <NEW_LINE> <DEDENT> def is_connected(self): <NEW_LINE> <INDENT> return self.conn is not None <NEW_LINE> <DEDENT> def connect(self): <NEW_LINE> <INDENT> self.conn = redis.Redis(host=self.host, port=self.port, db=self.db, password=self.password, socket_timeout=self.socket_timeout, connection_pool=self.connection_pool, charset=self.charset, errors=self.errors) <NEW_LINE> <DEDENT> def disconnect(self): <NEW_LINE> <INDENT> if self.is_connected(): <NEW_LINE> <INDENT> self.conn.connection_pool.disconnect() <NEW_LINE> self.conn = None <NEW_LINE> <DEDENT> <DEDENT> def retrieve(self, flow_name, task_name, task_id): <NEW_LINE> <INDENT> assert self.is_connected() <NEW_LINE> ret = self.conn.get(task_id) <NEW_LINE> if ret is None: <NEW_LINE> <INDENT> raise FileNotFoundError("Record not found in database") <NEW_LINE> <DEDENT> record = json.loads(ret.decode(self.charset)) <NEW_LINE> assert record.get('task_name') == task_name <NEW_LINE> return record.get('result') <NEW_LINE> <DEDENT> def store(self, node_args, flow_name, task_name, task_id, result): <NEW_LINE> <INDENT> assert self.is_connected() <NEW_LINE> record = { 'node_args': node_args, 'flow_name': flow_name, 'task_name': task_name, 'task_id': task_id, 'result': result } <NEW_LINE> self.conn.set(task_id, json.dumps(record)) <NEW_LINE> return task_id <NEW_LINE> <DEDENT> def store_error(self, node_args, flow_name, task_name, task_id, exc_info): <NEW_LINE> <INDENT> raise NotImplementedError()
Selinon adapter for Redis database.
62598fc9bf627c535bcb1804
class HybridVirtualInterfacesNegativeTestJSON(test_virtual_interfaces_negative.VirtualInterfacesNegativeTestJSON): <NEW_LINE> <INDENT> pass
Test virtual interfaces negative
62598fc926068e7796d4ccb7
class bone_bake_locations(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "object.bone_bake_locations" <NEW_LINE> bl_label = "bone_bake_locations" <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> return context.active_object != None <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> ob = context.active_object <NEW_LINE> func_bake(loc=True, rot=False) <NEW_LINE> return ('FINISHED')
Bake rotation for selected bones
62598fc923849d37ff85140d
@registry.add('classical_damage') <NEW_LINE> class ClassicalDamage(Damage): <NEW_LINE> <INDENT> def __init__(self, imt, taxonomy, fragility_functions, hazard_imtls, investigation_time, risk_investigation_time): <NEW_LINE> <INDENT> self.imt = imt <NEW_LINE> self.taxonomy = taxonomy <NEW_LINE> self.risk_functions = fragility_functions <NEW_LINE> self.curves = functools.partial( scientific.classical_damage, fragility_functions['damage'], hazard_imtls[imt], investigation_time=investigation_time, risk_investigation_time=risk_investigation_time) <NEW_LINE> <DEDENT> def __call__(self, loss_type, assets, hazard_curves, _epsilons=None, _tags=None): <NEW_LINE> <INDENT> fractions = utils.numpy_map(self.curves, hazard_curves) <NEW_LINE> damages = [asset.number * fraction for asset, fraction in zip(assets, fractions)] <NEW_LINE> return scientific.Output(assets, 'damage', damages=damages) <NEW_LINE> <DEDENT> compute_all_outputs = ( Classical.compute_all_outputs if sys.version > '3' else Classical.compute_all_outputs.__func__)
Implements the ClassicalDamage workflow
62598fc9851cf427c66b860f
class Response(ResponseBase, swob.Response): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> swob.Response.__init__(self, *args, **kwargs) <NEW_LINE> if self.etag: <NEW_LINE> <INDENT> self.etag = self.etag <NEW_LINE> <DEDENT> sw_sysmeta_headers = swob.HeaderKeyDict() <NEW_LINE> sw_headers = swob.HeaderKeyDict() <NEW_LINE> headers = HeaderKeyDict() <NEW_LINE> for key, val in self.headers.iteritems(): <NEW_LINE> <INDENT> _key = key.lower() <NEW_LINE> if _key.startswith(sysmeta_prefix('object')) or _key.startswith(sysmeta_prefix('container')): <NEW_LINE> <INDENT> sw_sysmeta_headers[key] = val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sw_headers[key] = val <NEW_LINE> <DEDENT> <DEDENT> for key, val in sw_headers.iteritems(): <NEW_LINE> <INDENT> _key = key.lower() <NEW_LINE> if _key.startswith('x-object-meta-'): <NEW_LINE> <INDENT> headers['x-amz-meta-' + _key[14:]] = val <NEW_LINE> <DEDENT> elif _key in ('content-length', 'content-type', 'content-range', 'content-encoding', 'etag', 'last-modified'): <NEW_LINE> <INDENT> headers[key] = val <NEW_LINE> <DEDENT> elif _key == 'x-container-object-count': <NEW_LINE> <INDENT> headers['x-rgw-object-count'] = val <NEW_LINE> <DEDENT> elif _key == 'x-container-bytes-used': <NEW_LINE> <INDENT> headers['x-rgw-bytes-used'] = val <NEW_LINE> <DEDENT> <DEDENT> self.headers = headers <NEW_LINE> self.sysmeta_headers = sw_sysmeta_headers <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_swift_resp(cls, sw_resp): <NEW_LINE> <INDENT> if sw_resp.app_iter: <NEW_LINE> <INDENT> body = None <NEW_LINE> app_iter = sw_resp.app_iter <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> body = sw_resp.body <NEW_LINE> app_iter = None <NEW_LINE> <DEDENT> resp = Response(status=sw_resp.status, headers=sw_resp.headers, request=sw_resp.request, body=body, app_iter=app_iter, conditional_response=sw_resp.conditional_response) <NEW_LINE> resp.environ.update(sw_resp.environ) <NEW_LINE> return resp <NEW_LINE> <DEDENT> def append_copy_resp_body(self, controller_name): <NEW_LINE> <INDENT> elem = Element('Copy%sResult' % controller_name) <NEW_LINE> SubElement(elem, 'LastModified').text = self.last_modified.isoformat()[:-6] + '.000Z' <NEW_LINE> SubElement(elem, 'ETag').text = '"%s"' % self.etag <NEW_LINE> self.headers['Content-Type'] = 'application/xml' <NEW_LINE> self.body = tostring(elem) <NEW_LINE> self.etag = None
Similar to the Response class in Swift, but uses our HeaderKeyDict for headers instead of Swift's HeaderKeyDict. This also translates Swift specific headers to S3 headers.
62598fc9ff9c53063f51a9a8
class DeltaEvent(DeltaObject): <NEW_LINE> <INDENT> def __init__(self, name, function): <NEW_LINE> <INDENT> DeltaObject.__init__(self) <NEW_LINE> self._set_name_(name) <NEW_LINE> self._enabled = True <NEW_LINE> self._handlers = [] <NEW_LINE> self._function = function <NEW_LINE> <DEDENT> def set_enable(self, enable): <NEW_LINE> <INDENT> self._enabled = enable <NEW_LINE> <DEDENT> def is_enable(self): <NEW_LINE> <INDENT> return self._enabled <NEW_LINE> <DEDENT> def fire(self, *args, **kwargs): <NEW_LINE> <INDENT> params = {} <NEW_LINE> reversed_handlers = [] <NEW_LINE> if self._enabled: <NEW_LINE> <INDENT> params['event'] = self <NEW_LINE> params['function'] = self._function <NEW_LINE> params['args'] = args <NEW_LINE> params['kwargs'] = kwargs <NEW_LINE> for handler in self._handlers: <NEW_LINE> <INDENT> if handler.is_enable(): <NEW_LINE> <INDENT> handler.before(params) <NEW_LINE> reversed_handlers.insert(0, handler) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> params['result'] = self._function(*args, **kwargs) <NEW_LINE> if self._enabled: <NEW_LINE> <INDENT> for handler in reversed_handlers: <NEW_LINE> <INDENT> handler.after(params) <NEW_LINE> <DEDENT> <DEDENT> return params['result'] <NEW_LINE> <DEDENT> def insert_handler(self, handler, index): <NEW_LINE> <INDENT> self._handlers.insert(index, handler) <NEW_LINE> <DEDENT> def add_handler(self, handler): <NEW_LINE> <INDENT> self._handlers.append(handler) <NEW_LINE> <DEDENT> def get_handlers(self): <NEW_LINE> <INDENT> return iter(self._handlers) <NEW_LINE> <DEDENT> def get_handler(self, name): <NEW_LINE> <INDENT> for handler in self._handlers: <NEW_LINE> <INDENT> if handler.get_name() == name: <NEW_LINE> <INDENT> return handler <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def remove_handler(self, name): <NEW_LINE> <INDENT> handler = self.get_handler(name) <NEW_LINE> if handler is not None: <NEW_LINE> <INDENT> self._handlers.remove(handler) <NEW_LINE> <DEDENT> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self._handlers = []
Delta event class.
62598fc95fc7496912d48427
class Comment(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(User, on_delete=models.CASCADE, verbose_name='Пользователь', related_name='band_commentator') <NEW_LINE> band = models.ForeignKey(Band, on_delete=models.CASCADE, verbose_name='Запись') <NEW_LINE> content = models.CharField(max_length=250, verbose_name='Содержание') <NEW_LINE> published = models.BooleanField(default=True, verbose_name='Активно') <NEW_LINE> created_at = models.DateTimeField(auto_now_add=True) <NEW_LINE> updated_at = models.DateTimeField(auto_now=True) <NEW_LINE> objects = CommentManager() <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{}: {}".format(self.user, self.content) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ['created_at', 'user'] <NEW_LINE> verbose_name = 'Комментарий' <NEW_LINE> verbose_name_plural = 'Комментарии'
Events comment model.
62598fc9d8ef3951e32c8009
class CCDistrMoeModel(models.BaseModel): <NEW_LINE> <INDENT> def create_model(self, model_input, vocab_size, num_mixtures=None, l2_penalty=1e-8, is_training=True, **unused_params): <NEW_LINE> <INDENT> reduced_distr_size = 1024 <NEW_LINE> batch_size = tf.shape(model_input)[0] <NEW_LINE> part_size = 393 <NEW_LINE> time_steps = vocab_size // part_size <NEW_LINE> num_experts = 8 <NEW_LINE> prob_chains = [] <NEW_LINE> distribution = tf.zeros([batch_size, vocab_size], dtype=tf.float32) <NEW_LINE> distr_weights = tf.get_variable("distr_weights", [vocab_size, reduced_distr_size], initializer=tf.random_normal_initializer()) <NEW_LINE> distr_biases = tf.get_variable("distr_biases", [reduced_distr_size], initializer=tf.constant_initializer(0.0)) <NEW_LINE> for step in xrange(time_steps): <NEW_LINE> <INDENT> if step > 0: <NEW_LINE> <INDENT> tf.get_variable_scope().reuse_variables() <NEW_LINE> <DEDENT> reduced_distr_activations = tf.matmul(distribution, distr_weights) + distr_biases <NEW_LINE> reduced_distr = tf.nn.relu(slim.batch_norm(reduced_distr_activations, is_training=is_training)) <NEW_LINE> group_input = tf.concat([reduced_distr, model_input], 1) <NEW_LINE> group_expert_activations = slim.fully_connected( group_input, part_size * num_experts, activation_fn=None, weights_regularizer=slim.l2_regularizer(l2_penalty), scope="pred_" + str(step) ) <NEW_LINE> group_gate_activations = slim.fully_connected( group_input, part_size * (num_experts + 1), activation_fn=None, biases_initializer=None, weights_regularizer=slim.l2_regularizer(l2_penalty), scope="gate_" + str(step) ) <NEW_LINE> expert_distribution = tf.nn.sigmoid(tf.reshape( group_expert_activations, [-1, num_experts])) <NEW_LINE> gate_distribution = tf.nn.softmax(tf.reshape( group_gate_activations, [-1, num_experts + 1])) <NEW_LINE> expert_distr_by_class_and_batch = tf.reduce_sum( gate_distribution[:, :num_experts] * expert_distribution, 1) <NEW_LINE> group_predictions = tf.reshape(expert_distr_by_class_and_batch, [-1, part_size]) <NEW_LINE> prob_chains.append(group_predictions) <NEW_LINE> pre = distribution[:, :step * part_size] if step > 0 else None <NEW_LINE> aft = distribution[:, (step + 1) * part_size:] if step + 1 < time_steps else None <NEW_LINE> distribution = group_predictions <NEW_LINE> if pre is not None: <NEW_LINE> <INDENT> distribution = tf.concat([pre, distribution], 1) <NEW_LINE> <DEDENT> if aft is not None: <NEW_LINE> <INDENT> distribution = tf.concat([distribution, aft], 1) <NEW_LINE> <DEDENT> <DEDENT> final_probabilities = tf.concat(prob_chains, 1) <NEW_LINE> return {"predictions": final_probabilities}
Classifiers Chain Moe
62598fc94a966d76dd5ef232
class BlogSectionForm(FlaskForm): <NEW_LINE> <INDENT> name = StringField('Blog Section', validators=[DataRequired()]) <NEW_LINE> submit = SubmitField('Submit')
Form for admin to add, edit, delete blog section
62598fc9fbf16365ca794415
class TestStochRSIIndicator(unittest.TestCase): <NEW_LINE> <INDENT> _filename = "test/data/cs-stochrsi.csv" <NEW_LINE> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls._df = pd.read_csv(cls._filename, sep=",") <NEW_LINE> cls._params = dict( close=cls._df["Close"], window=14, smooth1=3, smooth2=3, fillna=False ) <NEW_LINE> cls._indicator = StochRSIIndicator(**cls._params) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> del cls._df <NEW_LINE> <DEDENT> def test_stochrsi(self): <NEW_LINE> <INDENT> target = "StochRSI(14)" <NEW_LINE> result = self._indicator.stochrsi() <NEW_LINE> pd.testing.assert_series_equal( self._df[target].tail(), result.tail(), check_names=False ) <NEW_LINE> <DEDENT> def test_stochrsi2(self): <NEW_LINE> <INDENT> target = "StochRSI(14)" <NEW_LINE> result = stochrsi(**self._params) <NEW_LINE> pd.testing.assert_series_equal( self._df[target].tail(), result.tail(), check_names=False )
https://school.stockcharts.com/doku.php?id=technical_indicators:stochrsi
62598fc9dc8b845886d53918
class Animal(): <NEW_LINE> <INDENT> def __init__(self, species, animal_noise): <NEW_LINE> <INDENT> self.species = species <NEW_LINE> self.animal_noise = animal_noise <NEW_LINE> <DEDENT> def speak(self): <NEW_LINE> <INDENT> return print(f'I am a {self.species} and I say {self.animal_noise}')
This represents an animal
62598fc9ab23a570cc2d4f1c
class Number: <NEW_LINE> <INDENT> comparisons = 0 <NEW_LINE> def __init__(self, value): <NEW_LINE> <INDENT> self._value = value <NEW_LINE> return <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self._value) <NEW_LINE> <DEDENT> def __eq__(self, target): <NEW_LINE> <INDENT> Number.comparisons += 1 <NEW_LINE> result = self._value == target._value <NEW_LINE> return result <NEW_LINE> <DEDENT> def __lt__(self, target): <NEW_LINE> <INDENT> Number.comparisons += 1 <NEW_LINE> result = self._value < target._value <NEW_LINE> return result <NEW_LINE> <DEDENT> def __le__(self, target): <NEW_LINE> <INDENT> Number.comparisons += 1 <NEW_LINE> result = self._value <= target._value <NEW_LINE> return result
------------------------------------------------------- Wraps a class definition around integers. Uses class attribute comparisons to determine how many times comparison functions are called on the class. Use: print(Number.comparisons) Use: Number.comparisons = 0 -------------------------------------------------------
62598fc9be7bc26dc9252009
class LoadRowFileAction(argparse.Action): <NEW_LINE> <INDENT> def __call__(self, parser, namespace, filename, option_string=None): <NEW_LINE> <INDENT> ret = [] <NEW_LINE> logging.debug(f"opening {filename} as CSV") <NEW_LINE> with open(filename) as f: <NEW_LINE> <INDENT> for row in f: <NEW_LINE> <INDENT> s = row.rstrip() <NEW_LINE> if len(s): ret.append(s) <NEW_LINE> <DEDENT> <DEDENT> setattr(namespace, self.dest, ret)
load a file line by line into an opt
62598fc9ec188e330fdf8bf2
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): <NEW_LINE> <INDENT> VERSION = 1 <NEW_LINE> CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL <NEW_LINE> @staticmethod <NEW_LINE> @callback <NEW_LINE> def async_get_options_flow(config_entry): <NEW_LINE> <INDENT> return OptionsFlowHandler(config_entry) <NEW_LINE> <DEDENT> async def async_step_user(self, user_input=None): <NEW_LINE> <INDENT> errors = {} <NEW_LINE> if user_input is not None: <NEW_LINE> <INDENT> name = user_input[CONF_NAME] <NEW_LINE> longitude = user_input[CONF_LONGITUDE] <NEW_LINE> latitude = user_input[CONF_LATITUDE] <NEW_LINE> session = aiohttp_client.async_get_clientsession(self.hass) <NEW_LINE> api = AuroraForecast(session=session) <NEW_LINE> try: <NEW_LINE> <INDENT> await api.get_forecast_data(longitude, latitude) <NEW_LINE> <DEDENT> except ClientError: <NEW_LINE> <INDENT> errors["base"] = "cannot_connect" <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> _LOGGER.exception("Unexpected exception") <NEW_LINE> errors["base"] = "unknown" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> await self.async_set_unique_id( f"{user_input[CONF_LONGITUDE]}_{user_input[CONF_LATITUDE]}" ) <NEW_LINE> self._abort_if_unique_id_configured() <NEW_LINE> return self.async_create_entry( title=f"Aurora - {name}", data=user_input ) <NEW_LINE> <DEDENT> <DEDENT> return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required(CONF_NAME, default=DEFAULT_NAME): str, vol.Required( CONF_LONGITUDE, default=self.hass.config.longitude, ): vol.All( vol.Coerce(float), vol.Range(min=-180, max=180), ), vol.Required( CONF_LATITUDE, default=self.hass.config.latitude, ): vol.All( vol.Coerce(float), vol.Range(min=-90, max=90), ), } ), errors=errors, )
Handle a config flow for NOAA Aurora Integration.
62598fc94527f215b58ea22c
class ProfilePhoto(Entity): <NEW_LINE> <INDENT> @property <NEW_LINE> def height(self): <NEW_LINE> <INDENT> return self.properties.get('height', None) <NEW_LINE> <DEDENT> @property <NEW_LINE> def width(self): <NEW_LINE> <INDENT> return self.properties.get('width', None)
A profile photo of a user, group or an Outlook contact accessed from Exchange Online. It's binary data not encoded in base-64. The supported sizes of HD photos on Exchange Online are as follows: '48x48', '64x64', '96x96', '120x120', '240x240', '360x360','432x432', '504x504', and '648x648'.
62598fc9cc40096d6161a386
class Row(metaclass=abc.ABCMeta): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> def __init__(self, row, conv_bool=False): <NEW_LINE> <INDENT> if self.__class__ == Row: <NEW_LINE> <INDENT> raise TypeError("Can't instantiate a non-subclassed row") <NEW_LINE> <DEDENT> for index in range(len(self.__slots__)): <NEW_LINE> <INDENT> slot = self.__slots__[index] <NEW_LINE> value = row[index] <NEW_LINE> if conv_bool and (value == 0 or value == 1): <NEW_LINE> <INDENT> value = bool(value) <NEW_LINE> <DEDENT> setattr(self, slot, value) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"{type(self).__name__}({', '.join(str(getattr(self, x)) for x in self.__slots__)})" <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"{type(self).__name__}([{', '.join(repr(getattr(self, x)) for x in self.__slots__)}])" <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Row): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> for slot in self.__slots__: <NEW_LINE> <INDENT> sval = getattr(self, slot, _Empty) <NEW_LINE> oval = getattr(other, slot, _Empty) <NEW_LINE> if sval == _Empty or oval == _Empty: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if sval != oval: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def to_row(self): <NEW_LINE> <INDENT> out = [] <NEW_LINE> for slot in self.__slots__: <NEW_LINE> <INDENT> value = getattr(self, slot) <NEW_LINE> if isinstance(value, SqlConvertable): <NEW_LINE> <INDENT> value = value.sql_safe() <NEW_LINE> <DEDENT> out.append(value) <NEW_LINE> <DEDENT> return out <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def table_name(cls): <NEW_LINE> <INDENT> if hasattr(cls, "TABLE_NAME"): <NEW_LINE> <INDENT> return cls.TABLE_NAME <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
Conceptually, a Row in a SQL database. Subclass to define a table, __slots__ is used to define the columns in order. Can be saved and loaded from a database
62598fc926068e7796d4ccb9
class _nxm_ip (object): <NEW_LINE> <INDENT> @property <NEW_LINE> def value (self): <NEW_LINE> <INDENT> return self._unpack_value(self._value) <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value (self, value): <NEW_LINE> <INDENT> if isinstance(value, tuple) or isinstance(value, list): <NEW_LINE> <INDENT> assert len(value) == 2 <NEW_LINE> ip = value[0] <NEW_LINE> self.mask = value[1] <NEW_LINE> <DEDENT> elif isinstance(value, str) and len(value)>4 and '/' in value: <NEW_LINE> <INDENT> temp = parse_cidr(value, infer=False) <NEW_LINE> ip = temp[0] <NEW_LINE> self.mask = 32 if temp[1] is None else temp[1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ip = value <NEW_LINE> <DEDENT> self._value = self._pack_value(ip) <NEW_LINE> <DEDENT> def _pack_value (self, v): <NEW_LINE> <INDENT> return IPAddr(v, networkOrder=False).toRaw() <NEW_LINE> <DEDENT> def _unpack_value (self, v): <NEW_LINE> <INDENT> return IPAddr(v, networkOrder=True) <NEW_LINE> <DEDENT> def _pack_mask (self, v): <NEW_LINE> <INDENT> if isinstance(v, int): <NEW_LINE> <INDENT> if v > 32: v = 32 <NEW_LINE> elif v < 0: v = 0 <NEW_LINE> n = (0xffFFffFF << (32-v)) & 0xffFFffFF <NEW_LINE> return IPAddr(n, networkOrder=False).toRaw() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return IPAddr(v).toRaw()
Allows setting of IP address in many formats The value can be any format known by IPAddr. If it's a string, it can also have a trailing /netmask or /cidr-bits. If it's a tuple, the first is assumed to be any kind of IP address and the second is either a netmask or the number of network bits.
62598fc9851cf427c66b8611
class uninstantiated_subprogram_name(parser.name): <NEW_LINE> <INDENT> def __init__(self, sString): <NEW_LINE> <INDENT> parser.name.__init__(self, sString)
unique_id = subprogram_instantiation_declaration : uninstantiated_subprogram_name
62598fc9d8ef3951e32c800a
class TuyaDevice(Entity): <NEW_LINE> <INDENT> def __init__(self, tuya): <NEW_LINE> <INDENT> self.tuya = tuya <NEW_LINE> <DEDENT> async def async_added_to_hass(self): <NEW_LINE> <INDENT> dev_id = self.tuya.object_id() <NEW_LINE> self.hass.data[DOMAIN]['entities'][dev_id] = self.entity_id <NEW_LINE> async_dispatcher_connect( self.hass, SIGNAL_DELETE_ENTITY, self._delete_callback) <NEW_LINE> async_dispatcher_connect( self.hass, SIGNAL_UPDATE_ENTITY, self._update_callback) <NEW_LINE> <DEDENT> @property <NEW_LINE> def object_id(self): <NEW_LINE> <INDENT> return self.tuya.object_id() <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self): <NEW_LINE> <INDENT> return 'tuya.{}'.format(self.tuya.object_id()) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.tuya.name() <NEW_LINE> <DEDENT> @property <NEW_LINE> def available(self): <NEW_LINE> <INDENT> return self.tuya.available() <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.tuya.update() <NEW_LINE> <DEDENT> @callback <NEW_LINE> def _delete_callback(self, dev_id): <NEW_LINE> <INDENT> if dev_id == self.object_id: <NEW_LINE> <INDENT> self.hass.async_add_job(self.async_remove()) <NEW_LINE> <DEDENT> <DEDENT> @callback <NEW_LINE> def _update_callback(self): <NEW_LINE> <INDENT> self.async_schedule_update_ha_state(True)
Tuya base device.
62598fc9091ae35668704f87
class SubmissionJSONFeedGenerator(SyndicationFeed): <NEW_LINE> <INDENT> mime_type = 'application/json' <NEW_LINE> def _encode_complex(self, obj): <NEW_LINE> <INDENT> if isinstance(obj, datetime.datetime): <NEW_LINE> <INDENT> return obj.isoformat() <NEW_LINE> <DEDENT> <DEDENT> def write(self, outfile, encoding): <NEW_LINE> <INDENT> request = self.feed['request'] <NEW_LINE> callback = request.GET.get('callback', None) <NEW_LINE> if callback is not None: <NEW_LINE> <INDENT> if not valid_jsonp_callback_value(callback): <NEW_LINE> <INDENT> callback = None <NEW_LINE> <DEDENT> <DEDENT> items_out = [] <NEW_LINE> for item in self.items: <NEW_LINE> <INDENT> item_out = dict((x, item[x]) for x in ( 'link', 'title', 'pubdate', 'author_name', 'author_link', )) <NEW_LINE> if item['obj'].creator.email: <NEW_LINE> <INDENT> item_out['author_avatar'] = gravatar_url(item['obj'].creator.email) <NEW_LINE> <DEDENT> item_out['categories'] = dict( (x, request.build_absolute_uri(reverse('demos_tag', kwargs={'tag': x}))) for x in item['categories'] ) <NEW_LINE> item_out.update((x, unicode(getattr(item['obj'], x))) for x in ( 'summary', 'description', )) <NEW_LINE> item_out['featured'] = item['obj'].featured <NEW_LINE> item_out['screenshot'] = request.build_absolute_uri( item['obj'].screenshot_url(1)) <NEW_LINE> item_out['thumbnail'] = request.build_absolute_uri( item['obj'].thumbnail_url(1)) <NEW_LINE> items_out.append(item_out) <NEW_LINE> <DEDENT> data = items_out <NEW_LINE> if callback: <NEW_LINE> <INDENT> outfile.write('%s(' % callback) <NEW_LINE> <DEDENT> outfile.write(json.dumps(data, default=self._encode_complex)) <NEW_LINE> if callback: <NEW_LINE> <INDENT> outfile.write(')')
JSON feed generator for Submissions TODO: Someday maybe make this into a JSON Activity Stream?
62598fc97cff6e4e811b5d85
class IndexView(ListView): <NEW_LINE> <INDENT> queryset = CosplayPost.objects.all().order_by('-created') <NEW_LINE> template_name = 'core/index.html' <NEW_LINE> context_object_name = 'posts' <NEW_LINE> paginate_by = 6
Вью для главной страницы, все посты
62598fc99f28863672818a2b
class NoDataReturnedError(InvalidDataReturnedError): <NEW_LINE> <INDENT> pass
An empty JSON file was downloaded from 4chan. This usually means that the thread is dead/deleted.
62598fc97c178a314d78d7fd
class GradsOfGradsTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.left_input = torch.tensor([[1., 1.]], requires_grad=True) <NEW_LINE> self.right_input = torch.tensor([[0.32], [0.72]], requires_grad=True) <NEW_LINE> self.expected_grad = torch.tensor([[0.3972, 0.6762], [0.2869, 0.4458]]) <NEW_LINE> <DEDENT> def test_auto_grad_with_quadratic_function(self): <NEW_LINE> <INDENT> weight = Quadratic().weight <NEW_LINE> a = self.left_input <NEW_LINE> x = self.right_input <NEW_LINE> loss = torch.matmul(weight, x) <NEW_LINE> loss = torch.matmul(weight.transpose(1, 0), loss) <NEW_LINE> loss = torch.matmul(a, loss) <NEW_LINE> loss.backward(retain_graph=True, create_graph=True) <NEW_LINE> weight.grad <NEW_LINE> with torch.no_grad(): <NEW_LINE> <INDENT> m = torch.matmul(x, a) <NEW_LINE> m = m + m.transpose(1, 0) <NEW_LINE> w_grad_expected = torch.matmul(weight, m) <NEW_LINE> <DEDENT> self.assertTrue(weight.grad.allclose(w_grad_expected, atol=1e-8)) <NEW_LINE> lr = 0.1 <NEW_LINE> weight2 = weight - lr * weight.grad <NEW_LINE> weight2.retain_grad() <NEW_LINE> weight.grad = None <NEW_LINE> loss2 = torch.matmul(weight2, x) <NEW_LINE> loss2 = torch.matmul(weight2.transpose(1, 0), loss2) <NEW_LINE> loss2 = torch.matmul(a, loss2) <NEW_LINE> loss2.backward() <NEW_LINE> m = torch.matmul(x, a) <NEW_LINE> m = m + m.transpose(1, 0) <NEW_LINE> w2_grad_expected = torch.matmul(weight2, m) <NEW_LINE> w_grad_expected = torch.matmul(w2_grad_expected, (torch.eye(2) - lr * m)) <NEW_LINE> self.assertTrue(weight.grad.allclose(w_grad_expected, atol=1e-8)) <NEW_LINE> self.assertTrue(weight2.grad.allclose(w2_grad_expected, atol=1e-8)) <NEW_LINE> self.assertTrue(weight.grad.allclose(self.expected_grad, atol=1e-4)) <NEW_LINE> <DEDENT> def test_update_params_with_quadratic_layer(self): <NEW_LINE> <INDENT> quad = Quadratic() <NEW_LINE> quad_clone = clone_model(quad) <NEW_LINE> x = self.right_input <NEW_LINE> out = quad_clone(x) <NEW_LINE> loss = out.sum() <NEW_LINE> update_params(quad_clone.named_parameters(), quad_clone, loss, lr=0.1) <NEW_LINE> out2 = quad_clone(x) <NEW_LINE> loss2 = out2.sum() <NEW_LINE> loss2.backward() <NEW_LINE> self.assertTrue(torch.allclose(quad.weight.grad, self.expected_grad, atol=1e-4))
Perform tests for taking gradients of gradients. Specifically, this uses a quadratic layer as a test example since it yields non-trivial results (unlike a linear layer) and enables manually calculated expected values.
62598fc9a219f33f346c6b67
class PDT_OT_ModalDrawOperator(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "pdt.modaldraw" <NEW_LINE> bl_label = "PDT Modal Draw" <NEW_LINE> bl_options = {"REGISTER", "UNDO"} <NEW_LINE> _handle = None <NEW_LINE> @staticmethod <NEW_LINE> def handle_add(self, context): <NEW_LINE> <INDENT> if PDT_OT_ModalDrawOperator._handle is None: <NEW_LINE> <INDENT> PDT_OT_ModalDrawOperator._handle = SpaceView3D.draw_handler_add( draw_callback_3d, (self, context), "WINDOW", "POST_VIEW" ) <NEW_LINE> context.window_manager.pdt_run_opengl = True <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def handle_remove(self, context): <NEW_LINE> <INDENT> if PDT_OT_ModalDrawOperator._handle is not None: <NEW_LINE> <INDENT> SpaceView3D.draw_handler_remove(PDT_OT_ModalDrawOperator._handle, "WINDOW") <NEW_LINE> <DEDENT> PDT_OT_ModalDrawOperator._handle = None <NEW_LINE> context.window_manager.pdt_run_opengl = False <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> if context.area.type == "VIEW_3D": <NEW_LINE> <INDENT> if context.window_manager.pdt_run_opengl is False: <NEW_LINE> <INDENT> self.handle_add(self, context) <NEW_LINE> context.area.tag_redraw() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.handle_remove(self, context) <NEW_LINE> context.area.tag_redraw() <NEW_LINE> <DEDENT> return {"FINISHED"} <NEW_LINE> <DEDENT> self.report({"ERROR"}, PDT_ERR_NO3DVIEW) <NEW_LINE> return {"CANCELLED"}
Show/Hide Pivot Point
62598fc997e22403b383b264
class CfgReader: <NEW_LINE> <INDENT> def __init__(self, default_settings_path: AnyStr): <NEW_LINE> <INDENT> self.config = ConfigParser(interpolation=ExtendedInterpolation()) <NEW_LINE> self.config.read([get_config_path(default_settings_path)]) <NEW_LINE> <DEDENT> def get_opt(self, s: AnyStr, v: AnyStr, default: Any = None): <NEW_LINE> <INDENT> val = CfgReader._get_env_val(s, v) <NEW_LINE> if val is not None: <NEW_LINE> <INDENT> return val <NEW_LINE> <DEDENT> return self.config.get(s, v) if self.config.has_option(s, v) else default <NEW_LINE> <DEDENT> def get_opt_path(self, s: AnyStr, v: AnyStr, default: AnyStr = ""): <NEW_LINE> <INDENT> return os.path.expanduser(self.get_opt(s, v, default)) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _get_env_val(s: AnyStr, v: AnyStr): <NEW_LINE> <INDENT> return os.environ.get(f"MM__{s.upper()}_{v.upper()}")
Config reader. There are 3 way of getting an option. In priority order: 1. From system env. 2. From config. 3. From default values. For using the option from system env you can build an option name as MM__ + [SECTION_NAME] + _ + [VALUE_NAME].
62598fc960cbc95b0636469c
class OrderedBase: <NEW_LINE> <INDENT> CREATION_COUNTER_FIELD = '_creation_counter' <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> if type(self) is not OrderedBase: <NEW_LINE> <INDENT> self.touch_creation_counter() <NEW_LINE> <DEDENT> <DEDENT> def touch_creation_counter(self): <NEW_LINE> <INDENT> bases = type(self).__mro__ <NEW_LINE> root = bases[bases.index(OrderedBase) - 1] <NEW_LINE> if not hasattr(root, self.CREATION_COUNTER_FIELD): <NEW_LINE> <INDENT> setattr(root, self.CREATION_COUNTER_FIELD, 0) <NEW_LINE> <DEDENT> next_counter = getattr(root, self.CREATION_COUNTER_FIELD) <NEW_LINE> setattr(self, self.CREATION_COUNTER_FIELD, next_counter) <NEW_LINE> setattr(root, self.CREATION_COUNTER_FIELD, next_counter + 1)
Marks a class as being ordered. Each instance (even from subclasses) will share a global creation counter.
62598fc94527f215b58ea22e