code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class User(Base0): <NEW_LINE> <INDENT> def __init__(self, obj): <NEW_LINE> <INDENT> Base0.__init__(self) <NEW_LINE> self.username = require_value_from_dict(obj, 'username') <NEW_LINE> self.password = require_value_from_dict(obj, 'password') <NEW_LINE> self.email = require_value_from_dict(obj, 'email') <NEW_LINE> self.mobile = require_value_from_dict(obj, 'mobile') <NEW_LINE> self.sex = require_value_from_dict(obj, 'sex') <NEW_LINE> self.date = time.time() <NEW_LINE> self.modified = time.time() <NEW_LINE> self.permission = require_value_from_dict(obj, 'permission') <NEW_LINE> self.status = 0 | 用户 | 62598fb2d7e4931a7ef3c118 |
class TCA6424(object): <NEW_LINE> <INDENT> pins_list = [ ( 'PWREN-CLK-MGT156MHz', 'NETCLK-CE', 'NETCLK-RESETn', 'NETCLK-PR0', 'NETCLK-PR1', 'NETCLK-OD0', 'NETCLK-OD1', 'NETCLK-OD2', 'PWREN-CLK-MAINREF', 'CLK-MAINSEL-25MHz', 'CLK-MAINSEL-EX_B', '12', 'CLK-MAINSEL-GPS', 'FPGA-GPIO-EN', 'PWREN-CLK-WB-20MHz', 'PWREN-CLK-WB-25MHz', 'GPS-PHASELOCK', 'GPS-nINITSURV', 'GPS-nRESET', 'GPS-WARMUP', 'GPS-SURVEY', 'GPS-LOCKOK', 'GPS-ALARM', 'PWREN-GPS', ), ( 'NETCLK-PR1', 'NETCLK-PR0', 'NETCLK-CE', 'NETCLK-RESETn', 'NETCLK-OD2', 'NETCLK-OD1', 'NETCLK-OD0', 'PWREN-CLK-MGT156MHz', 'PWREN-CLK-MAINREF', 'CLK-MAINSEL-25MHz', 'CLK-MAINSEL-EX_B', '12', 'CLK-MAINSEL-GPS', 'FPGA-GPIO-EN', 'PWREN-CLK-WB-20MHz', 'PWREN-CLK-WB-25MHz', 'GPS-PHASELOCK', 'GPS-nINITSURV', 'GPS-nRESET', 'GPS-WARMUP', 'GPS-SURVEY', 'GPS-LOCKOK', 'GPS-ALARM', 'PWREN-GPS', )] <NEW_LINE> def __init__(self, rev): <NEW_LINE> <INDENT> if rev == 2: <NEW_LINE> <INDENT> self.pins = self.pins_list[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.pins = self.pins_list[1] <NEW_LINE> <DEDENT> default_val = 0x860101 if rev == 2 else 0x860780 <NEW_LINE> self._gpios = SysFSGPIO({'label': 'tca6424', 'device/of_node/name': 'gpio'}, 0xFFF7FF, 0x86F7FF, default_val) <NEW_LINE> <DEDENT> def set(self, name, value=None): <NEW_LINE> <INDENT> assert name in self.pins <NEW_LINE> self._gpios.set(self.pins.index(name), value=value) <NEW_LINE> <DEDENT> def reset(self, name): <NEW_LINE> <INDENT> self.set(name, value=0) <NEW_LINE> <DEDENT> def get(self, name): <NEW_LINE> <INDENT> assert name in self.pins <NEW_LINE> return self._gpios.get(self.pins.index(name)) | Abstraction layer for the port/gpio expander
pins_list is an array of different version of TCA6424 pins map.
First element of this array corresponding to revC, second is revD etc... | 62598fb2379a373c97d99099 |
class G03_LMC2(BaseExtModel): <NEW_LINE> <INDENT> x_range = [0.3, 10.0] <NEW_LINE> Rv = 2.76 <NEW_LINE> obsdata_x = np.array( [0.455, 0.606, 0.800, 1.818, 2.273, 2.703, 3.375, 3.625, 3.875, 4.125, 4.375, 4.625, 4.875, 5.125, 5.375, 5.625, 5.875, 6.125, 6.375, 6.625, 6.875, 7.125, 7.375, 7.625, 7.875, 8.125] ) <NEW_LINE> obsdata_axav = np.array( [0.101, 0.150, 0.299, 1.000, 1.349, 1.665, 1.899, 2.067, 2.249, 2.447, 2.777, 2.922, 2.921, 2.812, 2.805, 2.863, 2.932, 3.060, 3.110, 3.299, 3.408, 3.515, 3.670, 3.862, 3.937, 4.055] ) <NEW_LINE> obsdata_tolerance = 6e-2 <NEW_LINE> def evaluate(self, in_x): <NEW_LINE> <INDENT> C1 = -1.475 <NEW_LINE> C2 = 1.132 <NEW_LINE> C3 = 1.463 <NEW_LINE> C4 = 0.294 <NEW_LINE> xo = 4.558 <NEW_LINE> gamma = 0.945 <NEW_LINE> optnir_axav_x = 1.0 / np.array([2.198, 1.65, 1.25, 0.55, 0.44, 0.37]) <NEW_LINE> optnir_axav_y = [0.101, 0.15, 0.299, 1.000, 1.349, 1.665] <NEW_LINE> return _curve_F99_method( in_x, self.Rv, C1, C2, C3, C4, xo, gamma, optnir_axav_x, optnir_axav_y, self.x_range, self.__class__.__name__, ) | Gordon et al (2003) LMC2 Average Extinction Curve
Parameters
----------
None
Raises
------
None
Notes
-----
From Gordon et al. (2003, ApJ, 594, 279)
Example showing the average curve
.. plot::
:include-source:
import numpy as np
import matplotlib.pyplot as plt
import astropy.units as u
from dust_extinction.averages import G03_LMC2
fig, ax = plt.subplots()
# generate the curves and plot them
x = np.arange(0.3,10.0,0.1)/u.micron
# define the extinction model
ext_model = G03_LMC2()
# generate the curves and plot them
x = np.arange(ext_model.x_range[0], ext_model.x_range[1],0.1)/u.micron
ax.plot(x,ext_model(x),label='G03 LMC2')
ax.plot(ext_model.obsdata_x, ext_model.obsdata_axav, 'ko',
label='obsdata')
ax.set_xlabel(r'$x$ [$\mu m^{-1}$]')
ax.set_ylabel(r'$A(x)/A(V)$')
ax.legend(loc='best')
plt.show() | 62598fb27d847024c075c446 |
class FakeServer(object): <NEW_LINE> <INDENT> def __init__(self, server_factory, auto_accept=True, on_connect=None): <NEW_LINE> <INDENT> self.server_factory = server_factory <NEW_LINE> self.auto_accept = auto_accept <NEW_LINE> self.connection_queue = DeferredQueue() <NEW_LINE> self.on_connect = on_connect <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def for_protocol(cls, protocol, *args, **kw): <NEW_LINE> <INDENT> factory = ServerFactory.forProtocol(protocol) <NEW_LINE> return cls(factory, *args, **kw) <NEW_LINE> <DEDENT> @property <NEW_LINE> def endpoint(self): <NEW_LINE> <INDENT> return FakeServerEndpoint(self) <NEW_LINE> <DEDENT> def await_connection(self): <NEW_LINE> <INDENT> return self.connection_queue.get() <NEW_LINE> <DEDENT> def _handle_connection(self): <NEW_LINE> <INDENT> conn = FakeConnection(self) <NEW_LINE> if self.on_connect is not None: <NEW_LINE> <INDENT> conn._connected_d.addCallback(lambda _: self.on_connect(conn)) <NEW_LINE> <DEDENT> self.connection_queue.put(conn) <NEW_LINE> if self.auto_accept: <NEW_LINE> <INDENT> conn.accept_connection() <NEW_LINE> <DEDENT> return conn._accept_d | Fake server container for testing client/server interactions. | 62598fb221bff66bcd722ceb |
class QueueGroupItem(Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> app_label = "core" <NEW_LINE> db_table = "norc_queuegroupitem" <NEW_LINE> ordering = ["priority"] <NEW_LINE> unique_together = ("group", "queue_type", "queue_id") <NEW_LINE> <DEDENT> group = ForeignKey(QueueGroup, related_name="items") <NEW_LINE> queue_type = ForeignKey(ContentType) <NEW_LINE> queue_id = PositiveIntegerField() <NEW_LINE> queue = GenericForeignKey("queue_type", "queue_id") <NEW_LINE> priority = PositiveIntegerField() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u'[QueueGroupItem G:%s Q:%s P:%s]' % (self.group, self.queue, self.priority) <NEW_LINE> <DEDENT> __repr__ = __unicode__ | Maps queues to QueueGroups. | 62598fb2ff9c53063f51a6d1 |
class DOMHTMLReleaseinfoParser(DOMParserBase): <NEW_LINE> <INDENT> extractors = [Extractor(label='release dates', path="//th[@class='xxxx']/../../tr", attrs=Attribute(key='release dates', multi=True, path={'country': ".//td[1]//text()", 'date': ".//td[2]//text()", 'notes': ".//td[3]//text()"})), Extractor(label='akas', path="//div[@class='_imdbpy_akas']/table/tr", attrs=Attribute(key='akas', multi=True, path={'title': "./td[1]/text()", 'countries': "./td[2]/text()"}))] <NEW_LINE> preprocessors = [ (re.compile('(<h5><a name="?akas"?.*</table>)', re.I | re.M | re.S), r'<div class="_imdbpy_akas">\1</div>')] <NEW_LINE> def postprocess_data(self, data): <NEW_LINE> <INDENT> if not ('release dates' in data or 'akas' in data): return data <NEW_LINE> releases = data.get('release dates') or [] <NEW_LINE> rl = [] <NEW_LINE> for i in releases: <NEW_LINE> <INDENT> country = i.get('country') <NEW_LINE> date = i.get('date') <NEW_LINE> if not (country and date): continue <NEW_LINE> country = country.strip() <NEW_LINE> date = date.strip() <NEW_LINE> if not (country and date): continue <NEW_LINE> notes = i['notes'] <NEW_LINE> info = u'%s::%s' % (country, date) <NEW_LINE> if notes: <NEW_LINE> <INDENT> info += notes <NEW_LINE> <DEDENT> rl.append(info) <NEW_LINE> <DEDENT> if releases: <NEW_LINE> <INDENT> del data['release dates'] <NEW_LINE> <DEDENT> if rl: <NEW_LINE> <INDENT> data['release dates'] = rl <NEW_LINE> <DEDENT> akas = data.get('akas') or [] <NEW_LINE> nakas = [] <NEW_LINE> for aka in akas: <NEW_LINE> <INDENT> title = aka.get('title', '').strip() <NEW_LINE> if not title: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> countries = aka.get('countries', '').split('/') <NEW_LINE> if not countries: <NEW_LINE> <INDENT> nakas.append(title) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for country in countries: <NEW_LINE> <INDENT> nakas.append('%s::%s' % (title, country.strip())) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if akas: <NEW_LINE> <INDENT> del data['akas'] <NEW_LINE> <DEDENT> if nakas: <NEW_LINE> <INDENT> data['akas from release info'] = nakas <NEW_LINE> <DEDENT> return data | Parser for the "release dates" page of a given movie.
The page should be provided as a string, as taken from
the akas.imdb.com server. The final result will be a
dictionary, with a key for every relevant section.
Example:
rdparser = DOMHTMLReleaseinfoParser()
result = rdparser.parse(releaseinfo_html_string) | 62598fb25fdd1c0f98e5e011 |
class HyperlinkedRelatedField(RelatedField): <NEW_LINE> <INDENT> pk_url_kwarg = 'pk' <NEW_LINE> slug_field = 'slug' <NEW_LINE> slug_url_kwarg = None <NEW_LINE> default_read_only = False <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.view_name = kwargs.pop('view_name') <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise ValueError("Hyperlinked field requires 'view_name' kwarg") <NEW_LINE> <DEDENT> self.slug_field = kwargs.pop('slug_field', self.slug_field) <NEW_LINE> default_slug_kwarg = self.slug_url_kwarg or self.slug_field <NEW_LINE> self.slug_url_kwarg = kwargs.pop('slug_url_kwarg', default_slug_kwarg) <NEW_LINE> self.format = kwargs.pop('format', None) <NEW_LINE> super(HyperlinkedRelatedField, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def get_slug_field(self): <NEW_LINE> <INDENT> return self.slug_field <NEW_LINE> <DEDENT> def to_native(self, obj): <NEW_LINE> <INDENT> view_name = self.view_name <NEW_LINE> request = self.context.get('request', None) <NEW_LINE> format = self.format or self.context.get('format', None) <NEW_LINE> kwargs = {self.pk_url_kwarg: obj.pk} <NEW_LINE> try: <NEW_LINE> <INDENT> return reverse(view_name, kwargs=kwargs, request=request, format=format) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> slug = getattr(obj, self.slug_field, None) <NEW_LINE> if not slug: <NEW_LINE> <INDENT> raise ValidationError('Could not resolve URL for field using view name "%s"' % view_name) <NEW_LINE> <DEDENT> kwargs = {self.slug_url_kwarg: slug} <NEW_LINE> try: <NEW_LINE> <INDENT> return reverse(self.view_name, kwargs=kwargs, request=request, format=format) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> kwargs = {self.pk_url_kwarg: obj.pk, self.slug_url_kwarg: slug} <NEW_LINE> try: <NEW_LINE> <INDENT> return reverse(self.view_name, kwargs=kwargs, request=request, format=format) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> raise ValidationError('Could not resolve URL for field using view name "%s"', view_name) <NEW_LINE> <DEDENT> def from_native(self, value): <NEW_LINE> <INDENT> if self.queryset is None: <NEW_LINE> <INDENT> raise Exception('Writable related fields must include a `queryset` argument') <NEW_LINE> <DEDENT> if value.startswith('http:') or value.startswith('https:'): <NEW_LINE> <INDENT> value = urlparse(value).path <NEW_LINE> prefix = get_script_prefix() <NEW_LINE> if value.startswith(prefix): <NEW_LINE> <INDENT> value = '/' + value[len(prefix):] <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> match = resolve(value) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise ValidationError('Invalid hyperlink - No URL match') <NEW_LINE> <DEDENT> if match.url_name != self.view_name: <NEW_LINE> <INDENT> raise ValidationError('Invalid hyperlink - Incorrect URL match') <NEW_LINE> <DEDENT> pk = match.kwargs.get(self.pk_url_kwarg, None) <NEW_LINE> slug = match.kwargs.get(self.slug_url_kwarg, None) <NEW_LINE> if pk is not None: <NEW_LINE> <INDENT> queryset = self.queryset.filter(pk=pk) <NEW_LINE> <DEDENT> elif slug is not None: <NEW_LINE> <INDENT> slug_field = self.get_slug_field() <NEW_LINE> queryset = self.queryset.filter(**{slug_field: slug}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValidationError('Invalid hyperlink') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> obj = queryset.get() <NEW_LINE> <DEDENT> except ObjectDoesNotExist: <NEW_LINE> <INDENT> raise ValidationError('Invalid hyperlink - object does not exist.') <NEW_LINE> <DEDENT> return obj | Represents a to-one relationship, using hyperlinking. | 62598fb22c8b7c6e89bd384a |
class ImplementsNone(ObjectType, VirtualMethods): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> ObjectType.__init__(self) <NEW_LINE> VirtualMethods.__init__(self) | Implements no virtual methods | 62598fb25fdd1c0f98e5e012 |
class TestTachographDriverFile(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return TachographDriverFile( card_number = '1000000492436002', created_at_time = '2020-01-02T15:04:05Z07:00', id = '4aff772c-a7bb-45e6-8e41-6a53e34feb83', url = 'https://samsara-tachograph-files.s3.us-west-2.amazonaws.com/123/456/789/4aff772c-a7bb-45e6-8e41-6a53e34feb83.ddd' ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return TachographDriverFile( ) <NEW_LINE> <DEDENT> <DEDENT> def testTachographDriverFile(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True) | TachographDriverFile unit test stubs | 62598fb2e1aae11d1e7ce866 |
class MyPlugin(CubePlugin): <NEW_LINE> <INDENT> label = "Information Plugin" <NEW_LINE> userLevel = 1 <NEW_LINE> def setup(self): <NEW_LINE> <INDENT> self.my_float = SpecFloat(label='My Float', minval=0, maxval=100, stepsize=0.1, defaultValue=10.4) <NEW_LINE> self.my_float.units = 'my units' <NEW_LINE> self.my_float.help = 'my mouse hover help' <NEW_LINE> self.my_float.interfaceType = INTERFACES.SLIDER <NEW_LINE> <DEDENT> def action(self): <NEW_LINE> <INDENT> bands = self.datacube.getBandCount() <NEW_LINE> samples = self.datacube.getSampleCount() <NEW_LINE> lines = self.datacube.getLineCount() <NEW_LINE> self.wb.postMessage("Bands: " + str(bands)) <NEW_LINE> self.wb.postMessage("Samples: " + str(samples)) <NEW_LINE> self.wb.postMessage("Lines: " + str(lines)) <NEW_LINE> self.wb.postMessage(skimage.__version__) <NEW_LINE> self.wb.postMessage(str(vars(self.datacube))) <NEW_LINE> self.wb.postMessage(str(dir(self.datacube))) <NEW_LINE> self.wb.postMessage(str(dir(self.wb))) | Extracts useful information about a datacube and displays it | 62598fb28e7ae83300ee9128 |
class MockRepo(object): <NEW_LINE> <INDENT> def __init__(self, name="", repoid="", enabled=True): <NEW_LINE> <INDENT> self.id = repoid <NEW_LINE> self.name = name <NEW_LINE> self.enabled = enabled <NEW_LINE> self.priority = 99 | mock class for repo | 62598fb230dc7b766599f8d1 |
class AsyncioEventLoop(_AbstractEventLoop): <NEW_LINE> <INDENT> def __init__(self, loop=None): <NEW_LINE> <INDENT> loop = loop or _real_asyncio.get_event_loop() <NEW_LINE> self._loop = loop <NEW_LINE> self.run_until_complete = ( self._run_until_complete if portage._internal_caller else loop.run_until_complete ) <NEW_LINE> self.call_soon = loop.call_soon <NEW_LINE> self.call_soon_threadsafe = loop.call_soon_threadsafe <NEW_LINE> self.call_later = loop.call_later <NEW_LINE> self.call_at = loop.call_at <NEW_LINE> self.is_running = loop.is_running <NEW_LINE> self.is_closed = loop.is_closed <NEW_LINE> self.close = loop.close <NEW_LINE> self.create_future = ( loop.create_future if hasattr(loop, "create_future") else self._create_future ) <NEW_LINE> self.create_task = loop.create_task <NEW_LINE> self.add_reader = loop.add_reader <NEW_LINE> self.remove_reader = loop.remove_reader <NEW_LINE> self.add_writer = loop.add_writer <NEW_LINE> self.remove_writer = loop.remove_writer <NEW_LINE> self.run_in_executor = loop.run_in_executor <NEW_LINE> self.time = loop.time <NEW_LINE> self.default_exception_handler = loop.default_exception_handler <NEW_LINE> self.call_exception_handler = loop.call_exception_handler <NEW_LINE> self.set_debug = loop.set_debug <NEW_LINE> self.get_debug = loop.get_debug <NEW_LINE> self._wakeup_fd = -1 <NEW_LINE> self._child_watcher = None <NEW_LINE> if portage._internal_caller: <NEW_LINE> <INDENT> loop.set_exception_handler(self._internal_caller_exception_handler) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _internal_caller_exception_handler(loop, context): <NEW_LINE> <INDENT> loop.default_exception_handler(context) <NEW_LINE> if "exception" in context: <NEW_LINE> <INDENT> signal.signal(signal.SIGTERM, signal.SIG_DFL) <NEW_LINE> os.kill(portage.getpid(), signal.SIGTERM) <NEW_LINE> <DEDENT> <DEDENT> def _create_future(self): <NEW_LINE> <INDENT> return _real_asyncio.Future(loop=self._loop) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _asyncio_child_watcher(self): <NEW_LINE> <INDENT> if self._child_watcher is None: <NEW_LINE> <INDENT> self._child_watcher = _ChildWatcherThreadSafetyWrapper( self, _real_asyncio.get_child_watcher() ) <NEW_LINE> <DEDENT> return self._child_watcher <NEW_LINE> <DEDENT> @property <NEW_LINE> def _asyncio_wrapper(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def _run_until_complete(self, future): <NEW_LINE> <INDENT> if self._wakeup_fd != -1: <NEW_LINE> <INDENT> signal.set_wakeup_fd(self._wakeup_fd) <NEW_LINE> self._wakeup_fd = -1 <NEW_LINE> os.kill(portage.getpid(), signal.SIGCHLD) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return self._loop.run_until_complete(future) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._wakeup_fd = signal.set_wakeup_fd(-1) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass | Implementation of asyncio.AbstractEventLoop which wraps asyncio's
event loop and is minimally compatible with _PortageEventLoop. | 62598fb2442bda511e95c4dc |
class Shapes: <NEW_LINE> <INDENT> numShapes = 0 | Shapes Generic Class | 62598fb2a8370b77170f0462 |
class CustomUserManager(BaseUserManager): <NEW_LINE> <INDENT> def create_user(self, email, country, password=None, is_active=True): <NEW_LINE> <INDENT> if not email: <NEW_LINE> <INDENT> raise ValueError('User\'s email address must be set') <NEW_LINE> <DEDENT> if not country: <NEW_LINE> <INDENT> raise ValueError('User\'s country must be set') <NEW_LINE> <DEDENT> user = self.model( email=self.normalize_email(email), country=country, is_active=is_active, ) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save(using=country) <NEW_LINE> return user <NEW_LINE> <DEDENT> def create_superuser(self, email, country, password): <NEW_LINE> <INDENT> user = self.create_user( email, password=password, country=country, ) <NEW_LINE> user.is_admin = True <NEW_LINE> user.save(using=country) <NEW_LINE> return user <NEW_LINE> <DEDENT> def get_by_natural_key(self, username): <NEW_LINE> <INDENT> return get_user_from_localized_databases(username) | Custom user manager for CustomUser. | 62598fb2283ffb24f3cf3912 |
class LocationModelTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> app = create_app() <NEW_LINE> db.drop_all() <NEW_LINE> db.create_all() <NEW_LINE> seed_db() <NEW_LINE> db.session.commit() <NEW_LINE> self.client = app.test_client() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> db.session.commit() <NEW_LINE> db.drop_all() <NEW_LINE> <DEDENT> def test_create_and_read(self): <NEW_LINE> <INDENT> user = User(name='Nicholas Cage', age=49, gender='m') <NEW_LINE> db.session.add(user) <NEW_LINE> db.session.commit() <NEW_LINE> user = User.query.filter_by(name='Nicholas Cage').first() <NEW_LINE> location1 = Location( city="Los Angeles", latitude=34.063566, longitude=-118.421092) <NEW_LINE> location2 = Location( city="San Francisco", latitude=37.69841, longitude=-122.454232) <NEW_LINE> user.locations.extend([location1, location2]) <NEW_LINE> db.session.add(user) <NEW_LINE> db.session.commit() <NEW_LINE> user = User.query.filter_by(name='Nicholas Cage').first() <NEW_LINE> locations = Location.query.all() <NEW_LINE> self.assertGreater(len(locations), 2) <NEW_LINE> location = Location.query.filter_by(city='Los Angeles').first() <NEW_LINE> self.assertEqual(location.city, 'Los Angeles') <NEW_LINE> self.assertEqual(location.latitude, 34.062264) <NEW_LINE> self.assertEqual(location.longitude, -118.340361) <NEW_LINE> <DEDENT> def test_delete(self): <NEW_LINE> <INDENT> location = Location.query.filter_by(user_id=1).first() <NEW_LINE> user = User.query.filter_by(id=1).first() <NEW_LINE> location = Location( city="Los Angeles", latitude=34.063566, longitude=-118.421092) <NEW_LINE> user.locations.append(location) <NEW_LINE> num_locations = len(user.locations) <NEW_LINE> db.session.add(user) <NEW_LINE> db.session.commit() <NEW_LINE> location.query.delete() <NEW_LINE> db.session.commit() <NEW_LINE> user = User.query.filter_by(id=1).first() <NEW_LINE> self.assertLess(len(user.locations), num_locations) <NEW_LINE> <DEDENT> def test_cascade_delete(self): <NEW_LINE> <INDENT> user = User.query.filter_by(name='Taylor Swift').first() <NEW_LINE> location = Location( city="San Francisco", latitude=37.69841, longitude=-122.454232) <NEW_LINE> user.locations.append(location) <NEW_LINE> db.session.add(user) <NEW_LINE> db.session.commit() <NEW_LINE> user.query.delete() <NEW_LINE> db.session.commit() <NEW_LINE> result = User.query.filter_by(name='Taylor Swift').first() <NEW_LINE> self.assertIsNone(result) | Test Location model. | 62598fb2cc0a2c111447b098 |
class Complex: <NEW_LINE> <INDENT> r = '' <NEW_LINE> i = '' <NEW_LINE> def __init__(self, realpart, imagpart): <NEW_LINE> <INDENT> self.r = realpart <NEW_LINE> self.i = imagpart | a simple class example | 62598fb23346ee7daa33768a |
class LoadJobConfig(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._properties = {} <NEW_LINE> self._schema = () <NEW_LINE> <DEDENT> allow_jagged_rows = _TypedApiResourceProperty( 'allow_jagged_rows', 'allowJaggedRows', bool) <NEW_LINE> allow_quoted_newlines = _TypedApiResourceProperty( 'allow_quoted_newlines', 'allowQuotedNewlines', bool) <NEW_LINE> autodetect = AutoDetectSchema('autodetect', 'autodetect', bool) <NEW_LINE> create_disposition = CreateDisposition('create_disposition', 'createDisposition') <NEW_LINE> encoding = Encoding('encoding', 'encoding') <NEW_LINE> field_delimiter = _TypedApiResourceProperty( 'field_delimiter', 'fieldDelimiter', six.string_types) <NEW_LINE> ignore_unknown_values = _TypedApiResourceProperty( 'ignore_unknown_values', 'ignoreUnknownValues', bool) <NEW_LINE> max_bad_records = _TypedApiResourceProperty( 'max_bad_records', 'maxBadRecords', six.integer_types) <NEW_LINE> null_marker = _TypedApiResourceProperty( 'null_marker', 'nullMarker', six.string_types) <NEW_LINE> quote_character = _TypedApiResourceProperty( 'quote_character', 'quote', six.string_types) <NEW_LINE> skip_leading_rows = _TypedApiResourceProperty( 'skip_leading_rows', 'skipLeadingRows', six.integer_types) <NEW_LINE> source_format = SourceFormat('source_format', 'sourceFormat') <NEW_LINE> write_disposition = WriteDisposition('write_disposition', 'writeDisposition') <NEW_LINE> @property <NEW_LINE> def schema(self): <NEW_LINE> <INDENT> return list(self._schema) <NEW_LINE> <DEDENT> @schema.setter <NEW_LINE> def schema(self, value): <NEW_LINE> <INDENT> if not all(isinstance(field, SchemaField) for field in value): <NEW_LINE> <INDENT> raise ValueError('Schema items must be fields') <NEW_LINE> <DEDENT> self._schema = tuple(value) <NEW_LINE> <DEDENT> def to_api_repr(self): <NEW_LINE> <INDENT> config = copy.deepcopy(self._properties) <NEW_LINE> if len(self.schema) > 0: <NEW_LINE> <INDENT> config['schema'] = {'fields': _build_schema_resource(self.schema)} <NEW_LINE> <DEDENT> slr = config.get('skipLeadingRows') <NEW_LINE> if slr is not None: <NEW_LINE> <INDENT> config['skipLeadingRows'] = str(slr) <NEW_LINE> <DEDENT> return config <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_api_repr(cls, resource): <NEW_LINE> <INDENT> schema = resource.pop('schema', {'fields': ()}) <NEW_LINE> slr = resource.pop('skipLeadingRows', None) <NEW_LINE> config = cls() <NEW_LINE> config._properties = copy.deepcopy(resource) <NEW_LINE> config.schema = _parse_schema_resource(schema) <NEW_LINE> config.skip_leading_rows = _int_or_none(slr) | Configuration options for load jobs.
All properties in this class are optional. Values which are ``None`` ->
server defaults. | 62598fb256ac1b37e6302270 |
class SimHeapFreelist(SimHeapLibc): <NEW_LINE> <INDENT> def __iter__(self): <NEW_LINE> <INDENT> return self.chunks() <NEW_LINE> <DEDENT> def chunks(self): <NEW_LINE> <INDENT> raise NotImplementedError("%s not implemented for %s" % (self.chunks.__func__.__name__, self.__class__.__name__)) <NEW_LINE> <DEDENT> def allocated_chunks(self): <NEW_LINE> <INDENT> raise NotImplementedError("%s not implemented for %s" % (self.allocated_chunks.__func__.__name__, self.__class__.__name__)) <NEW_LINE> <DEDENT> def free_chunks(self): <NEW_LINE> <INDENT> raise NotImplementedError("%s not implemented for %s" % (self.free_chunks.__func__.__name__, self.__class__.__name__)) <NEW_LINE> <DEDENT> def chunk_from_mem(self, ptr): <NEW_LINE> <INDENT> raise NotImplementedError("%s not implemented for %s" % (self.chunk_from_mem.__func__.__name__, self.__class__.__name__)) <NEW_LINE> <DEDENT> def print_heap_state(self): <NEW_LINE> <INDENT> print("┌───────────────────────────────┐") <NEW_LINE> print("├───────── HEAP CHUNKS ─────────┤") <NEW_LINE> for ck in self.chunks(): <NEW_LINE> <INDENT> print("│ " + str(ck) + " │") <NEW_LINE> <DEDENT> print("├───────── USED CHUNKS ─────────┤") <NEW_LINE> for ck in self.allocated_chunks(): <NEW_LINE> <INDENT> print("│ " + str(ck) + " │") <NEW_LINE> <DEDENT> print("├───────── FREE CHUNKS ─────────┤") <NEW_LINE> for ck in self.free_chunks(): <NEW_LINE> <INDENT> print("│ " + str(ck) + " │") <NEW_LINE> <DEDENT> print("└───────────────────────────────┘") <NEW_LINE> <DEDENT> def print_all_chunks(self): <NEW_LINE> <INDENT> print("┌───────────────────────────────┐") <NEW_LINE> print("├───────── HEAP CHUNKS ─────────┤") <NEW_LINE> for ck in self.chunks(): <NEW_LINE> <INDENT> print("│ " + str(ck) + " │") <NEW_LINE> <DEDENT> print("└───────────────────────────────┘") | A freelist-style heap implementation. Distinguishing features of such heaps include chunks containing heap
metadata in addition to user data and at least (but often more than) one linked list of free chunks. | 62598fb2090684286d593720 |
class TestAngleBetweenPlanes(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> unittest.TestCase.setUp(self) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> unittest.TestCase.tearDown(self) <NEW_LINE> <DEDENT> def testSkeleton(self): <NEW_LINE> <INDENT> self.assert_(True) <NEW_LINE> <DEDENT> def test_angleBetweenPlanesCubic_deg(self): <NEW_LINE> <INDENT> plane1 = (1, 0, 0) <NEW_LINE> plane2 = (1, 0, 0) <NEW_LINE> expected_angle_deg = 0.0 <NEW_LINE> actual_angle_deg = angleBetweenPlanesCubic_deg(plane1, plane2) <NEW_LINE> self.assertAlmostEquals(expected_angle_deg, actual_angle_deg) <NEW_LINE> plane2 = (0, 1, 0) <NEW_LINE> expected_angle_deg = 90.0 <NEW_LINE> actual_angle_deg = angleBetweenPlanesCubic_deg(plane1, plane2) <NEW_LINE> self.assertAlmostEquals(expected_angle_deg, actual_angle_deg) <NEW_LINE> plane2 = (1, 1, 0) <NEW_LINE> expected_angle_deg = 45.0 <NEW_LINE> actual_angle_deg = angleBetweenPlanesCubic_deg(plane1, plane2) <NEW_LINE> self.assertAlmostEquals(expected_angle_deg, actual_angle_deg) <NEW_LINE> plane2 = (0, 1, 1) <NEW_LINE> expected_angle_deg = 90.0 <NEW_LINE> actual_angle_deg = angleBetweenPlanesCubic_deg(plane1, plane2) <NEW_LINE> self.assertAlmostEquals(expected_angle_deg, actual_angle_deg) <NEW_LINE> plane2 = (1, 1, 1) <NEW_LINE> expected_angle_deg = 54.735610317245339 <NEW_LINE> actual_angle_deg = angleBetweenPlanesCubic_deg(plane1, plane2) <NEW_LINE> self.assertAlmostEquals(expected_angle_deg, actual_angle_deg) <NEW_LINE> self.assert_(True) | TestCase class for the module `electronmicroscopy.tem.angle_between_planes`. | 62598fb2379a373c97d9909b |
class ScDrawerButton(Gtk.ToggleButton): <NEW_LINE> <INDENT> revealer = None <NEW_LINE> spinner = None <NEW_LINE> context = None <NEW_LINE> def __init__(self, context): <NEW_LINE> <INDENT> Gtk.ToggleButton.__init__(self) <NEW_LINE> self.context = context <NEW_LINE> box = Gtk.Box.new(Gtk.Orientation.HORIZONTAL, 0) <NEW_LINE> self.add(box) <NEW_LINE> self.img = Gtk.Image.new_from_icon_name( "open-menu-symbolic", Gtk.IconSize.SMALL_TOOLBAR) <NEW_LINE> box.pack_start(self.img, True, True, 0) <NEW_LINE> self.img.set_halign(Gtk.Align.CENTER) <NEW_LINE> self.set_can_focus(False) <NEW_LINE> self.get_style_context().add_class("image-button") <NEW_LINE> self.revealer = Gtk.Revealer.new() <NEW_LINE> self.revealer.set_reveal_child(False) <NEW_LINE> self.revealer.set_transition_type( Gtk.RevealerTransitionType.SLIDE_RIGHT) <NEW_LINE> self.spinner = Gtk.Spinner.new() <NEW_LINE> self.spinner.stop() <NEW_LINE> self.revealer.add(self.spinner) <NEW_LINE> box.pack_start(self.revealer, False, False, 0) <NEW_LINE> self.revealer.set_property("margin", 0) <NEW_LINE> self.revealer.set_border_width(0) <NEW_LINE> self.spinner.set_margin_start(3) <NEW_LINE> self.spinner.set_margin_end(3) <NEW_LINE> self.context.executor.connect('execution-started', self.start_exec) <NEW_LINE> self.context.executor.connect('execution-ended', self.end_exec) <NEW_LINE> <DEDENT> def start_exec(self, executor): <NEW_LINE> <INDENT> self.spinner.start() <NEW_LINE> self.revealer.set_reveal_child(True) <NEW_LINE> <DEDENT> def end_exec(self, executor): <NEW_LINE> <INDENT> self.spinner.stop() <NEW_LINE> self.revealer.set_reveal_child(False) | Simple button to control the draw visibility, and also sliding
a spinner into view when jobs are currently running | 62598fb23539df3088ecc338 |
class GPXLoader(xml.sax.handler.ContentHandler): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(GPXLoader, self).__init__() <NEW_LINE> self.tag_list = [] <NEW_LINE> self.gpx_data = [] <NEW_LINE> self.gpx_data.append([]) <NEW_LINE> self.gpx_data.append([]) <NEW_LINE> self.gpx_data.append([]) <NEW_LINE> self.char_buffer = "" <NEW_LINE> <DEDENT> def startElement(self, tag, attributes): <NEW_LINE> <INDENT> self.tag_list.append(tag) <NEW_LINE> self.char_buffer = "" <NEW_LINE> if tag == "trkpt": <NEW_LINE> <INDENT> self.gpx_data[0].append(attributes["lat"]) <NEW_LINE> self.gpx_data[1].append(attributes["lon"]) <NEW_LINE> <DEDENT> <DEDENT> def endElement(self, name): <NEW_LINE> <INDENT> if self.tag_list[-1] == "time": <NEW_LINE> <INDENT> if self.tag_list[-2] == "trkpt": <NEW_LINE> <INDENT> if self.tag_list[-3] == "trkseg": <NEW_LINE> <INDENT> self.gpx_data[2].append(self.char_buffer) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> del self.tag_list[-1] <NEW_LINE> <DEDENT> def characters(self, content): <NEW_LINE> <INDENT> self.char_buffer += str(content) | This class loads in to memory the GPX [lat, lon, time] data generated
from a Garmin GPS device, uses | 62598fb2cc0a2c111447b099 |
class XspeedGateway(VtGateway): <NEW_LINE> <INDENT> def __init__(self, eventEngine, gatewayName='XSPEED'): <NEW_LINE> <INDENT> super(XspeedGateway, self).__init__(eventEngine, gatewayName) <NEW_LINE> self.mdApi = XspeedMdApi(self) <NEW_LINE> self.tdApi = XspeedTdApi(self) <NEW_LINE> self.mdConnected = False <NEW_LINE> self.tdConnected = False <NEW_LINE> self.qryEnabled = False <NEW_LINE> self.fileName = self.gatewayName + '_connect.json' <NEW_LINE> self.filePath = getJsonPath(self.fileName, __file__) <NEW_LINE> <DEDENT> def connect(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> f = open(self.filePath) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> log = VtLogData() <NEW_LINE> log.gatewayName = self.gatewayName <NEW_LINE> log.logContent = u'读取连接配置出错,请检查' <NEW_LINE> self.onLog(log) <NEW_LINE> return <NEW_LINE> <DEDENT> setting = json.load(f) <NEW_LINE> f.close() <NEW_LINE> try: <NEW_LINE> <INDENT> accountID = str(setting['accountID']) <NEW_LINE> password = str(setting['password']) <NEW_LINE> tdAddress = str(setting['tdAddress']) <NEW_LINE> mdAddress = str(setting['mdAddress']) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> log = VtLogData() <NEW_LINE> log.gatewayName = self.gatewayName <NEW_LINE> log.logContent = u'连接配置缺少字段,请检查' <NEW_LINE> self.onLog(log) <NEW_LINE> return <NEW_LINE> <DEDENT> self.mdApi.connect(accountID, password, mdAddress) <NEW_LINE> self.tdApi.connect(accountID, password, tdAddress) <NEW_LINE> self.initQuery() <NEW_LINE> <DEDENT> def subscribe(self, subscribeReq): <NEW_LINE> <INDENT> self.mdApi.subscribe(subscribeReq) <NEW_LINE> <DEDENT> def sendOrder(self, orderReq): <NEW_LINE> <INDENT> return self.tdApi.sendOrder(orderReq) <NEW_LINE> <DEDENT> def cancelOrder(self, cancelOrderReq): <NEW_LINE> <INDENT> self.tdApi.cancelOrder(cancelOrderReq) <NEW_LINE> <DEDENT> def qryAccount(self): <NEW_LINE> <INDENT> self.tdApi.qryAccount() <NEW_LINE> <DEDENT> def qryFuturesPosition(self): <NEW_LINE> <INDENT> self.tdApi.qryFuturesPosition() <NEW_LINE> <DEDENT> def qryOptionPosition(self): <NEW_LINE> <INDENT> self.tdApi.qryOptionPosition() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> if self.mdConnected: <NEW_LINE> <INDENT> self.mdApi.close() <NEW_LINE> <DEDENT> if self.tdConnected: <NEW_LINE> <INDENT> self.tdApi.close() <NEW_LINE> <DEDENT> <DEDENT> def initQuery(self): <NEW_LINE> <INDENT> if self.qryEnabled: <NEW_LINE> <INDENT> self.qryFunctionList = [self.qryAccount, self.qryFuturesPosition, self.qryOptionPosition] <NEW_LINE> self.qryCount = 0 <NEW_LINE> self.qryTrigger = 1 <NEW_LINE> self.qryNextFunction = 0 <NEW_LINE> self.startQuery() <NEW_LINE> <DEDENT> <DEDENT> def query(self, event): <NEW_LINE> <INDENT> self.qryCount += 1 <NEW_LINE> if self.qryCount > self.qryTrigger: <NEW_LINE> <INDENT> self.qryCount = 0 <NEW_LINE> function = self.qryFunctionList[self.qryNextFunction] <NEW_LINE> function() <NEW_LINE> self.qryNextFunction += 1 <NEW_LINE> if self.qryNextFunction == len(self.qryFunctionList): <NEW_LINE> <INDENT> self.qryNextFunction = 0 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def startQuery(self): <NEW_LINE> <INDENT> self.eventEngine.register(EVENT_TIMER, self.query) <NEW_LINE> <DEDENT> def setQryEnabled(self, qryEnabled): <NEW_LINE> <INDENT> self.qryEnabled = qryEnabled | XSPEED接口 | 62598fb2ff9c53063f51a6d3 |
class MemoryBufferedJSONList(MemoryBufferedJSONCollection, SyncedList): <NEW_LINE> <INDENT> def __init__( self, filename=None, write_concern=False, data=None, parent=None, *args, **kwargs, ): <NEW_LINE> <INDENT> super().__init__( filename=filename, write_concern=write_concern, data=data, parent=parent, *args, **kwargs, ) | A buffered :class:`JSONList`. | 62598fb266656f66f7d5a476 |
class WalmartSellersSpider(scrapy.Spider): <NEW_LINE> <INDENT> name = "walmart_sellers" <NEW_LINE> allowed_domains = ["walmart.com"] <NEW_LINE> def __init__(self, category='', domain=None, *args, **kwargs): <NEW_LINE> <INDENT> super(WalmartSellersSpider, self).__init__(*args, **kwargs) <NEW_LINE> self.start_urls = ['https://www.walmart.com/%s' % category] <NEW_LINE> <DEDENT> def parse(self, response): <NEW_LINE> <INDENT> xpath_price = "/html/body/div/div/div[1]/div[1]/div/div/div[1]/div[2]/div/div[1]/div/div[4]/span[2]/span/span[%s]/text()" <NEW_LINE> xpath_seller_name = '/html/body/div/div/div/div/div[2]/div/div/div[4]/div/div[3]/div/div[1]/div[1]/a/text()' <NEW_LINE> xpath_seller_url = '/html/body/div/div/div/div/div[2]/div/div/div[4]/div/div[3]/div/div[1]/div[1]/a/@href' <NEW_LINE> xpath_remove_button = '/html/body/div/div/div[1]/div[1]/div/div/div[1]/div[1]/div/div[3]/div[1]/div/div[2]/div/div/div/div/div[6]/div/div/button[2]' <NEW_LINE> xpath_add_to_cart_button = '/html/body/div/div/div/div/div[2]/div/div/div[4]/div[%s]/div[3]/div/div[2]/div[1]/div[2]/div/button' <NEW_LINE> cart_url = "https://www.walmart.com/cart" <NEW_LINE> walmart_sellers = WalmartSellersDetails() <NEW_LINE> walmart_sellers["seller_name"] = "" <NEW_LINE> walmart_sellers["seller_url"] = "" <NEW_LINE> walmart_sellers["product_shoppingcartprice"] = [] <NEW_LINE> walmart_sellers["seller_name"] = response.xpath(xpath_seller_name).extract() <NEW_LINE> walmart_sellers["seller_url"] = response.xpath(xpath_seller_url).extract() <NEW_LINE> num_sellers = len(walmart_sellers["seller_url"]) <NEW_LINE> for i in range(num_sellers): <NEW_LINE> <INDENT> driver = webdriver.Chrome() <NEW_LINE> driver.get(response.url) <NEW_LINE> driver.find_element_by_xpath(xpath_add_to_cart_button % str(i + 2)).click() <NEW_LINE> time.sleep(5) <NEW_LINE> driver.get(cart_url) <NEW_LINE> dom = lxml.html.fromstring(driver.page_source) <NEW_LINE> driver.find_element_by_xpath(xpath_remove_button).click() <NEW_LINE> driver.close() <NEW_LINE> price = dom.xpath(xpath_price % '2')[0] + dom.xpath(xpath_price % '3')[0] + dom.xpath(xpath_price % '4')[0] + dom.xpath(xpath_price % '5')[0] <NEW_LINE> walmart_sellers["product_shoppingcartprice"].append(price) <NEW_LINE> <DEDENT> yield walmart_sellers | scrapy spider to crawl single walmart sellers page
input:inputSellersUrl
you can run it using something like that
scrapy crawl walmart_sellers -o output_file.json -a category='product/3921879/sellers'
category attribute will be concatenated to 'https://www.walmart.com/' to build the start_urls list | 62598fb226068e7796d4c9dc |
class UBusTestSystemReboot(rootfs_boot.RootFSBootTest): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> board = self.dev.board <NEW_LINE> lan = self.dev.lan <NEW_LINE> for i in range(1000): <NEW_LINE> <INDENT> print( "\nRunning iteration of ubus json-rpc system reboot nubmer %s\n" % i) <NEW_LINE> session_id = ubus_login_session(lan) <NEW_LINE> print("\nLogged in with sessionid = %s\n" % session_id) <NEW_LINE> ubus_system_reboot(session_id, lan) <NEW_LINE> board.wait_for_linux() | Various UBus tests | 62598fb27c178a314d78d523 |
class MusicPlayer(commands.Cog): <NEW_LINE> <INDENT> __slots__ = ('bot', '_guild', '_channel', '_cog', 'queue', 'next', 'current', 'np', 'volume') <NEW_LINE> def __init__(self, ctx): <NEW_LINE> <INDENT> self.bot = ctx.bot <NEW_LINE> self._guild = ctx.guild <NEW_LINE> self._channel = ctx.channel <NEW_LINE> self._cog = ctx.cog <NEW_LINE> self.queue = asyncio.Queue() <NEW_LINE> self.next = asyncio.Event() <NEW_LINE> self.np = None <NEW_LINE> self.volume = .5 <NEW_LINE> self.current = None <NEW_LINE> ctx.bot.loop.create_task(self.player_loop()) <NEW_LINE> <DEDENT> async def player_loop(self): <NEW_LINE> <INDENT> await self.bot.wait_until_ready() <NEW_LINE> while not self.bot.is_closed(): <NEW_LINE> <INDENT> self.next.clear() <NEW_LINE> try: <NEW_LINE> <INDENT> async with timeout(300): <NEW_LINE> <INDENT> source = await self.queue.get() <NEW_LINE> <DEDENT> <DEDENT> except asyncio.TimeoutError: <NEW_LINE> <INDENT> return self.destroy(self._guild) <NEW_LINE> <DEDENT> if not isinstance(source, YTDLSource): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> source = await YTDLSource.regather_stream(source, loop=self.bot.loop) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> await self._channel.send(f'There was an error processing your song.\n' f'```css\n[{e}]\n```') <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> source.volume = self.volume <NEW_LINE> self.current = source <NEW_LINE> self._guild.voice_client.play(source, after=lambda _: self.bot.loop.call_soon_threadsafe(self.next.set)) <NEW_LINE> self.np = await self._channel.send(f'**Now Playing:** `{source.title}` requested by ' f'`{source.requester}`') <NEW_LINE> await self.next.wait() <NEW_LINE> source.cleanup() <NEW_LINE> self.current = None <NEW_LINE> try: <NEW_LINE> <INDENT> await self.np.delete() <NEW_LINE> <DEDENT> except discord.HTTPException: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def destroy(self, guild): <NEW_LINE> <INDENT> return self.bot.loop.create_task(self._cog.cleanup(guild)) | A class which is assigned to each guild using the bot for Music.
This class implements a queue and loop, which allows for different guilds to listen to different playlists
simultaneously.
When the bot disconnects from the Voice it's instance will be destroyed. | 62598fb2851cf427c66b8342 |
class Time(): <NEW_LINE> <INDENT> ps = 1 <NEW_LINE> ns = 1000 <NEW_LINE> us = ns * 1000 <NEW_LINE> ms = us * 1000 <NEW_LINE> s = ms * 1000 | Time units | 62598fb28a43f66fc4bf2201 |
class QueueInput(FeedfreeInput): <NEW_LINE> <INDENT> def __init__(self, ds, queue=None): <NEW_LINE> <INDENT> if not isinstance(ds, DataFlow): <NEW_LINE> <INDENT> raise ValueError("QueueInput takes a DataFlow! Got {}".format(ds)) <NEW_LINE> <DEDENT> self.queue = queue <NEW_LINE> self.ds = ds <NEW_LINE> self._inf_ds = RepeatedData(ds, -1) <NEW_LINE> self._started = False <NEW_LINE> <DEDENT> def _size(self): <NEW_LINE> <INDENT> return self.ds.size() <NEW_LINE> <DEDENT> def _setup(self, inputs): <NEW_LINE> <INDENT> self._input_placehdrs = [v.build_placeholder_reuse() for v in inputs] <NEW_LINE> assert len(self._input_placehdrs) > 0, "QueueInput has to be used with some inputs!" <NEW_LINE> with self.cached_name_scope(): <NEW_LINE> <INDENT> if self.queue is None: <NEW_LINE> <INDENT> self.queue = tf.FIFOQueue( 50, [x.dtype for x in self._input_placehdrs], name='input_queue') <NEW_LINE> <DEDENT> logger.info("Setting up the queue '{}' for CPU prefetching ...".format(self.queue.name)) <NEW_LINE> self.thread = EnqueueThread(self.queue, self._inf_ds, self._input_placehdrs) <NEW_LINE> self._dequeue_op = self.queue.dequeue(name='dequeue_for_reset') <NEW_LINE> <DEDENT> <DEDENT> def refill_queue(self): <NEW_LINE> <INDENT> self.thread.pause() <NEW_LINE> opt = tf.RunOptions() <NEW_LINE> opt.timeout_in_ms = 2000 <NEW_LINE> sess = tf.get_default_session() <NEW_LINE> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> sess.run(self._dequeue_op, options=opt) <NEW_LINE> <DEDENT> <DEDENT> except tf.errors.DeadlineExceededError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.thread.reinitialize_dataflow() <NEW_LINE> self.thread.resume() <NEW_LINE> <DEDENT> def _create_ema_callback(self): <NEW_LINE> <INDENT> with self.cached_name_scope(): <NEW_LINE> <INDENT> size = tf.cast(self.queue.size(), tf.float32, name='queue_size') <NEW_LINE> <DEDENT> size_ema_op = add_moving_summary(size, collection=None, decay=0.5)[0].op <NEW_LINE> return RunOp( lambda: size_ema_op, run_before=False, run_as_trigger=False, run_step=True) <NEW_LINE> <DEDENT> def _get_callbacks(self): <NEW_LINE> <INDENT> from ..callbacks.concurrency import StartProcOrThread <NEW_LINE> cb = StartProcOrThread(self.thread) <NEW_LINE> return [cb, self._create_ema_callback(), _get_reset_callback(self._inf_ds)] <NEW_LINE> <DEDENT> def _get_input_tensors(self): <NEW_LINE> <INDENT> with tf.device('/cpu:0'), self.cached_name_scope(): <NEW_LINE> <INDENT> ret = self.queue.dequeue(name='input_deque') <NEW_LINE> if isinstance(ret, tf.Tensor): <NEW_LINE> <INDENT> ret = [ret] <NEW_LINE> <DEDENT> assert len(ret) == len(self._input_placehdrs) <NEW_LINE> for qv, v in zip(ret, self._input_placehdrs): <NEW_LINE> <INDENT> qv.set_shape(v.get_shape()) <NEW_LINE> <DEDENT> return ret | Enqueue datapoints from a DataFlow to a TF queue.
And the model receives dequeued tensors.
Calling :meth:`reset_state()` will clear the queue and reset the dataflow. | 62598fb2dd821e528d6d8fb5 |
class TcpSensor(Entity): <NEW_LINE> <INDENT> required = tuple() <NEW_LINE> def __init__(self, hass, config): <NEW_LINE> <INDENT> value_template = config.get(CONF_VALUE_TEMPLATE) <NEW_LINE> if value_template is not None: <NEW_LINE> <INDENT> value_template.hass = hass <NEW_LINE> <DEDENT> self._hass = hass <NEW_LINE> self._config = { CONF_NAME: config.get(CONF_NAME), CONF_HOST: config.get(CONF_HOST), CONF_PORT: config.get(CONF_PORT), CONF_TIMEOUT: config.get(CONF_TIMEOUT), CONF_PAYLOAD: config.get(CONF_PAYLOAD), CONF_UNIT_OF_MEASUREMENT: config.get(CONF_UNIT_OF_MEASUREMENT), CONF_VALUE_TEMPLATE: value_template, CONF_VALUE_ON: config.get(CONF_VALUE_ON), CONF_BUFFER_SIZE: config.get(CONF_BUFFER_SIZE), } <NEW_LINE> self._state = None <NEW_LINE> self.update() <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> name = self._config[CONF_NAME] <NEW_LINE> if name is not None: <NEW_LINE> <INDENT> return name <NEW_LINE> <DEDENT> return super().name <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return self._config[CONF_UNIT_OF_MEASUREMENT] <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: <NEW_LINE> <INDENT> sock.settimeout(self._config[CONF_TIMEOUT]) <NEW_LINE> try: <NEW_LINE> <INDENT> sock.connect((self._config[CONF_HOST], self._config[CONF_PORT])) <NEW_LINE> <DEDENT> except socket.error as err: <NEW_LINE> <INDENT> _LOGGER.error( "Unable to connect to %s on port %s: %s", self._config[CONF_HOST], self._config[CONF_PORT], err, ) <NEW_LINE> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> sock.send(self._config[CONF_PAYLOAD].encode()) <NEW_LINE> <DEDENT> except socket.error as err: <NEW_LINE> <INDENT> _LOGGER.error( "Unable to send payload %r to %s on port %s: %s", self._config[CONF_PAYLOAD], self._config[CONF_HOST], self._config[CONF_PORT], err, ) <NEW_LINE> return <NEW_LINE> <DEDENT> readable, _, _ = select.select([sock], [], [], self._config[CONF_TIMEOUT]) <NEW_LINE> if not readable: <NEW_LINE> <INDENT> _LOGGER.warning( "Timeout (%s second(s)) waiting for a response after " "sending %r to %s on port %s.", self._config[CONF_TIMEOUT], self._config[CONF_PAYLOAD], self._config[CONF_HOST], self._config[CONF_PORT], ) <NEW_LINE> return <NEW_LINE> <DEDENT> value = sock.recv(self._config[CONF_BUFFER_SIZE]).decode() <NEW_LINE> <DEDENT> if self._config[CONF_VALUE_TEMPLATE] is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._state = self._config[CONF_VALUE_TEMPLATE].render(value=value) <NEW_LINE> return <NEW_LINE> <DEDENT> except TemplateError: <NEW_LINE> <INDENT> _LOGGER.error( "Unable to render template of %r with value: %r", self._config[CONF_VALUE_TEMPLATE], value, ) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> self._state = value | Implementation of a TCP socket based sensor. | 62598fb2adb09d7d5dc0a612 |
class Env(object): <NEW_LINE> <INDENT> metadata = {'render.modes': []} <NEW_LINE> reward_range = (-float('inf'), float('inf')) <NEW_LINE> spec = None <NEW_LINE> action_space = None <NEW_LINE> observation_space = None <NEW_LINE> def step(self, action): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def render(self, mode='human'): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def seed(self, seed=None): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> @property <NEW_LINE> def unwrapped(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.spec is None: <NEW_LINE> <INDENT> return '<{} instance>'.format(type(self).__name__) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '<{}<{}>>'.format(type(self).__name__, self.spec.id) <NEW_LINE> <DEDENT> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, *args): <NEW_LINE> <INDENT> self.close() <NEW_LINE> return False | The main OpenAI Gym class. It encapsulates an environment with
arbitrary behind-the-scenes dynamics. An environment can be
partially or fully observed.
The main API methods that users of this class need to know are:
step
reset
render
close
seed
And set the following attributes:
action_space: The Space object corresponding to valid actions
observation_space: The Space object corresponding to valid observations
reward_range: A tuple corresponding to the min and max possible rewards
Note: a default reward range set to [-inf,+inf] already exists. Set it if you want a narrower range.
The methods are accessed publicly as "step", "reset", etc.. The
non-underscored versions are wrapper methods to which we may add
functionality over time. | 62598fb255399d3f056265a1 |
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.ALPHA) <NEW_LINE> class CreateBeta(Create): <NEW_LINE> <INDENT> _ALLOW_RSA_ENCRYPTED_CSEK_KEYS = True <NEW_LINE> @classmethod <NEW_LINE> def Args(cls, parser): <NEW_LINE> <INDENT> _Args(parser, cls.ReleaseTrack()) <NEW_LINE> parser.display_info.AddCacheUpdater(flags.ImagesCompleter) <NEW_LINE> <DEDENT> def Run(self, args): <NEW_LINE> <INDENT> return self._Run(args) | Create Google Compute Engine images. | 62598fb2d58c6744b42dc31c |
class ValueRegex: <NEW_LINE> <INDENT> def __init__(self, expr): <NEW_LINE> <INDENT> self.expr = expr <NEW_LINE> <DEDENT> def get_resource_value(self, resource): <NEW_LINE> <INDENT> if resource is None: <NEW_LINE> <INDENT> return resource <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> capture = re.match(self.expr, resource) <NEW_LINE> <DEDENT> except (ValueError, TypeError): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if capture is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return capture.group(1) | Allows filtering based on the output of a regex capture.
This is useful for parsing data that has a weird format.
Instead of comparing the contents of the 'resource value' with the 'value',
it will instead apply the regex to contents of the 'resource value', and compare
the result of the capture group defined in that regex with the 'value'.
Therefore you must have a single capture group defined in the regex.
If the regex doesn't find a match it will return 'None'
Example of getting a datetime object to make an 'expiration' comparison::
type: value
value_regex: ".*delete_after=([0-9]{4}-[0-9]{2}-[0-9]{2}).*"
key: "tag:company_mandated_metadata"
value_type: expiration
op: lte
value: 0 | 62598fb257b8e32f5250815f |
class AuthenticationFailed(GitHubError): <NEW_LINE> <INDENT> pass | Exception class for 401 responses.
Possible reasons:
- Need one time password (for two-factor authentication)
- You are not authorized to access the resource | 62598fb2f9cc0f698b1c530e |
class BenchmarkConfigSpec(spec.BaseSpec): <NEW_LINE> <INDENT> def __init__(self, component_full_name, expected_os_types=None, **kwargs): <NEW_LINE> <INDENT> super(BenchmarkConfigSpec, self).__init__(component_full_name, **kwargs) <NEW_LINE> if expected_os_types is not None: <NEW_LINE> <INDENT> mismatched_os_types = [] <NEW_LINE> for group_name, group_spec in sorted(self.vm_groups.iteritems()): <NEW_LINE> <INDENT> if group_spec.os_type not in expected_os_types: <NEW_LINE> <INDENT> mismatched_os_types.append('{0}.vm_groups[{1}].os_type: {2}'.format( component_full_name, repr(group_name), repr(group_spec.os_type))) <NEW_LINE> <DEDENT> <DEDENT> if mismatched_os_types: <NEW_LINE> <INDENT> raise errors.Config.InvalidValue( 'VM groups in {0} may only have the following OS types: {1}. The ' 'following VM group options are invalid:{2}{3}'.format( component_full_name, ', '.join(repr(os_type) for os_type in expected_os_types), os.linesep, os.linesep.join(mismatched_os_types))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def _GetOptionDecoderConstructions(cls): <NEW_LINE> <INDENT> result = super(BenchmarkConfigSpec, cls)._GetOptionDecoderConstructions() <NEW_LINE> result.update({ 'description': (option_decoders.StringDecoder, {'default': None}), 'flags': (_FlagsDecoder, {}), 'vm_groups': (_VmGroupsDecoder, {'default': {}}), 'spark_service': (_SparkServiceDecoder, {'default': None})}) <NEW_LINE> return result <NEW_LINE> <DEDENT> def _DecodeAndInit(self, component_full_name, config, decoders, flag_values): <NEW_LINE> <INDENT> decoders = decoders.copy() <NEW_LINE> self.flags = decoders.pop('flags').Decode(config.pop('flags', None), component_full_name, flag_values) <NEW_LINE> with self.RedirectFlags(flag_values): <NEW_LINE> <INDENT> super(BenchmarkConfigSpec, self)._DecodeAndInit( component_full_name, config, decoders, flag_values) <NEW_LINE> <DEDENT> <DEDENT> @contextlib.contextmanager <NEW_LINE> def RedirectFlags(self, flag_values): <NEW_LINE> <INDENT> with flag_util.FlagDictSubstitution(flag_values, lambda: self.flags): <NEW_LINE> <INDENT> yield | Configurable options of a benchmark run.
Attributes:
description: None or string. Description of the benchmark to run.
flags: flags.FlagValues. Values to use for each flag while executing the
benchmark.
vm_groups: dict mapping VM group name string to _VmGroupSpec. Configurable
options for each VM group used by the benchmark. | 62598fb256b00c62f0fb293e |
class Worktree(object): <NEW_LINE> <INDENT> def __init__(self, repo, process_name): <NEW_LINE> <INDENT> self.repo = repo <NEW_LINE> self.pygit2_repo = pygit2_get(repo) <NEW_LINE> self._worktree = None <NEW_LINE> self.process_name = process_name <NEW_LINE> self.worktree_name = "-".join(str(item) for item in self.process_name.as_tuple()) <NEW_LINE> self.path = os.path.join(env.config["root"], env.config["paths"]["worktrees"], os.path.basename(repo.working_dir), process_name.subtype, process_name.obj_id) <NEW_LINE> self._lock = None <NEW_LINE> <DEDENT> def as_mut(self, lock): <NEW_LINE> <INDENT> return MutGuard(lock, self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def lock_key(self): <NEW_LINE> <INDENT> return (self.process_name.subtype, self.process_name.obj_id) <NEW_LINE> <DEDENT> @mut() <NEW_LINE> def get(self): <NEW_LINE> <INDENT> if self._worktree is None: <NEW_LINE> <INDENT> all_worktrees = {item.name: item for item in worktrees(self.pygit2_repo)} <NEW_LINE> count = len(all_worktrees) <NEW_LINE> max_count = get_max_worktree_count(self.repo) <NEW_LINE> if max_count and count >= max_count: <NEW_LINE> <INDENT> cleanup_repo(self.pygit2_repo, max_count - 1) <NEW_LINE> <DEDENT> path_exists = os.path.exists(self.path) <NEW_LINE> if self.worktree_name in all_worktrees and not path_exists: <NEW_LINE> <INDENT> prune_worktrees(self.pygit2_repo) <NEW_LINE> del all_worktrees[self.worktree_name] <NEW_LINE> <DEDENT> if self.worktree_name not in all_worktrees: <NEW_LINE> <INDENT> if path_exists: <NEW_LINE> <INDENT> logger.warning("Found existing content in worktree path %s, removing" % self.path) <NEW_LINE> shutil.rmtree(self.path) <NEW_LINE> <DEDENT> logger.info("Creating worktree %s at %s" % (self.worktree_name, self.path)) <NEW_LINE> if not os.path.exists(os.path.dirname(self.path)): <NEW_LINE> <INDENT> os.makedirs(os.path.dirname(self.path)) <NEW_LINE> <DEDENT> worktree = self.pygit2_repo.add_worktree(self.worktree_name, os.path.abspath(self.path), self.pygit2_repo.lookup_reference( "refs/heads/%s" % self.process_name)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> worktree = self.pygit2_repo.lookup_worktree(self.worktree_name) <NEW_LINE> <DEDENT> assert os.path.exists(self.path) <NEW_LINE> assert worktree.path == self.path <NEW_LINE> self._worktree = git.Repo(self.path) <NEW_LINE> <DEDENT> return self._worktree <NEW_LINE> <DEDENT> @mut() <NEW_LINE> def delete(self): <NEW_LINE> <INDENT> if not os.path.exists(self.path): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> worktree = self.pygit2_repo.lookup_worktree(self.worktree_name) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> worktree = None <NEW_LINE> <DEDENT> if worktree is None: <NEW_LINE> <INDENT> for worktree in worktrees(self.pygit2_repo): <NEW_LINE> <INDENT> if worktree.path == self.path: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> assert worktree.path == self.path <NEW_LINE> delete_worktree(self.process_name, worktree) | Wrapper for accessing a git worktree for a specific process.
To access the worktree call .get() | 62598fb23d592f4c4edbaf48 |
class ExternalAddressbook(AddressBook): <NEW_LINE> <INDENT> def __init__(self, commandline, regex, reflags=0, external_filtering=True, **kwargs): <NEW_LINE> <INDENT> AddressBook.__init__(self, **kwargs) <NEW_LINE> self.commandline = commandline <NEW_LINE> self.regex = regex <NEW_LINE> self.reflags = reflags <NEW_LINE> self.external_filtering = external_filtering <NEW_LINE> <DEDENT> def get_contacts(self): <NEW_LINE> <INDENT> return self._call_and_parse(self.commandline) <NEW_LINE> <DEDENT> def lookup(self, prefix): <NEW_LINE> <INDENT> if self.external_filtering: <NEW_LINE> <INDENT> return self._call_and_parse(self.commandline + " " + prefix) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return AddressBook.lookup(self, prefix) <NEW_LINE> <DEDENT> <DEDENT> def _call_and_parse(self, commandline): <NEW_LINE> <INDENT> cmdlist = split_commandstring(commandline) <NEW_LINE> resultstring, errmsg, retval = call_cmd(cmdlist) <NEW_LINE> if retval != 0: <NEW_LINE> <INDENT> msg = 'abook command "%s" returned with ' % commandline <NEW_LINE> msg += 'return code %d' % retval <NEW_LINE> if errmsg: <NEW_LINE> <INDENT> msg += ':\n%s' % errmsg <NEW_LINE> <DEDENT> raise AddressbookError(msg) <NEW_LINE> <DEDENT> if not resultstring: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> lines = resultstring.splitlines() <NEW_LINE> res = [] <NEW_LINE> for l in lines: <NEW_LINE> <INDENT> m = re.match(self.regex, l, self.reflags) <NEW_LINE> if m: <NEW_LINE> <INDENT> info = m.groupdict() <NEW_LINE> if 'email' and 'name' in info: <NEW_LINE> <INDENT> email = info['email'].strip() <NEW_LINE> name = info['name'] <NEW_LINE> res.append((name, email)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return res | :class:`AddressBook` that parses a shell command's output | 62598fb25166f23b2e243462 |
class AGSDiscoveryImageCatalogPage(CoClass): <NEW_LINE> <INDENT> _reg_clsid_ = GUID('{A3D46959-CF28-47FA-BCB8-BA69D1DFA56A}') <NEW_LINE> _idlflags_ = [] <NEW_LINE> _typelib_path_ = typelib_path <NEW_LINE> _reg_typelib_ = ('{C0FC1503-7E6F-11D2-AABF-00C04FA375F1}', 10, 2) | Esri AGS Image Catalog Parameters Property Page. | 62598fb2097d151d1a2c10b5 |
class ApplicationAPIError(APIError): <NEW_LINE> <INDENT> pass | Exception raised when an :class:`ApplicationAPI` request fails. | 62598fb27047854f4633f463 |
@dataclass <NEW_LINE> class Lesson(DB.base, StandardFields): <NEW_LINE> <INDENT> language: str <NEW_LINE> __tablename__ = "lesson" <NEW_LINE> language = Column(String(LANG_ID_LEN), nullable=False) <NEW_LINE> versions = relationship( "LessonVersion", back_populates="lesson", cascade="all, delete" ) | Represent a logical lesson. | 62598fb256ac1b37e6302273 |
class RunnerResults(dict): <NEW_LINE> <INDENT> def __init__(self, results): <NEW_LINE> <INDENT> self.update(results) <NEW_LINE> <DEDENT> def __getattr__(self, key): <NEW_LINE> <INDENT> return lambda server: self.acquire(server, key) <NEW_LINE> <DEDENT> def acquire(self, server, key): <NEW_LINE> <INDENT> if server not in self['contacted']: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self['contacted'][server][key] | Wraps the results of parsed module_runner output. The result may
be used just like it is in Ansible:
result['contacted']['server']['rc']
or it can alternatively be used thusly:
result.rc('server') | 62598fb267a9b606de546056 |
class SplitRoute(plugin.InputPreparationPlugin): <NEW_LINE> <INDENT> ROUTE_STEPS_SET=set(["ENG", "CAD","CAM","MILL", "MILL-SET","TURN", "DRILL", "QUAL","EDM", "EDM-SET","ASSM", "MAN","INJM", "INJM-MAN", "INJM-SET"]) <NEW_LINE> DESIGN_ROUTE_STEPS_SET=set(["ENG", "CAD"]) <NEW_LINE> def preprocess(self, data): <NEW_LINE> <INDENT> orders = data["input"]["BOM"]["productionOrders"] <NEW_LINE> for order in orders: <NEW_LINE> <INDENT> orderComponents = order.get("componentsList", []) <NEW_LINE> componentsToAdd = [] <NEW_LINE> for index, component in enumerate(orderComponents): <NEW_LINE> <INDENT> route = component.get("route", []) <NEW_LINE> design_step_list = [] <NEW_LINE> routeList = copy.deepcopy(route) <NEW_LINE> i = 0 <NEW_LINE> for step in routeList: <NEW_LINE> <INDENT> stepTechnology = step.get('technology',[]) <NEW_LINE> assert stepTechnology in self.ROUTE_STEPS_SET, 'the technology provided does not exist' <NEW_LINE> if stepTechnology in self.DESIGN_ROUTE_STEPS_SET: <NEW_LINE> <INDENT> design_step_list.append(step) <NEW_LINE> route.pop(i) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> i+=1 <NEW_LINE> <DEDENT> <DEDENT> if design_step_list: <NEW_LINE> <INDENT> design = {"name": component.get("name","")+"_Design", "id": component.get("id","")+"_D", "quantity": component.get("quantity", 1), "route": design_step_list, "_class": "Dream.OrderDesign"} <NEW_LINE> componentsToAdd.append(design) <NEW_LINE> component["_class"] = "Dream.Mould" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> component["class"] = "Dream.OrderComponent" <NEW_LINE> <DEDENT> <DEDENT> for design in componentsToAdd: <NEW_LINE> <INDENT> orderComponents.append(design) <NEW_LINE> <DEDENT> <DEDENT> return data | Input preparation
reads the data from external data base and splits the routes if the parts described are design and mould | 62598fb2167d2b6e312b6ffb |
class Item(GameObject): <NEW_LINE> <INDENT> def __init__(self, x, y, levelnumber, char, **kwargs): <NEW_LINE> <INDENT> GameObject.__init__(self,x,y,levelnumber, char, **kwargs) <NEW_LINE> if self.char == ":": <NEW_LINE> <INDENT> self.longtext = self.generate_text() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.longtext = Game.tiledict[self.char][1] <NEW_LINE> <DEDENT> self.hitpoints = 0 <NEW_LINE> self.power = 0 <NEW_LINE> if self.char =="t": <NEW_LINE> <INDENT> self.hitpoints = 5 <NEW_LINE> self.power = random.randint(1,10) <NEW_LINE> <DEDENT> <DEDENT> def generate_text(self): <NEW_LINE> <INDENT> word1 = random.choice(("a big", "a small", "a medium", "an epic", "a handsome","a rotting", "an expensive", "a cheap")) <NEW_LINE> word2 = random.choice(("yellow", "green", "blue", "red", "white", "black","rusty", "shiny", "blood-smeared")) <NEW_LINE> word3 = random.choice(("ring", "drink", "flower", "wand", "fruit")) <NEW_LINE> return " ".join((word1, word2, word3)) | individual Item with all attributes | 62598fb2bf627c535bcb1528 |
class ZWaveSensor(zwave.ZWaveDeviceEntity): <NEW_LINE> <INDENT> def __init__(self, values): <NEW_LINE> <INDENT> zwave.ZWaveDeviceEntity.__init__(self, values, DOMAIN) <NEW_LINE> self.update_properties() <NEW_LINE> <DEDENT> def update_properties(self): <NEW_LINE> <INDENT> self._state = self.values.primary.data <NEW_LINE> self._units = self.values.primary.units <NEW_LINE> <DEDENT> @property <NEW_LINE> def force_update(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return self._units | Representation of a Z-Wave sensor. | 62598fb2460517430c4320a2 |
class ComponentTests(ossie.utils.testing.ScaComponentTestCase): <NEW_LINE> <INDENT> def testScaBasicBehavior(self): <NEW_LINE> <INDENT> execparams = self.getPropertySet(kinds=("execparam",), modes=("readwrite", "writeonly"), includeNil=False) <NEW_LINE> execparams = dict([(x.id, any.from_any(x.value)) for x in execparams]) <NEW_LINE> self.launch(execparams) <NEW_LINE> self.assertNotEqual(self.comp, None) <NEW_LINE> self.assertEqual(self.comp.ref._non_existent(), False) <NEW_LINE> self.assertEqual(self.comp.ref._is_a("IDL:CF/Resource:1.0"), True) <NEW_LINE> expectedProps = [] <NEW_LINE> expectedProps.extend(self.getPropertySet(kinds=("configure", "execparam"), modes=("readwrite", "readonly"), includeNil=True)) <NEW_LINE> expectedProps.extend(self.getPropertySet(kinds=("allocate",), action="external", includeNil=True)) <NEW_LINE> props = self.comp.query([]) <NEW_LINE> props = dict((x.id, any.from_any(x.value)) for x in props) <NEW_LINE> for expectedProp in expectedProps: <NEW_LINE> <INDENT> self.assertEquals(props.has_key(expectedProp.id), True) <NEW_LINE> <DEDENT> for port in self.scd.get_componentfeatures().get_ports().get_uses(): <NEW_LINE> <INDENT> port_obj = self.comp.getPort(str(port.get_usesname())) <NEW_LINE> self.assertNotEqual(port_obj, None) <NEW_LINE> self.assertEqual(port_obj._non_existent(), False) <NEW_LINE> self.assertEqual(port_obj._is_a("IDL:CF/Port:1.0"), True) <NEW_LINE> <DEDENT> for port in self.scd.get_componentfeatures().get_ports().get_provides(): <NEW_LINE> <INDENT> port_obj = self.comp.getPort(str(port.get_providesname())) <NEW_LINE> self.assertNotEqual(port_obj, None) <NEW_LINE> self.assertEqual(port_obj._non_existent(), False) <NEW_LINE> self.assertEqual(port_obj._is_a(port.get_repid()), True) <NEW_LINE> <DEDENT> self.comp.start() <NEW_LINE> self.comp.stop() <NEW_LINE> self.comp.releaseObject() | Test for all component implementations in skiphead_ss | 62598fb2a8370b77170f0465 |
@six.add_metaclass(abc.ABCMeta) <NEW_LINE> class AuthMethodHandler(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def authenticate(self, context, auth_payload, auth_context): <NEW_LINE> <INDENT> raise exception.Unauthorized() | Abstract base class for an authentication plugin. | 62598fb2e5267d203ee6b98d |
class SplitConv(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, num_ways, divisible_by=8, batch_norm=False, activation_fn=torch.nn.ReLU(inplace=True)): <NEW_LINE> <INDENT> super(SplitConv, self).__init__() <NEW_LINE> if num_ways == 1 or min(in_channels // num_ways, out_channels // num_ways) < divisible_by: <NEW_LINE> <INDENT> input_splits = [in_channels] <NEW_LINE> output_splits = [out_channels] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> input_splits = _split_divisible(in_channels, num_ways, divisible_by=divisible_by) <NEW_LINE> output_splits = _split_divisible(out_channels, num_ways, divisible_by=divisible_by) <NEW_LINE> <DEDENT> branches = [] <NEW_LINE> for in_size, out_size in zip(input_splits, output_splits): <NEW_LINE> <INDENT> branch = [torch.nn.Conv2d(in_channels=in_size, out_channels=out_size, kernel_size=1, bias=False)] <NEW_LINE> if batch_norm: <NEW_LINE> <INDENT> branch += [torch.nn.BatchNorm2d(num_features=out_size, **_BATCH_NORM_PARAMS)] <NEW_LINE> <DEDENT> if activation_fn is not None: <NEW_LINE> <INDENT> branch += [activation_fn] <NEW_LINE> <DEDENT> branches.append(torch.nn.Sequential(*branch)) <NEW_LINE> <DEDENT> self._input_splits = input_splits <NEW_LINE> self._branches = torch.nn.Sequential(*branches) <NEW_LINE> <DEDENT> def forward(self, input_tensor): <NEW_LINE> <INDENT> inputs = [input_tensor] <NEW_LINE> if len(self._input_splits) > 1: <NEW_LINE> <INDENT> inputs = torch.split(input_tensor, self._input_splits, dim=1) <NEW_LINE> <DEDENT> outputs = [branch(x) for branch, x in zip(self._branches, inputs)] <NEW_LINE> return torch.cat(outputs, dim=1) | Creates a split convolution. | 62598fb28a349b6b436862c5 |
class Dialog_Xorg(QtGui.QDialog): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> QtGui.QDialog.__init__(self) <NEW_LINE> self.__the_dialog = Ui_DialogXorg() <NEW_LINE> self.__the_dialog.setupUi(self) <NEW_LINE> self.xorg_list = Utils.get_x11_windows_id_list() <NEW_LINE> self.add_xorg_windows() <NEW_LINE> self.valid = False <NEW_LINE> self.connect(self.__the_dialog.ButtonBox,QtCore.SIGNAL('accepted()'),self.__accepted) <NEW_LINE> self.connect(self.__the_dialog.ButtonBox,QtCore.SIGNAL('rejected()'),self.__reject) <NEW_LINE> <DEDENT> def add_xorg_windows(self): <NEW_LINE> <INDENT> if self.xorg_list: <NEW_LINE> <INDENT> the_list = [] <NEW_LINE> for x in self.xorg_list: <NEW_LINE> <INDENT> the_list.append(x[0]) <NEW_LINE> <DEDENT> self.__the_dialog.ComboBoxXorg.addItems(the_list) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__the_dialog.ComboBoxXorg.addItem("No disponible") <NEW_LINE> <DEDENT> <DEDENT> def remove_list_elements(self): <NEW_LINE> <INDENT> self.__the_dialog.ComboBoxXorg.clear() <NEW_LINE> <DEDENT> def get_xorg_window_id_selected(self): <NEW_LINE> <INDENT> item_selected = self.__the_dialog.ComboBoxXorg.currentIndex() <NEW_LINE> xid = self.xorg_list[item_selected][1] <NEW_LINE> if xid!=0: <NEW_LINE> <INDENT> xid = int(xid,0) <NEW_LINE> <DEDENT> return (self.xorg_list[item_selected][0],xid) <NEW_LINE> <DEDENT> def __accepted(self): <NEW_LINE> <INDENT> self.valid = True <NEW_LINE> self.close() <NEW_LINE> <DEDENT> def __reject(self): <NEW_LINE> <INDENT> self.close() | classdocs | 62598fb2dc8b845886d53640 |
class PackException(Exception): <NEW_LINE> <INDENT> pass | Error while unpacking. | 62598fb25fdd1c0f98e5e016 |
class NoTimeWaitTCPServer(socketserver.ThreadingTCPServer): <NEW_LINE> <INDENT> def server_bind(self): <NEW_LINE> <INDENT> self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) <NEW_LINE> self.socket.bind(self.server_address) | when a socket does is shutdown dance, it ends up in a TIME-WAIT state,
which can prevent rebinding on it quickly. here we say "shut up, socket",
let me rebind anyways even if you're in TIME-WAIT." that will teach it. | 62598fb230bbd722464699bd |
class ValidatedLineEdit(QtWidgets.QLineEdit): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.message_field = kwargs.pop('message_field', None) <NEW_LINE> super(ValidatedLineEdit, self).__init__(*args, **kwargs) <NEW_LINE> self.message_field.setVisible(False) <NEW_LINE> self.icon = None <NEW_LINE> self.is_valid = True <NEW_LINE> self.message = '' <NEW_LINE> <DEDENT> def set_valid(self): <NEW_LINE> <INDENT> self.set_icon(None) <NEW_LINE> self.is_valid = True <NEW_LINE> self.message = '' <NEW_LINE> if self.message_field: <NEW_LINE> <INDENT> self.message_field.setVisible(False) <NEW_LINE> <DEDENT> <DEDENT> def set_invalid(self, message=''): <NEW_LINE> <INDENT> self.icon = self.style() .standardIcon(QtWidgets.QStyle.SP_MessageBoxCritical) <NEW_LINE> self.set_icon(self.icon) <NEW_LINE> self.is_valid = False <NEW_LINE> if self.message_field: <NEW_LINE> <INDENT> self.message_field.setText(message) <NEW_LINE> if self.isVisible(): <NEW_LINE> <INDENT> self.message_field.setVisible(True) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def set_icon(self, icon=None): <NEW_LINE> <INDENT> self.icon = icon <NEW_LINE> if icon is None: <NEW_LINE> <INDENT> self.setTextMargins(1, 1, 1, 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.setTextMargins(1, 1, 20, 1) <NEW_LINE> <DEDENT> <DEDENT> def paintEvent(self, event): <NEW_LINE> <INDENT> super(ValidatedLineEdit, self).paintEvent(event) <NEW_LINE> if self.icon is not None: <NEW_LINE> <INDENT> painter = QtGui.QPainter(self) <NEW_LINE> pixmap = self.icon.pixmap(self.height() - 6, self.height() - 6) <NEW_LINE> x = self.width() - self.height() + 4 <NEW_LINE> painter.drawPixmap(x, 3, pixmap) <NEW_LINE> painter.setPen(QtGui.QColor("lightgrey")) <NEW_LINE> painter.drawLine(x - 2, 3, x - 2, self.height() - 4) <NEW_LINE> <DEDENT> <DEDENT> def setVisible(self, vis): <NEW_LINE> <INDENT> super(ValidatedLineEdit, self).setVisible(vis) <NEW_LINE> if vis: <NEW_LINE> <INDENT> if not self.is_valid: <NEW_LINE> <INDENT> self.message_field.setVisible(vis) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.message_field.setVisible(vis) | A custom line edit that can display an icon
| 62598fb2a17c0f6771d5c2be |
class CallbackManager(object): <NEW_LINE> <INDENT> def __init__(self, pre=None, post=None): <NEW_LINE> <INDENT> self.pre = pre if pre is not None else _nop <NEW_LINE> self.post = post if post is not None else _nop <NEW_LINE> <DEDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> return _ManagedCallbackContext(self.pre, self.post, args, kwargs) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self.pre() <NEW_LINE> <DEDENT> def __exit__(self, *excinfo): <NEW_LINE> <INDENT> self.post() | Create a context manager from a pre-execution callback and a
post-execution callback.
Parameters
----------
pre : (...) -> any, optional
A pre-execution callback. This will be passed ``*args`` and
``**kwargs``.
post : (...) -> any, optional
A post-execution callback. This will be passed ``*args`` and
``**kwargs``.
Notes
-----
The enter value of this context manager will be the result of calling
``pre(*args, **kwargs)``
Examples
--------
>>> def pre(where):
... print('entering %s block' % where)
>>> def post(where):
... print('exiting %s block' % where)
>>> manager = CallbackManager(pre, post)
>>> with manager('example'):
... print('inside example block')
entering example block
inside example
exiting example block
These are reusable with different args:
>>> with manager('another'):
... print('inside another block')
entering another block
inside another block
exiting another block | 62598fb244b2445a339b69b6 |
class AriStatusBinFile(AriBinFile): <NEW_LINE> <INDENT> def __init__(self, sdir, status): <NEW_LINE> <INDENT> fn = self._getOutFname(status) <NEW_LINE> fname = "{0}/{1}".format(sdir, fn) <NEW_LINE> super( AriStatusBinFile, self ).__init__(fname) <NEW_LINE> <DEDENT> def _getOutFname(self, status): <NEW_LINE> <INDENT> return 'status.{0}.{1:08d}-{2:06d}.dat'.format( status.GetMacAdrAsStr(), status.GetStatusTime().GetDate(), status.GetStatusTime().GetTime()) | an AriBinFile that knows how to set its own name given a
TSnStatusUpdate object. a unique filename is guaranteed by
AriBinFile. | 62598fb22ae34c7f260ab16b |
class QueryTestablePermissionsRequest(_messages.Message): <NEW_LINE> <INDENT> fullResourceName = _messages.StringField(1) <NEW_LINE> pageSize = _messages.IntegerField(2, variant=_messages.Variant.INT32) <NEW_LINE> pageToken = _messages.StringField(3) | A request to get permissions which can be tested on a resource.
Fields:
fullResourceName: Required. The full resource name to query from the list
of testable permissions. The name follows the Google Cloud Platform
resource format. For example, a Cloud Platform project with id `my-
project` will be named `//cloudresourcemanager.googleapis.com/projects
/my-project`.
pageSize: Optional limit on the number of permissions to include in the
response.
pageToken: Optional pagination token returned in an earlier
QueryTestablePermissionsRequest. | 62598fb21f5feb6acb162ca8 |
class AddURLResultSet(ResultSet): <NEW_LINE> <INDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None) | Retrieve the value for the "Response" output from this choreography execution. ((integer) The response from Instapaper. Successful reqests will return a 201 status code.) | 62598fb255399d3f056265a3 |
class DigitalVILMapper(DataMapper): <NEW_LINE> <INDENT> def __init__(self, prod): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> lin_scale = float16(prod.thresholds[0]) <NEW_LINE> lin_offset = float16(prod.thresholds[1]) <NEW_LINE> log_start = prod.thresholds[2] <NEW_LINE> log_scale = float16(prod.thresholds[3]) <NEW_LINE> log_offset = float16(prod.thresholds[4]) <NEW_LINE> ind = np.arange(255) <NEW_LINE> self.lut[2:log_start] = (ind[2:log_start] - lin_offset) / lin_scale <NEW_LINE> self.lut[log_start:-1] = np.exp((ind[log_start:] - log_offset) / log_scale) | Mapper for digital VIL products. | 62598fb2851cf427c66b8345 |
class BeerIngredients(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'ingredients' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> ingredient_name = db.Column(db.String, nullable=False) <NEW_LINE> beer_id = db.Column(db.Integer, db.ForeignKey('beers.id'), nullable=False) <NEW_LINE> def save(self): <NEW_LINE> <INDENT> db.session.add(self) <NEW_LINE> db.session.commit() <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> db.session.delete(self) <NEW_LINE> db.session.commit() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def update(ingredient, data): <NEW_LINE> <INDENT> ingredient.ingredient_name = data['ingredient_name'] <NEW_LINE> ingredient.beer_id = data['beer_id'] <NEW_LINE> db.session.commit() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_ingredients(): <NEW_LINE> <INDENT> return BeerIngredients.query.all() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_ingredient_id(_id): <NEW_LINE> <INDENT> return BeerIngredients.query.get(_id) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def filter_ingredient_name(name): <NEW_LINE> <INDENT> return BeerIngredients.query.filter(BeerIngredients.ingredient_name == name).all() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def filter_beer_id(beer_id): <NEW_LINE> <INDENT> return BeerIngredients.query.filter(BeerIngredients.beer_id == beer_id).all() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f'ingredients(id={self.id}, name={self.ingredient_name}, beer_id={self.beer_id})' | Beer Ingredients Model | 62598fb2442bda511e95c4e1 |
class RealMatrix(Matrix): <NEW_LINE> <INDENT> def __new__(cls, stream): <NEW_LINE> <INDENT> return Matrix.__new__(cls, stream, precision=stream.precision) | Matrix with floats/doubles, depending on precision of stream. With index.
This requires the stream to have an attribute 'precision', which
determines whether floating points are encoded in single (4) or
in double (!4) precision. | 62598fb27047854f4633f464 |
class TestLocalApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = isi_sdk_8_2_2.api.local_api.LocalApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_cluster_time(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_upgrade_cluster_firmware_status(self): <NEW_LINE> <INDENT> pass | LocalApi unit test stubs | 62598fb21b99ca400228f575 |
class Buttons(Device): <NEW_LINE> <INDENT> CLICKED, PRESSED, RELEASE = range(3) <NEW_LINE> PUD_UP = gpio.PUD_UP <NEW_LINE> PUD_DOWN = gpio.PUD_DOWN <NEW_LINE> def __init__(self, iface, name=None, clicked=None, pressed=None, release=None): <NEW_LINE> <INDENT> if not isinstance(iface, InterfaceGPIO): <NEW_LINE> <INDENT> raise InterfaceNoSupported(self.__class__, iface.__class__) <NEW_LINE> <DEDENT> super(Buttons, self).__init__(iface, name) <NEW_LINE> if not any((clicked, pressed, release)): <NEW_LINE> <INDENT> raise Exception('The button object needs at least a callback to work') <NEW_LINE> <DEDENT> if clicked and not hasattr(clicked, '__call__'): <NEW_LINE> <INDENT> raise InvalidFunctionError('clicked') <NEW_LINE> <DEDENT> if pressed and not hasattr(pressed, '__call__'): <NEW_LINE> <INDENT> raise InvalidFunctionError('pressed') <NEW_LINE> <DEDENT> if release and not hasattr(release, '__call__'): <NEW_LINE> <INDENT> raise InvalidFunctionError('release') <NEW_LINE> <DEDENT> self._on_clicked = clicked <NEW_LINE> self._on_pressed = pressed <NEW_LINE> self._on_release = release <NEW_LINE> <DEDENT> def __on_clicked__(self, pin): <NEW_LINE> <INDENT> if not self._on_clicked: <NEW_LINE> <INDENT> logger.error('The "clicked" callback was not defined') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._on_clicked(self, pin, self.CLICKED) <NEW_LINE> <DEDENT> <DEDENT> def __on_pressed__(self, pin): <NEW_LINE> <INDENT> if not self._on_pressed: <NEW_LINE> <INDENT> logger.error('The "pressed" callback was not defined') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._on_pressed(self, pin, self.PRESSED) <NEW_LINE> <DEDENT> <DEDENT> def __on_release__(self, pin): <NEW_LINE> <INDENT> if not self._on_release: <NEW_LINE> <INDENT> logger.error('The "release" callback was not defined') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._on_release(self, pin, self.RELEASE) <NEW_LINE> <DEDENT> <DEDENT> def setup(self, pin, event, pud=gpio.PUD_UP, bouncetime=200): <NEW_LINE> <INDENT> iface = self.get_interface() <NEW_LINE> if not pin in iface.get_input_channels(): <NEW_LINE> <INDENT> raise NoChannelInterfaceError(pin, 'input') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> edge, callback = {self.CLICKED: (gpio.RISING, self.__on_clicked__), self.PRESSED: (gpio.BOTH, self.__on_pressed__), self.RELEASE: (gpio.FALLING, self.__on_release__)}[event] <NEW_LINE> iface.setup(pin, gpio.IN, 0, callback, pud, edge, bouncetime) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise Exception('Configuring channel {0} in an invalid event mode.'.format(pin)) | Supervise the state of one or more inputs channels when they change the state
to up or down. It calls a callback function and passes the channel that generated
the event and the new channel status. | 62598fb25fcc89381b266191 |
class GradResearchArea(GradHappening): <NEW_LINE> <INDENT> trans_areas, trans_choices, trans_acad_org = None, None, None <NEW_LINE> def __init__(self, emplid, adm_appl_nbr, acad_org, area, choice): <NEW_LINE> <INDENT> if not GradResearchArea.trans_areas: <NEW_LINE> <INDENT> GradMetadata.trans_areas, GradMetadata.trans_choices = research_translation_tables() <NEW_LINE> units = Unit.objects.exclude(acad_org__isnull=True).exclude(acad_org='') <NEW_LINE> GradResearchArea.trans_acad_org = dict((u.acad_org, u) for u in units) <NEW_LINE> <DEDENT> self.emplid = emplid <NEW_LINE> self.adm_appl_nbr = adm_appl_nbr <NEW_LINE> self.acad_org = acad_org <NEW_LINE> self.area = area <NEW_LINE> self.choice = choice <NEW_LINE> self.unit = GradResearchArea.trans_acad_org.get(acad_org, None) <NEW_LINE> self.strm = '9999' <NEW_LINE> self.effdt = datetime.date(3000, 1, 1) <NEW_LINE> self.grad_program = True <NEW_LINE> self.stdnt_car_nbr = None <NEW_LINE> self.in_career = False <NEW_LINE> <DEDENT> def find_local_data(self, student_info, verbosity): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def update_local_data(self, student_info, verbosity, dry_run): <NEW_LINE> <INDENT> career = student_info['career'] <NEW_LINE> ch = GradMetadata.trans_choices.get((self.acad_org, self.area, self.choice), None) <NEW_LINE> if ch: <NEW_LINE> <INDENT> career.research_areas.add(ch) | The research area given by this student on his/her application.
There may be several of these: they end up joined together into a text field. | 62598fb2cc0a2c111447b09c |
class LoginForm(Form): <NEW_LINE> <INDENT> name = StringField('Username', validators=[ Required(), Length(1, 64), Regexp('^[A-Za-z][A-Za-z0-9_.]*$', 0, 'Usernames must have only letters, ' 'numbers, dots or underscores')]) <NEW_LINE> password = PasswordField('password', validators=[InputRequired('Please enter your password.')]) <NEW_LINE> remember_me = BooleanField('remember_me', default=False) | 用户登陆 | 62598fb221bff66bcd722cf1 |
class LSUN(data.Dataset): <NEW_LINE> <INDENT> def __init__(self, db_path, classes='train', transform=None, target_transform=None): <NEW_LINE> <INDENT> categories = ['bedroom', 'bridge', 'church_outdoor', 'classroom', 'conference_room', 'dining_room', 'kitchen', 'living_room', 'restaurant', 'tower'] <NEW_LINE> dset_opts = ['train', 'val', 'test'] <NEW_LINE> self.db_path = db_path <NEW_LINE> if type(classes) == str and classes in dset_opts: <NEW_LINE> <INDENT> if classes == 'test': <NEW_LINE> <INDENT> classes = [classes] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> classes = [c + '_' + classes for c in categories] <NEW_LINE> <DEDENT> <DEDENT> if type(classes) == list: <NEW_LINE> <INDENT> for c in classes: <NEW_LINE> <INDENT> c_short = c.split('_') <NEW_LINE> c_short.pop(len(c_short) - 1) <NEW_LINE> c_short = '_'.join(c_short) <NEW_LINE> if c_short not in categories: <NEW_LINE> <INDENT> raise(ValueError('Unknown LSUN class: ' + c_short + '.' 'Options are: ' + str(categories))) <NEW_LINE> <DEDENT> c_short = c.split('_') <NEW_LINE> c_short = c_short.pop(len(c_short) - 1) <NEW_LINE> if c_short not in dset_opts: <NEW_LINE> <INDENT> raise(ValueError('Unknown postfix: ' + c_short + '.' 'Options are: ' + str(dset_opts))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise(ValueError('Unknown option for classes')) <NEW_LINE> <DEDENT> self.classes = classes <NEW_LINE> self.dbs = [] <NEW_LINE> for c in self.classes: <NEW_LINE> <INDENT> self.dbs.append(LSUNClass( db_path=db_path + '/' + c + '_lmdb', transform=transform)) <NEW_LINE> <DEDENT> self.indices = [] <NEW_LINE> count = 0 <NEW_LINE> for db in self.dbs: <NEW_LINE> <INDENT> count += len(db) <NEW_LINE> self.indices.append(count) <NEW_LINE> <DEDENT> self.length = count <NEW_LINE> self.target_transform = target_transform <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> target = 0 <NEW_LINE> sub = 0 <NEW_LINE> for ind in self.indices: <NEW_LINE> <INDENT> if index < ind: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> target += 1 <NEW_LINE> sub += ind <NEW_LINE> <DEDENT> db = self.dbs[target] <NEW_LINE> index = index - sub <NEW_LINE> if self.target_transform is not None: <NEW_LINE> <INDENT> target = self.target_transform(target) <NEW_LINE> <DEDENT> img, _ = db[index] <NEW_LINE> return img, target <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.length <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__class__.__name__ + ' (' + self.db_path + ')' | db_path = root directory for the database files
classes = 'train' | 'val' | 'test' | ['bedroom_train', 'church_train', ...] | 62598fb24e4d5625663724b1 |
class RDFS_ClassMeta(type): <NEW_LINE> <INDENT> def __new__(meta_cls, name, bases, dct): <NEW_LINE> <INDENT> dct.setdefault("__properties__", []) <NEW_LINE> dct.setdefault("__uri__", None) <NEW_LINE> return super(RDFS_ClassMeta, meta_cls).__new__(meta_cls, name, bases, dct) <NEW_LINE> <DEDENT> def __init__(cls, name, bases, dct): <NEW_LINE> <INDENT> cls.label = LiteralPropertyProxy(name="label", uri=RDFS.label) <NEW_LINE> cls.comment = LiteralPropertyProxy(name="comment", uri=RDFS.comment) <NEW_LINE> cls.seeAlso = PropertyProxy(name="seeAlso", uri=RDFS.seeAlso) <NEW_LINE> cls.isDefinedBy = PropertyProxy(name="isDefinedBy", uri=RDFS.isDefinedBy) <NEW_LINE> cls.value = PropertyProxy(name="value", uri=RDF.value) <NEW_LINE> cls.prefLabel = LiteralPropertyProxy(name="prefLabel", uri=SKOS.prefLabel) <NEW_LINE> Session.get_current().register_class(cls) <NEW_LINE> return super(RDFS_ClassMeta, cls).__init__(name, bases, dct) | Metaclass for the `RDFS_Class` class.
This metaclass governs the creation of all classes which correspond
to an RDFS.Class resource. | 62598fb2167d2b6e312b6ffd |
class FontSet(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swaggerTypes = { 'ComplexScript': 'str', 'EastAsian': 'str', 'Latin': 'str' } <NEW_LINE> self.attributeMap = { 'ComplexScript': 'ComplexScript','EastAsian': 'EastAsian','Latin': 'Latin'} <NEW_LINE> self.ComplexScript = None <NEW_LINE> self.EastAsian = None <NEW_LINE> self.Latin = None | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fb2460517430c4320a3 |
class JSONEncoderWithRDFPrimitivesSupport(json.JSONEncoder): <NEW_LINE> <INDENT> def default(self, obj): <NEW_LINE> <INDENT> if isinstance(obj, (rdfvalue.RDFInteger, rdfvalue.RDFBool, rdfvalue.RDFString)): <NEW_LINE> <INDENT> return obj.SerializeToDataStore() <NEW_LINE> <DEDENT> return json.JSONEncoder.default(self, obj) | Custom JSON encoder that encodes renderers output.
Custom encoder is required to facilitate usage of primitive values -
booleans, integers and strings - in renderers responses.
If renderer references an RDFString, RDFInteger or and RDFBOol when building a
response, it will lead to JSON encoding failure when response encoded,
unless this custom encoder is used. Another way to solve this issue would be
to explicitly call api_value_renderers.RenderValue on every value returned
from the renderer, but it will make the code look overly verbose and dirty. | 62598fb2ff9c53063f51a6d7 |
class MockModel(nn.Module): <NEW_LINE> <INDENT> r <NEW_LINE> def __init__(self, *args, use_ner=False, **kwargs): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.use_ner = use_ner <NEW_LINE> self.device = torch.device("cpu") <NEW_LINE> self.transformer = nn.Linear(512, 2) <NEW_LINE> self.head = nn.Linear(2, 2) <NEW_LINE> <DEDENT> def reset_weights(self): <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> def forward(self, features: Union[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]]): <NEW_LINE> <INDENT> if self.use_ner: <NEW_LINE> <INDENT> ner_out, transformer_inp = features <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> transformer_inp = features <NEW_LINE> <DEDENT> first_tensor_from_features = transformer_inp[0].float() <NEW_LINE> transformer_out = self.get_transformer()(first_tensor_from_features) <NEW_LINE> return self.get_head()(transformer_out) <NEW_LINE> <DEDENT> def to(self, device): <NEW_LINE> <INDENT> self.device = device <NEW_LINE> self.get_head().to(device) <NEW_LINE> super().to(device) <NEW_LINE> <DEDENT> def get_transformer(self): <NEW_LINE> <INDENT> return self.transformer <NEW_LINE> <DEDENT> def get_head(self): <NEW_LINE> <INDENT> return self.head <NEW_LINE> <DEDENT> def get_init_kwargs(self): <NEW_LINE> <INDENT> return {} | Model imitating model for NTI RUCOS to test other components without problems with RAM | 62598fb28a349b6b436862c7 |
class Activity: <NEW_LINE> <INDENT> def __init__(self, activity_id, predecessor, duration, resource): <NEW_LINE> <INDENT> self.id = activity_id <NEW_LINE> self.predecessor = predecessor <NEW_LINE> self.duration = duration <NEW_LINE> self.resource = resource if resource is not None else None <NEW_LINE> self.est = None <NEW_LINE> self.lst = None <NEW_LINE> self.is_critical = False <NEW_LINE> self.level = [[], []] <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.id | basic structure of an activity having an id, predecessor, duration and a resource | 62598fb2dd821e528d6d8fb9 |
class OAuth2Decorator(object): <NEW_LINE> <INDENT> def __init__(self, client_id, client_secret, scope, user_agent, auth_uri='https://accounts.google.com/o/oauth2/auth', token_uri='https://accounts.google.com/o/oauth2/token'): <NEW_LINE> <INDENT> self.flow = OAuth2WebServerFlow(client_id, client_secret, scope, user_agent, auth_uri, token_uri) <NEW_LINE> self.credentials = None <NEW_LINE> self._request_handler = None <NEW_LINE> <DEDENT> def oauth_required(self, method): <NEW_LINE> <INDENT> def check_oauth(request_handler, *args): <NEW_LINE> <INDENT> user = users.get_current_user() <NEW_LINE> if not user: <NEW_LINE> <INDENT> request_handler.redirect(users.create_login_url( request_handler.request.uri)) <NEW_LINE> return <NEW_LINE> <DEDENT> self.flow.params['state'] = request_handler.request.url <NEW_LINE> self._request_handler = request_handler <NEW_LINE> self.credentials = StorageByKeyName( CredentialsModel, user.user_id(), 'credentials').get() <NEW_LINE> if not self.has_credentials(): <NEW_LINE> <INDENT> return request_handler.redirect(self.authorize_url()) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> method(request_handler, *args) <NEW_LINE> <DEDENT> except AccessTokenRefreshError: <NEW_LINE> <INDENT> return request_handler.redirect(self.authorize_url()) <NEW_LINE> <DEDENT> <DEDENT> return check_oauth <NEW_LINE> <DEDENT> def oauth_aware(self, method): <NEW_LINE> <INDENT> def setup_oauth(request_handler, *args): <NEW_LINE> <INDENT> user = users.get_current_user() <NEW_LINE> if not user: <NEW_LINE> <INDENT> request_handler.redirect(users.create_login_url( request_handler.request.uri)) <NEW_LINE> return <NEW_LINE> <DEDENT> self.flow.params['state'] = request_handler.request.url <NEW_LINE> self._request_handler = request_handler <NEW_LINE> self.credentials = StorageByKeyName( CredentialsModel, user.user_id(), 'credentials').get() <NEW_LINE> method(request_handler, *args) <NEW_LINE> <DEDENT> return setup_oauth <NEW_LINE> <DEDENT> def has_credentials(self): <NEW_LINE> <INDENT> return self.credentials is not None and not self.credentials.invalid <NEW_LINE> <DEDENT> def authorize_url(self): <NEW_LINE> <INDENT> callback = self._request_handler.request.relative_url('/oauth2callback') <NEW_LINE> url = self.flow.step1_get_authorize_url(callback) <NEW_LINE> user = users.get_current_user() <NEW_LINE> memcache.set(user.user_id(), pickle.dumps(self.flow), namespace=OAUTH2CLIENT_NAMESPACE) <NEW_LINE> return url <NEW_LINE> <DEDENT> def http(self): <NEW_LINE> <INDENT> return self.credentials.authorize(httplib2.Http()) | Utility for making OAuth 2.0 easier.
Instantiate and then use with oauth_required or oauth_aware
as decorators on webapp.RequestHandler methods.
Example:
decorator = OAuth2Decorator(
client_id='837...ent.com',
client_secret='Qh...wwI',
scope='https://www.googleapis.com/auth/buzz',
user_agent='my-sample-app/1.0')
class MainHandler(webapp.RequestHandler):
@decorator.oauth_required
def get(self):
http = decorator.http()
# http is authorized with the user's Credentials and can be used
# in API calls | 62598fb2adb09d7d5dc0a616 |
class DataSpecificationUnknownTypeException(DataSpecificationException): <NEW_LINE> <INDENT> def __init__(self, type_id, command): <NEW_LINE> <INDENT> Exception.__init__( self, "Unknown id value {0:d} for data type during command " "{1:s}".format(type_id, command)) | An exception that indicates that the value of the requested type is unknown
| 62598fb25fc7496912d482c1 |
class S3Bucket: <NEW_LINE> <INDENT> def __init__(self, bucket): <NEW_LINE> <INDENT> self.bucket = bucket <NEW_LINE> <DEDENT> @property <NEW_LINE> def arn(self): <NEW_LINE> <INDENT> return s3_arn(self.bucket.name) <NEW_LINE> <DEDENT> def create(self): <NEW_LINE> <INDENT> return aws.create_bucket( self.bucket.name, self.bucket.is_data_warehouse ) <NEW_LINE> <DEDENT> def mark_for_archival(self): <NEW_LINE> <INDENT> aws.tag_bucket(self.bucket.name, {"to-archive": "true"}) | Wraps a S3Bucket model to provide convenience methods for AWS | 62598fb2796e427e5384e820 |
class GroupPlacementViewServiceGrpcTransport(object): <NEW_LINE> <INDENT> _OAUTH_SCOPES = () <NEW_LINE> def __init__(self, channel=None, credentials=None, address='googleads.googleapis.com:443'): <NEW_LINE> <INDENT> if channel is not None and credentials is not None: <NEW_LINE> <INDENT> raise ValueError( 'The `channel` and `credentials` arguments are mutually ' 'exclusive.', ) <NEW_LINE> <DEDENT> if channel is None: <NEW_LINE> <INDENT> channel = self.create_channel( address=address, credentials=credentials, ) <NEW_LINE> <DEDENT> self._channel = channel <NEW_LINE> self._stubs = { 'group_placement_view_service_stub': group_placement_view_service_pb2_grpc.GroupPlacementViewServiceStub(channel), } <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create_channel( cls, address='googleads.googleapis.com:443', credentials=None, **kwargs): <NEW_LINE> <INDENT> return google.api_core.grpc_helpers.create_channel( address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def channel(self): <NEW_LINE> <INDENT> return self._channel <NEW_LINE> <DEDENT> @property <NEW_LINE> def get_group_placement_view(self): <NEW_LINE> <INDENT> return self._stubs[ 'group_placement_view_service_stub'].GetGroupPlacementView | gRPC transport class providing stubs for
google.ads.googleads.v1.services GroupPlacementViewService API.
The transport provides access to the raw gRPC stubs,
which can be used to take advantage of advanced
features of gRPC. | 62598fb297e22403b383af9a |
class Agent(): <NEW_LINE> <INDENT> def __init__(self, state_size, action_size, seed): <NEW_LINE> <INDENT> self.state_size = state_size <NEW_LINE> self.action_size = action_size <NEW_LINE> self.seed = random.seed(seed) <NEW_LINE> self.qnetwork_local = QNetwork(state_size, action_size, seed).to(device) <NEW_LINE> self.qnetwork_target = QNetwork(state_size, action_size, seed).to(device) <NEW_LINE> self.optimizer = optim.Adam(self.qnetwork_local.parameters(), lr=LR) <NEW_LINE> self.memory = ReplayBuffer(action_size, BUFFER_SIZE, BATCH_SIZE, seed, ALPHA, BETA) <NEW_LINE> self.t_step = 0 <NEW_LINE> self.beta = BETA <NEW_LINE> <DEDENT> def step(self, state, action, reward, next_state, done): <NEW_LINE> <INDENT> self.memory.add(self.t_step,state, action, reward, next_state, done) <NEW_LINE> self.t_step = (self.t_step + 1) <NEW_LINE> if self.t_step % UPDATE_EVERY == 0: <NEW_LINE> <INDENT> if len(self.memory) > BATCH_SIZE: <NEW_LINE> <INDENT> experiences = self.memory.sample(self.t_step) <NEW_LINE> self.learn(experiences, GAMMA, self.beta) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def act(self, state, eps=0.): <NEW_LINE> <INDENT> state = torch.from_numpy(state).float().unsqueeze(0).to(device) <NEW_LINE> self.qnetwork_local.eval() <NEW_LINE> with torch.no_grad(): <NEW_LINE> <INDENT> action_values = self.qnetwork_local(state) <NEW_LINE> <DEDENT> self.qnetwork_local.train() <NEW_LINE> if random.random() > eps: <NEW_LINE> <INDENT> return np.argmax(action_values.cpu().data.numpy()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return random.choice(np.arange(self.action_size)) <NEW_LINE> <DEDENT> <DEDENT> def learn(self, experiences, gamma, beta): <NEW_LINE> <INDENT> sampled_times, states, actions, rewards, next_states, dones, priorities, sampling_weight = experiences <NEW_LINE> Q_max_action = torch.argmax(self.qnetwork_local(next_states), dim=1) <NEW_LINE> Q_targets_next = self.qnetwork_target(next_states).gather(1,Q_max_action.unsqueeze(1)) <NEW_LINE> Q_targets = rewards + (gamma * Q_targets_next * (1 - dones)) <NEW_LINE> Q_expected = self.qnetwork_local(states).gather(1, actions) <NEW_LINE> TD_Error= torch.abs(Q_targets-Q_expected).cpu().data.numpy() + EPSILON <NEW_LINE> self.memory.update_priorities(sampled_times ,TD_Error) <NEW_LINE> beta = beta**(1-beta) <NEW_LINE> is_weight = ALPHA*(1/BATCH_SIZE*1/sampling_weight)**beta <NEW_LINE> is_weight = torch.from_numpy(is_weight).float().to(device) <NEW_LINE> loss = F.mse_loss(is_weight * Q_expected, is_weight * Q_targets) <NEW_LINE> self.optimizer.zero_grad() <NEW_LINE> loss.backward() <NEW_LINE> self.optimizer.step() <NEW_LINE> self.soft_update(self.qnetwork_local, self.qnetwork_target, TAU) <NEW_LINE> <DEDENT> def soft_update(self, local_model, target_model, tau): <NEW_LINE> <INDENT> for target_param, local_param in zip(target_model.parameters(), local_model.parameters()): <NEW_LINE> <INDENT> target_param.data.copy_(tau*local_param.data + (1.0-tau)*target_param.data) | Interacts with and learns from the environment. | 62598fb2be8e80087fbbf0f1 |
class UsersAttentionMiddlePeopleCreateView(View): <NEW_LINE> <INDENT> def post(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> json_dict = json.loads(request.body.decode()) <NEW_LINE> AttentionMiddlePeople.objects.create(**json_dict) <NEW_LINE> return JsonResponse({"statue": 200, 'data': '添加成功'}, safe=False) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> context = {"Result": 'false', 'Msg': {e}} <NEW_LINE> return JsonResponse(context) | 用户关注的置业顾问记录/上传 | 62598fb2a79ad1619776a0f4 |
class UpdateNike(APIView): <NEW_LINE> <INDENT> permission_classes = [permissions.IsAuthenticated] <NEW_LINE> def post(self, request): <NEW_LINE> <INDENT> prof = Profile.objects.get(user=request.user) <NEW_LINE> ser = EditNike(prof, data=request.data) <NEW_LINE> if ser.is_valid(): <NEW_LINE> <INDENT> ser.save() <NEW_LINE> return Response(status=201) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response(status=400) | Редактирование ника пользователя | 62598fb2f548e778e596b630 |
@pytest.mark.draft <NEW_LINE> @pytest.mark.components <NEW_LINE> @pytest.allure.story('Clients') <NEW_LINE> @pytest.allure.feature('PATCH') <NEW_LINE> class Test_PFE_Components(object): <NEW_LINE> <INDENT> @pytest.allure.link('https://jira.qumu.com/browse/TC-42301') <NEW_LINE> @pytest.mark.Clients <NEW_LINE> @pytest.mark.PATCH <NEW_LINE> def test_TC_42301_PATCH_Clients_Id(self, context): <NEW_LINE> <INDENT> with pytest.allure.step("""Verify that User is unable to Create/Edit/View/Delete, any entity on any page of the QED, using token with "Provision" permission within the token expiration time."""): <NEW_LINE> <INDENT> clientDetails = context.sc.ClientDetails( id=None, matchingRule={ 'operator': 'ALL', 'rules': [{ 'expressionType': 'Single', 'contextField': 'remoteAddress', 'operator': 'IPMATCH', 'contextFieldType': 'String', 'matchValue': '172.30.2.49/32', 'contextFieldKey': None }], 'groups': [] }, name='POST: Client Name Updated', sourceSelectionRule=[]) <NEW_LINE> response = check( context.cl.Clients.updateEntity( body=clientDetails, id='clientUpdate' ) ) <NEW_LINE> <DEDENT> with pytest.allure.step("""Verify that User is unable to Create/Edit/View/Delete, any entity on any page of the QED, using token with "Provision" permission within the token expiration time."""): <NEW_LINE> <INDENT> clientDetails = context.sc.ClientDetails( id=None, matchingRule={ 'operator': 'ALL', 'rules': [{ 'expressionType': 'Single', 'contextField': 'remoteAddress', 'operator': 'IPMATCH', 'contextFieldType': 'String', 'matchValue': '172.30.2.49/32', 'contextFieldKey': None }], 'groups': [] }, name='POST: Client Name Updated', sourceSelectionRule=[]) <NEW_LINE> request = context.cl.Clients.updateEntity( body=clientDetails, id='clientUpdate' ) <NEW_LINE> try: <NEW_LINE> <INDENT> client, response = check( request, quiet=True, returnResponse=True ) <NEW_LINE> <DEDENT> except (HTTPBadRequest, HTTPForbidden) as e: <NEW_LINE> <INDENT> get_error_message(e) | expect.any( should.start_with('may not be empty'), should.start_with('Invalid page parameter specified'), should.contain('Invalid Authorization Token') ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception( "Expected error message, got {} status code instead.".format( response.status_code)) | PFE Clients test cases. | 62598fb27047854f4633f466 |
class LoginWithGoogleDto(object): <NEW_LINE> <INDENT> swagger_types = { 'id_token': 'str' } <NEW_LINE> attribute_map = { 'id_token': 'idToken' } <NEW_LINE> def __init__(self, id_token=None): <NEW_LINE> <INDENT> self._id_token = None <NEW_LINE> self.discriminator = None <NEW_LINE> if id_token is not None: <NEW_LINE> <INDENT> self.id_token = id_token <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def id_token(self): <NEW_LINE> <INDENT> return self._id_token <NEW_LINE> <DEDENT> @id_token.setter <NEW_LINE> def id_token(self, id_token): <NEW_LINE> <INDENT> self._id_token = id_token <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(LoginWithGoogleDto, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, LoginWithGoogleDto): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fb27d43ff2487427448 |
class UFuncOperator(Operator): <NEW_LINE> <INDENT> symbol = None <NEW_LINE> nin = None <NEW_LINE> nout = None <NEW_LINE> operation = None <NEW_LINE> def __init__(self, op): <NEW_LINE> <INDENT> Operator.__init__(self, name=op.__name__) <NEW_LINE> self.symbol = op.__name__ <NEW_LINE> self.nin = op.nin <NEW_LINE> self.nout = op.nout <NEW_LINE> self.operation = op <NEW_LINE> return | A operator wrapper around a numpy ufunc.
The name and symbol attributes are set equal to the ufunc.__name__
attribute. nin and nout are also taken from the ufunc. | 62598fb2283ffb24f3cf3918 |
class HealthchecksioBinarySensor(BinarySensorEntity): <NEW_LINE> <INDENT> def __init__(self, hass, config, config_entry): <NEW_LINE> <INDENT> self.hass = hass <NEW_LINE> self.attr = {} <NEW_LINE> self.config_entry = config_entry <NEW_LINE> self._status = None <NEW_LINE> self.config = config <NEW_LINE> <DEDENT> async def async_update(self): <NEW_LINE> <INDENT> await self.hass.data[DOMAIN_DATA]["client"].update_data() <NEW_LINE> for check in self.hass.data[DOMAIN_DATA]["data"]["checks"]: <NEW_LINE> <INDENT> if self.unique_id == check.get("ping_url").split("/")[-1]: <NEW_LINE> <INDENT> self.config = check <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> self._status = self.config.get("status") == "up" <NEW_LINE> self.attr["attribution"] = ATTRIBUTION <NEW_LINE> self.attr["last_ping"] = self.config.get("last_ping") <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self): <NEW_LINE> <INDENT> return self.config.get("ping_url").split("/")[-1] <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_info(self): <NEW_LINE> <INDENT> return { "identifiers": {(DOMAIN, self.config_entry.entry_id)}, "name": "Healthchecks.io", "manufacturer": "SIA Monkey See Monkey Do", } <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.config.get("name") <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_class(self): <NEW_LINE> <INDENT> return BINARY_SENSOR_DEVICE_CLASS <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return self._status <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_state_attributes(self): <NEW_LINE> <INDENT> return self.attr | Healthchecksio binary_sensor class. | 62598fb29c8ee823130401b8 |
class ConfigFilter(metaclass=MetaclassRegistry): <NEW_LINE> <INDENT> name = "identity" <NEW_LINE> sensitive_arg = False <NEW_LINE> def Filter(self, data: Text) -> Text: <NEW_LINE> <INDENT> precondition.AssertType(data, Text) <NEW_LINE> return data | A configuration filter can transform a configuration parameter. | 62598fb2cc0a2c111447b09e |
class Serializer(PythonSerializer): <NEW_LINE> <INDENT> internal_use_only = False <NEW_LINE> def handle_field(self, obj, field): <NEW_LINE> <INDENT> if isinstance(field, models.TimeField) and getattr(obj, field.name) is not None: <NEW_LINE> <INDENT> self._current[field.name] = str(getattr(obj, field.name)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super().handle_field(obj, field) <NEW_LINE> <DEDENT> <DEDENT> def end_serialization(self): <NEW_LINE> <INDENT> self.options.setdefault('allow_unicode', True) <NEW_LINE> yaml.dump(self.objects, self.stream, Dumper=DjangoSafeDumper, **self.options) <NEW_LINE> <DEDENT> def getvalue(self): <NEW_LINE> <INDENT> return super(PythonSerializer, self).getvalue() | Convert a queryset to YAML. | 62598fb2aad79263cf42e85f |
class ParticleRestartTestHarness(TestHarness): <NEW_LINE> <INDENT> def _run_openmc(self): <NEW_LINE> <INDENT> args = {'openmc_exec': self._opts.exe} <NEW_LINE> if self._opts.mpi_exec is not None: <NEW_LINE> <INDENT> args.update({'mpi_procs': self._opts.mpi_np, 'mpi_exec': self._opts.mpi_exec}) <NEW_LINE> <DEDENT> returncode = openmc.run(**args) <NEW_LINE> assert returncode == 0, 'OpenMC did not exit successfully.' <NEW_LINE> args.update({'restart_file': self._sp_name}) <NEW_LINE> returncode = openmc.run(**args) <NEW_LINE> assert returncode == 0, 'OpenMC did not exit successfully.' <NEW_LINE> <DEDENT> def _test_output_created(self): <NEW_LINE> <INDENT> particle = glob.glob(os.path.join(os.getcwd(), self._sp_name)) <NEW_LINE> assert len(particle) == 1, 'Either multiple or no particle restart ' 'files exist.' <NEW_LINE> assert particle[0].endswith('h5'), 'Particle restart file is not a HDF5 file.' <NEW_LINE> <DEDENT> def _get_results(self): <NEW_LINE> <INDENT> particle = glob.glob(os.path.join(os.getcwd(), self._sp_name))[0] <NEW_LINE> p = openmc.Particle(particle) <NEW_LINE> outstr = '' <NEW_LINE> outstr += 'current batch:\n' <NEW_LINE> outstr += "{0:12.6E}\n".format(p.current_batch) <NEW_LINE> outstr += 'current gen:\n' <NEW_LINE> outstr += "{0:12.6E}\n".format(p.current_gen) <NEW_LINE> outstr += 'particle id:\n' <NEW_LINE> outstr += "{0:12.6E}\n".format(p.id) <NEW_LINE> outstr += 'run mode:\n' <NEW_LINE> outstr += "{0}\n".format(p.run_mode) <NEW_LINE> outstr += 'particle weight:\n' <NEW_LINE> outstr += "{0:12.6E}\n".format(p.weight) <NEW_LINE> outstr += 'particle energy:\n' <NEW_LINE> outstr += "{0:12.6E}\n".format(p.energy) <NEW_LINE> outstr += 'particle xyz:\n' <NEW_LINE> outstr += "{0:12.6E} {1:12.6E} {2:12.6E}\n".format(p.xyz[0], p.xyz[1], p.xyz[2]) <NEW_LINE> outstr += 'particle uvw:\n' <NEW_LINE> outstr += "{0:12.6E} {1:12.6E} {2:12.6E}\n".format(p.uvw[0], p.uvw[1], p.uvw[2]) <NEW_LINE> return outstr | Specialized TestHarness for running OpenMC particle restart tests. | 62598fb23346ee7daa33768d |
class HeaderExists(Rule): <NEW_LINE> <INDENT> NAME = 'header-exists' <NEW_LINE> _log = logging.getLogger(NAME) <NEW_LINE> def __init__(self, rule_data, cfg): <NEW_LINE> <INDENT> super().__init__(rule_data, cfg) <NEW_LINE> self._header_name = rule_data['name'] <NEW_LINE> <DEDENT> def check(self, message): <NEW_LINE> <INDENT> self._log.debug('%r exists', self._header_name) <NEW_LINE> return self._header_name in message | Looks for a message to have a given header. | 62598fb221bff66bcd722cf3 |
class AgentAssistantRecord(proto.Message): <NEW_LINE> <INDENT> article_suggestion_answer = proto.Field( proto.MESSAGE, number=5, oneof="answer", message=participant.ArticleAnswer, ) <NEW_LINE> faq_answer = proto.Field( proto.MESSAGE, number=6, oneof="answer", message=participant.FaqAnswer, ) | Represents a record of a human agent assist answer.
This message has `oneof`_ fields (mutually exclusive fields).
For each oneof, at most one member field can be set at the same time.
Setting any member of the oneof automatically clears all other
members.
.. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
Attributes:
article_suggestion_answer (google.cloud.dialogflow_v2.types.ArticleAnswer):
Output only. The article suggestion answer.
This field is a member of `oneof`_ ``answer``.
faq_answer (google.cloud.dialogflow_v2.types.FaqAnswer):
Output only. The FAQ answer.
This field is a member of `oneof`_ ``answer``. | 62598fb2379a373c97d990a1 |
class LetterGridFieldGroup(_GridFieldGroup): <NEW_LINE> <INDENT> def __init__(self, grid: Grid, horizontal_start: int, vertical_start: int, num_fields: int, field_length: int, field_orientation: geometry_utils.Orientation): <NEW_LINE> <INDENT> fields_vertical = field_orientation is geometry_utils.Orientation.VERTICAL <NEW_LINE> self.fields = [ LetterGridField( grid, horizontal_start + i if fields_vertical else horizontal_start, vertical_start + i if not fields_vertical else vertical_start, field_orientation, field_length) for i in range(num_fields) ] <NEW_LINE> <DEDENT> def read_value(self, threshold: float, fill_percents: tp.List[tp.List[float]] ) -> tp.List[tp.List[str]]: <NEW_LINE> <INDENT> return tp.cast(tp.List[tp.List[str]], super().read_value(threshold, fill_percents)) | A letter grid field group is one group of fields that represents an
entire string. | 62598fb2be7bc26dc9251ea2 |
class Vereador(Politico): <NEW_LINE> <INDENT> def __init__(self, nome, partido, municipio, estado): <NEW_LINE> <INDENT> Politico.__init__(self) <NEW_LINE> self.set_nome(nome) <NEW_LINE> self.set_salario(5000) <NEW_LINE> self.set_partido(partido) <NEW_LINE> self.set_estado(estado) <NEW_LINE> self.__municipio = municipio <NEW_LINE> self.set_funcao("propor leis municipais em benefício da população.") <NEW_LINE> <DEDENT> def set_municipio(self, municipio): <NEW_LINE> <INDENT> if type(municipio) == str: <NEW_LINE> <INDENT> self.__municipio = municipio <NEW_LINE> <DEDENT> <DEDENT> def get_municipio(self): <NEW_LINE> <INDENT> return self.__municipio <NEW_LINE> <DEDENT> def apresentacao(self): <NEW_LINE> <INDENT> Politico.apresentacao(self) <NEW_LINE> print ('sou vereador: ' + self.get_municipio() + '/' + self.get_estado()) <NEW_LINE> print ('Minha função é ' + self.get_funcao()) <NEW_LINE> print ('Fui eleito por ' + self.get_estado()) <NEW_LINE> print ('============================') | Classe Senador | 62598fb2d486a94d0ba2c05c |
class ReceptiveFieldTransformer(BaseEstimator, TransformerMixin): <NEW_LINE> <INDENT> def __init__(self, width): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> <DEDENT> def fit(self, X, y=None): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def transform(self, X, y=None): <NEW_LINE> <INDENT> no_channels = False <NEW_LINE> if X.ndim < 5: <NEW_LINE> <INDENT> X = np.expand_dims(X, axis=-1) <NEW_LINE> no_channels = True <NEW_LINE> <DEDENT> assert X.ndim == 5, 'Input dimensions must be (n_samples, n_pixels_x, n_pixels_y, n_pixels_z [, n_c])' <NEW_LINE> padding_x, padding_y, padding_z = int((self.width[0] - 1) / 2), int((self.width[1] - 1) / 2), int((self.width[2] - 1) / 2) <NEW_LINE> padded_data = np.pad(X, ((0, 0), (padding_x, padding_x), (padding_y, padding_y), (padding_z, padding_z), (0, 0)), mode='constant', constant_values=0) <NEW_LINE> Xt = rolling_window(padded_data, (0, self.width[0], self.width[1], self.width[2], 0)) <NEW_LINE> if no_channels: <NEW_LINE> <INDENT> Xt = Xt[:, :, :, :, 0] <NEW_LINE> <DEDENT> return Xt | Obtain regularly spaced subimages belonging to the masked area of images in a collection.
Parameters
----------
width: width along every dimension [wx, wy, wz] - must be unpair | 62598fb2ff9c53063f51a6d9 |
class Card(NumType): <NEW_LINE> <INDENT> pass | cardinal number or corresponding interrogative / relative / indefinite / demonstrative word | 62598fb27d847024c075c44f |
class DeleteShadowResponse(awsiot.ModeledClass): <NEW_LINE> <INDENT> __slots__ = ['client_token', 'timestamp', 'version'] <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.client_token = kwargs.get('client_token') <NEW_LINE> self.timestamp = kwargs.get('timestamp') <NEW_LINE> self.version = kwargs.get('version') <NEW_LINE> for key, val in zip(['timestamp', 'version'], args): <NEW_LINE> <INDENT> setattr(self, key, val) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def from_payload(cls, payload): <NEW_LINE> <INDENT> new = cls() <NEW_LINE> val = payload.get('clientToken') <NEW_LINE> if val is not None: <NEW_LINE> <INDENT> new.client_token = val <NEW_LINE> <DEDENT> val = payload.get('timestamp') <NEW_LINE> if val is not None: <NEW_LINE> <INDENT> new.timestamp = datetime.datetime.fromtimestamp(val) <NEW_LINE> <DEDENT> val = payload.get('version') <NEW_LINE> if val is not None: <NEW_LINE> <INDENT> new.version = val <NEW_LINE> <DEDENT> return new | Response payload to a DeleteShadow request.
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
client_token (str): A client token used to correlate requests and responses.
timestamp (datetime.datetime): The time the response was generated by AWS IoT.
version (int): The current version of the document for the device's shadow.
Attributes:
client_token (str): A client token used to correlate requests and responses.
timestamp (datetime.datetime): The time the response was generated by AWS IoT.
version (int): The current version of the document for the device's shadow. | 62598fb25fdd1c0f98e5e019 |
class Query(object): <NEW_LINE> <INDENT> def __init__(self, tables=None, where_clause=None, where_clause_params=None, joins=None): <NEW_LINE> <INDENT> self.tables = tables or [] <NEW_LINE> self.where_clause = where_clause or [] <NEW_LINE> self.where_clause_params = where_clause_params or [] <NEW_LINE> self.joins = joins or {} <NEW_LINE> <DEDENT> def join(self, connection, outer=False): <NEW_LINE> <INDENT> (lhs, table, lhs_col, col) = connection <NEW_LINE> lhs = _quote(lhs) <NEW_LINE> table = _quote(table) <NEW_LINE> assert lhs in self.tables, "Left-hand-side table must already be part of the query!" <NEW_LINE> if table in self.tables: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.tables.append(table) <NEW_LINE> self.joins.setdefault(lhs, []).append((table, lhs_col, col, outer and 'LEFT JOIN' or 'JOIN')) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def get_sql(self): <NEW_LINE> <INDENT> query_from = '' <NEW_LINE> tables_to_process = list(self.tables) <NEW_LINE> def add_joins_for_table(table, query_from): <NEW_LINE> <INDENT> for (dest_table, lhs_col, col, join) in self.joins.get(table,[]): <NEW_LINE> <INDENT> tables_to_process.remove(dest_table) <NEW_LINE> query_from += ' %s %s ON (%s."%s" = %s."%s")' % (join, dest_table, table, lhs_col, dest_table, col) <NEW_LINE> query_from = add_joins_for_table(dest_table, query_from) <NEW_LINE> <DEDENT> return query_from <NEW_LINE> <DEDENT> for table in tables_to_process: <NEW_LINE> <INDENT> query_from += table <NEW_LINE> if table in self.joins: <NEW_LINE> <INDENT> query_from = add_joins_for_table(table, query_from) <NEW_LINE> <DEDENT> query_from += ',' <NEW_LINE> <DEDENT> query_from = query_from[:-1] <NEW_LINE> return (query_from, " AND ".join(self.where_clause), self.where_clause_params) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '<osv.Query: "SELECT ... FROM %s WHERE %s" with params: %r>' % self.get_sql() | Dumb implementation of a Query object, using 3 string lists so far
for backwards compatibility with the (table, where_clause, where_params) previously used.
TODO: To be improved after v6.0 to rewrite part of the ORM and add support for:
- auto-generated multiple table aliases
- multiple joins to the same table with different conditions
- dynamic right-hand-side values in domains (e.g. a.name = a.description)
- etc. | 62598fb216aa5153ce40058f |
class Muster(CMakePackage): <NEW_LINE> <INDENT> homepage = "https://github.com/llnl/muster" <NEW_LINE> url = "https://github.com/llnl/muster/archive/v1.0.tar.gz" <NEW_LINE> version('1.0.1', 'd709787db7e080447afb6571ac17723c') <NEW_LINE> version('1.0', '2eec6979a4a36d3a65a792d12969be16') <NEW_LINE> depends_on('boost') <NEW_LINE> depends_on('mpi') <NEW_LINE> depends_on('cmake@2.8:', type='build') | The Muster library provides implementations of sequential and
parallel K-Medoids clustering algorithms. It is intended as a
general framework for parallel cluster analysis, particularly
for performance data analysis on systems with very large
numbers of processes. | 62598fb3e5267d203ee6b991 |
class CrossrefDOIAndISSNList(CrossrefTask): <NEW_LINE> <INDENT> date = ClosestDateParameter(default=datetime.date.today()) <NEW_LINE> def requires(self): <NEW_LINE> <INDENT> return {'input': CrossrefIntermediateSchema(date=self.date), 'jq': Executable(name='jq', message='https://github.com/stedolan/jq')} <NEW_LINE> <DEDENT> @timed <NEW_LINE> def run(self): <NEW_LINE> <INDENT> _, stopover = tempfile.mkstemp(prefix='siskin-') <NEW_LINE> temp = shellout("unpigz -c {input} > {output}", input=self.input().get('input').path) <NEW_LINE> output = shellout("""jq -r '[.doi?, .["rft.issn"][]?, .["rft.eissn"][]?] | @csv' {input} | LC_ALL=C sort -S50% > {output} """, input=temp, output=stopover) <NEW_LINE> os.remove(temp) <NEW_LINE> luigi.LocalTarget(output).move(self.output().path) <NEW_LINE> <DEDENT> def output(self): <NEW_LINE> <INDENT> return luigi.LocalTarget(path=self.path(ext='csv')) | A list of Crossref DOIs with their ISSNs. | 62598fb38e7ae83300ee9130 |
class PaginatorEmbedInterface(PaginatorInterface): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._embed = kwargs.pop('embed', None) or discord.Embed() <NEW_LINE> super().__init__(*args, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def send_kwargs(self): <NEW_LINE> <INDENT> display_page = self.display_page <NEW_LINE> self._embed.description = self.pages[display_page] <NEW_LINE> self._embed.set_footer(text=f'Page {display_page + 1}/{self.page_count}') <NEW_LINE> return {'embed': self._embed} <NEW_LINE> <DEDENT> max_page_size = 2048 <NEW_LINE> @property <NEW_LINE> def page_size(self): <NEW_LINE> <INDENT> return self.paginator.max_size | A paginator interface that encloses content in an embed. | 62598fb391f36d47f2230eee |
class DescribeInsurePacksResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.InsurePacks = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("InsurePacks") is not None: <NEW_LINE> <INDENT> self.InsurePacks = [] <NEW_LINE> for item in params.get("InsurePacks"): <NEW_LINE> <INDENT> obj = KeyValueRecord() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.InsurePacks.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.RequestId = params.get("RequestId") | DescribeInsurePacks返回参数结构体
| 62598fb397e22403b383af9c |
@use_strategy(BUILD_STRATEGY) <NEW_LINE> class LeafDescriptorFactory(Factory): <NEW_LINE> <INDENT> FACTORY_FOR = XModuleDescriptor <NEW_LINE> runtime = SubFactory(DescriptorSystemFactory) <NEW_LINE> url_name = LazyAttributeSequence('{.block_type}_{}'.format) <NEW_LINE> @lazy_attribute <NEW_LINE> def location(self): <NEW_LINE> <INDENT> return Location('i4x://org/course/category/{}'.format(self.url_name)) <NEW_LINE> <DEDENT> @lazy_attribute <NEW_LINE> def block_type(self): <NEW_LINE> <INDENT> return self.descriptor_cls.__name__ <NEW_LINE> <DEDENT> @lazy_attribute <NEW_LINE> def definition_id(self): <NEW_LINE> <INDENT> return self.location <NEW_LINE> <DEDENT> @lazy_attribute <NEW_LINE> def usage_id(self): <NEW_LINE> <INDENT> return self.location <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _build(cls, target_class, *args, **kwargs): <NEW_LINE> <INDENT> runtime = kwargs.pop('runtime') <NEW_LINE> desc_cls = kwargs.pop('descriptor_cls') <NEW_LINE> block_type = kwargs.pop('block_type') <NEW_LINE> def_id = kwargs.pop('definition_id') <NEW_LINE> usage_id = kwargs.pop('usage_id') <NEW_LINE> block = runtime.construct_xblock_from_class( desc_cls, ScopeIds(None, block_type, def_id, usage_id), DictFieldData(dict(**kwargs)) ) <NEW_LINE> block.save() <NEW_LINE> return block | Factory to generate leaf XModuleDescriptors. | 62598fb3be8e80087fbbf0f3 |
class DescribePublishSubscribeResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.TotalCount = None <NEW_LINE> self.PublishSubscribeSet = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.TotalCount = params.get("TotalCount") <NEW_LINE> if params.get("PublishSubscribeSet") is not None: <NEW_LINE> <INDENT> self.PublishSubscribeSet = [] <NEW_LINE> for item in params.get("PublishSubscribeSet"): <NEW_LINE> <INDENT> obj = PublishSubscribe() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.PublishSubscribeSet.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.RequestId = params.get("RequestId") | DescribePublishSubscribe返回参数结构体
| 62598fb3097d151d1a2c10bb |
class GridWorld(object): <NEW_LINE> <INDENT> def __init__(self, m, n): <NEW_LINE> <INDENT> self.m = m <NEW_LINE> self.n = n <NEW_LINE> self.grid = np.zeros((m,n)) <NEW_LINE> self.stateSpace = [i+1 for i in range(self.m*self.n-2)] <NEW_LINE> self.stateSpacePlus = [i for i in range(self.m*self.n)] <NEW_LINE> self.actionSpace = {'up': -self.m, 'down': self.m, 'left': -1, 'right': 1} <NEW_LINE> self.p = self.initP() <NEW_LINE> <DEDENT> def initP(self): <NEW_LINE> <INDENT> P = {} <NEW_LINE> def offGridMove(newState, oldState): <NEW_LINE> <INDENT> if newState not in self.stateSpacePlus: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif oldState % self.m == 0 and newState % self.m == self.m - 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif oldState % self.m == self.m - 1 and newState % self.m == 0: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> for state in self.stateSpace: <NEW_LINE> <INDENT> for action in self.actionSpace: <NEW_LINE> <INDENT> resultingState = state + self.actionSpace[action] <NEW_LINE> key = (state, -1, state, action) if offGridMove(resultingState, state) else (resultingState, -1, state, action) <NEW_LINE> P[key] = 1 <NEW_LINE> <DEDENT> <DEDENT> return P | Gridworld defined by m x n matrix with
terminal states at top left corner and bottom right corner.
State transitions are deterministic; attempting to move
off the grid leaves the state unchanged, and rewards are -1 on
each step.
In this implementation we model the environment as a system of
equations to be solved, rather than as a game to be played. | 62598fb338b623060ffa912a |
class TraceTracker(Printer): <NEW_LINE> <INDENT> def __init__(self, *args, **kw): <NEW_LINE> <INDENT> self.out = StringIO() <NEW_LINE> super(TraceTracker, self).__init__(self.out, *args, **kw) <NEW_LINE> self.checker = MinimockOutputChecker() <NEW_LINE> self.options = doctest.ELLIPSIS <NEW_LINE> self.options |= doctest.NORMALIZE_INDENTATION <NEW_LINE> self.options |= doctest.NORMALIZE_FUNCTION_PARAMETERS <NEW_LINE> self.options |= doctest.REPORT_UDIFF <NEW_LINE> <DEDENT> def check(self, want): <NEW_LINE> <INDENT> return self.checker.check_output(want, self.dump(), optionflags=self.options) <NEW_LINE> <DEDENT> def diff(self, want): <NEW_LINE> <INDENT> if self.check(want): <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.checker.output_difference(doctest.Example("", want), self.dump(), optionflags=self.options) <NEW_LINE> <DEDENT> <DEDENT> def dump(self): <NEW_LINE> <INDENT> return self.out.getvalue() <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.out.truncate(0) | :class:`AbstractTracker` implementation for using MiniMock in non-
:mod:`doctest` tests. Follows the pattern of recording minimocked
object usage as strings, then using the facilities of :mod:`doctest`
to assert the correctness of these usage strings. | 62598fb3498bea3a75a57bad |
class CFFICDataWrapper(object): <NEW_LINE> <INDENT> def __init__(self, cdecl, ffi): <NEW_LINE> <INDENT> self._cdata = ffi.new(cdecl) <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self._cdata, name) <NEW_LINE> <DEDENT> def __setattr__(self, name, value): <NEW_LINE> <INDENT> if name == "_cdata": <NEW_LINE> <INDENT> super(CFFICDataWrapper, self).__setattr__(name, value) <NEW_LINE> return <NEW_LINE> <DEDENT> if hasattr(self.__class__, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> attr = getattr(self.__class__, name) <NEW_LINE> attr.__set__(self, value) <NEW_LINE> return <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> setattr(self._cdata, name, value) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self._cdata.__getitem__(key) <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> return self._cdata.__setitem__(key, value) | Base class for exposing Python types and interfaces to pywincffi users:
* Wraps a CFFI cdata object in self._cdata.
* Delegates attribute getting/setting to self._cdata, supporting structs.
* Delegates item getting/setting to self._cdata, supporting arrays.
Attribute access is not delegated to the wrapped object if the class
itself contains such an attribute and that attribute is a descriptor; this
is in place to support @property in sub-classes.
:param str cdecl:
C type specification as used in ff.new(cdecl)
:param cffi.api.FFI ffi:
FFI instance used to create wrapped cdata object. | 62598fb3a05bb46b3848a8f9 |
class CMSCalendarEntriesPlugin(ZinniaCMSPluginBase): <NEW_LINE> <INDENT> model = CalendarEntriesPlugin <NEW_LINE> name = _('Calendar entries') <NEW_LINE> render_template = 'cmsplugin_zinnia/calendar.html' <NEW_LINE> fieldsets = ((None, { 'fields': (('year', 'month'),), 'description': _("If you don't set year and month, " "the current month will be used.")}),) <NEW_LINE> form = CalendarEntriesAdminForm | Plugin for including calendar of published entries | 62598fb33346ee7daa33768e |
class SquareFeet: <NEW_LINE> <INDENT> def create_totalSqFeet(self, dataset): <NEW_LINE> <INDENT> if 'TotalArea' not in dataset: <NEW_LINE> <INDENT> dataset['TotalArea'] = dataset.apply(lambda row: self.add_GrLivArea_TotalBsmtSF(row), axis=1) <NEW_LINE> <DEDENT> <DEDENT> def add_GrLivArea_TotalBsmtSF(self, row): <NEW_LINE> <INDENT> return row['GrLivArea'] + row['TotalBsmtSF'] <NEW_LINE> <DEDENT> def plot_scatter_totalSqFeet(self, dataset): <NEW_LINE> <INDENT> sns.set_theme(color_codes=True) <NEW_LINE> sns.regplot(x='GrLivArea', y='SalePrice', data=dataset) <NEW_LINE> plt.title('Scatter plot of SalePrice and GrLivArea') <NEW_LINE> self.label_point(dataset['TotalBsmtSF'], dataset['SalePrice'], dataset['Id'], plt.gca()) <NEW_LINE> <DEDENT> def label_point(self, x, y, val, ax): <NEW_LINE> <INDENT> a = pd.concat({'x': x, 'y': y, 'val': val}, axis=1) <NEW_LINE> for i, point in a.iterrows(): <NEW_LINE> <INDENT> ax.text(point['x']+.02, point['y'], str(point['val'])) | process square feet related features
GrLivArea TotalBsmtSF
Total living space generally is very important
when people buying houses, adding a predictors that adds up the living spaces | 62598fb33539df3088ecc340 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.