code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class CryptoPrice: <NEW_LINE> <INDENT> def __init__(self, bot): <NEW_LINE> <INDENT> self.bot = bot <NEW_LINE> self.url = "https://coinmarketcap.com/" <NEW_LINE> self.session = aiohttp.ClientSession() <NEW_LINE> <DEDENT> def __unload(self): <NEW_LINE> <INDENT> self.session.close() <NEW_LINE> <DEDENT> @commands.command() <NEW_LINE> async def cprice(self, *, currency: str=None): <NEW_LINE> <INDENT> if currency is None: <NEW_LINE> <INDENT> search = 'id' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> search = currency <NEW_LINE> <DEDENT> search = search.replace(' ', '-').lower() <NEW_LINE> async with self.session.get(self.url) as response: <NEW_LINE> <INDENT> cryptosoup = BeautifulSoup(await response.text(), "html.parser") <NEW_LINE> <DEDENT> results = cryptosoup.find_all("tr", id=re.compile(search)) <NEW_LINE> size = len(results) <NEW_LINE> if size == 0: <NEW_LINE> <INDENT> await self.bot.say("Couldn't find a currency matching your query.") <NEW_LINE> <DEDENT> elif size <= 5: <NEW_LINE> <INDENT> text = self.make_table(results) <NEW_LINE> await self.bot.say("```" + text + "```") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if currency is None: <NEW_LINE> <INDENT> results = results[:5] <NEW_LINE> text = self.make_table(results) <NEW_LINE> await self.bot.say("```" + text + "```") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> text = ('Your query matched {} results. Try adding more ' 'characters to your search.'.format(size)) <NEW_LINE> await self.bot.say(text) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def make_table(self, results, limit: int=None): <NEW_LINE> <INDENT> headers = ['Name', 'Price', '24h Change'] <NEW_LINE> rows = [] <NEW_LINE> for row in results: <NEW_LINE> <INDENT> column = [] <NEW_LINE> column.append(row.find("td", class_="currency-name").a.get_text().strip()) <NEW_LINE> column.append(row.find("a", class_="price").get_text().strip()) <NEW_LINE> column.append(row.find("td", class_="percent-change").get_text().strip()) <NEW_LINE> rows.append(column) <NEW_LINE> <DEDENT> if limit: <NEW_LINE> <INDENT> rows.sort(key=lambda column: len(column[0])) <NEW_LINE> rows = rows[:limit] <NEW_LINE> <DEDENT> text = tabulate(rows, headers=headers) <NEW_LINE> return text
|
Fetches cryptocurrency information
|
6259900621a7993f00c66abe
|
class BoundedWString(AbstractWString): <NEW_LINE> <INDENT> __slots__ = ('maximum_size', ) <NEW_LINE> def __init__(self, maximum_size: int): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.maximum_size = maximum_size <NEW_LINE> <DEDENT> def has_maximum_size(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return super().__eq__(other) and self.maximum_size == other.maximum_size
|
A 16-bit string type.
|
62599006bf627c535bcb1ff0
|
class ActionGroupResourcePaged(Paged): <NEW_LINE> <INDENT> _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'current_page': {'key': 'value', 'type': '[ActionGroupResource]'} } <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ActionGroupResourcePaged, self).__init__(*args, **kwargs)
|
A paging container for iterating over a list of :class:`ActionGroupResource <azure.mgmt.monitor.models.ActionGroupResource>` object
|
62599006462c4b4f79dbc54a
|
class Media(Segment): <NEW_LINE> <INDENT> ui_name = "Unknown media" <NEW_LINE> extra_serializable_attributes = [] <NEW_LINE> def __init__(self, container, signature=None, force=False): <NEW_LINE> <INDENT> container.header = self.calc_header(container) <NEW_LINE> size = len(container) - container.header_length <NEW_LINE> Segment.__init__(self, container, container.header_length, name=self.ui_name, length=size) <NEW_LINE> self.signature = signature <NEW_LINE> if container.header is not None: <NEW_LINE> <INDENT> self.check_header(container.header) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.check_media_size() <NEW_LINE> <DEDENT> except errors.PossibleCandidateMedia: <NEW_LINE> <INDENT> if not force: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> self.check_magic() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> desc = f"{self.ui_name}, size={len(self)}" <NEW_LINE> if self.filesystem is not None: <NEW_LINE> <INDENT> desc += f", filesystem={self.filesystem.ui_name}" <NEW_LINE> <DEDENT> return desc <NEW_LINE> <DEDENT> @property <NEW_LINE> def filesystem(self): <NEW_LINE> <INDENT> return self.container.filesystem <NEW_LINE> <DEDENT> def calc_header(self, container): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def check_header(self, header): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def check_media_size(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def check_magic(self): <NEW_LINE> <INDENT> pass
|
Base class for what is typically the root segment in a Container,
describing the type of media the data represents: floppy disk image,
cassette image, cartridge, etc.
Media can contain a filesystem, which is used to further subdivide the data
from the container into segments to represent each logical section in the
filesystem (i.e. boot sectors, VTOC, directory structure, files, etc.)
|
625990060a366e3fb87dd535
|
class PhantomJSDriver(Process): <NEW_LINE> <INDENT> def __init__(self, exe_path='phantomjs', extra_args=None, params=None): <NEW_LINE> <INDENT> script_path = wpull.util.get_package_filename('driver/phantomjs.js') <NEW_LINE> self._config_file = tempfile.NamedTemporaryFile( prefix='tmp-wpull-', suffix='.json', delete=False ) <NEW_LINE> args = [exe_path] + (extra_args or []) + [script_path, self._config_file.name] <NEW_LINE> super().__init__(args, stderr_callback=self._stderr_callback) <NEW_LINE> self._params = params <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def _stderr_callback(self, line): <NEW_LINE> <INDENT> _logger.warning(line.decode('utf-8', 'replace').rstrip()) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def start(self, use_atexit=True): <NEW_LINE> <INDENT> _logger.debug('PhantomJS start.') <NEW_LINE> self._write_config() <NEW_LINE> yield from super().start(use_atexit) <NEW_LINE> <DEDENT> def _write_config(self): <NEW_LINE> <INDENT> param_dict = { 'url': self._params.url, 'snapshot_paths': self._params.snapshot_paths, 'wait_time': self._params.wait_time, 'num_scrolls': self._params.num_scrolls, 'smart_scroll': self._params.smart_scroll, 'snapshot': self._params.snapshot, 'viewport_width': self._params.viewport_size[0], 'viewport_height': self._params.viewport_size[1], 'paper_width': self._params.paper_size[0], 'paper_height': self._params.paper_size[1], 'custom_headers': self._params.custom_headers, 'page_settings': self._params.page_settings, } <NEW_LINE> if self._params.event_log_filename: <NEW_LINE> <INDENT> param_dict['event_log_filename'] = os.path.abspath(self._params.event_log_filename) <NEW_LINE> <DEDENT> if self._params.action_log_filename: <NEW_LINE> <INDENT> param_dict['action_log_filename'] = os.path.abspath(self._params.action_log_filename) <NEW_LINE> <DEDENT> config_text = json.dumps(param_dict) <NEW_LINE> self._config_file.write(config_text.encode('utf-8')) <NEW_LINE> self._config_file.close() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> _logger.debug('Terminate phantomjs process.') <NEW_LINE> super().close() <NEW_LINE> if os.path.exists(self._config_file.name): <NEW_LINE> <INDENT> os.remove(self._config_file.name)
|
PhantomJS processing.
Args:
exe_path (str): Path of the PhantomJS executable.
extra_args (list): Additional arguments for PhantomJS. Most likely,
you'll want to pass proxy settings for capturing traffic.
params (:class:`PhantomJSDriverParams`): Parameters for controlling
the processing pipeline.
This class launches PhantomJS that scrolls and saves snapshots. It can
only be used once per URL.
|
62599006bf627c535bcb1ff2
|
class ExactMatchingPolicy(NameMatchingPolicy): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def match(self, name): <NEW_LINE> <INDENT> return self.name == name
|
Tests that name exact match to argument.
|
6259900615fb5d323ce7f887
|
class TerrainManager: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._terrains = {} <NEW_LINE> self._point_table = {} <NEW_LINE> <DEDENT> def __contains__(self, code): <NEW_LINE> <INDENT> return code in self._terrains <NEW_LINE> <DEDENT> def __getitem__(self, code): <NEW_LINE> <INDENT> return self._terrains[code] <NEW_LINE> <DEDENT> def register(self, terrain): <NEW_LINE> <INDENT> code = terrain.code <NEW_LINE> if not isinstance(code, str) or len(code) != 3: <NEW_LINE> <INDENT> raise ValueError("terrain code must be 3 letter string") <NEW_LINE> <DEDENT> if code in self._terrains: <NEW_LINE> <INDENT> raise AlreadyExists(code, self._terrains[code], terrain) <NEW_LINE> <DEDENT> if not isinstance(terrain, Terrain): <NEW_LINE> <INDENT> raise TypeError("must be an instance Terrain to register") <NEW_LINE> <DEDENT> self._terrains[code] = terrain <NEW_LINE> <DEDENT> def set_terrain_for_point(self, point_data, terrain): <NEW_LINE> <INDENT> if point_data in self._point_table: <NEW_LINE> <INDENT> raise AlreadyExists(point_data, self._point_table[point_data], terrain) <NEW_LINE> <DEDENT> self._point_table[point_data] = terrain <NEW_LINE> <DEDENT> def get_terrain_for_point(self, elevation, moisture, temperature): <NEW_LINE> <INDENT> elevation = round(elevation, 1) <NEW_LINE> moisture = round(moisture, 1) <NEW_LINE> temperature = round(temperature, 1) <NEW_LINE> return self._point_table.get((elevation, moisture, temperature))
|
A manager for terrain types.
|
62599007d164cc6175821ac3
|
class PackageFactory(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def incoming(cls, format, zip_path=None, metadata_files=None): <NEW_LINE> <INDENT> formats = app.config.get("PACKAGE_HANDLERS", {}) <NEW_LINE> cname = formats.get(format) <NEW_LINE> if cname is None: <NEW_LINE> <INDENT> msg = "No handler for package format {x}".format(x=format) <NEW_LINE> app.logger.debug("Package Factory Incoming - {x}".format(x=msg)) <NEW_LINE> raise PackageException(msg) <NEW_LINE> <DEDENT> klazz = plugin.load_class(cname) <NEW_LINE> return klazz(zip_path=zip_path, metadata_files=metadata_files) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def converter(cls, format): <NEW_LINE> <INDENT> formats = app.config.get("PACKAGE_HANDLERS", {}) <NEW_LINE> cname = formats.get(format) <NEW_LINE> if cname is None: <NEW_LINE> <INDENT> msg = "No handler for package format {x}".format(x=format) <NEW_LINE> app.logger.debug("Package Factory Converter - {x}".format(x=msg)) <NEW_LINE> raise PackageException(msg) <NEW_LINE> <DEDENT> klazz = plugin.load_class(cname) <NEW_LINE> return klazz()
|
Factory which provides methods for accessing specific PackageHandler implementations
|
62599007627d3e7fe0e079e4
|
class LinuxVXLANDataplaneDriver(dp_drivers.DataplaneDriver): <NEW_LINE> <INDENT> dataplane_instance_class = LinuxVXLANEVIDataplane <NEW_LINE> type = consts.EVPN <NEW_LINE> required_kernel = "3.11.0" <NEW_LINE> encaps = [exa.Encapsulation(exa.Encapsulation.Type.VXLAN)] <NEW_LINE> driver_opts = [ cfg.IntOpt("vxlan_dst_port", default="4789", help=("UDP port toward which send VXLAN traffic (defaults " "to standard IANA-allocated port)")), ] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(LinuxVXLANDataplaneDriver, self).__init__() <NEW_LINE> privileged_utils.modprobe('vxlan') <NEW_LINE> <DEDENT> @log_decorator.log_info <NEW_LINE> def reset_state(self): <NEW_LINE> <INDENT> cmd = "brctl show | tail -n +2 | awk '{print $1}'| grep '%s'" <NEW_LINE> for bridge in self._run_command(cmd % BRIDGE_NAME_PREFIX, run_as_root=True, raise_on_error=False, acceptable_return_codes=[0, 1], shell=True)[0]: <NEW_LINE> <INDENT> self._run_command("ip link set %s down" % bridge, run_as_root=True) <NEW_LINE> self._run_command("brctl delbr %s" % bridge, run_as_root=True) <NEW_LINE> <DEDENT> cmd = "ip link show | awk '{print $2}' | tr -d ':' | grep '%s'" <NEW_LINE> for interface in self._run_command(cmd % VXLAN_INTERFACE_PREFIX, run_as_root=True, raise_on_error=False, acceptable_return_codes=[0, 1], shell=True)[0]: <NEW_LINE> <INDENT> self._run_command("ip link set %s down" % interface, run_as_root=True) <NEW_LINE> self._run_command("ip link delete %s" % interface, run_as_root=True)
|
E-VPN Dataplane driver relying on Linux kernel linuxbridge VXLAN
|
6259900721a7993f00c66ac4
|
class NumpyToPILConvertor(NumpyPILConvertor): <NEW_LINE> <INDENT> def __call__(self, img): <NEW_LINE> <INDENT> return self.numpyToPIL(img)
|
===================
NumpyToPILConvertor
===================
A numpy to PIL image convertor
|
6259900715fb5d323ce7f88b
|
class PlaySound(object): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> <DEDENT> def run(self, command): <NEW_LINE> <INDENT> cmd = [ 'aplay', self.path, ] <NEW_LINE> player = subprocess.call(cmd)
|
Play a wav file from the given path.
|
625990070a366e3fb87dd53b
|
class Data(object): <NEW_LINE> <INDENT> KELVIN_NULL = 273.15 <NEW_LINE> KM_MILES = 1.609344 <NEW_LINE> conditions = {'NCT' : 'no clouds detected', 'NSC' : 'nil signifant cloud', 'SKC' : 'clear sky', 'CLR' : 'clear sky', 'FEW' :'few clouds','OVC' : 'overcast clouds', 'BKN' : 'broken clouds', 'CAVOK' : 'ceiling and visibility ok', 'SCT' : 'scattered'} <NEW_LINE> stationtypes = {'1' : 'Airport station', '2' : 'SWOP station', '3' : 'SYNOP station', '4' : '', '5' : 'DIY station'} <NEW_LINE> def getstation(self, nr): <NEW_LINE> <INDENT> return self.stationtypes[str(nr)]
|
absolute zero point
|
62599007462c4b4f79dbc552
|
class TGPlugin: <NEW_LINE> <INDENT> def __init__(self, extra_vars_func=None, options=None, extension="mak"): <NEW_LINE> <INDENT> self.extra_vars_func = extra_vars_func <NEW_LINE> self.extension = extension <NEW_LINE> if not options: <NEW_LINE> <INDENT> options = {} <NEW_LINE> <DEDENT> lookup_options = {} <NEW_LINE> for k, v in options.items(): <NEW_LINE> <INDENT> if k.startswith("mako."): <NEW_LINE> <INDENT> lookup_options[k[5:]] = v <NEW_LINE> <DEDENT> elif k in ["directories", "filesystem_checks", "module_directory"]: <NEW_LINE> <INDENT> lookup_options[k] = v <NEW_LINE> <DEDENT> <DEDENT> self.lookup = TemplateLookup(**lookup_options) <NEW_LINE> self.tmpl_options = {} <NEW_LINE> for kw in compat.inspect_getargspec(Template.__init__)[0]: <NEW_LINE> <INDENT> if kw in lookup_options: <NEW_LINE> <INDENT> self.tmpl_options[kw] = lookup_options[kw] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def load_template(self, templatename, template_string=None): <NEW_LINE> <INDENT> if template_string is not None: <NEW_LINE> <INDENT> return Template(template_string, **self.tmpl_options) <NEW_LINE> <DEDENT> if "/" not in templatename: <NEW_LINE> <INDENT> templatename = ( "/" + templatename.replace(".", "/") + "." + self.extension ) <NEW_LINE> <DEDENT> return self.lookup.get_template(templatename) <NEW_LINE> <DEDENT> def render( self, info, format="html", fragment=False, template=None ): <NEW_LINE> <INDENT> if isinstance(template, str): <NEW_LINE> <INDENT> template = self.load_template(template) <NEW_LINE> <DEDENT> if self.extra_vars_func: <NEW_LINE> <INDENT> info.update(self.extra_vars_func()) <NEW_LINE> <DEDENT> return template.render(**info)
|
TurboGears compatible Template Plugin.
|
625990070a366e3fb87dd53d
|
class StaticMessageQueue(MessageQueue): <NEW_LINE> <INDENT> def __init__(self, value, name = 'distortos::StaticMessageQueue'): <NEW_LINE> <INDENT> super().__init__(value, name)
|
Print `distortos::StaticMessageQueue`.
|
62599007462c4b4f79dbc554
|
class FifeAgent(Base): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Base.__init__(self, layer=object, behaviour=object, instance=object) <NEW_LINE> <DEDENT> @property <NEW_LINE> def saveable_fields(self): <NEW_LINE> <INDENT> fields = list(self.fields.keys()) <NEW_LINE> fields.remove("layer") <NEW_LINE> fields.remove("behaviour") <NEW_LINE> fields.remove("instance") <NEW_LINE> return fields <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def register(cls, name="FifeAgent", auto_register=True): <NEW_LINE> <INDENT> return super(FifeAgent, cls).register(name, auto_register)
|
Component that stores the values for a fife agent
Fields:
layer: The layer the agent is on
behaviour: The behaviour object of this agent
instance: The FIFE instance of this agent.
|
6259900715fb5d323ce7f88f
|
class bi(Device): <NEW_LINE> <INDENT> attrs = ('INP', 'ZNAM', 'ONAM', 'RVAL', 'VAL', 'EGU', 'HOPR', 'LOPR', 'PREC', 'NAME', 'DESC', 'DTYP') <NEW_LINE> def __init__(self, prefix, **kwargs): <NEW_LINE> <INDENT> if prefix.endswith('.'): <NEW_LINE> <INDENT> prefix = prefix[:-1] <NEW_LINE> <DEDENT> Device.__init__(self, prefix, delim='.', attrs=self.attrs, **kwargs)
|
Simple binary input device
|
62599007d164cc6175821acc
|
class Template(object): <NEW_LINE> <INDENT> def __init__(self, template_string, name="<string>", loader=None, compress_whitespace=None, autoescape=_UNSET): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> if compress_whitespace is None: <NEW_LINE> <INDENT> compress_whitespace = name.endswith(".html") or name.endswith(".js") <NEW_LINE> <DEDENT> if autoescape is not _UNSET: <NEW_LINE> <INDENT> self.autoescape = autoescape <NEW_LINE> <DEDENT> elif loader: <NEW_LINE> <INDENT> self.autoescape = loader.autoescape <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.autoescape = _DEFAULT_AUTOESCAPE <NEW_LINE> <DEDENT> self.namespace = loader.namespace if loader else {} <NEW_LINE> reader = _TemplateReader(name, escape.native_str(template_string)) <NEW_LINE> self.file = _File(self, _parse(reader, self)) <NEW_LINE> self.code = self._generate_python(loader, compress_whitespace) <NEW_LINE> self.loader = loader <NEW_LINE> try: <NEW_LINE> <INDENT> self.compiled = compile( escape.to_unicode(self.code), "%s.generated.py" % self.name.replace('.', '_'), "exec", dont_inherit=True) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> formatted_code = _format_code(self.code).rstrip() <NEW_LINE> logging.error("%s code:\n%s", self.name, formatted_code) <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> def generate(self, **kwargs): <NEW_LINE> <INDENT> namespace = { "escape": escape.xhtml_escape, "xhtml_escape": escape.xhtml_escape, "url_escape": escape.url_escape, "json_encode": escape.json_encode, "squeeze": escape.squeeze, "linkify": escape.linkify, "datetime": datetime, "_tt_utf8": escape.utf8, "_tt_string_types": (unicode_type, bytes), "__name__": self.name.replace('.', '_'), "__loader__": ObjectDict(get_source=lambda name: self.code), } <NEW_LINE> namespace.update(self.namespace) <NEW_LINE> namespace.update(kwargs) <NEW_LINE> exec_in(self.compiled, namespace) <NEW_LINE> execute = namespace["_tt_execute"] <NEW_LINE> linecache.clearcache() <NEW_LINE> return execute() <NEW_LINE> <DEDENT> def _generate_python(self, loader, compress_whitespace): <NEW_LINE> <INDENT> buffer = StringIO() <NEW_LINE> try: <NEW_LINE> <INDENT> named_blocks = {} <NEW_LINE> ancestors = self._get_ancestors(loader) <NEW_LINE> ancestors.reverse() <NEW_LINE> for ancestor in ancestors: <NEW_LINE> <INDENT> ancestor.find_named_blocks(loader, named_blocks) <NEW_LINE> <DEDENT> writer = _CodeWriter(buffer, named_blocks, loader, ancestors[0].template, compress_whitespace) <NEW_LINE> ancestors[0].generate(writer) <NEW_LINE> return buffer.getvalue() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> buffer.close() <NEW_LINE> <DEDENT> <DEDENT> def _get_ancestors(self, loader): <NEW_LINE> <INDENT> ancestors = [self.file] <NEW_LINE> for chunk in self.file.body.chunks: <NEW_LINE> <INDENT> if isinstance(chunk, _ExtendsBlock): <NEW_LINE> <INDENT> if not loader: <NEW_LINE> <INDENT> raise ParseError("{% extends %} block found, but no " "template loader") <NEW_LINE> <DEDENT> template = loader.load(chunk.name, self.name) <NEW_LINE> ancestors.extend(template._get_ancestors(loader)) <NEW_LINE> <DEDENT> <DEDENT> return ancestors
|
A compiled template.
We compile into Python from the given template_string. You can generate
the template from variables with generate().
|
62599007462c4b4f79dbc558
|
class TripResponse(object): <NEW_LINE> <INDENT> def __init__(self, trips=None): <NEW_LINE> <INDENT> self.swagger_types = { 'trips': 'list[TripResponseTrips]' } <NEW_LINE> self.attribute_map = { 'trips': 'trips' } <NEW_LINE> self._trips = trips <NEW_LINE> <DEDENT> @property <NEW_LINE> def trips(self): <NEW_LINE> <INDENT> return self._trips <NEW_LINE> <DEDENT> @trips.setter <NEW_LINE> def trips(self, trips): <NEW_LINE> <INDENT> self._trips = trips <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, TripResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
6259900715fb5d323ce7f895
|
class LEDNet(nn.Module): <NEW_LINE> <INDENT> def __init__(self, nclass, backbone='', aux=False, jpu=False, pretrained_base=True, **kwargs): <NEW_LINE> <INDENT> super(LEDNet, self).__init__() <NEW_LINE> self.encoder = nn.Sequential( Downsampling(3, 32), SSnbt(32, **kwargs), SSnbt(32, **kwargs), SSnbt(32, **kwargs), Downsampling(32, 64), SSnbt(64, **kwargs), SSnbt(64, **kwargs), Downsampling(64, 128), SSnbt(128, **kwargs), SSnbt(128, 2, **kwargs), SSnbt(128, 5, **kwargs), SSnbt(128, 9, **kwargs), SSnbt(128, 2, **kwargs), SSnbt(128, 5, **kwargs), SSnbt(128, 9, **kwargs), SSnbt(128, 17, **kwargs), ) <NEW_LINE> self.decoder = APNModule(128, nclass) <NEW_LINE> self.__setattr__('exclusive', ['encoder', 'decoder']) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> size = x.size()[2:] <NEW_LINE> x = self.encoder(x) <NEW_LINE> x = self.decoder(x) <NEW_LINE> outputs = list() <NEW_LINE> x = F.interpolate(x, size, mode='bilinear', align_corners=True) <NEW_LINE> outputs.append(x) <NEW_LINE> return tuple(outputs)
|
LEDNet
Parameters
----------
nclass : int
Number of categories for the training dataset.
backbone : string
Pre-trained dilated backbone network type (default:'resnet50'; 'resnet50',
'resnet101' or 'resnet152').
norm_layer : object
Normalization layer used in backbone network (default: :class:`nn.BatchNorm`;
for Synchronized Cross-GPU BachNormalization).
aux : bool
Auxiliary loss.
Reference:
Yu Wang, et al. "LEDNet: A Lightweight Encoder-Decoder Network for Real-Time Semantic Segmentation."
arXiv preprint arXiv:1905.02423 (2019).
|
6259900721a7993f00c66ad4
|
class MultiSelect(object): <NEW_LINE> <INDENT> def __call__(self, field, **kwargs): <NEW_LINE> <INDENT> kwargs.setdefault('id', field.id) <NEW_LINE> src_list, dst_list = [], [] <NEW_LINE> for val, label, selected in field.iter_choices(): <NEW_LINE> <INDENT> if selected: <NEW_LINE> <INDENT> dst_list.append({'label':label, 'listValue':val}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> src_list.append({'label':label, 'listValue':val}) <NEW_LINE> <DEDENT> <DEDENT> kwargs.update( { 'data-provider-src':json.dumps(src_list), 'data-provider-dst':json.dumps(dst_list) } ) <NEW_LINE> html = ['<div %s>' % html_params(name=field.name, **kwargs)] <NEW_LINE> html.append('</div>') <NEW_LINE> return HTMLString(''.join(html))
|
Renders a megalist-multiselect widget.
The field must provide an `iter_choices()` method which the widget will
call on rendering; this method must yield tuples of
`(value, label, selected)`.
|
62599007d164cc6175821ad4
|
class PageScope(BaseScope): <NEW_LINE> <INDENT> pattern = r'pageid:' <NEW_LINE> def __init__(self, request): <NEW_LINE> <INDENT> self._request = request <NEW_LINE> <DEDENT> def lookup(self, scope_name): <NEW_LINE> <INDENT> pageid_scope = scope_name[len('pageid:'):] <NEW_LINE> scope_segments = self._pageid_to_namespace(pageid_scope) <NEW_LINE> request_segments = self._request_pageid_namespace <NEW_LINE> for pos, name in enumerate(scope_segments): <NEW_LINE> <INDENT> if pos == len(request_segments): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if request_segments[pos] != name: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _pageid_to_namespace(pageid): <NEW_LINE> <INDENT> pageid = pageid.replace('#', ':') <NEW_LINE> return [name for name in pageid.split(':') if name] <NEW_LINE> <DEDENT> @cachedproperty <NEW_LINE> def _request_pageid_namespace(self): <NEW_LINE> <INDENT> return tuple(self._pageid_to_namespace( self._request._orig_env.get('launchpad.pageid', '')))
|
The current page ID.
Pageid scopes are written as 'pageid:' + the pageid to match. Pageids
are treated as a namespace with : and # delimiters.
For example, the scope 'pageid:Foo' will be active on pages with pageids:
Foo
Foo:Bar
Foo#quux
|
62599007462c4b4f79dbc560
|
class RenamedTemporaryFile(object): <NEW_LINE> <INDENT> def __init__(self, final_path, **kwargs): <NEW_LINE> <INDENT> tmpfile_dir = kwargs.pop('dir', None) <NEW_LINE> if tmpfile_dir is None: <NEW_LINE> <INDENT> tmpfile_dir = os.path.dirname(final_path) <NEW_LINE> <DEDENT> self.tmpfile = tempfile.NamedTemporaryFile(dir=tmpfile_dir, **kwargs) <NEW_LINE> self.final_path = final_path <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> return getattr(self.tmpfile, attr) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> log.debug('Preparing to write "%s"' % self.tmpfile.name) <NEW_LINE> self.tmpfile.__enter__() <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_traceback): <NEW_LINE> <INDENT> if exc_type is None: <NEW_LINE> <INDENT> self.tmpfile.delete = False <NEW_LINE> result = self.tmpfile.__exit__(exc_type, exc_val, exc_traceback) <NEW_LINE> os.rename(self.tmpfile.name, self.final_path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = self.tmpfile.__exit__(exc_type, exc_val, exc_traceback) <NEW_LINE> <DEDENT> return result
|
Input/output file handle manager.
RenamedTemporyFile takes care of file handle creation and tear down.
Handy if you just want to write to a file in a particular directory
without having to worry about boring (yawn) file details.
.. note::
Your nominated directory must exist. Otherwise, file will just be
created in the current directory.
When used within context of a ``with`` statement, will return an object
of the file type and pass it to the variable defined by ``as`` clause.
File handle is closed when the ``with`` exits.
.. note::
Although built on the :mod:`tempfile.NamesTemporyFile` module,
the filename has been set to persist after file handle closure.
Example usage as follows ...
Say I want to write to the persistent file, ``banana``:
>>> from itt.utils.files import RenamedTemporaryFile
>>> with RenamedTemporaryFile("banana") as f:
>>> f.write("stuff")
...
|
62599007627d3e7fe0e079f6
|
class NgramTests(unittest.TestCase): <NEW_LINE> <INDENT> items = ['sdafaf','asfwef','asdfawe','adfwe', 'askfjwehiuasdfji'] <NEW_LINE> def test_ngram_search(self): <NEW_LINE> <INDENT> idx = NGram(self.items) <NEW_LINE> self.assertEqual(idx.search('askfjwehiuasdfji'), [('askfjwehiuasdfji', 1.0), ('asdfawe', 0.17391304347826086), ('asfwef', 0.083333333333333329), ('adfwe', 0.041666666666666664), ]) <NEW_LINE> self.assertEqual(idx.search('afadfwe')[:2], [('adfwe', 0.59999999999999998), ('asdfawe', 0.20000000000000001)]) <NEW_LINE> self.assertEqual(NGram.compare('sdfeff', 'sdfeff'), 1.0) <NEW_LINE> self.assertEqual(NGram.compare('sdfeff', 'zzzzzz'), 0.0) <NEW_LINE> <DEDENT> def test_set_operations(self): <NEW_LINE> <INDENT> items1 = set(["abcde", "cdefg", "fghijk", "ijklm"]) <NEW_LINE> items2 = set(["cdefg", "lmnop"]) <NEW_LINE> idx1 = NGram(items1) <NEW_LINE> idx2 = NGram(items2) <NEW_LINE> results = lambda L: sorted(x[0] for x in L) <NEW_LINE> self.assertEqual(results(idx1.search('cde')), ["abcde","cdefg"]) <NEW_LINE> idx1.remove('abcde') <NEW_LINE> self.assertEqual(results(idx1.search('cde')), ["cdefg"]) <NEW_LINE> items1.remove('abcde') <NEW_LINE> idx1.intersection_update(idx2) <NEW_LINE> test = items1.intersection(items2) <NEW_LINE> self.assertEqual(idx1, test) <NEW_LINE> self.assertEqual(results(idx1.search('lmn')), []) <NEW_LINE> self.assertEqual(results(idx1.search('ijk')), []) <NEW_LINE> self.assertEqual(results(idx1.search('def')), ['cdefg'])
|
Tests of the ngram class
|
62599007462c4b4f79dbc562
|
class Parameters: <NEW_LINE> <INDENT> axes = DatasetParameter('reference network shapefile', type='input') <NEW_LINE> length_min = LiteralParameter('minimum axis length') <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.axes = dict( key='network-cartography-ready', tiled=False) <NEW_LINE> self.length_min = 50e3
|
Axis selection parameters
|
625990070a366e3fb87dd54f
|
class DeadlineExceededError(BaseException): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return ('The overall deadline for responding to the HTTP request ' 'was exceeded.')
|
Exception raised when the request reaches its overall time limit.
This exception will be thrown by the original thread handling the request,
shortly after the request reaches its deadline. Since the exception is
asynchronously set on the thread by the App Engine runtime, it can appear
to originate from any line of code that happens to be executing at that
time.
If the application catches this exception and does not generate a response
very quickly afterwards, an error will be returned to the user and
the application instance may be terminated.
Not to be confused with runtime.apiproxy_errors.DeadlineExceededError.
That one is raised when individual API calls take too long.
|
6259900721a7993f00c66ada
|
class TableauResource(BasicResource): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> return tableau_services.get_growth_of_unit()
|
docstring for UserResource
|
62599007d164cc6175821ad9
|
class Base: <NEW_LINE> <INDENT> REFTAG_RESOURCE = RESOURCES_BY_TAG <NEW_LINE> @property <NEW_LINE> def CONCRETE_MAP(self): <NEW_LINE> <INDENT> if not hasattr(self, "_CONCRETE_MAP"): <NEW_LINE> <INDENT> self._CONCRETE_MAP = { concrete: res for (res, concrete) in self.RESOURCE_MAP.items() } <NEW_LINE> <DEDENT> return self._CONCRETE_MAP <NEW_LINE> <DEDENT> def get_concrete(self, res): <NEW_LINE> <INDENT> return self.RESOURCE_MAP[res] <NEW_LINE> <DEDENT> def is_concrete(self, cls): <NEW_LINE> <INDENT> return cls in self.CONCRETE_MAP <NEW_LINE> <DEDENT> def get_resource(self, cls): <NEW_LINE> <INDENT> return self.CONCRETE_MAP[cls]
|
Backend base class.
Do NOT extend this directly when implementing a new backend, instead
extend Interface below.
|
62599007bf627c535bcb200c
|
class TestClef(object): <NEW_LINE> <INDENT> def test_invalid_clef(self): <NEW_LINE> <INDENT> l_time = {'ly_type': '', 'type': ''} <NEW_LINE> m_staffdef = etree.Element(mei.STAFF_DEF) <NEW_LINE> with pytest.raises(exceptions.LilyPondError): <NEW_LINE> <INDENT> lilypond.set_clef(l_time, m_staffdef) <NEW_LINE> <DEDENT> <DEDENT> def test_nonexistent_clef(self): <NEW_LINE> <INDENT> l_time = {'ly_type': 'clef', 'type': 'bullshit'} <NEW_LINE> m_staffdef = etree.Element(mei.STAFF_DEF) <NEW_LINE> lilypond.set_clef(l_time, m_staffdef) <NEW_LINE> assert m_staffdef.get('clef.shape') is None <NEW_LINE> assert m_staffdef.get('clef.line') is None <NEW_LINE> <DEDENT> def test_works(self): <NEW_LINE> <INDENT> l_time = {'ly_type': 'clef', 'type': 'bass'} <NEW_LINE> m_staffdef = etree.Element(mei.STAFF_DEF) <NEW_LINE> lilypond.set_clef(l_time, m_staffdef) <NEW_LINE> assert m_staffdef.get('clef.shape') == 'F' <NEW_LINE> assert m_staffdef.get('clef.line') == '4' <NEW_LINE> <DEDENT> def test_change(self): <NEW_LINE> <INDENT> l_staff = { 'ly_type': 'staff', 'initial_settings': [], 'content': [{'layers': [[ {'dur': '2', 'dots': [], 'ly_type': 'rest'}, {'ly_type': 'clef', 'type': 'treble'}, {'dur': '4', 'dots': [], 'ly_type': 'rest'}, ]]}], } <NEW_LINE> m_staff = etree.Element(mei.STAFF) <NEW_LINE> m_staffdef = etree.Element(mei.STAFF_DEF) <NEW_LINE> m_staffdef.set('n', '888') <NEW_LINE> lilypond.do_staff(l_staff, m_staff, m_staffdef) <NEW_LINE> m_staffdef = m_staff.find('.//{}'.format(mei.STAFF_DEF)) <NEW_LINE> assert m_staffdef.get('n') == '888'
|
Setting the clef.
|
625990073cc13d1c6d4662a9
|
class Solution: <NEW_LINE> <INDENT> def isIdentical(self, a, b): <NEW_LINE> <INDENT> if not (a or b): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if a and b: <NEW_LINE> <INDENT> if a.val == b.val: <NEW_LINE> <INDENT> return self.isIdentical(a.left, b.left) and self.isIdentical(a.right, b.right) <NEW_LINE> <DEDENT> <DEDENT> return False
|
@param a, b, the root of binary trees.
@return true if they are identical, or false.
|
62599007462c4b4f79dbc56a
|
class TestHealthCheckAPI(ApiTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestHealthCheckAPI, self).setUp() <NEW_LINE> self.cluster_id = 1 <NEW_LINE> self.url = '/clusters/%s/healthreports' % self.cluster_id <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> super(TestHealthCheckAPI, self).tearDown() <NEW_LINE> <DEDENT> def test_add_and_list_reports(self): <NEW_LINE> <INDENT> reports_list = [ {'name': 'rp1', 'category': 'c1'}, {'name': 'rp2', 'category': 'c2'}, {'name': 'rp3', 'category': 'c3'} ] <NEW_LINE> request_data = json.dumps({"report_list": reports_list}) <NEW_LINE> return_value = self.test_client.post(self.url, data=request_data) <NEW_LINE> resp = json.loads(return_value.get_data()) <NEW_LINE> self.assertEqual(200, return_value.status_code) <NEW_LINE> self.assertEqual(3, len(resp)) <NEW_LINE> request_data = json.dumps({'name': 'rp4 test'}) <NEW_LINE> return_value = self.test_client.post(self.url, data=request_data) <NEW_LINE> resp = json.loads(return_value.get_data()) <NEW_LINE> self.assertEqual(200, return_value.status_code) <NEW_LINE> self.assertEqual('rp4-test', resp['name']) <NEW_LINE> return_value = self.test_client.post(self.url, data=request_data) <NEW_LINE> self.assertEqual(409, return_value.status_code) <NEW_LINE> return_value = self.test_client.get(self.url) <NEW_LINE> resp = json.loads(return_value.get_data()) <NEW_LINE> self.assertEqual(200, return_value.status_code) <NEW_LINE> self.assertEqual(4, len(resp)) <NEW_LINE> <DEDENT> def test_update_and_get_health_report(self): <NEW_LINE> <INDENT> report_name = 'test-report' <NEW_LINE> health_check_db.add_report_record(self.cluster_id, name=report_name) <NEW_LINE> url = '/'.join((self.url, report_name)) <NEW_LINE> request_data = json.dumps( {"report": report_sample, "state": "finished"} ) <NEW_LINE> return_value = self.test_client.put(url, data=request_data) <NEW_LINE> resp = json.loads(return_value.get_data()) <NEW_LINE> self.maxDiff = None <NEW_LINE> self.assertEqual(200, return_value.status_code) <NEW_LINE> self.assertDictEqual(report_sample, resp['report']) <NEW_LINE> return_value = self.test_client.put(url, data=request_data) <NEW_LINE> self.assertEqual(403, return_value.status_code) <NEW_LINE> return_value = self.test_client.get(url) <NEW_LINE> self.assertEqual(200, return_value.status_code) <NEW_LINE> self.assertDictEqual(report_sample, resp['report']) <NEW_LINE> <DEDENT> def test_action_start_check_health(self): <NEW_LINE> <INDENT> url = '/clusters/%s/action' % self.cluster_id <NEW_LINE> request_data = json.dumps({'check_health': None}) <NEW_LINE> return_value = self.test_client.post(url, data=request_data) <NEW_LINE> self.assertEqual(403, return_value.status_code) <NEW_LINE> user = models.User.query.filter_by(email='admin@huawei.com').first() <NEW_LINE> cluster_db.update_cluster_state( self.cluster_id, user=user, state='SUCCESSFUL' ) <NEW_LINE> return_value = self.test_client.post(url, data=request_data) <NEW_LINE> self.assertEqual(202, return_value.status_code)
|
Test health check api.
|
6259900856b00c62f0fb342c
|
class ScrapeConfig: <NEW_LINE> <INDENT> def __init__(self, review_type, start, end): <NEW_LINE> <INDENT> self.review_type = review_type <NEW_LINE> self.start = start <NEW_LINE> self.end = end <NEW_LINE> <DEDENT> @property <NEW_LINE> def ul_selector(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @property <NEW_LINE> def li_selector(self): <NEW_LINE> <INDENT> return "//li" <NEW_LINE> <DEDENT> @property <NEW_LINE> def per_page(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @property <NEW_LINE> def review_meta_class(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def page_url(self, book_id, page_num): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def search_url(self, isbn13): <NEW_LINE> <INDENT> raise NotImplementedError
|
리뷰 스크래핑에 관련한 설정을 추상화하는 클래스입니다.
서점의 특정 리뷰, Pagination 을 설정할 수 있고,
서점 리뷰페이지의 url, 파싱을 하기 위한 xpath selector 를 알고 있습니다.
|
62599008d164cc6175821ae7
|
class TestAI(unittest.TestCase): <NEW_LINE> <INDENT> def assertMove(self, player, ideal, board, func='assertEqual'): <NEW_LINE> <INDENT> g = tttg.Game(board) <NEW_LINE> ai = tttai.AI(player) <NEW_LINE> move = ai.choose_move(g) <NEW_LINE> getattr(self, func)(move, ideal) <NEW_LINE> <DEDENT> def testPlacesToWinX(self): <NEW_LINE> <INDENT> self.assertMove(tttg.X, tttg.BOTTOMLEFT, [ [X, E, O], [X, E, O], [E, E, E] ]) <NEW_LINE> <DEDENT> def testPlacesToWinO(self): <NEW_LINE> <INDENT> self.assertMove(tttg.O, tttg.BOTTOMRIGHT, [ [X, E, O], [E, X, O], [X, E, E] ]) <NEW_LINE> <DEDENT> def testPlacesToBlock(self): <NEW_LINE> <INDENT> self.assertMove(O, tttg.LEFTEDGE, [ [X, O, O], [E, E, X], [X, E, E] ]) <NEW_LINE> <DEDENT> def testFork(self): <NEW_LINE> <INDENT> self.assertMove(X, tttg.BOTTOMRIGHT, [ [X, O, E], [O, E, E], [X, E, E] ]) <NEW_LINE> <DEDENT> def testBlockAFork(self): <NEW_LINE> <INDENT> edges = [tttg.TOPEDGE, tttg.RIGHTEDGE, tttg.BOTTOMEDGE, tttg.LEFTEDGE] <NEW_LINE> self.assertMove(O, edges, [ [E, E, X], [E, O, E], [X, E, E] ], 'assertIn') <NEW_LINE> <DEDENT> def testCenterIsPlayedAsFirstMove(self): <NEW_LINE> <INDENT> self.assertMove(X, tttg.CENTER, None) <NEW_LINE> self.assertMove(O, tttg.CENTER, [ [X, E, E], [E, E, E], [E, E, E] ]) <NEW_LINE> <DEDENT> def testPlaysEmptyCorner(self): <NEW_LINE> <INDENT> self.assertMove(O, tttg.TOPLEFT, [ [E, E, E], [E, X, E], [E, E, E] ]) <NEW_LINE> self.assertMove(X, tttg.TOPLEFT, [ [E, E, E], [E, O, E], [E, E, X] ]) <NEW_LINE> <DEDENT> def testPlaysEmptyEdge(self): <NEW_LINE> <INDENT> self.assertMove(O, tttg.LEFTEDGE, [ [X, O, X], [E, O, X], [O, X, O] ]) <NEW_LINE> <DEDENT> def testRaisesForFullBoard(self): <NEW_LINE> <INDENT> with self.assertRaises(tttai.NoMoveAvailable): <NEW_LINE> <INDENT> self.assertMove(X, None, [ [X, O, X], [X, O, X], [O, X, O] ])
|
1. Win: If AI has two in a row, place a third to win.
2. Block. If opponent has two in a row, play the block.
3. Fork: Create two non-blocked lines of 2.
4. Block a fork:
1. Create two in a row to force opponent into defending as long as defense does not create a fork.
2. If the opponent can fork, block the fork.
1. Play the center.
2. If the opponent is in a corner, play the opposite corner.
3. Play an empty corner.
4. Play an empty side.
|
62599008462c4b4f79dbc574
|
class Star: <NEW_LINE> <INDENT> type = "star" <NEW_LINE> m = 0 <NEW_LINE> x = 0 <NEW_LINE> y = 0 <NEW_LINE> Vx = 0 <NEW_LINE> Vy = 0 <NEW_LINE> Fx = 0 <NEW_LINE> Fy = 0 <NEW_LINE> R = 5 <NEW_LINE> color = "red" <NEW_LINE> image = None
|
Тип данных, описывающий звезду.
Содержит массу, координаты, скорость звезды,
а также визуальный радиус звезды в пикселах и её цвет.
|
6259900856b00c62f0fb342e
|
class Logger: <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = str(name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def level(self): <NEW_LINE> <INDENT> level = settings.log_level <NEW_LINE> return getattr(logging, level.upper()) <NEW_LINE> <DEDENT> def _print(self, msg): <NEW_LINE> <INDENT> print(': '.join([self.name, str(msg)])) <NEW_LINE> <DEDENT> def log(self, level, msg, **kwargs): <NEW_LINE> <INDENT> log = functools.partial(self._log, level, msg, **kwargs) <NEW_LINE> sublime.set_timeout(log, 0) <NEW_LINE> <DEDENT> def _log(self, level, msg, **kwargs): <NEW_LINE> <INDENT> if self.level <= level: <NEW_LINE> <INDENT> self._print(msg) <NEW_LINE> if level == logging.ERROR and kwargs.get('exc_info'): <NEW_LINE> <INDENT> traceback.print_exc() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def debug(self, msg): <NEW_LINE> <INDENT> self.log(logging.DEBUG, msg) <NEW_LINE> <DEDENT> def info(self, msg): <NEW_LINE> <INDENT> self.log(logging.INFO, msg) <NEW_LINE> <DEDENT> def error(self, msg, exc_info=False): <NEW_LINE> <INDENT> self.log(logging.ERROR, msg, exc_info=exc_info) <NEW_LINE> <DEDENT> def exception(self, msg): <NEW_LINE> <INDENT> self.error(msg, exc_info=True) <NEW_LINE> <DEDENT> def warning(self, msg): <NEW_LINE> <INDENT> self.log(logging.WARN, msg)
|
Sublime Console Logger.
|
625990080a366e3fb87dd560
|
class SiteNameIdVisiteurMapper: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.corrupt_line_counter = 0 <NEW_LINE> self.counter_name = 'CORRUPT_LINES' <NEW_LINE> <DEDENT> def process_line(self, line): <NEW_LINE> <INDENT> kv = line.split(kv_sep) <NEW_LINE> try: <NEW_LINE> <INDENT> print(kv_sep.join([kv[0], kv[1], str(1)])) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> self.corrupt_line_counter += 1 <NEW_LINE> sys.stderr.write('reporter:counter:{0},{1},{2}\n'.format( counter_group, self.counter_name, self.corrupt_line_counter))
|
Emit "(site <tab> id_visitor <tab> 1>)" tuples. Emit means: print to standard output.
|
62599008d164cc6175821aeb
|
class RawBitProcessor(object): <NEW_LINE> <INDENT> def process_bits(self, data: str) -> str: <NEW_LINE> <INDENT> return data
|
An identity bit processor.
|
6259900856b00c62f0fb3432
|
class Kin_results(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.error = None <NEW_LINE> self.V = 0.0 <NEW_LINE> self.Km = 0.0 <NEW_LINE> self.SS = 0.0 <NEW_LINE> self.v_hat = None
|
Object that holds data from a computation, supporting dot access.
Mandatory members are:
name - Name of the method used (str).
error - None by default, str describing error in computation
V - limiting rate (float)
Km - Michaelis constant (float)
SS - sum of squares of residuals to the Michelis-Menten equation (float)
v_hat - estimated rate values (iterable of floats)
Optional, depending on the method:
SE_V - standard error of the limiting rate
SE_Km - standard error of the Michelis constant
Optional for linearizations:
x - x-values during linearization (iterable of floats)
y - y-values during linearization (iterable of floats)
m - slope of linearization
b - intercept of linearization
|
62599008bf627c535bcb2021
|
class Transport(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> from suds.transport.options import Options <NEW_LINE> self.options = Options() <NEW_LINE> <DEDENT> def open(self, request): <NEW_LINE> <INDENT> raise Exception('not-implemented') <NEW_LINE> <DEDENT> def send(self, request): <NEW_LINE> <INDENT> raise Exception('not-implemented')
|
The transport I{interface}.
|
6259900815fb5d323ce7f8b7
|
class MiFloraSensor(Entity): <NEW_LINE> <INDENT> def __init__(self, poller, parameter, name, unit, force_update, median): <NEW_LINE> <INDENT> self.poller = poller <NEW_LINE> self.parameter = parameter <NEW_LINE> self._unit = unit <NEW_LINE> self._name = name <NEW_LINE> self._state = None <NEW_LINE> self.data = [] <NEW_LINE> self._force_update = force_update <NEW_LINE> self.median_count = median <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return self._unit <NEW_LINE> <DEDENT> @property <NEW_LINE> def force_update(self): <NEW_LINE> <INDENT> return self._force_update <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> _LOGGER.debug("Polling data for %s", self.name) <NEW_LINE> data = self.poller.parameter_value(self.parameter) <NEW_LINE> <DEDENT> except IOError as ioerr: <NEW_LINE> <INDENT> _LOGGER.info("Polling error %s", ioerr) <NEW_LINE> data = None <NEW_LINE> return <NEW_LINE> <DEDENT> if data is not None: <NEW_LINE> <INDENT> _LOGGER.debug("%s = %s", self.name, data) <NEW_LINE> self.data.append(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _LOGGER.info("Did not receive any data from Mi Flora sensor %s", self.name) <NEW_LINE> if len(self.data) > 0: <NEW_LINE> <INDENT> self.data = self.data[1:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._state = None <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> _LOGGER.debug("Data collected: %s", self.data) <NEW_LINE> if len(self.data) > self.median_count: <NEW_LINE> <INDENT> self.data = self.data[1:] <NEW_LINE> <DEDENT> if len(self.data) == self.median_count: <NEW_LINE> <INDENT> median = sorted(self.data)[int((self.median_count - 1) / 2)] <NEW_LINE> _LOGGER.debug("Median is: %s", median) <NEW_LINE> self._state = median <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _LOGGER.debug("Not yet enough data for median calculation")
|
Implementing the MiFlora sensor.
|
6259900815fb5d323ce7f8bb
|
class AllBucketLists(Resource): <NEW_LINE> <INDENT> @auth.user_is_login <NEW_LINE> def get(self): <NEW_LINE> <INDENT> params = request.args.to_dict() <NEW_LINE> limit = int(params.get('limit', 20)) <NEW_LINE> limit = 100 if int(limit) > 100 else limit <NEW_LINE> search_by = params.get('q', '') <NEW_LINE> page = int(params.get('page', 1)) <NEW_LINE> token = request.headers.get('Token') <NEW_LINE> user_id = get_current_user_id(token) <NEW_LINE> search_results = BucketList.query.filter_by( created_by=user_id).filter(BucketList.name.like('%{}%'.format (search_by))) <NEW_LINE> all_bucketlist = search_results.paginate( page=page, per_page=limit, error_out=False) <NEW_LINE> next_page = str(request.url_root) + 'api/v1/bucketlists?' + 'limit=' + str(limit) + '&page=' + str(page + 1) if all_bucketlist.has_next else None <NEW_LINE> previous_page = request.url_root + 'api/v1/bucketlists?' + 'limit=' + str(limit) + '&page=' + str(page - 1) if all_bucketlist.has_prev else None <NEW_LINE> bucketlist_output = [get_bucketlist( bucketlist) for bucketlist in all_bucketlist.items] <NEW_LINE> return {'data': bucketlist_output, 'pages': all_bucketlist.pages, 'previous_page': previous_page, 'next_page': next_page}, 200 <NEW_LINE> <DEDENT> @auth.user_is_login <NEW_LINE> def post(self): <NEW_LINE> <INDENT> name = request.form.get('name') <NEW_LINE> token = request.headers.get('Token') <NEW_LINE> current_user = get_current_user_id(token) <NEW_LINE> bucketlist = BucketList.query.filter_by( name=name, created_by=current_user).first() <NEW_LINE> if name: <NEW_LINE> <INDENT> if bucketlist: <NEW_LINE> <INDENT> return messages['bucketlist_exist'], 406 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bucketlist = BucketList(name, current_user) <NEW_LINE> return (get_bucketlist(bucketlist), 201) if save_model(bucketlist) else (messages['bucketlist_not_saved'], 400) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return messages['no_bucketlist_name'], 406
|
Manage responses to bucketlists requests by a user.
URL:
/api/v1/bucketlists/
Methods:
GET, POST
|
6259900856b00c62f0fb343a
|
class BacktestingStrategy(BaseStrategy): <NEW_LINE> <INDENT> def __init__(self, barFeed, cash=1000000): <NEW_LINE> <INDENT> broker = pyalgotrade.broker.backtesting.Broker(cash, barFeed) <NEW_LINE> BaseStrategy.__init__(self, barFeed, broker)
|
Base class for backtesting strategies.
:param barFeed: The bar feed to use to backtest the strategy.
:type barFeed: :class:`pyalgotrade.barfeed.BarFeed`.
:param cash: The amount of cash available.
:type cash: int/float.
.. note::
This is a base class and should not be used directly.
|
625990080a366e3fb87dd56c
|
class _Multiprocessor: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def _wrapper(func, queue, args, kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> from openff.evaluator.workflow.plugins import registered_workflow_protocols <NEW_LINE> if "registered_workflow_protocols" in kwargs: <NEW_LINE> <INDENT> protocols_to_import = kwargs.pop("registered_workflow_protocols") <NEW_LINE> for protocol_class in protocols_to_import: <NEW_LINE> <INDENT> module_name = ".".join(protocol_class.split(".")[:-1]) <NEW_LINE> class_name = protocol_class.split(".")[-1] <NEW_LINE> imported_module = importlib.import_module(module_name) <NEW_LINE> registered_workflow_protocols[class_name] = getattr( imported_module, class_name ) <NEW_LINE> <DEDENT> <DEDENT> if "logger_path" in kwargs: <NEW_LINE> <INDENT> formatter = timestamp_formatter() <NEW_LINE> logger_path = kwargs.pop("logger_path") <NEW_LINE> worker_logger = logging.getLogger() <NEW_LINE> if not len(worker_logger.handlers): <NEW_LINE> <INDENT> logger_handler = logging.FileHandler(logger_path) <NEW_LINE> logger_handler.setFormatter(formatter) <NEW_LINE> worker_logger.setLevel(logging.INFO) <NEW_LINE> worker_logger.addHandler(logger_handler) <NEW_LINE> if ( not os.path.exists(logger_path) or os.stat(logger_path).st_size == 0 ): <NEW_LINE> <INDENT> worker_logger.info("=========================================") <NEW_LINE> worker_logger.info(f"HOSTNAME: {platform.node()}") <NEW_LINE> if os.environ.get("PBS_JOBID") is not None: <NEW_LINE> <INDENT> worker_logger.info( f"PBSJOBID: {os.environ.get('PBS_JOBID')}" ) <NEW_LINE> <DEDENT> elif os.environ.get("LSB_JOBID") is not None: <NEW_LINE> <INDENT> worker_logger.info( f"LSBJOBID: {os.environ.get('LSB_JOBID')}" ) <NEW_LINE> <DEDENT> elif os.environ.get("SLURM_JOB_ID") is not None: <NEW_LINE> <INDENT> worker_logger.info( f"SLURMJOBID: {os.environ.get('SLURM_JOBID')}" ) <NEW_LINE> <DEDENT> worker_logger.info(f"PLATFORM: {platform.platform()}") <NEW_LINE> worker_logger.info("-----------------------------------------") <NEW_LINE> worker_logger.info( "PYTHON VERSION: " f"{platform.python_version()} - " f"{platform.python_implementation()}" ) <NEW_LINE> worker_logger.info("=========================================") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return_value = func(*args, **kwargs) <NEW_LINE> queue.put(return_value) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> queue.put((e, e.__traceback__)) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def run(function, *args, **kwargs): <NEW_LINE> <INDENT> if hasattr(evaluator, "_called_from_test"): <NEW_LINE> <INDENT> return function(*args, **kwargs) <NEW_LINE> <DEDENT> manager = multiprocessing.Manager() <NEW_LINE> queue = manager.Queue() <NEW_LINE> target_args = [function, queue, args, kwargs] <NEW_LINE> process = multiprocessing.Process( target=_Multiprocessor._wrapper, args=target_args ) <NEW_LINE> process.start() <NEW_LINE> return_value = queue.get() <NEW_LINE> process.join() <NEW_LINE> if ( isinstance(return_value, tuple) and len(return_value) > 0 and isinstance(return_value[0], Exception) ): <NEW_LINE> <INDENT> formatted_exception = traceback.format_exception( None, return_value[0], return_value[1] ) <NEW_LINE> logger.info(f"{formatted_exception} {return_value[0]} {return_value[1]}") <NEW_LINE> raise return_value[0] <NEW_LINE> <DEDENT> return return_value
|
A temporary utility class which runs a given
function in a separate process.
|
62599008d164cc6175821af5
|
class TestLookasideLegacy(Base): <NEW_LINE> <INDENT> expected_title = "git.lookaside.texlive.new" <NEW_LINE> expected_subti = 'jnovy uploaded pst-diffraction.doc.tar.xz for texlive' <NEW_LINE> expected_icon = "https://apps.fedoraproject.org/img/icons/git-logo.png" <NEW_LINE> expected_secondary_icon = "https://seccdn.libravatar.org/avatar/" + "e0e8e0c4d995109cdac8ae4eb5766a73cf09c7a8d2d8bac57f761e6223ca094b?s=64&" + "d=retro" <NEW_LINE> expected_link = 'https://src.fedoraproject.org/lookaside/pkgs/' + 'texlive/pst-diffraction.doc.tar.xz/' + 'dacad985394b3977f9dcf0c75f51a357/' + 'pst-diffraction.doc.tar.xz' <NEW_LINE> expected_usernames = set(['jnovy']) <NEW_LINE> expected_packages = set(['texlive']) <NEW_LINE> expected_objects = set(['texlive/pst-diffraction.doc.tar.xz']) <NEW_LINE> msg = { "i": 1, "timestamp": 1349197866.215465, "topic": "org.fedoraproject.prod.git.lookaside.texlive.new", "msg": { "agent": "jnovy", "md5sum": "dacad985394b3977f9dcf0c75f51a357", "name": "texlive", "filename": "pst-diffraction.doc.tar.xz" } }
|
Support oldschool lookaside messages. :(
|
6259900821a7993f00c66af8
|
class RingBuffer: <NEW_LINE> <INDENT> def __init__(self, maxBuffSz, pktSz): <NEW_LINE> <INDENT> self.maxBuffSz = maxBuffSz <NEW_LINE> self.Q = deque([], maxBuffSz) <NEW_LINE> assert pktSz >0, "Packet size to be a positive integer" <NEW_LINE> self.pktSize = pktSz <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> return len(self.Q) <NEW_LINE> <DEDENT> def space(self): <NEW_LINE> <INDENT> return self.maxBuffSz - len(self.Q)*self.pktSize <NEW_LINE> <DEDENT> def push(self, elem): <NEW_LINE> <INDENT> if len(self.Q) < self.maxBuffSz: <NEW_LINE> <INDENT> self.Q.appendleft(elem) <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def pop(self): <NEW_LINE> <INDENT> return self.Q.popleft() <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.Q.clear() <NEW_LINE> <DEDENT> def isEmpty(self): <NEW_LINE> <INDENT> if self.space() == self.maxBuffSz: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
|
RingBuffer class implements a buffer that allows push, pop and other basic queue operations
|
62599008d164cc6175821af7
|
class Score(object): <NEW_LINE> <INDENT> def __init__(self, score_name, implementation): <NEW_LINE> <INDENT> self._score_name = score_name <NEW_LINE> self._implementation = implementation <NEW_LINE> <DEDENT> def calculate(self, references, predictions): <NEW_LINE> <INDENT> avg_score, scores = self._implementation.compute_score( references, predictions) <NEW_LINE> if isinstance(avg_score, (list, tuple)): <NEW_LINE> <INDENT> avg_score = map(float, avg_score) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> avg_score = float(avg_score) <NEW_LINE> <DEDENT> return {self._score_name: avg_score}
|
A subclass of this class is an adapter of pycocoevalcap.
|
62599008462c4b4f79dbc584
|
class BorrowedBooks(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'borrowed_books' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> book_borrower = db.Column(db.String, nullable=False) <NEW_LINE> book_name = db.Column(db.String, nullable=False, index=True) <NEW_LINE> borrow_date = db.Column(db.Date, nullable=False, index=True) <NEW_LINE> borrow_days = db.Column(db.String, nullable=False) <NEW_LINE> delay_charge = db.Column(db.String, nullable=True) <NEW_LINE> def __init__(self, borrower, bookName, borrowDate, borrowDays, delayCharge): <NEW_LINE> <INDENT> self.book_borrower = borrower <NEW_LINE> self.book_name = bookName <NEW_LINE> self.borrow_date = borrowDate <NEW_LINE> self.borrow_days = borrowDays <NEW_LINE> self.delay_charge = delayCharge <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Book %r>' % (self.book_name)
|
This class creates borrowed_books database table instance
and sets table field names
|
625990083cc13d1c6d4662c7
|
class Supervisor: <NEW_LINE> <INDENT> def __init__(self, protected): <NEW_LINE> <INDENT> self.protected = list(protected) <NEW_LINE> <DEDENT> def validate(self, fgraph): <NEW_LINE> <INDENT> if not hasattr(fgraph, 'destroyers'): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> for r in self.protected + list(fgraph.outputs): <NEW_LINE> <INDENT> if fgraph.destroyers(r): <NEW_LINE> <INDENT> raise gof.InconsistencyError( "Trying to destroy a protected Variable.", r)
|
Listener for FunctionGraph events which makes sure that no
operation overwrites the contents of protected Variables. The
outputs of the FunctionGraph are protected by default.
|
62599008d164cc6175821af9
|
class PostFlairWidget(Widget, BaseList): <NEW_LINE> <INDENT> CHILD_ATTRIBUTE = "order"
|
Class to represent a post flair widget.
Find an existing one:
.. code-block:: python
post_flair_widget = None
widgets = reddit.subreddit("redditdev").widgets
for widget in widgets.sidebar:
if isinstance(widget, praw.models.PostFlairWidget):
post_flair_widget = widget
break
for flair in post_flair_widget:
print(flair)
print(post_flair_widget.templates[flair])
Create one (requires proper moderator permissions):
.. code-block:: python
subreddit = reddit.subreddit("redditdev")
widgets = subreddit.widgets
flairs = [f["id"] for f in subreddit.flair.link_templates]
styles = {"backgroundColor": "#FFFF66", "headerColor": "#3333EE"}
post_flair = widgets.mod.add_post_flair_widget("Some flairs", "list", flairs, styles)
For more information on creation, see :meth:`.add_post_flair_widget`.
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {"backgroundColor": "#FFFFFF", "headerColor": "#FF9900"}
post_flair = post_flair.mod.update(shortName="My fav flairs", styles=new_styles)
Delete one (requires proper moderator permissions):
.. code-block:: python
post_flair.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this class.
Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a guarantee that
these attributes will always be present, nor is this list necessarily complete.
======================= ============================================================
Attribute Description
======================= ============================================================
``display`` The display style of the widget, either ``"cloud"`` or
``"list"``.
``id`` The widget ID.
``kind`` The widget kind (always ``"post-flair"``).
``order`` A list of the flair IDs in this widget. Can be iterated over
by iterating over the :class:`.PostFlairWidget` (e.g. ``for
flair_id in post_flair``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``"backgroundColor"`` and
``"headerColor"``.
``subreddit`` The :class:`.Subreddit` the button widget belongs to.
``templates`` A ``dict`` that maps flair IDs to ``dict``\ s that describe
flairs.
======================= ============================================================
|
62599008462c4b4f79dbc586
|
class UserChangeView(SuccessMessageMixin, UpdateView): <NEW_LINE> <INDENT> template_name = 'accounts/update.html' <NEW_LINE> model = Person <NEW_LINE> form_class = UserCreateForm <NEW_LINE> success_url = '/' <NEW_LINE> success_message = "%(first_name)s %(last_name)s successfully updated." <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> self.object = form.save(commit=False) <NEW_LINE> self.object.save() <NEW_LINE> if self.object.user_type == 'parent': <NEW_LINE> <INDENT> Address.objects.update_or_create( person=self.object, defaults={ 'street': self.request.POST.get('street'), 'city': self.request.POST.get('city'), 'state': self.request.POST.get('state'), 'zip_code': self.request.POST.get('zip_code') } ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Address.objects.filter(person=self.object).delete() <NEW_LINE> <DEDENT> return super().form_valid(form)
|
Author: Farzana Yasmin.
Purpose: Update the specific user.
Pre condition: N/A.
Post condition: N/A.
Library dependency: N/A.
Database Interaction: accounts.models.Person will be hit to update the specified users and
also hit accounts.models.Address for creating/updating address details.
File Location: accounts/views.py.
Args:
SuccessMessageMixin: django.contrib.messages.views
UpdateView: django.views.generic
Returns:
None
|
625990083cc13d1c6d4662c9
|
class MimedFileFactory(log.Loggable): <NEW_LINE> <INDENT> logCategory = LOG_CATEGORY <NEW_LINE> defaultType = "application/octet-stream" <NEW_LINE> def __init__(self, httpauth, mimeToResource=None, rateController=None, requestModifiers=None, metadataProvider=None): <NEW_LINE> <INDENT> self._httpauth = httpauth <NEW_LINE> self._mimeToResource = mimeToResource or {} <NEW_LINE> self._rateController = rateController <NEW_LINE> self._requestModifiers = requestModifiers <NEW_LINE> self._metadataProvider = metadataProvider <NEW_LINE> <DEDENT> def create(self, path): <NEW_LINE> <INDENT> mimeType = path.mimeType or self.defaultType <NEW_LINE> self.debug("Create %s file for %s", mimeType, path) <NEW_LINE> klazz = self._mimeToResource.get(mimeType, File) <NEW_LINE> return klazz(path, self._httpauth, mimeToResource=self._mimeToResource, rateController=self._rateController, requestModifiers=self._requestModifiers, metadataProvider=self._metadataProvider)
|
I create File subclasses based on the mime type of the given path.
|
6259900821a7993f00c66afc
|
class Video(JavaScriptObject): <NEW_LINE> <INDENT> @java.init <NEW_LINE> def __init__(self, *a, **kw): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @java.protected <NEW_LINE> @__init__.register <NEW_LINE> @java.typed() <NEW_LINE> def __init__(self, ): <NEW_LINE> <INDENT> self.__init__._super() <NEW_LINE> <DEDENT> @java.final <NEW_LINE> @java.native <NEW_LINE> def getDisplayUrl(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @java.final <NEW_LINE> @java.native <NEW_LINE> def getSourceUrl(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @java.final <NEW_LINE> @java.native <NEW_LINE> def getOwner(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @java.final <NEW_LINE> @java.native <NEW_LINE> def getPermalink(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @java.final <NEW_LINE> @java.native <NEW_LINE> def getSourceType(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @java.static <NEW_LINE> @java.native <NEW_LINE> @java.typed(String) <NEW_LINE> def fromJson(self, jsonString): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @java.final <NEW_LINE> def stringify(self): <NEW_LINE> <INDENT> return JSONObject(self).toString()
|
Represents a video object in facebook
|
6259900821a7993f00c66afe
|
class PrivateIngredientsApiTests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.user = get_user_model().objects.create_user( 'do@gmail.com', 'testpass') <NEW_LINE> self.client = APIClient() <NEW_LINE> self.client.force_authenticate(user=self.user) <NEW_LINE> <DEDENT> def test_retrieve_ingredient_list(self): <NEW_LINE> <INDENT> Ingredient.objects.create(user=self.user, name='Do') <NEW_LINE> Ingredient.objects.create(user=self.user, name='Vuong') <NEW_LINE> res = self.client.get(INGREDIENTS_URL) <NEW_LINE> ingredients = Ingredient.objects.all().order_by('-name') <NEW_LINE> serializer = IngredientSerializer(ingredients, many=True) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(res.data, serializer.data) <NEW_LINE> <DEDENT> def test_ingredients_limited_to_user(self): <NEW_LINE> <INDENT> user2 = get_user_model().objects.create_user( 'do1@gmail.com', 'testpass') <NEW_LINE> Ingredient.objects.create(user=user2, name='Test') <NEW_LINE> ingredient = Ingredient.objects.create(user=self.user, name='Do') <NEW_LINE> res = self.client.get(INGREDIENTS_URL) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(len(res.data), 1) <NEW_LINE> self.assertEqual(res.data[0]['name'], ingredient.name) <NEW_LINE> <DEDENT> def test_create_ingredient_successful(self): <NEW_LINE> <INDENT> payload = {'name': 'Test tag'} <NEW_LINE> self.client.post(INGREDIENTS_URL, payload) <NEW_LINE> exists = Ingredient.objects.filter( user=self.user, name=payload['name'] ).exists() <NEW_LINE> self.assertTrue(exists) <NEW_LINE> <DEDENT> def test_create_ingredient_invalid(self): <NEW_LINE> <INDENT> payload = {'name': ''} <NEW_LINE> res = self.client.post(INGREDIENTS_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_retrieve_ingredients_assigned_to_recipes(self): <NEW_LINE> <INDENT> ingredient1 = Ingredient.objects.create(user=self.user, name="Test 1") <NEW_LINE> ingredient2 = Ingredient.objects.create(user=self.user, name="Test 2") <NEW_LINE> recipe = Recipe.objects.create( title="Test title", time_minutes=10, price=5.00, user=self.user ) <NEW_LINE> recipe.ingredients.add(ingredient1) <NEW_LINE> res = self.client.get(INGREDIENTS_URL, {"assigned_only": 1}) <NEW_LINE> serializer1 = IngredientSerializer(ingredient1) <NEW_LINE> serializer2 = IngredientSerializer(ingredient2) <NEW_LINE> self.assertIn(serializer1.data, res.data) <NEW_LINE> self.assertNotIn(serializer2.data, res.data) <NEW_LINE> <DEDENT> def test_retrieve_ingredient_assigned_unique(self): <NEW_LINE> <INDENT> ingredient = Ingredient.objects.create(user=self.user, name="Test 1") <NEW_LINE> Ingredient.objects.create(user=self.user, name="Test 2") <NEW_LINE> recipe1 = Recipe.objects.create( title="Test title", time_minutes=10, price=5.00, user=self.user ) <NEW_LINE> recipe1.ingredients.add(ingredient) <NEW_LINE> recipe2 = Recipe.objects.create( title="Test title 2", time_minutes=10, price=5.00, user=self.user ) <NEW_LINE> recipe2.ingredients.add(ingredient) <NEW_LINE> res = self.client.get(INGREDIENTS_URL, {'assigned_only': 1}) <NEW_LINE> self.assertEqual(len(res.data), 1)
|
Test the authorized user ingredients API
|
62599008d164cc6175821afd
|
class PageTestCase(BaseTestCase): <NEW_LINE> <INDENT> def test_index_page(self): <NEW_LINE> <INDENT> response = self.client.get("/") <NEW_LINE> self.assert200(response) <NEW_LINE> <DEDENT> def test_secret_page(self): <NEW_LINE> <INDENT> response = self.client.get("/secret") <NEW_LINE> self.assert401(response)
|
A pages test case
|
62599008462c4b4f79dbc58a
|
class MarketEvent(Event): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.type = 'MARKET'
|
Обрабатывает событие получние нового обновления рыночной информации с соответствущими барами.
|
6259900856b00c62f0fb3444
|
class SyncDataError(Exception): <NEW_LINE> <INDENT> def __init__(self, error): <NEW_LINE> <INDENT> self.error = error <NEW_LINE> self.severity = 'warning' <NEW_LINE> self.status_code = http_code.HTTP_400_BAD_REQUEST
|
This is for sync data error
|
6259900856b00c62f0fb3446
|
class WeekOfMonth(DateOffset): <NEW_LINE> <INDENT> _adjust_dst = True <NEW_LINE> def __init__(self, n=1, normalize=False, **kwds): <NEW_LINE> <INDENT> self.n = n <NEW_LINE> self.normalize = normalize <NEW_LINE> self.weekday = kwds['weekday'] <NEW_LINE> self.week = kwds['week'] <NEW_LINE> if self.n == 0: <NEW_LINE> <INDENT> raise ValueError('N cannot be 0') <NEW_LINE> <DEDENT> if self.weekday < 0 or self.weekday > 6: <NEW_LINE> <INDENT> raise ValueError('Day must be 0<=day<=6, got %d' % self.weekday) <NEW_LINE> <DEDENT> if self.week < 0 or self.week > 3: <NEW_LINE> <INDENT> raise ValueError('Week must be 0<=day<=3, got %d' % self.week) <NEW_LINE> <DEDENT> self.kwds = kwds <NEW_LINE> <DEDENT> @apply_wraps <NEW_LINE> def apply(self, other): <NEW_LINE> <INDENT> base = other <NEW_LINE> offsetOfMonth = self.getOffsetOfMonth(other) <NEW_LINE> if offsetOfMonth > other: <NEW_LINE> <INDENT> if self.n > 0: <NEW_LINE> <INDENT> months = self.n - 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> months = self.n <NEW_LINE> <DEDENT> <DEDENT> elif offsetOfMonth == other: <NEW_LINE> <INDENT> months = self.n <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.n > 0: <NEW_LINE> <INDENT> months = self.n <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> months = self.n + 1 <NEW_LINE> <DEDENT> <DEDENT> other = self.getOffsetOfMonth(other + relativedelta(months=months, day=1)) <NEW_LINE> other = datetime(other.year, other.month, other.day, base.hour, base.minute, base.second, base.microsecond) <NEW_LINE> return other <NEW_LINE> <DEDENT> def getOffsetOfMonth(self, dt): <NEW_LINE> <INDENT> w = Week(weekday=self.weekday) <NEW_LINE> d = datetime(dt.year, dt.month, 1, tzinfo=dt.tzinfo) <NEW_LINE> d = w.rollforward(d) <NEW_LINE> for i in range(self.week): <NEW_LINE> <INDENT> d = w.apply(d) <NEW_LINE> <DEDENT> return d <NEW_LINE> <DEDENT> def onOffset(self, dt): <NEW_LINE> <INDENT> if self.normalize and not _is_normalized(dt): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> d = datetime(dt.year, dt.month, dt.day, tzinfo=dt.tzinfo) <NEW_LINE> return d == self.getOffsetOfMonth(dt) <NEW_LINE> <DEDENT> @property <NEW_LINE> def rule_code(self): <NEW_LINE> <INDENT> return '%s-%d%s' % (self._prefix, self.week + 1, _int_to_weekday.get(self.weekday, '')) <NEW_LINE> <DEDENT> _prefix = 'WOM' <NEW_LINE> @classmethod <NEW_LINE> def _from_name(cls, suffix=None): <NEW_LINE> <INDENT> if not suffix: <NEW_LINE> <INDENT> raise ValueError("Prefix %r requires a suffix." % (cls._prefix)) <NEW_LINE> <DEDENT> week = int(suffix[0]) - 1 <NEW_LINE> weekday = _weekday_to_int[suffix[1:]] <NEW_LINE> return cls(week=week, weekday=weekday)
|
Describes monthly dates like "the Tuesday of the 2nd week of each month"
Parameters
----------
n : int
week : {0, 1, 2, 3, ...}
0 is 1st week of month, 1 2nd week, etc.
weekday : {0, 1, ..., 6}
0: Mondays
1: Tuesdays
2: Wednesdays
3: Thursdays
4: Fridays
5: Saturdays
6: Sundays
|
625990080a366e3fb87dd578
|
class WhitelistRule(CompareRule): <NEW_LINE> <INDENT> _schema_file = 'schemas/ruletype-whitelist.yaml' <NEW_LINE> def __init__(self, locator: str, hash_str: str, conf: dict): <NEW_LINE> <INDENT> super().__init__(locator, hash_str, conf) <NEW_LINE> self.expand_entries('whitelist') <NEW_LINE> <DEDENT> def prepare(self, es_client: ElasticSearchClient, start_time: str = None) -> None: <NEW_LINE> <INDENT> self._conf.setdefault('original_filter', self._conf['filter']) <NEW_LINE> self._conf['filter'] = self._conf['original_filter'] <NEW_LINE> terms_query = {'bool': {'must_not': {'terms': {self._conf['compare_key']: list(self._conf['whitelist'])}}}} <NEW_LINE> if es_client.es_version_at_least(6): <NEW_LINE> <INDENT> self._conf['filter'].append(terms_query) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._conf['filter'].append({'constant_score': {'filter': terms_query}}) <NEW_LINE> <DEDENT> <DEDENT> def compare(self, event: dict) -> bool: <NEW_LINE> <INDENT> term = dots_get(event, self._conf['compare_key']) <NEW_LINE> if term is None: <NEW_LINE> <INDENT> return not self._conf['ignore_null'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return term not in self._conf['whitelist'] <NEW_LINE> <DEDENT> <DEDENT> def generate_match(self, event: dict) -> (dict, dict): <NEW_LINE> <INDENT> extra = {'compare_key': self._conf['compare_key'], 'num_events': 1, 'began_at': dots_get(event, self.ts_field), 'ended_at': dots_get(event, self.ts_field)} <NEW_LINE> return extra, event
|
A CompareRule where the compare function checks a given key against a whitelist.
|
62599008d164cc6175821b01
|
class StoreSerializers(serializers.ModelSerializer): <NEW_LINE> <INDENT> business = serializers.SerializerMethodField() <NEW_LINE> retailer_id = serializers.SerializerMethodField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Store <NEW_LINE> fields = ('id', 'name', 'address1', 'address2', 'town', 'postcode', 'county', 'country', 'fascia', 'wholesaler', 'epos', 'licensing_region', 'fascia_display_name', 'epos_display_name', 'fascia_name', 'wholesaler_name', 'epos_name', 'licensing_region_name', 'latitude', 'longitude', 'average_weekly_turnover', 'average_weekly_footfall', 'average_basket_spend', 'store_size', 'licensing_status_name', 'business', 'retailer_id', 'retailer_business', 'is_postoffice', 'is_lottery', 'is_paypoint', 'is_bakeoff', 'is_hotfood', 'licensing_status', 'admin_notes', 'retailer_notes', 'is_active', 'store_status', 'contact_name', 'contact_number', 'another_store_available') <NEW_LINE> <DEDENT> def get_retailer_id(self, obj): <NEW_LINE> <INDENT> _ = self.__class__.__name__ <NEW_LINE> return obj.retailer_business.retailer_id <NEW_LINE> <DEDENT> def get_business(self, obj): <NEW_LINE> <INDENT> _ = self.__class__.__name__ <NEW_LINE> return RetailerBusinessFullSerializer(obj.retailer_business, many=False).data
|
Store Serializer
|
6259900915fb5d323ce7f8c9
|
class ScraperFactory(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def create_scraper(url, type): <NEW_LINE> <INDENT> if type == ScraperType.ORCID: <NEW_LINE> <INDENT> return OrcIdScraper(url, type) <NEW_LINE> <DEDENT> if type == ScraperType.RESEARCHID: <NEW_LINE> <INDENT> return ResearchIdScraper(url, type) <NEW_LINE> <DEDENT> if type == ScraperType.PROFILE: <NEW_LINE> <INDENT> return ProfileScraper(url, type) <NEW_LINE> <DEDENT> if type == ScraperType.GOOGLESCHOLAR: <NEW_LINE> <INDENT> return GoogleScholarScraper(url, type)
|
Returns a scraper made for the URL you feed it
|
6259900956b00c62f0fb3448
|
class Error(Exception): <NEW_LINE> <INDENT> pass
|
Errors common to model data interface.
|
6259900921a7993f00c66b04
|
class SimplifiedOpenvpnData: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._config = SimplifiedOpenvpnConfig() <NEW_LINE> self._db = sqlite3.connect(self._config.container + 'sovpn.sqlite') <NEW_LINE> sql = self.read_sql_file('create_table_clients.sql') <NEW_LINE> self._db.cursor().execute(sql) <NEW_LINE> self._db.commit() <NEW_LINE> <DEDENT> def read_sql_file(self, sql_file): <NEW_LINE> <INDENT> sql = _helper.read_file_as_value(self._config.container + 'sql/' + sql_file) <NEW_LINE> return sql <NEW_LINE> <DEDENT> def insert_share_hash(self, slug, share_hash): <NEW_LINE> <INDENT> sql = self.read_sql_file('insert_client_record.sql') <NEW_LINE> try: <NEW_LINE> <INDENT> self._db.cursor().execute(sql, [slug, share_hash]) <NEW_LINE> self._db.commit() <NEW_LINE> return True <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def rotate_share_hash(self, slug, share_hash): <NEW_LINE> <INDENT> sql = self.read_sql_file('update_client_hash.sql') <NEW_LINE> self._db.cursor().execute(sql, [share_hash, slug]) <NEW_LINE> self._db.commit() <NEW_LINE> <DEDENT> def find_client_slug_by_share_hash(self, share_hash): <NEW_LINE> <INDENT> sql = self.read_sql_file('find_client_slug_by_hash.sql') <NEW_LINE> cursor = self._db.cursor() <NEW_LINE> cursor.execute(sql, [share_hash]) <NEW_LINE> result = cursor.fetchone() <NEW_LINE> if result: <NEW_LINE> <INDENT> return result[0] <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def find_client_share_hash_by_slug(self, slug): <NEW_LINE> <INDENT> sql = self.read_sql_file('find_client_hash_by_slug.sql') <NEW_LINE> cursor = self._db.cursor() <NEW_LINE> cursor.execute(sql, [slug]) <NEW_LINE> result = cursor.fetchone() <NEW_LINE> if result: <NEW_LINE> <INDENT> return result[0] <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def get_all_client_slugs(self): <NEW_LINE> <INDENT> sql = self.read_sql_file('select_client_slugs.sql') <NEW_LINE> cursor = self._db.cursor() <NEW_LINE> cursor.execute(sql) <NEW_LINE> result = cursor.fetchall() <NEW_LINE> slugs = list() <NEW_LINE> for record in result: <NEW_LINE> <INDENT> slugs.append(record[0]) <NEW_LINE> <DEDENT> return slugs
|
Class that contains methods that deal with database.
|
62599009627d3e7fe0e07a24
|
class WXUserRegSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> openId = serializers.CharField(label="微信openid", help_text="微信openid", required=True, allow_blank=False, validators=[UniqueValidator(queryset=User.objects.all(), message="用户已经存在")]) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ('openId', 'nickName', 'gender', 'city', 'province', 'country', 'avatarUrl')
|
微信用户注册
|
62599009bf627c535bcb2037
|
class _Node: <NEW_LINE> <INDENT> __slots__ = '_element', '_next' <NEW_LINE> def __init__(self, element, next): <NEW_LINE> <INDENT> self._element = element <NEW_LINE> self._next = next
|
Lightweight,nonpublic class for storing a singly linked node.
|
62599009462c4b4f79dbc590
|
class MultipleChoiceField(models.CharField): <NEW_LINE> <INDENT> def clean_choices(self, values): <NEW_LINE> <INDENT> for value in values: <NEW_LINE> <INDENT> exists = False <NEW_LINE> for choice, label in self.choices: <NEW_LINE> <INDENT> if choice == value: <NEW_LINE> <INDENT> exists = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if not exists: <NEW_LINE> <INDENT> raise ValidationError(_("Invalid value: {}").format(value)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def validate(self, value, model_instance): <NEW_LINE> <INDENT> if not self.editable: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.clean_choices(value) <NEW_LINE> if value is None and not self.null: <NEW_LINE> <INDENT> raise ValidationError(self.error_messages["null"], code="null") <NEW_LINE> <DEDENT> if not self.blank and value in self.empty_values: <NEW_LINE> <INDENT> raise ValidationError(self.error_messages["blank"], code="blank") <NEW_LINE> <DEDENT> <DEDENT> def from_db_value(self, value, expression, connection): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if not value: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> values = value.split(",") <NEW_LINE> self.clean_choices(values) <NEW_LINE> return values <NEW_LINE> <DEDENT> def get_prep_value(self, value): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> picked = [] <NEW_LINE> for choice, label in self.choices: <NEW_LINE> <INDENT> if choice in value: <NEW_LINE> <INDENT> picked.append(choice) <NEW_LINE> <DEDENT> <DEDENT> return ",".join(picked) <NEW_LINE> <DEDENT> def to_python(self, value): <NEW_LINE> <INDENT> if isinstance(value, (list, set, tuple)): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> if value is None: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> values = value.split(",") <NEW_LINE> self.clean_choices(values) <NEW_LINE> return values <NEW_LINE> <DEDENT> def formfield(self, **kwargs): <NEW_LINE> <INDENT> defaults = {"form_class": django.forms.MultipleChoiceField} <NEW_LINE> defaults.update(**kwargs) <NEW_LINE> return super().formfield(**defaults)
|
Field that can take a set of string values
and store them in a charfield using a delimiter
This needs to be compatible with django-rest-framework's
multiple choice field.
|
625990093cc13d1c6d4662d3
|
class HelmCmd(object): <NEW_LINE> <INDENT> def parse(self, argv): <NEW_LINE> <INDENT> args = docopt(__doc__, argv=argv) <NEW_LINE> logger.debug("opendc helm - args:\n{}".format(args)) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> call_ansible(os.path.realpath(os.path.join(os.path.curdir, 'playbooks/helm.yaml')))
|
Install kubectl and helm clients locally.
|
62599009627d3e7fe0e07a26
|
@tf_export(v1=['layers.Conv2D']) <NEW_LINE> class Conv2D(keras_layers.Conv2D, base.Layer): <NEW_LINE> <INDENT> def __init__(self, filters, kernel_size, strides=(1, 1), padding='valid', data_format='channels_last', dilation_rate=(1, 1), activation=None, use_bias=True, kernel_initializer=None, bias_initializer=init_ops.zeros_initializer(), kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, trainable=True, name=None, **kwargs): <NEW_LINE> <INDENT> super(Conv2D, self).__init__( filters=filters, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, activation=activation, use_bias=use_bias, kernel_initializer=kernel_initializer, bias_initializer=bias_initializer, kernel_regularizer=kernel_regularizer, bias_regularizer=bias_regularizer, activity_regularizer=activity_regularizer, kernel_constraint=kernel_constraint, bias_constraint=bias_constraint, trainable=trainable, name=name, **kwargs)
|
2D convolution layer (e.g. spatial convolution over images).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 2 integers, specifying the
height and width of the 2D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the convolution along the height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
`"valid"` means no padding. `"same"` results in padding evenly to
the left/right or up/down of the input such that output has the same
height/width dimension as the input.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
dilation_rate: An integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, the default
initializer will be used.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
|
6259900921a7993f00c66b0a
|
class ABTransition(ActionBundle): <NEW_LINE> <INDENT> def __init__(self, parser): <NEW_LINE> <INDENT> self.parser = parser <NEW_LINE> log.info(self.__class__.__name__ + " initialized") <NEW_LINE> try: <NEW_LINE> <INDENT> k = parser.options.key <NEW_LINE> s = parser.options.status <NEW_LINE> jira = jira_authenticate(parser.options.jira_url, parser.options.jira_username, parser.options.jira_password) <NEW_LINE> issue = jira.issue(k) <NEW_LINE> transitions = jira.transitions(issue) <NEW_LINE> log.info("Issue Key : " + k) <NEW_LINE> log.info("Current Status : " + str(issue.fields.status)) <NEW_LINE> log.info("Requested Status: " + str(s)) <NEW_LINE> if get_string_from_list(transitions, 'name', s): <NEW_LINE> <INDENT> jira.transition_issue(issue, s) <NEW_LINE> log.info("New Status : " + s) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.debug("Ticket can only do the following transitions:") <NEW_LINE> for t in transitions: <NEW_LINE> <INDENT> log.debug(t['name']) <NEW_LINE> <DEDENT> log.warn("Exiting without transition") <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> raise
|
classdocs.
|
62599009462c4b4f79dbc596
|
class ContaineranalysisProjectsOccurrencesCreateRequest(_messages.Message): <NEW_LINE> <INDENT> name = _messages.StringField(1) <NEW_LINE> occurrence = _messages.MessageField('Occurrence', 2) <NEW_LINE> parent = _messages.StringField(3, required=True)
|
A ContaineranalysisProjectsOccurrencesCreateRequest object.
Fields:
name: The name of the project. Should be of the form
"projects/{project_id}". @deprecated
occurrence: A Occurrence resource to be passed as the request body.
parent: This field contains the projectId for example:
"projects/{project_id}"
|
625990090a366e3fb87dd582
|
class ProjectsService(base_api.BaseApiService): <NEW_LINE> <INDENT> _NAME = u'projects' <NEW_LINE> def __init__(self, client): <NEW_LINE> <INDENT> super(BigtableadminV2.ProjectsService, self).__init__(client) <NEW_LINE> self._upload_configs = { }
|
Service class for the projects resource.
|
62599009462c4b4f79dbc598
|
class Yaml(ReportService): <NEW_LINE> <INDENT> def run(self, selected): <NEW_LINE> <INDENT> options = self.get_option(selected) <NEW_LINE> report = {"instances": [i.to_dict() for i in selected], "option": options} <NEW_LINE> print(safe_dump(report, indent=2)) <NEW_LINE> return report
|
YAML reporter implementation.
|
62599009462c4b4f79dbc59c
|
class CTD_ANON (pyxb.binding.basis.complexTypeDefinition): <NEW_LINE> <INDENT> _TypeDefinition = None <NEW_LINE> _ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY <NEW_LINE> _Abstract = False <NEW_LINE> _ExpandedName = None <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/tmp/tmpiTHi4xxsds/PacBioDatasets.xsd', 125, 2) <NEW_LINE> _ElementMap = {} <NEW_LINE> _AttributeMap = {} <NEW_LINE> __Subset = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Subset'), 'Subset', '__httppacificbiosciences_comPacBioDatasets_xsd_CTD_ANON_httppacificbiosciences_comPacBioDatasets_xsdSubset', True, pyxb.utils.utility.Location('/tmp/tmpiTHi4xxsds/PacBioDatasets.xsd', 127, 4), ) <NEW_LINE> Subset = property(__Subset.value, __Subset.set, None, None) <NEW_LINE> _ElementMap.update({ __Subset.name() : __Subset }) <NEW_LINE> _AttributeMap.update({ })
|
Complex type [anonymous] with content type ELEMENT_ONLY
|
6259900956b00c62f0fb3456
|
class UserAssignForm(FlaskForm): <NEW_LINE> <INDENT> career = QuerySelectField(query_factory=lambda: Career.query.all(), get_label="name") <NEW_LINE> role = QuerySelectField(query_factory=lambda: Role.query.all(), get_label="name") <NEW_LINE> submit = SubmitField('Submit')
|
Form for admin to assign departments and roles to employees
|
62599009d164cc6175821b13
|
class Organization(ModelMixin, BaseModel): <NEW_LINE> <INDENT> __tablename__ = 'organization' <NEW_LINE> organizationId = Column('organization_id', Integer, unique=True) <NEW_LINE> externalId = Column('external_id', String(80), nullable=False) <NEW_LINE> displayName = Column('display_name', String(255), nullable=False) <NEW_LINE> hpoId = Column('hpo_id', Integer, ForeignKey('hpo.hpo_id'), nullable=False) <NEW_LINE> sites = relationship('Site', cascade='all, delete-orphan', order_by='Site.googleGroup') <NEW_LINE> isObsolete = Column('is_obsolete', ModelEnum(ObsoleteStatus))
|
An organization, under an awardee/HPO, and containing sites.
|
62599009507cdc57c63a5938
|
class AttrDict(dict): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(AttrDict, self).__init__(*args, **kwargs) <NEW_LINE> self.__dict__ = self
|
A dict subclass that allows keys to be accessed as attributes
|
6259900956b00c62f0fb345a
|
class ConnectToSourceSqlServerTaskOutputAgentJobLevel(ConnectToSourceSqlServerTaskOutput): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'result_type': {'required': True}, 'name': {'readonly': True}, 'job_category': {'readonly': True}, 'is_enabled': {'readonly': True}, 'job_owner': {'readonly': True}, 'last_executed_on': {'readonly': True}, 'migration_eligibility': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'result_type': {'key': 'resultType', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'job_category': {'key': 'jobCategory', 'type': 'str'}, 'is_enabled': {'key': 'isEnabled', 'type': 'bool'}, 'job_owner': {'key': 'jobOwner', 'type': 'str'}, 'last_executed_on': {'key': 'lastExecutedOn', 'type': 'iso-8601'}, 'migration_eligibility': {'key': 'migrationEligibility', 'type': 'MigrationEligibilityInfo'}, } <NEW_LINE> def __init__(self, **kwargs) -> None: <NEW_LINE> <INDENT> super(ConnectToSourceSqlServerTaskOutputAgentJobLevel, self).__init__(**kwargs) <NEW_LINE> self.name = None <NEW_LINE> self.job_category = None <NEW_LINE> self.is_enabled = None <NEW_LINE> self.job_owner = None <NEW_LINE> self.last_executed_on = None <NEW_LINE> self.migration_eligibility = None <NEW_LINE> self.result_type = 'AgentJobLevelOutput'
|
AgentJob level output for the task that validates connection to SQL Server
and also validates source server requirements.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Result identifier
:vartype id: str
:param result_type: Required. Constant filled by server.
:type result_type: str
:ivar name: AgentJob name
:vartype name: str
:ivar job_category: The type of AgentJob.
:vartype job_category: str
:ivar is_enabled: The state of the original AgentJob.
:vartype is_enabled: bool
:ivar job_owner: The owner of the AgentJob
:vartype job_owner: str
:ivar last_executed_on: UTC Date and time when the AgentJob was last
executed.
:vartype last_executed_on: datetime
:ivar migration_eligibility: Information about eligiblity of agent job for
migration.
:vartype migration_eligibility:
~azure.mgmt.datamigration.models.MigrationEligibilityInfo
|
6259900921a7993f00c66b19
|
class GUI_Application_Feature(ttk.Frame): <NEW_LINE> <INDENT> def __init__(self, parent, argument): <NEW_LINE> <INDENT> ttk.Frame.__init__(self, parent) <NEW_LINE> self.pack(expand=Y, fill=BOTH) <NEW_LINE> self.master.title(TITLE_1) <NEW_LINE> self.create_feature_widgets(argument) <NEW_LINE> <DEDENT> def create_feature_widgets(self, argument): <NEW_LINE> <INDENT> self.entry_1_text = StringVar() <NEW_LINE> self.entry_2_text = StringVar() <NEW_LINE> self.style = ttk.Style() <NEW_LINE> self.style.configure('.', font=('FreeSans', 12)) <NEW_LINE> self.style.configure('blue.TLabel', foreground='white', background='#0000ff', font=('FreeSans', 16), padding=10) <NEW_LINE> self.style.configure('cyan.TFrame', borderwidth=5, relief="ridge", background='#00ffff') <NEW_LINE> self.style.configure('grey.TEntry', foreground='grey', padding=5) <NEW_LINE> self.style.configure('black.TEntry', foreground='black', padding=5) <NEW_LINE> self.frame_1 = ttk.Frame(self, style='cyan.TFrame', padding="5 5 5 5") <NEW_LINE> self.entry_1 = ttk.Entry(self, textvariable=self.entry_1_text, style='grey.TEntry' ) <NEW_LINE> self.entry_2 = ttk.Entry(self, textvariable=self.entry_2_text, show="*" ) <NEW_LINE> self.label_1 = ttk.Label(self.frame_1, text="", style='blue.TLabel') <NEW_LINE> self.label_1.grid(row=0, column=0, sticky="WE") <NEW_LINE> self.button_1 = ttk.Button(self, text=BUTTON_1_TEXT, command=self.button_1_cb) <NEW_LINE> self.entry_1.grid(row=1, column=0, padx=10, pady=5, sticky="w") <NEW_LINE> self.entry_2.grid(row=2, column=0, padx=10, pady=5, sticky="w") <NEW_LINE> self.frame_1.grid(row=0, column=0, columnspan=3, padx=5, pady=5) <NEW_LINE> self.button_1.grid(row=4, column=0, padx=5, pady=5, sticky="w") <NEW_LINE> if argument: <NEW_LINE> <INDENT> self.entry_1_text.set("Account") <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> def entry_1_cb(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def button_1_cb(self): <NEW_LINE> <INDENT> self.label_1.config(text="{},{}".format( self.entry_1_text.get(), self.entry_2_text.get() )) <NEW_LINE> self.entry_1.configure(style='black.TEntry')
|
Main GUI for the featured functionality
|
62599009462c4b4f79dbc5a4
|
class EpsilonGreedyPolicy(BasePolicy): <NEW_LINE> <INDENT> def __init__(self, eps=0.05, rand=None): <NEW_LINE> <INDENT> self.eps = eps <NEW_LINE> self.rand = rand if rand else random <NEW_LINE> self.do_annealing = False <NEW_LINE> <DEDENT> def choose_action(self, task, value_function, state): <NEW_LINE> <INDENT> actions = task.generate_possible_actions(state) <NEW_LINE> best_action = choose_best_action(task, value_function, state, self.rand) <NEW_LINE> probs = self.__calc_select_probability(best_action, actions) <NEW_LINE> selected_action_idx = self.__roulette(probs) <NEW_LINE> return actions[selected_action_idx] <NEW_LINE> <DEDENT> def set_eps_annealing(self, initial_eps, final_eps, anneal_duration): <NEW_LINE> <INDENT> self.do_annealing = True <NEW_LINE> self.eps = initial_eps <NEW_LINE> self.min_eps = final_eps <NEW_LINE> self.anneal_step = (initial_eps - final_eps) / anneal_duration <NEW_LINE> <DEDENT> def anneal_eps(self): <NEW_LINE> <INDENT> self.eps = max(self.min_eps, self.eps - self.anneal_step) <NEW_LINE> <DEDENT> def __calc_select_probability(self, best_action, actions): <NEW_LINE> <INDENT> e = self.eps / len(actions) <NEW_LINE> bonus = 1 - self.eps <NEW_LINE> calc_prob = lambda action: e + bonus if action == best_action else e <NEW_LINE> return [calc_prob(action) for action in actions] <NEW_LINE> <DEDENT> def __roulette(self, probs): <NEW_LINE> <INDENT> hit_idx = -1 <NEW_LINE> prob_sum = 0 <NEW_LINE> dart = self.rand.random() <NEW_LINE> for idx, prob in enumerate(probs): <NEW_LINE> <INDENT> prob_sum += prob <NEW_LINE> if dart < prob_sum: <NEW_LINE> <INDENT> hit_idx = idx <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return hit_idx
|
Choose explore action in probability epsilon else choose best action
If you want to decaly epsilon (most of the case it's good idea) during training,
call "set_eps_annealing" before start training.
Properties:
eps : the probability to select explore action
rand : used when multiple actions are candidate of choice
|
6259900956b00c62f0fb345e
|
class BookRepo(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.books = {} <NEW_LINE> <DEDENT> def add_book(self, book): <NEW_LINE> <INDENT> self.books[book.Id] = book <NEW_LINE> <DEDENT> def get_book(self, book_id): <NEW_LINE> <INDENT> return self.books[book_id] <NEW_LINE> <DEDENT> def get_book_name(self, bookname): <NEW_LINE> <INDENT> for key in self.users: <NEW_LINE> <INDENT> if self.users[key].Name == bookname: <NEW_LINE> <INDENT> return key <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_book_names(self): <NEW_LINE> <INDENT> return [b.Name for b in self.books] <NEW_LINE> <DEDENT> def delete_book(self, id): <NEW_LINE> <INDENT> del self.books[id] <NEW_LINE> <DEDENT> def list_books(self): <NEW_LINE> <INDENT> return jsonify(books=[b.serialize() for b in self.books.values()])
|
persists the BookModel instances into an in-memory dictionary
with methods to retrieve, insert, update and delete books
|
62599009627d3e7fe0e07a3c
|
class TicketViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Ticket.objects.all() <NEW_LINE> serializer_class = TicketSerializer
|
retrieve:
Boleto por ID.
list:
Lista de boletos.
create:
Crea un nuevo boleto.
update:
Actualiza los valores de un boleto
delete:
Borra un boleto
|
62599009bf627c535bcb2051
|
class Mixin(object): <NEW_LINE> <INDENT> def __init__(self, *mixins): <NEW_LINE> <INDENT> self.mixins = mixins <NEW_LINE> <DEDENT> def __call__(self, cls): <NEW_LINE> <INDENT> dct = {} <NEW_LINE> mcls = type(cls) <NEW_LINE> name = cls.__name__ <NEW_LINE> bases = cls.__mro__ <NEW_LINE> for mix in reversed(self.mixins): <NEW_LINE> <INDENT> for parent in reversed(mix.__mro__): <NEW_LINE> <INDENT> dct.update(parent.__dict__) <NEW_LINE> <DEDENT> dct.update(mix.__dict__) <NEW_LINE> <DEDENT> dct.update(cls.__dict__) <NEW_LINE> return type.__new__(mcls, name, bases, dct) <NEW_LINE> <DEDENT> __all__ = ['Mixin']
|
A class decorator which allows you
to mix in the properties of other
classes without inheriting directly.
**ONLY WORKS ON NEW STYLE CLASSES***
Method resolution order is just like
multiple inheritence. Left takes
precedence over right.
Mixins are added to the class directly,
so they take precedence over any regularly
inherited superclasses.
This requires copying of attributes,
so any changes to the precedent classes
will not be reflected in the child class.
|
62599009925a0f43d25e8be2
|
class CapabilityNetworkLimits(ManagedObject): <NEW_LINE> <INDENT> consts = CapabilityNetworkLimitsConsts() <NEW_LINE> naming_props = set([]) <NEW_LINE> mo_meta = MoMeta("CapabilityNetworkLimits", "capabilityNetworkLimits", "network-limits", VersionMeta.Version211a, "InputOutput", 0x1f, [], ["read-only"], [u'topSysDefaults'], [u'capabilityFeatureLimits'], [None]) <NEW_LINE> prop_meta = { "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version211a, MoPropertyMeta.INTERNAL, 0x2, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, 0x8, 0, 256, None, [], []), "sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302c, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version211a, MoPropertyMeta.READ_WRITE, 0x10, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), } <NEW_LINE> prop_map = { "childAction": "child_action", "dn": "dn", "rn": "rn", "sacl": "sacl", "status": "status", } <NEW_LINE> def __init__(self, parent_mo_or_dn, **kwargs): <NEW_LINE> <INDENT> self._dirty_mask = 0 <NEW_LINE> self.child_action = None <NEW_LINE> self.sacl = None <NEW_LINE> self.status = None <NEW_LINE> ManagedObject.__init__(self, "CapabilityNetworkLimits", parent_mo_or_dn, **kwargs)
|
This is CapabilityNetworkLimits class.
|
6259900956b00c62f0fb3464
|
class DataFrameWrapper: <NEW_LINE> <INDENT> def __init__(self, connection_string: str): <NEW_LINE> <INDENT> self.engine = create_engine(connection_string) <NEW_LINE> self.file_name_generator = FileNameGenerator() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def snowflake( cls, username: str, password: str, snowflake_account: str, database: str, warehouse: str ): <NEW_LINE> <INDENT> connection_string = f"snowflake://{username}:{password}@{snowflake_account}/{database}?warehouse={warehouse}" <NEW_LINE> return DataFrameWrapper(connection_string) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def postgresql( cls, username: str, password: str, hostname: str, database: str, port: int = 5432 ): <NEW_LINE> <INDENT> connection_string = f"postgresql://{username}:{password}@{hostname}:{port}/{database}" <NEW_LINE> return DataFrameWrapper(connection_string) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def mssql(cls, username: str, password: str, hostname: str, database: str, port: int = 1433): <NEW_LINE> <INDENT> connection_string = f"mssql+pymssql://{username}:{password}@{hostname}:{port}/{database}" <NEW_LINE> return DataFrameWrapper(connection_string) <NEW_LINE> <DEDENT> def create_report(self, query: str, query_type: str, filename=None, additional_parms=None): <NEW_LINE> <INDENT> def handle_html(file_name): <NEW_LINE> <INDENT> if not file_name: <NEW_LINE> <INDENT> file_name = self.file_name_generator.getFileName('html') <NEW_LINE> <DEDENT> if additional_parms: <NEW_LINE> <INDENT> df.to_html(file_name, **additional_parms) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> df.to_html(file_name) <NEW_LINE> <DEDENT> return file_name <NEW_LINE> <DEDENT> def handle_plot(file_name): <NEW_LINE> <INDENT> if not file_name: <NEW_LINE> <INDENT> file_name = self.file_name_generator.getFileName('png') <NEW_LINE> <DEDENT> if additional_parms: <NEW_LINE> <INDENT> fig = df.plot(**additional_parms).get_figure() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fig = df.plot().get_figure() <NEW_LINE> <DEDENT> fig.savefig(file_name) <NEW_LINE> return file_name <NEW_LINE> <DEDENT> def handle_text(file_name): <NEW_LINE> <INDENT> if not file_name: <NEW_LINE> <INDENT> file_name = self.file_name_generator.getFileName('txt') <NEW_LINE> <DEDENT> if additional_parms: <NEW_LINE> <INDENT> df.to_string(file_name, **additional_parms) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> df.to_string(file_name) <NEW_LINE> <DEDENT> return file_name <NEW_LINE> <DEDENT> handlers = {'html': handle_html, 'plot': handle_plot, 'text': handle_text} <NEW_LINE> if query_type not in handlers.keys(): <NEW_LINE> <INDENT> raise TypeError(f"query_type must be one of {[*handlers]} but was: {query_type}") <NEW_LINE> <DEDENT> df = pd.read_sql(query, self.engine) <NEW_LINE> total_rows = len(df.index) <NEW_LINE> return (total_rows, handlers[query_type](filename))
|
Class that facilitates running a query in a database via a Pandas Dataframe and then writing the data to files.
A number of formats are supported including png, html and text files.
|
625990093cc13d1c6d4662ed
|
class ProductEntityBaseParameters(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'description': {'max_length': 1000, 'min_length': 0}, } <NEW_LINE> _attribute_map = { 'description': {'key': 'description', 'type': 'str'}, 'terms': {'key': 'terms', 'type': 'str'}, 'subscription_required': {'key': 'subscriptionRequired', 'type': 'bool'}, 'approval_required': {'key': 'approvalRequired', 'type': 'bool'}, 'subscriptions_limit': {'key': 'subscriptionsLimit', 'type': 'int'}, 'state': {'key': 'state', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, description: Optional[str] = None, terms: Optional[str] = None, subscription_required: Optional[bool] = None, approval_required: Optional[bool] = None, subscriptions_limit: Optional[int] = None, state: Optional[Union[str, "ProductState"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ProductEntityBaseParameters, self).__init__(**kwargs) <NEW_LINE> self.description = description <NEW_LINE> self.terms = terms <NEW_LINE> self.subscription_required = subscription_required <NEW_LINE> self.approval_required = approval_required <NEW_LINE> self.subscriptions_limit = subscriptions_limit <NEW_LINE> self.state = state
|
Product Entity Base Parameters.
:ivar description: Product description. May include HTML formatting tags.
:vartype description: str
:ivar terms: Product terms of use. Developers trying to subscribe to the product will be
presented and required to accept these terms before they can complete the subscription process.
:vartype terms: str
:ivar subscription_required: Whether a product subscription is required for accessing APIs
included in this product. If true, the product is referred to as "protected" and a valid
subscription key is required for a request to an API included in the product to succeed. If
false, the product is referred to as "open" and requests to an API included in the product can
be made without a subscription key. If property is omitted when creating a new product it's
value is assumed to be true.
:vartype subscription_required: bool
:ivar approval_required: whether subscription approval is required. If false, new subscriptions
will be approved automatically enabling developers to call the product’s APIs immediately after
subscribing. If true, administrators must manually approve the subscription before the
developer can any of the product’s APIs. Can be present only if subscriptionRequired property
is present and has a value of false.
:vartype approval_required: bool
:ivar subscriptions_limit: Whether the number of subscriptions a user can have to this product
at the same time. Set to null or omit to allow unlimited per user subscriptions. Can be present
only if subscriptionRequired property is present and has a value of false.
:vartype subscriptions_limit: int
:ivar state: whether product is published or not. Published products are discoverable by users
of developer portal. Non published products are visible only to administrators. Default state
of Product is notPublished. Possible values include: "notPublished", "published".
:vartype state: str or ~api_management_client.models.ProductState
|
62599009bf627c535bcb2053
|
class IpamsvcUpdateIpamHostResponse(object): <NEW_LINE> <INDENT> swagger_types = { 'result': 'IpamsvcIpamHost' } <NEW_LINE> attribute_map = { 'result': 'result' } <NEW_LINE> def __init__(self, result=None): <NEW_LINE> <INDENT> self._result = None <NEW_LINE> self.discriminator = None <NEW_LINE> if result is not None: <NEW_LINE> <INDENT> self.result = result <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def result(self): <NEW_LINE> <INDENT> return self._result <NEW_LINE> <DEDENT> @result.setter <NEW_LINE> def result(self, result): <NEW_LINE> <INDENT> self._result = result <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(IpamsvcUpdateIpamHostResponse, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, IpamsvcUpdateIpamHostResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
6259900915fb5d323ce7f8e7
|
class NetworkCapacity(base.Scenario): <NEW_LINE> <INDENT> __scenario_type__ = "NetworkCapacity" <NEW_LINE> TARGET_SCRIPT = "networkcapacity.bash" <NEW_LINE> def __init__(self, scenario_cfg, context_cfg): <NEW_LINE> <INDENT> self.scenario_cfg = scenario_cfg <NEW_LINE> self.context_cfg = context_cfg <NEW_LINE> self.setup_done = False <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> self.target_script = pkg_resources.resource_filename( "yardstick.benchmark.scenarios.networking", NetworkCapacity.TARGET_SCRIPT) <NEW_LINE> host = self.context_cfg['host'] <NEW_LINE> if host is None: <NEW_LINE> <INDENT> raise RuntimeError('No right node.please check the configuration') <NEW_LINE> <DEDENT> host_user = host.get('user', 'ubuntu') <NEW_LINE> host_ip = host.get('ip', None) <NEW_LINE> host_pwd = host.get('password', None) <NEW_LINE> LOG.debug("user:%s, host:%s", host_user, host_ip) <NEW_LINE> self.client = ssh.SSH(host_user, host_ip, password=host_pwd) <NEW_LINE> self.client.wait(timeout=600) <NEW_LINE> self.client.run("cat > ~/networkcapacity.sh", stdin=open(self.target_script, 'rb')) <NEW_LINE> self.setup_done = True <NEW_LINE> <DEDENT> def run(self, result): <NEW_LINE> <INDENT> if not self.setup_done: <NEW_LINE> <INDENT> self.setup() <NEW_LINE> <DEDENT> cmd = "sudo bash networkcapacity.sh" <NEW_LINE> LOG.debug("Executing command: %s", cmd) <NEW_LINE> status, stdout, stderr = self.client.execute(cmd) <NEW_LINE> if status: <NEW_LINE> <INDENT> raise RuntimeError(stderr) <NEW_LINE> <DEDENT> result.update(json.loads(stdout))
|
Measure Network capacity and scale.
This scenario reads network status including number of connections,
number of frames sent/received.
|
62599009d164cc6175821b21
|
class Serializer(object): <NEW_LINE> <INDENT> internal_use_only = False <NEW_LINE> def serialize(self, queryset, **options): <NEW_LINE> <INDENT> self.options = options <NEW_LINE> self.stream = options.get("stream", StringIO()) <NEW_LINE> self.selected_fields = options.get("fields") <NEW_LINE> self.start_serialization() <NEW_LINE> for obj in queryset: <NEW_LINE> <INDENT> self.start_object(obj) <NEW_LINE> for field in obj._meta.fields: <NEW_LINE> <INDENT> if field.serialize: <NEW_LINE> <INDENT> if field.rel is None: <NEW_LINE> <INDENT> if self.selected_fields is None or field.attname in self.selected_fields: <NEW_LINE> <INDENT> self.handle_field(obj, field) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.selected_fields is None or field.attname[:-3] in self.selected_fields: <NEW_LINE> <INDENT> self.handle_fk_field(obj, field) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> for field in obj._meta.many_to_many: <NEW_LINE> <INDENT> if field.serialize: <NEW_LINE> <INDENT> if self.selected_fields is None or field.attname in self.selected_fields: <NEW_LINE> <INDENT> self.handle_m2m_field(obj, field) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.end_object(obj) <NEW_LINE> <DEDENT> self.end_serialization() <NEW_LINE> return self.getvalue() <NEW_LINE> <DEDENT> def get_string_value(self, obj, field): <NEW_LINE> <INDENT> if isinstance(field, models.DateTimeField): <NEW_LINE> <INDENT> value = getattr(obj, field.name).strftime("%Y-%m-%d %H:%M:%S") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = field.flatten_data(follow=None, obj=obj).get(field.name, "") <NEW_LINE> <DEDENT> return smart_unicode(value) <NEW_LINE> <DEDENT> def start_serialization(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def end_serialization(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def start_object(self, obj): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def end_object(self, obj): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def handle_field(self, obj, field): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def handle_fk_field(self, obj, field): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def handle_m2m_field(self, obj, field): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def getvalue(self): <NEW_LINE> <INDENT> if callable(getattr(self.stream, 'getvalue', None)): <NEW_LINE> <INDENT> return self.stream.getvalue()
|
Abstract serializer base class.
|
62599009462c4b4f79dbc5ae
|
class AnchorHealthV4(CDPDataItem): <NEW_LINE> <INDENT> type = 0x0125 <NEW_LINE> definition = [DISerialNumberAttr('serial_number'), DIUInt8Attr('interface_id'), DIUInt32Attr('ticks_reported'), DIUInt32Attr('timed_rxs_reported'), DIUInt32Attr('beacons_reported'), DIUInt32Attr('beacons_discarded'), DIUInt16Attr('average_quality'), DIUInt8Attr('report_period'), DIUInt8Attr('interanchor_comms_error_code'), DIListAttr('bad_paired_anchors', FullDeviceID)] <NEW_LINE> def add_bad_paired_anchors(self, serial_number=0, interface_identifier=0): <NEW_LINE> <INDENT> self.bad_paired_anchors.append(FullDeviceID(serial_number, interface_identifier))
|
CDP Data Item: Ciholas Data Protocol Anchor Health Data Item Definition
|
6259900a3cc13d1c6d4662f3
|
class WordNode(ABC, Serializable): <NEW_LINE> <INDENT> def __init__(self, surface, part, part_detail1, part_detail2, part_detail3, stem_type, stem_form, word, kana, pronunciation): <NEW_LINE> <INDENT> self.surface = surface <NEW_LINE> self.part = part <NEW_LINE> self.part_detail1 = part_detail1 <NEW_LINE> self.part_detail2 = part_detail2 <NEW_LINE> self.part_detail3 = part_detail3 <NEW_LINE> self.stem_type = stem_type <NEW_LINE> self.stem_form = stem_form <NEW_LINE> self.word = word <NEW_LINE> self.kana = kana <NEW_LINE> self.pronunciation = pronunciation <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> @abstractmethod <NEW_LINE> def create(cls, surface, features): <NEW_LINE> <INDENT> pass
|
Base class of parsed word
Attributes
----------
surface : str
Surface of word
part : str
Part of the word
part_detail1 : str
Detail1 of part
part_detail2 : str
Detail2 of part
part_detail3 : str
Detail3 of part
stem_type : str
Stem type
stem_form : str
Stem form
word : str
Word itself
kana : str
Japanese kana of the word
pronunciation : str
Pronunciation of the word
|
6259900abf627c535bcb2059
|
class memorize(dict): <NEW_LINE> <INDENT> def __init__(self, function): <NEW_LINE> <INDENT> self.function = function <NEW_LINE> <DEDENT> def __call__(self, *args): <NEW_LINE> <INDENT> return self[args] <NEW_LINE> <DEDENT> def __missing__(self, key): <NEW_LINE> <INDENT> result = self[key] = self.function(*key) <NEW_LINE> return result
|
cache decorator
|
6259900a627d3e7fe0e07a46
|
class Person(DomainResource): <NEW_LINE> <INDENT> resource_type = Field("Person", const=True) <NEW_LINE> active: fhirtypes.Boolean = Field( None, alias="active", title="Type `Boolean`", description="This person's record is in active use.", ) <NEW_LINE> purpose: fhirtypes.CodeableConceptType = Field( None, alias="purpose", title="Type `CodeableConcept` (represented as `dict` in JSON).", description="The type of contact.", ) <NEW_LINE> birthDate: fhirtypes.Date = Field( None, alias="birthDate", title="Type `Date`.", description="The date on which the person was born.", ) <NEW_LINE> gender: fhirtypes.Code = Field( None, alias="gender", title="Type `Code`.", description="male | female | other | unknown.", ) <NEW_LINE> address: ListType[fhirtypes.AddressType] = Field( None, alias="address", title="List of `Address` items (represented as `dict` in JSON).", description="One or more addresses for the person.", ) <NEW_LINE> identifier: ListType[fhirtypes.IdentifierType] = Field( None, alias="identifier", title="List of `Identifier` items (represented as `dict` in JSON).", description="A human identifier for this person.", ) <NEW_LINE> link: ListType[fhirtypes.PersonLinkType] = Field( None, alias="link", title="List of `PersonLink` items (represented as `dict` in JSON).", description="Link to a resource that concerns the same actual person.", ) <NEW_LINE> managingOrganization: fhirtypes.ReferenceType = Field( None, alias="managingOrganization", title="Type `Reference` referencing `Organization` (represented as `dict` in JSON).", description="The organization that is the custodian of the person record.", ) <NEW_LINE> name: ListType[fhirtypes.HumanNameType] = Field( None, alias="name", title="List of `HumanName` items (represented as `dict` in JSON).", description="A name associated with the person.", ) <NEW_LINE> telecom: ListType[fhirtypes.ContactPointType] = Field( None, alias="telecom", title="List of `ContactPoint` items (represented as `dict` in JSON).", description="A contact detail for the person.", ) <NEW_LINE> photo: fhirtypes.AttachmentType = Field( None, alias="photo", title="Type `Attachment` (represented as `dict` in JSON).", description="Image of the person.", )
|
A generic person record.
Demographics and administrative information about a person independent of a
specific health-related context.
|
6259900a462c4b4f79dbc5b2
|
class TestEzsignfolderGetListV1ResponseMPayload(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testEzsignfolderGetListV1ResponseMPayload(self): <NEW_LINE> <INDENT> pass
|
EzsignfolderGetListV1ResponseMPayload unit test stubs
|
6259900a3cc13d1c6d4662f5
|
class PyadrTooManyProposedAdrError(PyadrError): <NEW_LINE> <INDENT> pass
|
Too many proposed ADR found
|
6259900a15fb5d323ce7f8ef
|
class BinomialDeviance(ClassificationLossFunction): <NEW_LINE> <INDENT> def __init__(self, n_classes): <NEW_LINE> <INDENT> if n_classes != 2: <NEW_LINE> <INDENT> raise ValueError("{0:s} requires 2 classes.".format( self.__class__.__name__)) <NEW_LINE> <DEDENT> super(BinomialDeviance, self).__init__(1) <NEW_LINE> <DEDENT> def init_estimator(self): <NEW_LINE> <INDENT> return LogOddsEstimator() <NEW_LINE> <DEDENT> def __call__(self, y, pred, sample_weight=None): <NEW_LINE> <INDENT> pred = pred.ravel() <NEW_LINE> if sample_weight is None: <NEW_LINE> <INDENT> return -2.0 * np.mean((y * pred) - np.logaddexp(0.0, pred)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return (-2.0 / sample_weight.sum() * np.sum(sample_weight * ((y * pred) - np.logaddexp(0.0, pred)))) <NEW_LINE> <DEDENT> <DEDENT> def negative_gradient(self, y, pred, **kargs): <NEW_LINE> <INDENT> return y - expit(pred.ravel()) <NEW_LINE> <DEDENT> def _update_terminal_region(self, tree, terminal_regions, leaf, X, y, residual, pred, sample_weight): <NEW_LINE> <INDENT> terminal_region = np.where(terminal_regions == leaf)[0] <NEW_LINE> residual = residual.take(terminal_region, axis=0) <NEW_LINE> y = y.take(terminal_region, axis=0) <NEW_LINE> sample_weight = sample_weight.take(terminal_region, axis=0) <NEW_LINE> numerator = np.sum(sample_weight * residual) <NEW_LINE> denominator = np.sum(sample_weight * (y - residual) * (1 - y + residual)) <NEW_LINE> if denominator == 0.0: <NEW_LINE> <INDENT> tree.value[leaf, 0, 0] = 0.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tree.value[leaf, 0, 0] = numerator / denominator <NEW_LINE> <DEDENT> <DEDENT> def _score_to_proba(self, score): <NEW_LINE> <INDENT> proba = np.ones((score.shape[0], 2), dtype=np.float64) <NEW_LINE> proba[:, 1] = expit(score.ravel()) <NEW_LINE> proba[:, 0] -= proba[:, 1] <NEW_LINE> return proba <NEW_LINE> <DEDENT> def _score_to_decision(self, score): <NEW_LINE> <INDENT> proba = self._score_to_proba(score) <NEW_LINE> return np.argmax(proba, axis=1)
|
Binomial deviance loss function for binary classification.
Binary classification is a special case; here, we only need to
fit one tree instead of ``n_classes`` trees.
|
6259900ad164cc6175821b2b
|
class PLSpider(CrawlSpider): <NEW_LINE> <INDENT> name = 'PL' <NEW_LINE> allowed_domains = ['parlamentnilisty.cz'] <NEW_LINE> start_urls = ['https://www.parlamentnilisty.cz/zpravy'] <NEW_LINE> rules = ( Rule( LinkExtractor( allow=('/.*/.*/.*',), restrict_css=('.articles-list',) ), callback='parse_item', ), Rule( LinkExtractor( allow=('zpravy\?p=',), restrict_css=('.pagination',) ) ), ) <NEW_LINE> def transform_date(self, date): <NEW_LINE> <INDENT> return datetime.strptime(date.strip(), '%d. %m. %Y %H:%M') <NEW_LINE> <DEDENT> def transform_article(self, article): <NEW_LINE> <INDENT> return ' '.join(article) <NEW_LINE> <DEDENT> def transform_keywords(self, keywords): <NEW_LINE> <INDENT> return ', '.join(keywords) <NEW_LINE> <DEDENT> def parse_item(self, response): <NEW_LINE> <INDENT> article = NewsItem() <NEW_LINE> print(response.url) <NEW_LINE> article['title'] = response.css('section.article-header h1::text').extract()[0] <NEW_LINE> date = response.css('div.time::text').extract()[0] <NEW_LINE> article['date'] = self.transform_date(date) <NEW_LINE> found_article = response.css('section.article-content p::text').extract() <NEW_LINE> article['article'] = self.transform_article(found_article) <NEW_LINE> keywords = response.css('section.article-tags a::text').extract() <NEW_LINE> article['keywords'] = self.transform_keywords(keywords) <NEW_LINE> article['server'] = 'parlamentnilisty.cz' <NEW_LINE> return article
|
PLSpider is the crawler that crawl thourgh the www.parlamentnilisty.cz wesite
and downloads the articles defined by the rules.
The server local data is cleaned (converted) here.
The local data is the date and article format.
|
6259900a507cdc57c63a5950
|
class BoundedExecutor: <NEW_LINE> <INDENT> def __init__(self, bound, max_workers): <NEW_LINE> <INDENT> self._delegate = ThreadPoolExecutor(max_workers=max_workers) <NEW_LINE> self._semaphore = BoundedSemaphore(bound + max_workers) <NEW_LINE> <DEDENT> def submit(self, fn, *args, **kwargs): <NEW_LINE> <INDENT> self._semaphore.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> future = self._delegate.submit(fn, *args, **kwargs) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self._semaphore.release() <NEW_LINE> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> future.add_done_callback(lambda x: self._semaphore.release()) <NEW_LINE> return future <NEW_LINE> <DEDENT> <DEDENT> def shutdown(self, wait=True): <NEW_LINE> <INDENT> self._delegate.shutdown(wait)
|
BoundedExecutor behaves as a ThreadPoolExecutor which will block on
calls to submit() once the limit given as "bound" work items are queued for
execution.
:param bound: Integer - the maximum number of items in the work queue
:param max_workers: Integer - the size of the thread pool
|
6259900a462c4b4f79dbc5b8
|
class DummyFakerWithoutReplacers(ModelFaker): <NEW_LINE> <INDENT> FAKER_FOR = models.FakerTestA
|
A dummy faker to test behavior when no replacer is not provided
|
6259900a15fb5d323ce7f8f3
|
class IntentConfidenceCheckFailedEvent(Event): <NEW_LINE> <INDENT> pass
|
event
|
6259900a627d3e7fe0e07a4c
|
class EventProcessor(Observable): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(EventProcessor, self).__init__() <NEW_LINE> self.subscribers = [] <NEW_LINE> <DEDENT> def subscribe_publisher(self, publisher): <NEW_LINE> <INDENT> self.subscribe(publisher) <NEW_LINE> <DEDENT> def process_event(self, **event): <NEW_LINE> <INDENT> self.broadcast(**event) <NEW_LINE> return self.status()
|
Class that processes the events sent by all the HammerCloud code. Stores
logs and publishes them to the appropriate services.
|
6259900abf627c535bcb2061
|
class Meta(object): <NEW_LINE> <INDENT> db_table = 'ingest_event'
|
meta information for the db
|
6259900a0a366e3fb87dd5a6
|
class TestDeviceModelData(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> globals = {} <NEW_LINE> locals = {} <NEW_LINE> execfile("data/model_data.py", globals, locals) <NEW_LINE> self.data = locals['DEVICE_MODEL_DATA'] <NEW_LINE> <DEDENT> def test_DeviceModelData(self): <NEW_LINE> <INDENT> self.assertEqual(type(self.data), dict)
|
Test the device model data files are valid.
|
6259900a56b00c62f0fb3474
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.