code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class CouchTransaction(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.depth = 0 <NEW_LINE> self.docs_to_delete = defaultdict(list) <NEW_LINE> self.docs_to_save = defaultdict(dict) <NEW_LINE> <DEDENT> def delete(self, doc): <NEW_LINE> <INDENT> self.docs_to_delete[doc.__class__].append(doc) <NEW_LINE> <DEDENT> def delete_all(self, docs): <NEW_LINE> <INDENT> for doc in docs: <NEW_LINE> <INDENT> self.delete(doc) <NEW_LINE> <DEDENT> <DEDENT> def save(self, doc): <NEW_LINE> <INDENT> cls = doc.__class__ <NEW_LINE> if not doc.get_id: <NEW_LINE> <INDENT> doc._id = uuid.uuid4().hex <NEW_LINE> <DEDENT> self.docs_to_save[cls][doc.get_id] = doc <NEW_LINE> <DEDENT> def preview_save(self, cls=None): <NEW_LINE> <INDENT> if cls: <NEW_LINE> <INDENT> return list(self.docs_to_save[cls].values()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [doc for _cls in self.docs_to_save for doc in self.preview_save(cls=_cls)] <NEW_LINE> <DEDENT> <DEDENT> def commit(self): <NEW_LINE> <INDENT> for cls, docs in self.docs_to_delete.items(): <NEW_LINE> <INDENT> cls.bulk_delete(docs) <NEW_LINE> <DEDENT> for cls, doc_map in self.docs_to_save.items(): <NEW_LINE> <INDENT> docs = list(doc_map.values()) <NEW_LINE> cls.bulk_save(docs) <NEW_LINE> <DEDENT> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.depth += 1 <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> self.depth -= 1 <NEW_LINE> if self.depth == 0 and not exc_type: <NEW_LINE> <INDENT> self.commit() | Helper for saving up a bunch of saves and deletes of couch docs
and then committing them all at once with a few bulk operations
ex:
with CouchTransaction() as transaction:
for doc in docs:
transaction.save(doc)
other = Other.get(doc.other_id)
other.name = ''
transaction.save(other)
etc. This will do one bulk save per doc type, rather than one save per
call save call.
If an exception is raised during the body of the with statement,
no changes are commited to the db.
If the same transaction is used in multiple embedded with statements,
it will only be commited on successful exit of the outermost one. This lets
you do something like:
def save_stuff(stuff, transaction=None):
with transaction or CouchTransaction() as transaction:
# save all the stuff
and call this function either with no transaction or have it cooperate
with an ongoing transaction that you pass in. | 62598fb24a966d76dd5eef48 |
@dataclass <NEW_LINE> class DeconzNumberEntityDescription( NumberEntityDescription, DeconzNumberEntityDescriptionBase ): <NEW_LINE> <INDENT> entity_category = EntityCategory.CONFIG | Class describing deCONZ number entities. | 62598fb271ff763f4b5e77e3 |
class DaylightHandler(APIItems[Daylight]): <NEW_LINE> <INDENT> resource_type = ResourceTypes.DAYLIGHT <NEW_LINE> path = URL <NEW_LINE> item_cls = Daylight | Handler for daylight sensor. | 62598fb267a9b606de54603e |
@implementer(_IGlobal) <NEW_LINE> class _GlobalObject(object): <NEW_LINE> <INDENT> pass | used for global tool lookup | 62598fb223849d37ff851124 |
class Alarm(resource.Resource, display.Display): <NEW_LINE> <INDENT> list_column_names = [ "id", "name", "desc", "metric namespace", "metric name", "status" ] <NEW_LINE> show_column_names = [ "id", "name", "desc", "metric namespace", "metric name", "metric dimensions", "condition", "enabled", "action enabled", "update time", "status" ] <NEW_LINE> column_2_property = { "id": "alarm_id", "name": "alarm_name", "desc": "alarm_description", "enabled": "alarm_enabled", "action enabled": "alarm_action_enabled", "status": "alarm_state", } <NEW_LINE> @property <NEW_LINE> def metric_namespace(self): <NEW_LINE> <INDENT> return self.metric["namespace"] <NEW_LINE> <DEDENT> @property <NEW_LINE> def metric_name(self): <NEW_LINE> <INDENT> return self.metric["metric_name"] <NEW_LINE> <DEDENT> @property <NEW_LINE> def metric_dimensions(self): <NEW_LINE> <INDENT> dimensions = self.metric['dimensions'] <NEW_LINE> if dimensions and dimensions > 0: <NEW_LINE> <INDENT> return ';'.join([dim['name'] + '=' + dim['value'] for dim in dimensions]) <NEW_LINE> <DEDENT> return '' <NEW_LINE> <DEDENT> formatter = { "condition": condition_formatter, "update time": utils.format_time } | Cloud Eye alarm resource instance. | 62598fb285dfad0860cbfaac |
@base.register_class <NEW_LINE> class SORT_new_material(SORT_new_material_base): <NEW_LINE> <INDENT> bl_idname = "sort_material.new" | Add a new material | 62598fb2adb09d7d5dc0a5fc |
class MGMSG_HW_YES_FLASH_PROGRAMMING(MessageWithoutData): <NEW_LINE> <INDENT> message_id = 0x0017 <NEW_LINE> _params_names = ['message_id'] + [None, None] + ['dest', 'source'] | This message is sent by the server on start up, however, it is a
deprecated message (i.e. has no function) and can be ignored. | 62598fb230bbd722464699b1 |
class StructInputMethod(ContractMethod): <NEW_LINE> <INDENT> def __init__( self, provider: BaseProvider, contract_address: str, contract_function: ContractFunction, validator: Validator = None, ): <NEW_LINE> <INDENT> super().__init__(provider, contract_address, validator) <NEW_LINE> self.underlying_method = contract_function <NEW_LINE> <DEDENT> def validate_and_normalize_inputs(self, s: Tuple0xcf8ad995): <NEW_LINE> <INDENT> self.validator.assert_valid( method_name="structInput", parameter_name="s", argument_value=s ) <NEW_LINE> return s <NEW_LINE> <DEDENT> def call( self, s: Tuple0xcf8ad995, tx_params: Optional[TxParams] = None ) -> None: <NEW_LINE> <INDENT> (s) = self.validate_and_normalize_inputs(s) <NEW_LINE> tx_params = super().normalize_tx_params(tx_params) <NEW_LINE> return self.underlying_method(s).call(tx_params.as_dict()) <NEW_LINE> <DEDENT> def send_transaction( self, s: Tuple0xcf8ad995, tx_params: Optional[TxParams] = None ) -> Union[HexBytes, bytes]: <NEW_LINE> <INDENT> (s) = self.validate_and_normalize_inputs(s) <NEW_LINE> tx_params = super().normalize_tx_params(tx_params) <NEW_LINE> return self.underlying_method(s).transact(tx_params.as_dict()) <NEW_LINE> <DEDENT> def estimate_gas( self, s: Tuple0xcf8ad995, tx_params: Optional[TxParams] = None ) -> int: <NEW_LINE> <INDENT> (s) = self.validate_and_normalize_inputs(s) <NEW_LINE> tx_params = super().normalize_tx_params(tx_params) <NEW_LINE> return self.underlying_method(s).estimateGas(tx_params.as_dict()) | Various interfaces to the structInput method. | 62598fb2b7558d589546369c |
class Operation(object): <NEW_LINE> <INDENT> Types = Enum("Types", "ACTION TRANSFORMATION INSTANT_ACTION") <NEW_LINE> def __init__(self, name, *args, **kwargs): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.args = args <NEW_LINE> self.kwargs = kwargs <NEW_LINE> self.op_type = self._classify_operation(name) <NEW_LINE> <DEDENT> def _classify_operation(self, name): <NEW_LINE> <INDENT> ops = Operation.Types <NEW_LINE> operations_dict = { 'Define':ops.TRANSFORMATION, 'Filter':ops.TRANSFORMATION, 'Range':ops.TRANSFORMATION, 'Aggregate':ops.ACTION, 'Histo1D':ops.ACTION, 'Histo2D':ops.ACTION, 'Histo3D':ops.ACTION, 'Profile1D':ops.ACTION, 'Profile2D':ops.ACTION, 'Profile3D':ops.ACTION, 'Count':ops.ACTION, 'Min':ops.ACTION, 'Max':ops.ACTION, 'Mean':ops.ACTION, 'Sum':ops.ACTION, 'Fill':ops.ACTION, 'Reduce':ops.ACTION, 'Report':ops.ACTION, 'Take':ops.ACTION, 'Graph':ops.ACTION, 'Snapshot':ops.INSTANT_ACTION, 'Foreach':ops.INSTANT_ACTION } <NEW_LINE> op_type = operations_dict.get(name) <NEW_LINE> if not op_type: <NEW_LINE> <INDENT> raise Exception("Invalid operation \"{}\"".format(name)) <NEW_LINE> <DEDENT> return op_type <NEW_LINE> <DEDENT> def is_action(self): <NEW_LINE> <INDENT> return self.op_type == Operation.Types.ACTION <NEW_LINE> <DEDENT> def is_transformation(self): <NEW_LINE> <INDENT> return self.op_type == Operation.Types.TRANSFORMATION | A Generic representation of an operation. The
operation could be a transformation or an action.
Attributes
----------
Types
A class member that is an Enum of the types
of operations supported. This can be ACTION
or TRANSFORMATION or INSTANT_ACTION.
name
Name of the current operation.
args
Variable length argument list for the current
operation.
kwargs
Arbitrary keyword arguments for the current
operation.
op_type
The type or category of the current
operation (ACTION OR TRANSFORMATION OR INSTANT_ACTION).
For the list of operations that your current
backend supports, try :
import PyRDF
PyRDF.use(...) # Choose a backend
print(PyRDF.current_backend.supported_operations) | 62598fb2f9cc0f698b1c5303 |
class Title(CleanText): <NEW_LINE> <INDENT> @debug() <NEW_LINE> def filter(self, txt): <NEW_LINE> <INDENT> txt = super(Title, self).filter(txt) <NEW_LINE> return txt.title() | Extract text with :class:`CleanText` and apply title() to it. | 62598fb216aa5153ce400575 |
class PeerExpressRouteCircuitConnectionListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[PeerExpressRouteCircuitConnection]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["PeerExpressRouteCircuitConnection"]] = None, next_link: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(PeerExpressRouteCircuitConnectionListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = next_link | Response for ListPeeredConnections API service call retrieves all global reach peer circuit connections that belongs to a Private Peering for an ExpressRouteCircuit.
:param value: The global reach peer circuit connection associated with Private Peering in an
ExpressRoute Circuit.
:type value: list[~azure.mgmt.network.v2020_06_01.models.PeerExpressRouteCircuitConnection]
:param next_link: The URL to get the next set of results.
:type next_link: str | 62598fb28e7ae83300ee9115 |
@final <NEW_LINE> class AssignVisitor(BaseFSTVisitor): <NEW_LINE> <INDENT> def visit_assign(self, node: Assign) -> None: <NEW_LINE> <INDENT> self._check_assign_char(node) <NEW_LINE> self.generic_visit(node) <NEW_LINE> <DEDENT> def _check_assign_char(self, node: Assign) -> None: <NEW_LINE> <INDENT> if node.raw_text.startswith(' '): <NEW_LINE> <INDENT> self._add_violation( SpacedAssignViolation(node, text=node.text), ) | Finds wrong assigns. | 62598fb2fff4ab517ebcd857 |
class Island (object): <NEW_LINE> <INDENT> def __init__ (self, line): <NEW_LINE> <INDENT> self.lines = [line] <NEW_LINE> self.ymax = line.y <NEW_LINE> <DEDENT> def adjoin (self, island): <NEW_LINE> <INDENT> lnew = [] <NEW_LINE> u = self.lines <NEW_LINE> v = island.lines <NEW_LINE> i = 0 <NEW_LINE> ilen = len (u) <NEW_LINE> j = 0 <NEW_LINE> jlen = len (v) <NEW_LINE> while i < ilen and j < jlen: <NEW_LINE> <INDENT> if u[i] < v[j]: <NEW_LINE> <INDENT> lnew.append (u[i]) <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lnew.append (v[j]) <NEW_LINE> j += 1 <NEW_LINE> <DEDENT> <DEDENT> if i < ilen: <NEW_LINE> <INDENT> lnew += u[i:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lnew += v[j:] <NEW_LINE> <DEDENT> self.lines = lnew <NEW_LINE> if island.ymax > self.ymax: <NEW_LINE> <INDENT> self.ymax = island.ymax <NEW_LINE> <DEDENT> <DEDENT> def adjacent (self, seg, maxsep=1): <NEW_LINE> <INDENT> yseg = seg.y <NEW_LINE> xmin = seg.xmin <NEW_LINE> xmax = seg.xmax <NEW_LINE> for oseg in self.lines [-1::-1]: <NEW_LINE> <INDENT> if oseg.y == yseg: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if (oseg.y + maxsep < yseg): <NEW_LINE> <INDENT> return False <NEW_LINE> if xmin - maxsep > oseg.xmax: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if xmax + maxsep >= oseg.xmin: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> elif (oseg.y - maxsep < yseg): <NEW_LINE> <INDENT> if xmin - maxsep < oseg.xmax: <NEW_LINE> <INDENT> if xmax + maxsep > oseg.xmin: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def count (self): <NEW_LINE> <INDENT> n = 0 <NEW_LINE> for t in self.lines: <NEW_LINE> <INDENT> n+= t.count () <NEW_LINE> <DEDENT> return n <NEW_LINE> <DEDENT> def setIsland (self): <NEW_LINE> <INDENT> for l in self.lines: <NEW_LINE> <INDENT> l.setIsland (self) | List of directly or indirectly adjacent PixLines.
Takes in list of horizontal line segments | 62598fb21b99ca400228f569 |
class frac(Function): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def eval(cls, arg): <NEW_LINE> <INDENT> from sympy import AccumBounds, im <NEW_LINE> def _eval(arg): <NEW_LINE> <INDENT> if arg is S.Infinity or arg is S.NegativeInfinity: <NEW_LINE> <INDENT> return AccumBounds(0, 1) <NEW_LINE> <DEDENT> if arg.is_integer: <NEW_LINE> <INDENT> return S.Zero <NEW_LINE> <DEDENT> if arg.is_number: <NEW_LINE> <INDENT> if arg is S.NaN: <NEW_LINE> <INDENT> return S.NaN <NEW_LINE> <DEDENT> elif arg is S.ComplexInfinity: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return arg - floor(arg) <NEW_LINE> <DEDENT> <DEDENT> return cls(arg, evaluate=False) <NEW_LINE> <DEDENT> terms = Add.make_args(arg) <NEW_LINE> real, imag = S.Zero, S.Zero <NEW_LINE> for t in terms: <NEW_LINE> <INDENT> if t.is_imaginary or (S.ImaginaryUnit*t).is_real: <NEW_LINE> <INDENT> i = im(t) <NEW_LINE> if not i.has(S.ImaginaryUnit): <NEW_LINE> <INDENT> imag += i <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> real += t <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> real += t <NEW_LINE> <DEDENT> <DEDENT> real = _eval(real) <NEW_LINE> imag = _eval(imag) <NEW_LINE> return real + S.ImaginaryUnit*imag <NEW_LINE> <DEDENT> def _eval_rewrite_as_floor(self, arg): <NEW_LINE> <INDENT> return arg - floor(arg) <NEW_LINE> <DEDENT> def _eval_rewrite_as_ceiling(self, arg): <NEW_LINE> <INDENT> return arg + ceiling(-arg) <NEW_LINE> <DEDENT> def _eval_Eq(self, other): <NEW_LINE> <INDENT> if isinstance(self, frac): <NEW_LINE> <INDENT> if (self.rewrite(floor) == other) or (self.rewrite(ceiling) == other): <NEW_LINE> <INDENT> return S.true | Represents the fractional part of x
For real numbers it is defined [1]_ as
.. math::
x - \lfloor{x}\rfloor
Examples
========
>>> from sympy import Symbol, frac, Rational, floor, ceiling, I
>>> frac(Rational(4, 3))
1/3
>>> frac(-Rational(4, 3))
2/3
returns zero for integer arguments
>>> n = Symbol('n', integer=True)
>>> frac(n)
0
rewrite as floor
>>> x = Symbol('x')
>>> frac(x).rewrite(floor)
x - floor(x)
for complex arguments
>>> r = Symbol('r', real=True)
>>> t = Symbol('t', real=True)
>>> frac(t + I*r)
I*frac(r) + frac(t)
See Also
========
sympy.functions.elementary.integers.floor
sympy.functions.elementary.integers.ceiling
References
===========
.. [1] http://en.wikipedia.org/wiki/Fractional_part
.. [2] http://mathworld.wolfram.com/FractionalPart.html | 62598fb2ec188e330fdf8901 |
class MultiModeAction(qt.QWidgetAction): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> assert isinstance(parent, qt.QWidget) <NEW_LINE> qt.QWidgetAction.__init__(self, parent) <NEW_LINE> button = qt.QToolButton(parent) <NEW_LINE> button.setPopupMode(qt.QToolButton.MenuButtonPopup) <NEW_LINE> self.setDefaultWidget(button) <NEW_LINE> self.__button = button <NEW_LINE> <DEDENT> def getMenu(self): <NEW_LINE> <INDENT> button = self.__button <NEW_LINE> menu = button.menu() <NEW_LINE> if menu is None: <NEW_LINE> <INDENT> menu = qt.QMenu(button) <NEW_LINE> button.setMenu(menu) <NEW_LINE> <DEDENT> return menu <NEW_LINE> <DEDENT> def addAction(self, action): <NEW_LINE> <INDENT> menu = self.getMenu() <NEW_LINE> button = self.__button <NEW_LINE> menu.addAction(action) <NEW_LINE> if button.defaultAction() is None: <NEW_LINE> <INDENT> button.setDefaultAction(action) <NEW_LINE> <DEDENT> if action.isCheckable(): <NEW_LINE> <INDENT> action.toggled.connect(self._toggled) <NEW_LINE> <DEDENT> <DEDENT> def _toggled(self, checked): <NEW_LINE> <INDENT> if checked: <NEW_LINE> <INDENT> action = self.sender() <NEW_LINE> button = self.__button <NEW_LINE> button.setDefaultAction(action) | This action provides a default checkable action from a list of checkable
actions.
The default action can be selected from a drop down list. The last one used
became the default one.
The default action is directly usable without using the drop down list. | 62598fb27047854f4633f44c |
class AttributeSpecification: <NEW_LINE> <INDENT> __slots__ = ("name", "alt_name", "default", "transform", "accessor", "func") <NEW_LINE> def __init__(self, name, default=None, alt_name=None, transform=None, func=None): <NEW_LINE> <INDENT> if isinstance(default, tuple): <NEW_LINE> <INDENT> default, transform = default <NEW_LINE> <DEDENT> self.name = name <NEW_LINE> self.default = default <NEW_LINE> self.alt_name = alt_name or name <NEW_LINE> self.transform = transform or None <NEW_LINE> self.func = func <NEW_LINE> self.accessor = None <NEW_LINE> if self.transform and not hasattr(self.transform, "__call__"): <NEW_LINE> <INDENT> raise TypeError("transform must be callable") <NEW_LINE> <DEDENT> if self.transform is None and self.default is not None: <NEW_LINE> <INDENT> self.transform = type(self.default) | Class that describes how the value of a given attribute should be
retrieved.
The class contains the following members:
- C{name}: the name of the attribute. This is also used when we
are trying to get its value from a vertex/edge attribute of a
graph.
- C{alt_name}: alternative name of the attribute. This is used
when we are trying to get its value from a Python dict or an
L{igraph.Configuration} object. If omitted at construction time,
it will be equal to C{name}.
- C{default}: the default value of the attribute when none of
the sources we try can provide a meaningful value.
- C{transform}: optional transformation to be performed on the
attribute value. If C{None} or omitted, it defaults to the
type of the default value.
- C{func}: when given, this function will be called with an
index in order to derive the value of the attribute. | 62598fb2a79ad1619776a0da |
class FakeInputs: <NEW_LINE> <INDENT> def __init__(self, inputs): <NEW_LINE> <INDENT> self.inputs = inputs <NEW_LINE> <DEDENT> def __call__(self, prompt=None): <NEW_LINE> <INDENT> if prompt is not None: <NEW_LINE> <INDENT> print(prompt, end='') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return self.inputs.pop(0) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> raise EOFError | Simulate multiple user inputs, can be used as input() replacement | 62598fb2be383301e025386c |
class LoginCodeView(DjangoLoginView): <NEW_LINE> <INDENT> form_class = forms.LoginCodeForm <NEW_LINE> template_name = 'registration/login_code.html' <NEW_LINE> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> if 'code' in self.request.GET and getattr(settings, 'NOPASSWORD_LOGIN_ON_GET', False): <NEW_LINE> <INDENT> return super(LoginCodeView, self).post(request, *args, **kwargs) <NEW_LINE> <DEDENT> return super(LoginCodeView, self).get(request, *args, **kwargs) <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> form.save() <NEW_LINE> return super(LoginCodeView, self).form_valid(form) <NEW_LINE> <DEDENT> def get_form_kwargs(self): <NEW_LINE> <INDENT> kwargs = super(LoginCodeView, self).get_form_kwargs() <NEW_LINE> if self.request.method == 'GET' and 'code' in self.request.GET: <NEW_LINE> <INDENT> kwargs['data'] = self.request.GET <NEW_LINE> <DEDENT> return kwargs <NEW_LINE> <DEDENT> def get_redirect_url(self): <NEW_LINE> <INDENT> login_code = getattr(self.request.user, 'login_code', None) <NEW_LINE> return login_code.next if login_code else '' | Authenticates a user with a login code. | 62598fb2be7bc26dc9251e95 |
class StitchValue(Generic[T]): <NEW_LINE> <INDENT> def __lt__(self,b): <NEW_LINE> <INDENT> return SBinCmp.from_dsl(self,b,CmpType.lt) <NEW_LINE> <DEDENT> def __le__(self,b): <NEW_LINE> <INDENT> return SBinCmp.from_dsl(self,b,CmpType.le) <NEW_LINE> <DEDENT> def __eq__(self,b): <NEW_LINE> <INDENT> return SBinCmp.from_dsl(self,b,CmpType.eq) <NEW_LINE> <DEDENT> def __ne__(self,b): <NEW_LINE> <INDENT> return SBinCmp.from_dsl(self,b,CmpType.ne) <NEW_LINE> <DEDENT> def __gt__(self,b): <NEW_LINE> <INDENT> return SBinCmp.from_dsl(self,b,CmpType.gt) <NEW_LINE> <DEDENT> def __ge__(self,b): <NEW_LINE> <INDENT> return SBinCmp.from_dsl(self,b,CmpType.ge) <NEW_LINE> <DEDENT> def __call__(self,s: 'Stitch') -> T: <NEW_LINE> <INDENT> raise NotImplementedError() | A value dependent on the state of a Stitch object.
This is a lazily evaluated value that can be used directly but also serves
as a DSL for comparisons. | 62598fb27d847024c075c434 |
class ProductDataAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ('data_file', 'data_name', 'errors') <NEW_LINE> actions = [cheap_products, avg_prices, products_analogs] <NEW_LINE> form = ProductDataAdminForm | Product data model for admin | 62598fb2ff9c53063f51a6bf |
class TokenizerSplitter(Resource): <NEW_LINE> <INDENT> def post(self): <NEW_LINE> <INDENT> text = request.json["texto"] <NEW_LINE> if text[-1] not in PUNCTUATION: <NEW_LINE> <INDENT> text = text + "." <NEW_LINE> <DEDENT> outputSentences = [] <NEW_LINE> parsedTree = parsetree(text) <NEW_LINE> for sentence in parsedTree: <NEW_LINE> <INDENT> outputTokens = [] <NEW_LINE> for w in sentence.words: <NEW_LINE> <INDENT> outputTokens.append(w.string) <NEW_LINE> <DEDENT> outputSentences.append(dict(oracion=outputTokens)) <NEW_LINE> <DEDENT> return Response(json.dumps(outputSentences), mimetype="application/json") | Splits an input text into tokenized sentences. | 62598fb2cc0a2c111447b085 |
class Median(RMS) : <NEW_LINE> <INDENT> def __init__ ( self , xmin , xmax ) : <NEW_LINE> <INDENT> RMS.__init__ ( self , xmin , xmax , err = False ) <NEW_LINE> <DEDENT> def _median_ ( self , func , xmin , xmax , *args ) : <NEW_LINE> <INDENT> from ostap.math.integral import IntegralCache <NEW_LINE> iint = IntegralCache ( func , xmin , False , *args ) <NEW_LINE> half = 2.0 / iint ( xmax ) <NEW_LINE> from scipy import optimize <NEW_LINE> ifun = lambda x : iint( x ) * half - 1.0 <NEW_LINE> try: <NEW_LINE> <INDENT> meanv = Mean . __call__ ( self , func , *args ) <NEW_LINE> sigma = RMS . __call__ ( self , func , *args ) <NEW_LINE> import math <NEW_LINE> xmn = meanv - 2 * sigma <NEW_LINE> xmx = meanv + 2 * sigma <NEW_LINE> if isinstance ( xmin , float ) : xmn = max ( xmn , xmin ) <NEW_LINE> if isinstance ( xmax , float ) : xmx = min ( xmx , xmax ) <NEW_LINE> result = optimize.brentq ( ifun , xmn , xmx ) <NEW_LINE> <DEDENT> except : <NEW_LINE> <INDENT> result = optimize.brentq ( ifun , xmin , xmax ) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def __call__ ( self , func , *args ) : <NEW_LINE> <INDENT> return self._median_ ( func , self._xmin , self._xmax ) <NEW_LINE> <DEDENT> def __str__ ( self ) : <NEW_LINE> <INDENT> return "Median(%s,%s)" % ( self._xmin , self._xmax ) | Calculate median for the distribution or function
>>> xmin,xmax = 0,math.pi
>>> median = Median ( xmin,xmax ) ## specify min/max
>>> value = median ( math.sin )
- scipy.optimize.brentq is used | 62598fb223849d37ff851126 |
class XObject: <NEW_LINE> <INDENT> def __init__(self, data, width, height): <NEW_LINE> <INDENT> self.dictionary = {"Type": "/XObject", "Subtype": "/Image", "BitsPerComponent" : "8", "ColorSpace": "/DeviceRGB", "Width": str(width), "Height": str(height)} <NEW_LINE> self.raw_data = data <NEW_LINE> <DEDENT> def data(self): <NEW_LINE> <INDENT> return "stream\n" + self.raw_data + "\nendstream\n" <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> k = self.dictionary.keys() <NEW_LINE> k.extend(["Length"]) <NEW_LINE> return k <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> if item == "Length": <NEW_LINE> <INDENT> return len(self.raw_data) <NEW_LINE> <DEDENT> return self.dictionary[item] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.dictionary.keys()) + 1 | XObject dictionary and stream | 62598fb25fc7496912d482b7 |
class Digraph(CommandBase, Requirer): <NEW_LINE> <INDENT> cmds = { 'digraph': 'Write out package.dot digraph for graphviz', } <NEW_LINE> host_sys_deps = ['graphviz'] <NEW_LINE> @staticmethod <NEW_LINE> def setup_subparser(parser, cmd=None): <NEW_LINE> <INDENT> parser.add_argument( '--all', help="Print dependency tree", action='store_true', ) <NEW_LINE> parser.add_argument( '--dotfile', help="Dotfile to write to", default="digraph.dot", ) <NEW_LINE> <DEDENT> def __init__(self, cmd, args): <NEW_LINE> <INDENT> CommandBase.__init__(self, cmd, args, load_recipes=True, require_prefix=True, ) <NEW_LINE> Requirer.__init__(self) <NEW_LINE> self.packages = [] <NEW_LINE> if self.args.all: <NEW_LINE> <INDENT> self.packages = recipe_manager.recipe_manager.list_all() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.packages = self.inventory.get_packages() <NEW_LINE> <DEDENT> self.assert_requirements() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.graphviz( self.packages, self.args.dotfile, "digraph.png" ) <NEW_LINE> <DEDENT> def graphviz(self, packages, dotfile, pngfile): <NEW_LINE> <INDENT> self.log.info("Creating digraph file {0}".format(dotfile)) <NEW_LINE> f = open(dotfile, "w") <NEW_LINE> f.write("digraph g {\n") <NEW_LINE> for pkg in packages: <NEW_LINE> <INDENT> pkg_safe = pkg.replace("-", "_") <NEW_LINE> f.write('{pkg} [label="{pkg}"]\n'.format(pkg=pkg_safe)) <NEW_LINE> rec = recipe.get_recipe(pkg, fail_easy=True) <NEW_LINE> if rec is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for dep in rec.depends: <NEW_LINE> <INDENT> if dep in packages: <NEW_LINE> <INDENT> f.write(" {pkg} -> {dep}\n".format( pkg=pkg_safe, dep=dep.replace("-", "_") )) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> f.write("}\n") <NEW_LINE> f.close() <NEW_LINE> self.log.debug("{0} written".format(dotfile)) <NEW_LINE> if pngfile is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.log.info("Creating png file {0}".format(pngfile)) <NEW_LINE> subproc.monitor_process( ['dot', dotfile, '-Tpng', '-o{0}'.format(pngfile)], env=os.environ, ) | Generate dependency graph | 62598fb28a43f66fc4bf21ed |
class HeadlessShell(object): <NEW_LINE> <INDENT> def __init__(self, workflowClass): <NEW_LINE> <INDENT> self._workflowClass = workflowClass <NEW_LINE> self.projectManager = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def workflow(self): <NEW_LINE> <INDENT> return self.projectManager.workflow <NEW_LINE> <DEDENT> def createBlankProjectFile(self, projectFilePath): <NEW_LINE> <INDENT> hdf5File = ProjectManager.createBlankProjectFile(projectFilePath) <NEW_LINE> readOnly = False <NEW_LINE> self.projectManager = ProjectManager( self._workflowClass, hdf5File, projectFilePath, readOnly, headless=True ) <NEW_LINE> <DEDENT> def openProjectPath(self, projectFilePath): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> hdf5File, readOnly = ProjectManager.openProjectFile(projectFilePath) <NEW_LINE> self.projectManager = ProjectManager( self._workflowClass, hdf5File, projectFilePath, readOnly, headless=True ) <NEW_LINE> <DEDENT> except ProjectManager.ProjectVersionError: <NEW_LINE> <INDENT> oldProjectFilePath = projectFilePath <NEW_LINE> name, ext = os.path.splitext(oldProjectFilePath) <NEW_LINE> projectFilePath = name + "_imported" + ext <NEW_LINE> logger.info("Importing project as '" + projectFilePath + "'") <NEW_LINE> hdf5File = ProjectManager.createBlankProjectFile(projectFilePath) <NEW_LINE> self.projectManager = ProjectManager( self._workflowClass, hdf5File, projectFilePath, readOnly=False, importFromPath=oldProjectFilePath, headless=True ) | For now, this class is just a stand-in for the GUI shell (used when running from the command line). | 62598fb285dfad0860cbfaad |
class Binary(object): <NEW_LINE> <INDENT> def __init__(self, num = None): <NEW_LINE> <INDENT> if num is None: <NEW_LINE> <INDENT> self.binNum = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.binNum = decToBin(num) <NEW_LINE> <DEDENT> <DEDENT> def add(self, num): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> int(num) <NEW_LINE> self.add(Binary(num)) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> diffSize = len(self.binNum) - len(num.binNum) <NEW_LINE> if diffSize>0: <NEW_LINE> <INDENT> num.zeropad(diffSize) <NEW_LINE> <DEDENT> elif diffSize<0: <NEW_LINE> <INDENT> self.zeropad(-diffSize) <NEW_LINE> <DEDENT> carry = 0 <NEW_LINE> for ind in range(0,len(self.binNum)): <NEW_LINE> <INDENT> tempsum = self.binNum[ind]+num.binNum[ind]+carry <NEW_LINE> self.binNum[ind] = int(tempsum%2) <NEW_LINE> carry = int(tempsum/2) <NEW_LINE> <DEDENT> if carry == 1: <NEW_LINE> <INDENT> self.binNum.append(1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def zeropad(self,n): <NEW_LINE> <INDENT> self.binNum.extend([0]*n) <NEW_LINE> <DEDENT> def squeeze(self): <NEW_LINE> <INDENT> while self.binNum[-1]==0: <NEW_LINE> <INDENT> self.binNum.pop() <NEW_LINE> <DEDENT> <DEDENT> def toDec(self): <NEW_LINE> <INDENT> self.squeeze() <NEW_LINE> decNum = 0 <NEW_LINE> for ind in range(len(self.binNum)): <NEW_LINE> <INDENT> if self.binNum[ind] == 1: <NEW_LINE> <INDENT> decNum += pow(2,ind) <NEW_LINE> <DEDENT> <DEDENT> return decNum | classdocs | 62598fb292d797404e388b9d |
class CAP_Add_Stored_Presets(Operator): <NEW_LINE> <INDENT> bl_idname = "cap.create_current_preset" <NEW_LINE> bl_label = "Default Presets" <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> user_preferences = context.user_preferences <NEW_LINE> addon_prefs = user_preferences.addons[__package__].preferences <NEW_LINE> exp = bpy.data.objects[addon_prefs.default_datablock].CAPExp <NEW_LINE> if len(addon_prefs.saved_presets) > 0: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> user_preferences = context.user_preferences <NEW_LINE> addon_prefs = user_preferences.addons[__package__].preferences <NEW_LINE> exp = bpy.data.objects[addon_prefs.default_datablock].CAPExp <NEW_LINE> new_preset = exp.file_presets.add() <NEW_LINE> export_presets.CopyPreset(addon_prefs.saved_presets[addon_prefs.saved_presets_index], new_preset) <NEW_LINE> return {'FINISHED'} | Add the currently selected saved preset into the file presets list, enabling it's use for exports in this .blend file. | 62598fb257b8e32f52508155 |
class Digraph(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.nodes = set([]) <NEW_LINE> self.edges = {} <NEW_LINE> <DEDENT> def addNode(self, node): <NEW_LINE> <INDENT> node = node.getName() <NEW_LINE> if node in self.nodes: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.nodes.add(node) <NEW_LINE> self.edges[node] = [] <NEW_LINE> <DEDENT> <DEDENT> def addEdge(self, edge): <NEW_LINE> <INDENT> src = edge.getSource() <NEW_LINE> dest = edge.getDestination() <NEW_LINE> if not(src in self.nodes and dest in self.nodes): <NEW_LINE> <INDENT> raise ValueError('Node not in graph') <NEW_LINE> <DEDENT> self.edges[src].append(dest) <NEW_LINE> <DEDENT> def childrenOf(self, node): <NEW_LINE> <INDENT> return self.edges[node] <NEW_LINE> <DEDENT> def hasNode(self, node): <NEW_LINE> <INDENT> return node in self.nodes <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> res = '' <NEW_LINE> for k in self.edges: <NEW_LINE> <INDENT> for d in self.edges[k]: <NEW_LINE> <INDENT> res = res + str(k) + '->' + str(d) + '\n' <NEW_LINE> <DEDENT> <DEDENT> return res[:-1] | A directed graph | 62598fb230bbd722464699b2 |
class RestApiException(BiiException): <NEW_LINE> <INDENT> pass | Base class exception of this module | 62598fb256b00c62f0fb292a |
class SchemaNode(colander.SchemaNode): <NEW_LINE> <INDENT> readonly = False <NEW_LINE> def deserialize(self, cstruct=null): <NEW_LINE> <INDENT> if self.readonly and cstruct != null: <NEW_LINE> <INDENT> raise Invalid(self, 'This field is ``readonly``.') <NEW_LINE> <DEDENT> return super().deserialize(cstruct) <NEW_LINE> <DEDENT> def serialize(self, appstruct=null): <NEW_LINE> <INDENT> if appstruct in (None, null) and self.default is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return super().serialize(appstruct) | Subclass of :class: `SchemaNode` with extended keyword support.
The constructor accepts these additional keyword arguments:
readonly:
Disable deserialization. Default: False | 62598fb297e22403b383af82 |
class LogRecorder: <NEW_LINE> <INDENT> _base_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) <NEW_LINE> _logpath = os.path.join(_base_path, "testlog") <NEW_LINE> _level = logging.INFO <NEW_LINE> if Path(_logpath).exists(): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> os.mkdir(_logpath) <NEW_LINE> <DEDENT> _path = os.path.join(_logpath, "testcase.log") <NEW_LINE> logger = logging.getLogger(__name__) <NEW_LINE> logger.setLevel(_level) <NEW_LINE> logger.propagate = 0 <NEW_LINE> formatter = logging.Formatter("%(asctime)s - %(levelname)s-%(funcName)s: %(message)s", "%y-%m-%d %H:%M:%S") <NEW_LINE> rollHandler = RotatingFileHandler(_path, maxBytes=30 * 1024, backupCount=10) <NEW_LINE> rollHandler.setLevel(_level) <NEW_LINE> rollHandler.setFormatter(formatter) <NEW_LINE> logger.addHandler(rollHandler) | usage: 记录日志,默认日志等级INFO,每个log最大为30M,最多保留10个日志文件 | 62598fb260cbc95b063643c2 |
class DevConfig(EnvironmentConfig): <NEW_LINE> <INDENT> LOG_LEVEL = logging.DEBUG | Development Environment Config | 62598fb24c3428357761a32c |
class Rotate(Operation): <NEW_LINE> <INDENT> def __init(self, prob, magnitude): <NEW_LINE> <INDENT> super(Rotate, self).__init__(prob, magnitude) <NEW_LINE> <DEDENT> def __call__(self, image): <NEW_LINE> <INDENT> if random.uniform(0, 1) > self.prob: <NEW_LINE> <INDENT> return image <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> magnitude_range = np.linspace(-30, 30, 10) <NEW_LINE> degrees = magnitude_range[self.magnitude] <NEW_LINE> return image.rotate(degrees, expand=False, resample=Image.BICUBIC) | Rotate the image magnitude degrees. | 62598fb266673b3332c30440 |
class HTTPClient(_HTTPClient): <NEW_LINE> <INDENT> def __init__(self, connector=None, *, loop=None): <NEW_LINE> <INDENT> super().__init__(connector, loop=loop) <NEW_LINE> self._user_agent = self.user_agent <NEW_LINE> self.alt_user_agent = self.user_agent <NEW_LINE> <DEDENT> @contextlib.contextmanager <NEW_LINE> def use_alt_agent(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.user_agent = self.alt_user_agent <NEW_LINE> yield <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.user_agent = self._user_agent <NEW_LINE> <DEDENT> <DEDENT> @switch_agent <NEW_LINE> def add_relationship(self, user_id, type=None): <NEW_LINE> <INDENT> r = Route('PUT', '/users/@me/relationships/{user_id}', user_id=user_id) <NEW_LINE> payload = {} <NEW_LINE> if type is not None: <NEW_LINE> <INDENT> payload['type'] = type <NEW_LINE> <DEDENT> return self.request(r, json=payload) <NEW_LINE> <DEDENT> def remove_relationship(self, user_id): <NEW_LINE> <INDENT> return self.request(Route('DELETE', '/users/@me/relationships/{user_id}', user_id=user_id)) <NEW_LINE> <DEDENT> @switch_agent <NEW_LINE> def send_request(self, username, discriminator): <NEW_LINE> <INDENT> r = Route('POST', '/users/@me/relationships') <NEW_LINE> payload = {'username': username, 'discriminator': int(discriminator)} <NEW_LINE> return self.request(r, json=payload) | Subclass of discord.http.HTTPClient adding features.
See HTTPClient.__base__.__doc__ for more info. | 62598fb27d43ff248742743c |
class NotAuthenticated(Exception): <NEW_LINE> <INDENT> pass | User not Authorization for action. | 62598fb21b99ca400228f56a |
class TestTask554(unittest.TestCase): <NEW_LINE> <INDENT> @parameterized.expand( [ ( 20, [ [3, 4, 5], [6, 8, 10], [9, 12, 15], [12, 16, 20], [5, 12, 13], [8, 15, 17], ], ), (5, [[3, 4, 5]]), (10, [[3, 4, 5], [6, 8, 10]]), (1, []), ] ) <NEW_LINE> def test_main_logic(self, number, expected_value): <NEW_LINE> <INDENT> self.assertEqual(algo.Task554.main_logic(number + 1), expected_value) | Test class for task 178b | 62598fb2283ffb24f3cf3900 |
class NesstarHarvester(OaipmhHarvester): <NEW_LINE> <INDENT> md_format = 'oai_ddi' <NEW_LINE> def info(self): <NEW_LINE> <INDENT> return { 'name': 'NESSTAR', 'title': 'NESSTAR', 'description': 'Harvester for NESSTAR data sources' } <NEW_LINE> <DEDENT> def _before_record_fetch(self, harvest_object): <NEW_LINE> <INDENT> if (harvest_object.guid in ignored_datasets): <NEW_LINE> <INDENT> log.debug('Ignore dataset %s' % harvest_object.guid) <NEW_LINE> raise IgnoreDatasetError('Ignore dataset %s' % harvest_object.guid) <NEW_LINE> <DEDENT> <DEDENT> def _extract_license_id(self, content): <NEW_LINE> <INDENT> return 'FORS' <NEW_LINE> <DEDENT> def _get_possible_resource(self, harvest_obj, content): <NEW_LINE> <INDENT> url = super( NesstarHarvester, self )._get_possible_resource(harvest_obj, content) <NEW_LINE> if url: <NEW_LINE> <INDENT> url = ( 'http://fors-getdata.unil.ch/webview/index.jsp?object=%s' % url ) <NEW_LINE> <DEDENT> return url | NESSTAR Harvester | 62598fb2fff4ab517ebcd85a |
class DistributedParameterServerBuilder(DataParallelBuilder, DistributedBuilderBase): <NEW_LINE> <INDENT> def __init__(self, towers, server, caching_device): <NEW_LINE> <INDENT> DataParallelBuilder.__init__(self, towers) <NEW_LINE> DistributedBuilderBase.__init__(self, server) <NEW_LINE> assert caching_device in ['cpu', 'gpu'], caching_device <NEW_LINE> self.caching_device = caching_device <NEW_LINE> self.is_chief = (self.task_index == 0) <NEW_LINE> worker_prefix = '/job:worker/task:%s' % self.task_index <NEW_LINE> self.param_server_device = tf.train.replica_device_setter( worker_device=worker_prefix + '/cpu:0', cluster=self.cluster) <NEW_LINE> self.cpu_device = '%s/cpu:0' % worker_prefix <NEW_LINE> self.raw_devices = ['{}/gpu:{}'.format(worker_prefix, k) for k in self.towers] <NEW_LINE> self.sync_queue_devices = ['/job:ps/task:%s/cpu:0' % i for i in range(self.num_ps)] <NEW_LINE> <DEDENT> def build(self, get_grad_fn, get_opt_fn): <NEW_LINE> <INDENT> ps_strategy = tf.contrib.training.GreedyLoadBalancingStrategy( self.num_ps, tf.contrib.training.byte_size_load_fn) <NEW_LINE> devices = [ tf.train.replica_device_setter( worker_device=d, cluster=self.cluster, ps_strategy=ps_strategy) for d in self.raw_devices] <NEW_LINE> if self.caching_device == 'gpu': <NEW_LINE> <INDENT> caching_devices = self.raw_devices <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> caching_devices = [self.cpu_device] <NEW_LINE> <DEDENT> custom_getter = OverrideCachingDevice( caching_devices, self.cpu_device, 1024 * 64) <NEW_LINE> with tf.variable_scope(tf.get_variable_scope(), custom_getter=custom_getter): <NEW_LINE> <INDENT> grad_list = DataParallelBuilder.build_on_towers(self.towers, get_grad_fn, devices) <NEW_LINE> <DEDENT> DataParallelBuilder._check_grad_list(grad_list) <NEW_LINE> with tf.device(self.param_server_device): <NEW_LINE> <INDENT> grads = average_grads(grad_list, colocation=False) <NEW_LINE> opt = get_opt_fn() <NEW_LINE> train_op = opt.apply_gradients(grads, name='train_op') <NEW_LINE> <DEDENT> train_op = self._add_sync_queues_and_barrier('all_workers_sync_barrier', [train_op]) <NEW_LINE> return train_op | Distributed parameter server training.
A single copy of parameters are scattered around PS.
Gradients across GPUs are averaged within the worker, and applied to PS.
Each worker also caches the variables for reading.
It is an equivalent of ``--variable_update=parameter_server`` in
`tensorflow/benchmarks <https://github.com/tensorflow/benchmarks>`_.
Note:
1. Gradients are not averaged across workers, but applied to PS variables
directly (either with or without locking depending on the optimizer). | 62598fb2d268445f26639bbd |
class BaseConfig: <NEW_LINE> <INDENT> TESTING = False <NEW_LINE> DEV = False <NEW_LINE> SQLALCHEMY_TRACK_MODIFICATIONS = False <NEW_LINE> SECRET_KEY = os.environ.get('SECRET_KEY') or 'some-long-random-key' <NEW_LINE> TMP_FOLDER = os.environ.get('TMP_FOLDER') or os.path.abspath('/tmp') <NEW_LINE> FILE_TYPES = os.environ.get('FILE_TYPES') or ['pdf', 'epub', 'mobi'] | Base configuration | 62598fb23346ee7daa337681 |
class BroLogUtil(object): <NEW_LINE> <INDENT> EXT_EXPR = re.compile(r"[^/].*?\.(.*)$") <NEW_LINE> logtypes = dict() <NEW_LINE> @staticmethod <NEW_LINE> def supports(path): <NEW_LINE> <INDENT> base, fname = os.path.split(path) <NEW_LINE> return BroLogUtil.get_ext(fname) in BroLogUtil.logtypes <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_field_info(path): <NEW_LINE> <INDENT> base, fname = os.path.split(path) <NEW_LINE> return BroLogUtil.logtypes[ BroLogUtil.get_ext(fname) ] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def register_type(file_ext, target): <NEW_LINE> <INDENT> BroLogUtil.logtypes[file_ext] = target <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_ext(path): <NEW_LINE> <INDENT> match = BroLogUtil.EXT_EXPR.search(path) <NEW_LINE> if(match): <NEW_LINE> <INDENT> return match.group(1) <NEW_LINE> <DEDENT> return None | Container class for a few useful file / extension related functions.
Also maintains a registry for file extension / type specification pairs. These pairs
are used to automatically determine how to decode certain files. | 62598fb20c0af96317c563f0 |
class Area_01(area): <NEW_LINE> <INDENT> def __init__(self, player): <NEW_LINE> <INDENT> area.__init__(self, player) <NEW_LINE> self.background = pygame.image.load("background/background_images/forrest_side_scroll_background_2.jpg").convert() <NEW_LINE> self.background.set_colorkey(constants.WHITE) <NEW_LINE> self.area_limit = -2500 <NEW_LINE> area = [[sprite_objects.GRASS_LEFT, 500, 500], [sprite_objects.GRASS_MIDDLE, 570, 500], [sprite_objects.GRASS_RIGHT, 640, 500], [sprite_objects.GRASS_LEFT, 800, 400], [sprite_objects.GRASS_MIDDLE, 870, 400], [sprite_objects.GRASS_RIGHT, 940, 400], [sprite_objects.GRASS_LEFT, 1000, 500], [sprite_objects.GRASS_MIDDLE, 1070, 500], [sprite_objects.GRASS_RIGHT, 1140, 500], [sprite_objects.STONE_PLATFORM_LEFT, 1120, 280], [sprite_objects.STONE_PLATFORM_MIDDLE, 1190, 280], [sprite_objects.STONE_PLATFORM_RIGHT, 1260, 280], ] <NEW_LINE> for wall in area: <NEW_LINE> <INDENT> wall = sprite_objects.Platform(wall[0]) <NEW_LINE> wall.rect.x = wall[1] <NEW_LINE> wall.rect.y = wall[2] <NEW_LINE> wall.player = self.player <NEW_LINE> self.wall_list.add(wall) <NEW_LINE> <DEDENT> wall = sprite_objects.MovingPlatform(sprite_objects.STONE_PLATFORM_MIDDLE) <NEW_LINE> wall.rect.x = 1350 <NEW_LINE> wall.rect.y = 280 <NEW_LINE> wall.boundary_left = 1350 <NEW_LINE> wall.boundary_right = 1600 <NEW_LINE> wall.change_x = 1 <NEW_LINE> wall.player = self.player <NEW_LINE> wall.area = self <NEW_LINE> self.wall_list.add(wall) | Definition for area 1. | 62598fb2be7bc26dc9251e96 |
class QueenCard(PlayingCard): <NEW_LINE> <INDENT> def __init__(self, suit): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.suit = suit <NEW_LINE> self.value = 12 <NEW_LINE> self.uni = self.Uni[suit.value] <NEW_LINE> self.symbol = 'Q' <NEW_LINE> <DEDENT> def give_value(self): <NEW_LINE> <INDENT> return self.value | The QueenCard class represents the queen card. | 62598fb27b180e01f3e4908a |
class Repository(object): <NEW_LINE> <INDENT> def __init__(self, id, display_name=None, description=None, notes=None, working_dir=None, content_unit_counts=None, last_unit_added=None, last_unit_removed=None): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.display_name = display_name <NEW_LINE> self.description = description <NEW_LINE> self.notes = notes <NEW_LINE> self.working_dir = working_dir <NEW_LINE> self.content_unit_counts = content_unit_counts or {} <NEW_LINE> self.last_unit_added = last_unit_added <NEW_LINE> self.last_unit_removed = last_unit_removed <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'Repository [%s]' % self.id | Contains repository data and any additional data relevant for the plugin to
function.
:ivar id: programmatic ID for the repository
:type id: str
:ivar display_name: user-friendly name describing the repository
:type display_name: str or None
:ivar description: user-friendly description of the repository
:type description: str or None
:ivar notes: arbitrary key-value pairs set and used by users to
programmatically describe the repository
:type notes: dict or None
:ivar working_dir: local (to the Pulp server) directory the plugin may use
to store any temporary data required by the plugin; this directory
is unique for each repository and plugin combination
:type working_dir: str
:ivar content_unit_counts: dictionary of unit types and the count of units
of that type associated with the repository.
:type content_unit_counts: dict
:param last_unit_added: UTC datetime of the last time a unit was added to the repository
:type last_unit_added: datetime.datetime with tzinfo
:param last_unit_removed: UTC datetime of the last time a unit was removed from the repository
:param last_unit_removed: datetime.datetime with tzinfo | 62598fb267a9b606de546043 |
class PGHeaderFooter (Directive): <NEW_LINE> <INDENT> required_arguments = 0 <NEW_LINE> optional_arguments = 0 <NEW_LINE> def run (self): <NEW_LINE> <INDENT> settings = self.state.document.settings <NEW_LINE> include_lines = statemachine.string2lines ( settings.get_resource ('mydocutils.gutenberg.parsers', self.resource), settings.tab_width, convert_whitespace = 1) <NEW_LINE> self.state_machine.insert_input (include_lines, '') <NEW_LINE> return [] | Inserts PG header or footer. | 62598fb2d486a94d0ba2c044 |
class FBPFilter(BaseEnum): <NEW_LINE> <INDENT> Ramp: str = "ramp" <NEW_LINE> Hamming: str = "hamming" <NEW_LINE> SheppLogan: str = "shepp-logan" | The enum class of algoritm types. Possible values:
* ``FBPFilter.Ramp``
* ``FBPFilter.Hamming``
* ``FBPFilter.SheppLogan`` | 62598fb2bf627c535bcb1514 |
class Upyun: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def _httpdate_rfc1123(dt=None): <NEW_LINE> <INDENT> dt = dt or datetime.utcnow() <NEW_LINE> return dt.strftime('%a, %d %b %Y %H:%M:%S GMT') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _sign(client_key, client_secret, method, uri, date, policy=None, md5=None): <NEW_LINE> <INDENT> signarr = [] <NEW_LINE> for v in [method, uri, date, policy, md5]: <NEW_LINE> <INDENT> if v is not None: <NEW_LINE> <INDENT> signarr.append(v) <NEW_LINE> <DEDENT> <DEDENT> signstr = '&'.join(signarr) <NEW_LINE> signstr = base64.b64encode( hmac.new(client_secret.encode(), signstr.encode(), digestmod=hashlib.sha1).digest() ).decode() <NEW_LINE> return 'UPYUN %s:%s' % (client_key, signstr) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _send_request(method, url, headers, body): <NEW_LINE> <INDENT> s = Session() <NEW_LINE> prepped = Request(method, url).prepare() <NEW_LINE> prepped.headers = headers <NEW_LINE> prepped.body = body <NEW_LINE> r = s.send(prepped) <NEW_LINE> return r.status_code, r.text <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def upload(file_path, target_name, expire=None): <NEW_LINE> <INDENT> with open(file_path, 'rb') as f: <NEW_LINE> <INDENT> file = f.read() <NEW_LINE> <DEDENT> date = Upyun._httpdate_rfc1123() <NEW_LINE> file_md5 = hashlib.md5(file).hexdigest() <NEW_LINE> uri = '/{service}/{target}'.format(service=SERVICE_NAME, target=target_name) <NEW_LINE> method = 'PUT' <NEW_LINE> signature = Upyun._sign(OPERATOR_NAME, OPERATOR_SECRET, method, uri, date, md5=file_md5) <NEW_LINE> headers = { 'Authorization': signature, 'Date': date, 'Content-Length': str(len(file)), 'Content-MD5': file_md5, } <NEW_LINE> if expire is not None: <NEW_LINE> <INDENT> headers['x-upyun-meta-ttl'] = expire <NEW_LINE> <DEDENT> status, content = Upyun._send_request(method, SERVER_URL + uri, headers, file) <NEW_LINE> if status != 200: <NEW_LINE> <INDENT> raise Exception("Can't upload file via API, status=%d, response: %s" % (status, content)) | 使用又拍云提供的REST API上传文件
文档地址:https://help.upyun.com/knowledge-base/rest_api/
认证方式采用签名认证:https://help.upyun.com/knowledge-base/object_storage_authorization/#e7adbee5908de8aea4e8af81 | 62598fb28a349b6b436862b2 |
class MST(object): <NEW_LINE> <INDENT> def __init__(self, trace, data, dataquality): <NEW_LINE> <INDENT> self.mst = clibmseed.mst_init(None) <NEW_LINE> sampletype = SAMPLETYPE[data.dtype.type] <NEW_LINE> self.mst.contents.network = trace.stats.network <NEW_LINE> self.mst.contents.station = trace.stats.station <NEW_LINE> self.mst.contents.location = trace.stats.location <NEW_LINE> self.mst.contents.channel = trace.stats.channel <NEW_LINE> self.mst.contents.dataquality = dataquality <NEW_LINE> self.mst.contents.type = '\x00' <NEW_LINE> self.mst.contents.starttime = util._convertDatetimeToMSTime(trace.stats.starttime) <NEW_LINE> self.mst.contents.endtime = util._convertDatetimeToMSTime(trace.stats.endtime) <NEW_LINE> self.mst.contents.samprate = trace.stats.sampling_rate <NEW_LINE> self.mst.contents.samplecnt = trace.stats.npts <NEW_LINE> self.mst.contents.numsamples = trace.stats.npts <NEW_LINE> self.mst.contents.sampletype = sampletype <NEW_LINE> if data.dtype.byteorder != "=": <NEW_LINE> <INDENT> data = data.byteswap() <NEW_LINE> <DEDENT> bytecount = data.itemsize * data.size <NEW_LINE> self.mst.contents.datasamples = clibmseed.allocate_bytes(bytecount) <NEW_LINE> C.memmove(self.mst.contents.datasamples, data.ctypes.get_data(), bytecount) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> clibmseed.mst_free(C.pointer(self.mst)) <NEW_LINE> del self.mst | Class that transforms a ObsPy Trace object to a libmseed internal MSTrace
struct. | 62598fb2796e427e5384e80a |
class Sextant(Client): <NEW_LINE> <INDENT> def __init__(self, access_key: str, url: str = "") -> None: <NEW_LINE> <INDENT> super().__init__(access_key, url) <NEW_LINE> self._open_api = urljoin(self.gateway_url, "apps-sextant/v1/") <NEW_LINE> <DEDENT> def _generate_benmarks( self, offset: int = 0, limit: int = 128 ) -> Generator[Benchmark, None, int]: <NEW_LINE> <INDENT> params: Dict[str, Any] = {"offset": offset, "limit": limit} <NEW_LINE> response = self.open_api_do("GET", "benchmarks", "", params=params).json() <NEW_LINE> for benchmark in response["benchmarks"]: <NEW_LINE> <INDENT> yield Benchmark(benchmark["name"], benchmark["benchmarkId"], self) <NEW_LINE> <DEDENT> return response["totalCount"] <NEW_LINE> <DEDENT> def list_benchmarks(self) -> PagingList[Benchmark]: <NEW_LINE> <INDENT> return PagingList(self._generate_benmarks, 128) <NEW_LINE> <DEDENT> def get_benchmark(self, name: str) -> Benchmark: <NEW_LINE> <INDENT> for benchmark in self.list_benchmarks(): <NEW_LINE> <INDENT> if benchmark.name == name: <NEW_LINE> <INDENT> return benchmark <NEW_LINE> <DEDENT> <DEDENT> raise ResourceNotExistError(resource="benchmark", identification=name) | This class defines :class:`Sextant`.
Arguments:
access_key: User's access key.
url: The URL of the graviti gas website. | 62598fb2442bda511e95c4cc |
class EVEClient(GenericClient): <NEW_LINE> <INDENT> def _get_url_for_timerange(self, timerange, **kwargs): <NEW_LINE> <INDENT> if timerange.start.strftime('%M-%S') != '00-00': <NEW_LINE> <INDENT> timerange = TimeRange(timerange.start.strftime('%Y-%m-%d'), timerange.end) <NEW_LINE> <DEDENT> eve = Scraper(BASEURL) <NEW_LINE> return eve.filelist(timerange) <NEW_LINE> <DEDENT> def _get_time_for_url(self, urls): <NEW_LINE> <INDENT> eve = Scraper(BASEURL) <NEW_LINE> times = list() <NEW_LINE> for url in urls: <NEW_LINE> <INDENT> t0 = eve._extractDateURL(url) <NEW_LINE> times.append(TimeRange(t0, t0 + TimeDelta(1*u.day))) <NEW_LINE> <DEDENT> return times <NEW_LINE> <DEDENT> def _makeimap(self): <NEW_LINE> <INDENT> self.map_['source'] = 'SDO' <NEW_LINE> self.map_['provider'] = 'LASP' <NEW_LINE> self.map_['instrument'] = 'eve' <NEW_LINE> self.map_['physobs'] = 'irradiance' <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _can_handle_query(cls, *query): <NEW_LINE> <INDENT> chk_var = 0 <NEW_LINE> for x in query: <NEW_LINE> <INDENT> if x.__class__.__name__ == 'Instrument' and x.value.lower() == 'eve': <NEW_LINE> <INDENT> chk_var += 1 <NEW_LINE> <DEDENT> elif x.__class__.__name__ == 'Level' and x.value == 0: <NEW_LINE> <INDENT> chk_var += 1 <NEW_LINE> <DEDENT> <DEDENT> if chk_var == 2: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | Provides access to Level 0C Extreme ultraviolet Variability Experiment (EVE) data
as hosted by `LASP <http://lasp.colorado.edu/home/eve/data/data-access/>`_.
To use this client you must request Level 0 data.
Examples
--------
>>> from sunpy.net import Fido, attrs as a
>>> results = Fido.search(a.Time("2016/1/1", "2016/1/2"),
... a.Instrument('EVE'), a.Level(0)) #doctest: +REMOTE_DATA
>>> results #doctest: +REMOTE_DATA +ELLIPSIS
<sunpy.net.fido_factory.UnifiedResponse object at ...>
Results from 1 Provider:
<BLANKLINE>
2 Results from the EVEClient:
Start Time End Time Source Instrument Wavelength
str19 str19 str3 str3 str3
------------------- ------------------- ------ ---------- ----------
2016-01-01 00:00:00 2016-01-02 00:00:00 SDO eve nan
2016-01-02 00:00:00 2016-01-03 00:00:00 SDO eve nan
<BLANKLINE>
<BLANKLINE> | 62598fb297e22403b383af84 |
class Settings(Gio.Settings): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Gio.Settings.__init__(self) <NEW_LINE> <DEDENT> def new(): <NEW_LINE> <INDENT> settings = Gio.Settings.new('org.gnome.Lollypop') <NEW_LINE> settings.__class__ = Settings <NEW_LINE> return settings <NEW_LINE> <DEDENT> def get_music_paths(self): <NEW_LINE> <INDENT> paths = self.get_value('music-path') <NEW_LINE> if not paths: <NEW_LINE> <INDENT> if GLib.get_user_special_dir(GLib.UserDirectory.DIRECTORY_MUSIC): <NEW_LINE> <INDENT> paths = [GLib.get_user_special_dir( GLib.UserDirectory.DIRECTORY_MUSIC)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("You need to add a music path" " to org.gnome.Lollypop in dconf") <NEW_LINE> <DEDENT> <DEDENT> return paths | Lollypop settings | 62598fb244b2445a339b69ac |
class DataShape(object): <NEW_LINE> <INDENT> __metaclass__ = Type <NEW_LINE> composite = False <NEW_LINE> def __init__(self, parameters=None, name=None): <NEW_LINE> <INDENT> if type(parameters) is DataShape: <NEW_LINE> <INDENT> self.parameters = parameters <NEW_LINE> <DEDENT> elif len(parameters) > 0: <NEW_LINE> <INDENT> self.parameters = tuple(flatten(parameters)) <NEW_LINE> self.composite = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.parameters = tuple() <NEW_LINE> self.composite = False <NEW_LINE> <DEDENT> if name: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.__metaclass__._registry[name] = self <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.name = None <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> return self.parameters[index] <NEW_LINE> <DEDENT> def __rmul__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, (DataShape, Primitive)): <NEW_LINE> <INDENT> other = shape_coerce(other) <NEW_LINE> <DEDENT> return product(other, self) <NEW_LINE> <DEDENT> def __mul__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, (DataShape, Primitive)): <NEW_LINE> <INDENT> other = shape_coerce(other) <NEW_LINE> <DEDENT> return product(other, self) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.name: <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return (', '.join(map(str, self.parameters))) <NEW_LINE> <DEDENT> <DEDENT> def _equal(self, other): <NEW_LINE> <INDENT> return all(a==b for a,b in zip(self, other)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if type(other) is DataShape: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError('Cannot compare non-datashape to datashape') <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ''.join(["dshape(\"", doublequote(str(self)), "\")"]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def shape(self): <NEW_LINE> <INDENT> return self.parameters[:-1] | The Datashape class, implementation for generic composite
datashape objects | 62598fb216aa5153ce400579 |
class ValidationException(MobilemApiException): <NEW_LINE> <INDENT> num = 1200 | Common validation exception | 62598fb255399d3f05626590 |
class ReferenceDefinition(Base): <NEW_LINE> <INDENT> __table_args__ = {'schema': 'groundwater_protection_sites'} <NEW_LINE> __tablename__ = 'reference_definition' <NEW_LINE> id = sa.Column(sa.String, primary_key=True, autoincrement=False) <NEW_LINE> topic = sa.Column(sa.String, nullable=True) <NEW_LINE> canton = sa.Column(sa.String(2), nullable=True) <NEW_LINE> municipality = sa.Column(sa.Integer, nullable=True) <NEW_LINE> office_id = sa.Column(sa.String, sa.ForeignKey( Office.id), nullable=False ) <NEW_LINE> responsible_office = relationship(Office) <NEW_LINE> liefereinheit = sa.Column(sa.Integer, nullable=True) | The meta bucket for definitions which are directly related to a public law restriction in a common way or
to the whole canton or a whole municipality. It is used to have a place to store general documents
which are related to an extract but not directly on a special public law restriction situation.
Attributes:
id (int): The identifier. This is used in the database only and must not be set manually. If
you don't like it - don't care about.
topic (str): The topic which this definition might be related to.
canton (str): The canton this definition is related to.
municipality (int): The municipality this definition is related to.
office_id (int): The foreign key constraint which the definition is related to.
responsible_office (pyramid_oereb.standard.models.groundwater_protection_sites.Office):
The dedicated relation to the office instance from database. | 62598fb263d6d428bbee2822 |
class FireStatsDialogTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_icon_png(self): <NEW_LINE> <INDENT> path = ':/plugins/FireStats/icon.png' <NEW_LINE> icon = QIcon(path) <NEW_LINE> self.assertFalse(icon.isNull()) | Test rerources work. | 62598fb25fcc89381b266187 |
class UnicodeRawConfigParser(RawConfigParser): <NEW_LINE> <INDENT> def write(self, fp): <NEW_LINE> <INDENT> if self._defaults: <NEW_LINE> <INDENT> fp.write("[%s]\n" % DEFAULTSECT) <NEW_LINE> for (key, value) in self._defaults.items(): <NEW_LINE> <INDENT> fp.write("%s = %s\n" % (key, getUnicode(value, "UTF8").replace('\n', '\n\t'))) <NEW_LINE> <DEDENT> fp.write("\n") <NEW_LINE> <DEDENT> for section in self._sections: <NEW_LINE> <INDENT> fp.write("[%s]\n" % section) <NEW_LINE> for (key, value) in self._sections[section].items(): <NEW_LINE> <INDENT> if key != "__name__": <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> fp.write("%s\n" % (key)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fp.write("%s = %s\n" % (key, getUnicode(value, "UTF8").replace('\n', '\n\t'))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> fp.write("\n") | RawConfigParser with unicode writing support | 62598fb2cc0a2c111447b088 |
class OperationInputs(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'name': {'required': True}, } <NEW_LINE> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, name: str, **kwargs ): <NEW_LINE> <INDENT> super(OperationInputs, self).__init__(**kwargs) <NEW_LINE> self.name = name | Input values for operation results call.
All required parameters must be populated in order to send to Azure.
:ivar name: Required. The name of the Provisioning Service to check.
:vartype name: str | 62598fb23346ee7daa337682 |
class ModelIdentifier(object): <NEW_LINE> <INDENT> def __init__( self, arch_id=0, input_id=0, hypers={}, n_split=10, split=-1, split_seed=252, path='', prefix='model', formatter={}): <NEW_LINE> <INDENT> super(ModelIdentifier, self).__init__() <NEW_LINE> self.arch_id = int(arch_id) <NEW_LINE> self.input_id = int(input_id) <NEW_LINE> self._add_hypers(hypers) <NEW_LINE> self.split_seed = int(split_seed) <NEW_LINE> self.n_split = int(n_split) <NEW_LINE> self.split = int(split) <NEW_LINE> self.path = Path(path) <NEW_LINE> self.prefix = prefix <NEW_LINE> self._add_formatter(formatter) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> split = self.split <NEW_LINE> if split == -1: <NEW_LINE> <INDENT> split = 'x' <NEW_LINE> <DEDENT> hypers_string = '' <NEW_LINE> if len(self.hypers) > 0: <NEW_LINE> <INDENT> for k, v in self.hypers.items(): <NEW_LINE> <INDENT> if isinstance(v, float): <NEW_LINE> <INDENT> k_formatter = self.formatter[k] <NEW_LINE> v_str = k_formatter(v) <NEW_LINE> hypers_string += '-{0}'.format(v_str) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> hypers_string += '-{0}'.format(v) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> name = '{0}-{1}-{2}{3}-{4}'.format( self.prefix, self.arch_id, self.input_id, hypers_string, split ) <NEW_LINE> return name <NEW_LINE> <DEDENT> @property <NEW_LINE> def pathname(self): <NEW_LINE> <INDENT> pn = self.path / Path(self.name) <NEW_LINE> return pn <NEW_LINE> <DEDENT> def as_dict(self): <NEW_LINE> <INDENT> d = { 'arch_id': self.arch_id, 'input_id': self.input_id, 'split_seed': self.split_seed, 'n_split': self.n_split, 'split': self.split, } <NEW_LINE> for k, v in self.hypers.items(): <NEW_LINE> <INDENT> d.update( {'hyp_' + k: v} ) <NEW_LINE> <DEDENT> return d <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> d = { 'arch_id': self.arch_id, 'input_id': self.input_id, 'hypers': self.hypers, 'split_seed': self.split_seed, 'n_split': self.n_split, 'split': self.split, 'path': os.fspath(self.path), 'prefix': self.prefix, 'precision': self.precision } <NEW_LINE> return d <NEW_LINE> <DEDENT> def _add_hypers(self, hypers): <NEW_LINE> <INDENT> self.hypers = hypers <NEW_LINE> for k, v in hypers.items(): <NEW_LINE> <INDENT> if not hasattr(self, k): <NEW_LINE> <INDENT> setattr(self, k, v) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Attribute {0} already exists.'.format(k) ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _add_formatter(self, formatter): <NEW_LINE> <INDENT> for k, v in self.hypers.items(): <NEW_LINE> <INDENT> if isinstance(v, float): <NEW_LINE> <INDENT> if k not in formatter: <NEW_LINE> <INDENT> formatter[k] = lambda x: str(x) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.formatter = formatter | Object to keep track of a model's identifying information.
Attributes:
arch_id: Integer that identifies the model architecture.
input_id: Integer that identifies the input data.
n_dim: Integer indicating the dimensionality of the embedding.
spit_seed: Integer indicating the split seed.
split: Integer indicating the split (zero-indexed). A value of
'-1' indicates that all of the provided data was used for
training and there was no validation set.
name: A string representation of ID.
path: A Path object representing the complete path.
Methods:
as_dict: Return dictionary representation of ID. | 62598fb27b180e01f3e4908b |
class Solution: <NEW_LINE> <INDENT> def sumKEven(self, k): <NEW_LINE> <INDENT> res = 0 <NEW_LINE> for i in range(1, k + 1): <NEW_LINE> <INDENT> tmp = str(i) + str(i)[::-1] <NEW_LINE> res += int(tmp) <NEW_LINE> <DEDENT> return res | @param k:
@return: the sum of first k even-length palindrome numbers | 62598fb2379a373c97d9908b |
class RenderViewCommand: <NEW_LINE> <INDENT> def GetResources(self): <NEW_LINE> <INDENT> return { "Pixmap": os.path.join(ICONDIR, "RenderView.svg"), "MenuText": QT_TRANSLATE_NOOP( "RenderViewCommand", "Rendering View" ), "ToolTip": QT_TRANSLATE_NOOP( "RenderViewCommand", "Create a Rendering View of the " "selected object(s) in the selected " "project or the default project", ), } <NEW_LINE> <DEDENT> def Activated(self): <NEW_LINE> <INDENT> selection = Gui.Selection.getSelection() <NEW_LINE> objs, projs = [], [] <NEW_LINE> for item in selection: <NEW_LINE> <INDENT> (projs if RendererHandler.is_project(item) else objs).append(item) <NEW_LINE> <DEDENT> activedoc_projects = filter( RendererHandler.is_project, App.ActiveDocument.Objects ) <NEW_LINE> try: <NEW_LINE> <INDENT> target_project = next(it.chain(projs, activedoc_projects)) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> msg = ( translate( "Render", "[Render] Unable to find a valid project in selection " "or document", ) + "\n" ) <NEW_LINE> App.Console.PrintError(msg) <NEW_LINE> return <NEW_LINE> <DEDENT> QApplication.setOverrideCursor(QCursor(Qt.WaitCursor)) <NEW_LINE> target_project.Proxy.add_views(objs) <NEW_LINE> QApplication.restoreOverrideCursor() | GUI command to create a rendering view of an object in a project.
The command operates on the selected object(s) and the selected project,
or the default project. | 62598fb27d847024c075c438 |
class BgpPeerStatus(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'local_address': {'readonly': True}, 'neighbor': {'readonly': True}, 'asn': {'readonly': True}, 'state': {'readonly': True}, 'connected_duration': {'readonly': True}, 'routes_received': {'readonly': True}, 'messages_sent': {'readonly': True}, 'messages_received': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'local_address': {'key': 'localAddress', 'type': 'str'}, 'neighbor': {'key': 'neighbor', 'type': 'str'}, 'asn': {'key': 'asn', 'type': 'int'}, 'state': {'key': 'state', 'type': 'str'}, 'connected_duration': {'key': 'connectedDuration', 'type': 'str'}, 'routes_received': {'key': 'routesReceived', 'type': 'long'}, 'messages_sent': {'key': 'messagesSent', 'type': 'long'}, 'messages_received': {'key': 'messagesReceived', 'type': 'long'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(BgpPeerStatus, self).__init__(**kwargs) <NEW_LINE> self.local_address = None <NEW_LINE> self.neighbor = None <NEW_LINE> self.asn = None <NEW_LINE> self.state = None <NEW_LINE> self.connected_duration = None <NEW_LINE> self.routes_received = None <NEW_LINE> self.messages_sent = None <NEW_LINE> self.messages_received = None | BGP peer status details.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar local_address: The virtual network gateway's local address.
:vartype local_address: str
:ivar neighbor: The remote BGP peer.
:vartype neighbor: str
:ivar asn: The autonomous system number of the remote BGP peer.
:vartype asn: int
:ivar state: The BGP peer state. Possible values include: "Unknown", "Stopped", "Idle",
"Connecting", "Connected".
:vartype state: str or ~azure.mgmt.network.v2018_08_01.models.BgpPeerState
:ivar connected_duration: For how long the peering has been up.
:vartype connected_duration: str
:ivar routes_received: The number of routes learned from this peer.
:vartype routes_received: long
:ivar messages_sent: The number of BGP messages sent.
:vartype messages_sent: long
:ivar messages_received: The number of BGP messages received.
:vartype messages_received: long | 62598fb23539df3088ecc328 |
class Action(models.Model): <NEW_LINE> <INDENT> actor_content_type = models.ForeignKey(ContentType,related_name='actor') <NEW_LINE> actor_object_id = models.PositiveIntegerField() <NEW_LINE> actor = generic.GenericForeignKey('actor_content_type','actor_object_id') <NEW_LINE> verb = models.CharField(max_length=255) <NEW_LINE> description = models.TextField(blank=True,null=True) <NEW_LINE> target_content_type = models.ForeignKey(ContentType,related_name='target',blank=True,null=True) <NEW_LINE> target_object_id = models.PositiveIntegerField(blank=True,null=True) <NEW_LINE> target = generic.GenericForeignKey('target_content_type','target_object_id') <NEW_LINE> action_object_content_type = models.ForeignKey(ContentType,related_name='action_object',blank=True,null=True) <NEW_LINE> action_object_object_id = models.PositiveIntegerField(blank=True,null=True) <NEW_LINE> action_object = generic.GenericForeignKey('action_object_content_type','action_object_object_id') <NEW_LINE> timestamp = models.DateTimeField(auto_now_add=True, db_index=True) <NEW_LINE> public = models.BooleanField(default=True) <NEW_LINE> objects = ActionManager() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> if self.target: <NEW_LINE> <INDENT> if self.action_object: <NEW_LINE> <INDENT> return u'%s %s %s on %s %s ago' % (self.actor, self.verb, self.action_object, self.target, self.timesince()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return u'%s %s %s %s ago' % (self.actor, self.verb, self.target, self.timesince()) <NEW_LINE> <DEDENT> <DEDENT> return u'%s %s %s ago' % (self.actor, self.verb, self.timesince()) <NEW_LINE> <DEDENT> def actor_url(self): <NEW_LINE> <INDENT> return reverse('actstream_actor', None, (self.actor_content_type.pk, self.actor_object_id)) <NEW_LINE> <DEDENT> def target_url(self): <NEW_LINE> <INDENT> return reverse('actstream_actor', None, (self.target_content_type.pk, self.target_object_id)) <NEW_LINE> <DEDENT> def timesince(self, now=None): <NEW_LINE> <INDENT> return timesince_(self.timestamp, now) <NEW_LINE> <DEDENT> @models.permalink <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return ('actstream.views.detail', [self.pk]) | Action model describing the actor acting out a verb (on an optional target).
Nomenclature based on http://martin.atkins.me.uk/specs/activitystreams/atomactivity
Generalized Format::
<actor> <verb> <time>
<actor> <verb> <target> <time>
<actor> <verb> <action_object> <target> <time>
Examples::
<justquick> <reached level 60> <1 minute ago>
<brosner> <commented on> <pinax/pinax> <2 hours ago>
<washingtontimes> <started follow> <justquick> <8 minutes ago>
<mitsuhiko> <closed> <issue 70> on <mitsuhiko/flask> <about 3 hours ago>
Unicode Representation::
justquick reached level 60 1 minute ago
mitsuhiko closed issue 70 on mitsuhiko/flask 3 hours ago
HTML Representation::
<a href="http://oebfare.com/">brosner</a> commented on <a href="http://github.com/pinax/pinax">pinax/pinax</a> 2 hours ago | 62598fb2009cb60464d01598 |
class BaseConversionTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_base(self): <NEW_LINE> <INDENT> self.assertEquals(u'11', calc.base(11, 10)) <NEW_LINE> self.assertEquals(u'12', calc.base(10, 8)) <NEW_LINE> self.assertEquals(u'A', calc.base(10, 16)) <NEW_LINE> self.assertEquals(u'3YW', calc.base(5144, 36)) | Tests for L{eridanusstd.calc.base}. | 62598fb266656f66f7d5a466 |
class DeleteContent(MyUserBaseHandler): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(DeleteContent, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> @token_required() <NEW_LINE> @deco_jsonp() <NEW_LINE> async def post(self, *args, **kwargs): <NEW_LINE> <INDENT> post_json = self.get_post_body_dict() <NEW_LINE> content_id = post_json.get('content_id', None) <NEW_LINE> if list_have_none_mem(*[content_id]): <NEW_LINE> <INDENT> return ConstData.msg_args_wrong <NEW_LINE> <DEDENT> mongo_conn = self.get_async_mongo() <NEW_LINE> mycol = mongo_conn['dashboard_content'] <NEW_LINE> mycol.update({'_id': ObjectId(content_id)}, {'$set': {'is_del': True}}, upsert=False) <NEW_LINE> return ConstData.msg_succeed | 删除文本内容 | 62598fb2aad79263cf42e84a |
class Refund(object): <NEW_LINE> <INDENT> def __init__(self, attributes, api_response): <NEW_LINE> <INDENT> self.attributes = attributes <NEW_LINE> self.api_response = api_response <NEW_LINE> <DEDENT> @property <NEW_LINE> def amount(self): <NEW_LINE> <INDENT> return self.attributes.get('amount') <NEW_LINE> <DEDENT> @property <NEW_LINE> def created_at(self): <NEW_LINE> <INDENT> return self.attributes.get('created_at') <NEW_LINE> <DEDENT> @property <NEW_LINE> def currency(self): <NEW_LINE> <INDENT> return self.attributes.get('currency') <NEW_LINE> <DEDENT> @property <NEW_LINE> def fx(self): <NEW_LINE> <INDENT> return self.Fx(self.attributes.get('fx')) <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self.attributes.get('id') <NEW_LINE> <DEDENT> @property <NEW_LINE> def links(self): <NEW_LINE> <INDENT> return self.Links(self.attributes.get('links')) <NEW_LINE> <DEDENT> @property <NEW_LINE> def metadata(self): <NEW_LINE> <INDENT> return self.attributes.get('metadata') <NEW_LINE> <DEDENT> @property <NEW_LINE> def reference(self): <NEW_LINE> <INDENT> return self.attributes.get('reference') <NEW_LINE> <DEDENT> @property <NEW_LINE> def status(self): <NEW_LINE> <INDENT> return self.attributes.get('status') <NEW_LINE> <DEDENT> class Fx(object): <NEW_LINE> <INDENT> def __init__(self, attributes): <NEW_LINE> <INDENT> self.attributes = attributes <NEW_LINE> <DEDENT> @property <NEW_LINE> def estimated_exchange_rate(self): <NEW_LINE> <INDENT> return self.attributes.get('estimated_exchange_rate') <NEW_LINE> <DEDENT> @property <NEW_LINE> def exchange_rate(self): <NEW_LINE> <INDENT> return self.attributes.get('exchange_rate') <NEW_LINE> <DEDENT> @property <NEW_LINE> def fx_amount(self): <NEW_LINE> <INDENT> return self.attributes.get('fx_amount') <NEW_LINE> <DEDENT> @property <NEW_LINE> def fx_currency(self): <NEW_LINE> <INDENT> return self.attributes.get('fx_currency') <NEW_LINE> <DEDENT> <DEDENT> class Links(object): <NEW_LINE> <INDENT> def __init__(self, attributes): <NEW_LINE> <INDENT> self.attributes = attributes <NEW_LINE> <DEDENT> @property <NEW_LINE> def mandate(self): <NEW_LINE> <INDENT> return self.attributes.get('mandate') <NEW_LINE> <DEDENT> @property <NEW_LINE> def payment(self): <NEW_LINE> <INDENT> return self.attributes.get('payment') | A thin wrapper around a refund, providing easy access to its
attributes.
Example:
refund = client.refunds.get()
refund.id | 62598fb2d486a94d0ba2c046 |
class ArgsPlugin(Plugin): <NEW_LINE> <INDENT> def _build_instance(self, arg_list): <NEW_LINE> <INDENT> instance = {} <NEW_LINE> if 'dimensions' in self.args: <NEW_LINE> <INDENT> instance['dimensions'] = dict(item.strip().split(":") for item in self.args['dimensions'].split(",")) <NEW_LINE> <DEDENT> for arg in arg_list: <NEW_LINE> <INDENT> if arg in self.args: <NEW_LINE> <INDENT> instance[arg] = self.args[arg] <NEW_LINE> <DEDENT> <DEDENT> return instance <NEW_LINE> <DEDENT> def _check_required_args(self, arg_list): <NEW_LINE> <INDENT> if self.args is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for arg in arg_list: <NEW_LINE> <INDENT> if arg not in self.args: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def dependencies_installed(self): <NEW_LINE> <INDENT> return True | Base plugin for detection plugins that take arguments for configuration rather than do detection. | 62598fb2bd1bec0571e150fe |
class QuizAnswers(Document): <NEW_LINE> <INDENT> quiz = ReferenceField(QuestionSet, required=True) <NEW_LINE> question_order = ListField(ReferenceField(WikiQuestion)) <NEW_LINE> workerId = StringField(required=True) <NEW_LINE> assignmentId = StringField(required=True, unique_with=['workerId']) <NEW_LINE> quiz_answer_procedure = ListField(ReferenceField(WikiQuestionAnswer)) <NEW_LINE> quiz_final_answers = ListField(ReferenceField(WikiQuestionAnswer)) <NEW_LINE> quiz_submit_time = DateTimeField() <NEW_LINE> quiz_time_delta = IntField() <NEW_LINE> comment = StringField() <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return str(self.quiz) + str(self.workerId) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return unicode(self.__str__()) | Record a list of answers for a quiz (question set) | 62598fb285dfad0860cbfaaf |
class PhysicsInterface: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._base_update_called = False <NEW_LINE> self._num_update_calls = 0 <NEW_LINE> <DEDENT> def update(self, dt): <NEW_LINE> <INDENT> self._base_update_called = True <NEW_LINE> self._num_update_calls += 1 <NEW_LINE> <DEDENT> def create_physics(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def destroy_physics(self): <NEW_LINE> <INDENT> pass | An interface to apply updates to physics | 62598fb24f6381625f1994fb |
class TranslationSynthesisEventArgs(SessionEventArgs): <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> for _s in [SessionEventArgs]: <NEW_LINE> <INDENT> __swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {})) <NEW_LINE> <DEDENT> __setattr__ = lambda self, name, value: _swig_setattr(self, TranslationSynthesisEventArgs, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> for _s in [SessionEventArgs]: <NEW_LINE> <INDENT> __swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {})) <NEW_LINE> <DEDENT> __getattr__ = lambda self, name: _swig_getattr(self, TranslationSynthesisEventArgs, name) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> raise AttributeError("No constructor defined") <NEW_LINE> <DEDENT> __repr__ = _swig_repr <NEW_LINE> __swig_destroy__ = _speech_py_impl.delete_TranslationSynthesisEventArgs <NEW_LINE> __del__ = lambda self: None <NEW_LINE> def _get_result(self) : <NEW_LINE> <INDENT> return _speech_py_impl.TranslationSynthesisEventArgs__get_result(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def result(self) : <NEW_LINE> <INDENT> return self._get_result() | Defines payload that is sent with the event :py:attr:`TranslationRecognizer.synthesizing`. | 62598fb28a349b6b436862b4 |
class HideHudFlags(IntEnum): <NEW_LINE> <INDENT> WEAPONSELECTION = HIDEHUD_WEAPONSELECTION <NEW_LINE> FLASHLIGHT = HIDEHUD_FLASHLIGHT <NEW_LINE> ALL = HIDEHUD_ALL <NEW_LINE> HEALTH = HIDEHUD_HEALTH <NEW_LINE> PLAYERDEAD = HIDEHUD_PLAYERDEAD <NEW_LINE> NEEDSUIT = HIDEHUD_NEEDSUIT <NEW_LINE> MISCSTATUS = HIDEHUD_MISCSTATUS <NEW_LINE> CHAT = HIDEHUD_CHAT <NEW_LINE> CROSSHAIR = HIDEHUD_CROSSHAIR <NEW_LINE> VEHICLE_CROSSHAIR = HIDEHUD_VEHICLE_CROSSHAIR <NEW_LINE> INVEHICLE = HIDEHUD_INVEHICLE <NEW_LINE> BONUS_PROGRESS = HIDEHUD_BONUS_PROGRESS <NEW_LINE> RADAR = HIDEHUD_RADAR | Hide hud flags wrapper enumerator. | 62598fb25fdd1c0f98e5e004 |
class InlineEntityListDescriptor(EntityListDescriptor): <NEW_LINE> <INDENT> def __init__(self, tag, klass, *args): <NEW_LINE> <INDENT> super(EntityListDescriptor, self).__init__(tag, klass) <NEW_LINE> self.rootkeys = args <NEW_LINE> <DEDENT> def __get__(self, instance, cls): <NEW_LINE> <INDENT> instance.get() <NEW_LINE> result = [] <NEW_LINE> rootnode=instance.root <NEW_LINE> for rootkey in self.rootkeys: <NEW_LINE> <INDENT> rootnode=rootnode.find(rootkey) <NEW_LINE> <DEDENT> for node in rootnode.findall(self.tag): <NEW_LINE> <INDENT> entity = self.klass(instance.lims, uri=node.attrib['uri']) <NEW_LINE> entity.root = node <NEW_LINE> result.append(entity) <NEW_LINE> <DEDENT> return result | EntityListDescriptor which saves the XML tags in the parent entity as the
root elements of the referenced entities. Useful when the full body of the
referenced entity is enclosed in the parent. | 62598fb2be8e80087fbbf0dd |
class Privileges(object): <NEW_LINE> <INDENT> stmts = None <NEW_LINE> roles = None <NEW_LINE> aws_api_list = None <NEW_LINE> def __init__(self, aws_api_list): <NEW_LINE> <INDENT> self.stmts = [] <NEW_LINE> self.roles = [] <NEW_LINE> self.aws_api_list = aws_api_list <NEW_LINE> <DEDENT> def add_stmt(self, stmt): <NEW_LINE> <INDENT> if 'Action' not in stmt: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.stmts.append(stmt) <NEW_LINE> <DEDENT> def get_actions_from_statement(self, stmt): <NEW_LINE> <INDENT> actions = {} <NEW_LINE> for action in make_list(stmt['Action']): <NEW_LINE> <INDENT> action = action.lower() <NEW_LINE> action = '^' + action.replace('*', '.*') + '$' <NEW_LINE> for possible_action in self.aws_api_list: <NEW_LINE> <INDENT> for iam_name, cloudtrail_name in EVENT_RENAMES.items(): <NEW_LINE> <INDENT> if possible_action == cloudtrail_name: <NEW_LINE> <INDENT> possible_action = iam_name <NEW_LINE> <DEDENT> <DEDENT> if re.match(action, possible_action): <NEW_LINE> <INDENT> actions[possible_action] = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return actions <NEW_LINE> <DEDENT> def determine_allowed(self): <NEW_LINE> <INDENT> actions = {} <NEW_LINE> for stmt in self.stmts: <NEW_LINE> <INDENT> if stmt['Effect'] == 'Allow': <NEW_LINE> <INDENT> stmt_actions = self.get_actions_from_statement(stmt) <NEW_LINE> for action in stmt_actions: <NEW_LINE> <INDENT> if action not in actions: <NEW_LINE> <INDENT> actions[action] = [stmt] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> actions[action].append(stmt) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> for stmt in self.stmts: <NEW_LINE> <INDENT> if (stmt['Effect'] == 'Deny' and '*' in make_list(stmt.get('Resource', None)) and stmt.get('Condition', None) is None): <NEW_LINE> <INDENT> stmt_actions = self.get_actions_from_statement(stmt) <NEW_LINE> for action in stmt_actions: <NEW_LINE> <INDENT> if action in actions: <NEW_LINE> <INDENT> del actions[action] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return list(actions) | Keep track of privileges an actor has been granted | 62598fb27d43ff248742743e |
class DUMP3res(BaseObj): <NEW_LINE> <INDENT> _strfmt1 = "" <NEW_LINE> _attrlist = ("mountlist",) <NEW_LINE> def __init__(self, unpack): <NEW_LINE> <INDENT> self.mountlist = unpack.unpack_list(mountentry) | struct DUMP3res {
mountentry *mountlist;
}; | 62598fb2be383301e0253872 |
class SummaryStatisticsNumba(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @jit <NEW_LINE> def calculate_number_observation(self, one_dimensional_array): <NEW_LINE> <INDENT> number_observation = one_dimensional_array.size <NEW_LINE> return number_observation <NEW_LINE> <DEDENT> @jit <NEW_LINE> def calcuate_arithmetic_mean(self, one_dimensional_array, number_observation): <NEW_LINE> <INDENT> sum_result = 0.0 <NEW_LINE> for i in range(number_observation): <NEW_LINE> <INDENT> sum_result += one_dimensional_array[i] <NEW_LINE> <DEDENT> arithmetic_mean = sum_result / number_observation <NEW_LINE> return arithmetic_mean <NEW_LINE> <DEDENT> @jit <NEW_LINE> def calculate_median(self, one_dimensional_array, number_observation): <NEW_LINE> <INDENT> one_dimensional_array.sort() <NEW_LINE> half_position = number_observation // 2 <NEW_LINE> if not number_observation % 2: <NEW_LINE> <INDENT> median = (one_dimensional_array[half_position - 1] + one_dimensional_array[half_position]) / 2.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> median = one_dimensional_array[half_position] <NEW_LINE> <DEDENT> return median <NEW_LINE> <DEDENT> @jit <NEW_LINE> def calculate_sample_standard_deviation(self, one_dimensional_array, number_observation, arithmetic_mean): <NEW_LINE> <INDENT> sum_result = 0.0 <NEW_LINE> for i in range(number_observation): <NEW_LINE> <INDENT> sum_result += pow((one_dimensional_array[i] - arithmetic_mean), 2) <NEW_LINE> <DEDENT> sample_variance = sum_result / (number_observation - 1) <NEW_LINE> sample_standard_deviation = sqrt(sample_variance) <NEW_LINE> return sample_standard_deviation | calculate number of observations, arithmetic mean, median
and sample standard deviation using numba library | 62598fb23539df3088ecc32a |
@pytest.mark.usefixtures('versioning_manager', 'table_creator') <NEW_LINE> class TestColumnExclusion(object): <NEW_LINE> <INDENT> @pytest.fixture <NEW_LINE> def textitem_cls(self, base): <NEW_LINE> <INDENT> class TextItem(base): <NEW_LINE> <INDENT> __tablename__ = 'textitem' <NEW_LINE> __versioned__ = {'exclude': ['_created_at']} <NEW_LINE> id = sa.Column(sa.Integer, primary_key=True) <NEW_LINE> title = sa.Column(sa.String) <NEW_LINE> created_at = sa.Column('_created_at', sa.DateTime) <NEW_LINE> type = sa.Column(sa.String) <NEW_LINE> __mapper_args__ = {'polymorphic_on': type} <NEW_LINE> <DEDENT> return TextItem <NEW_LINE> <DEDENT> @pytest.fixture <NEW_LINE> def article_cls(self, textitem_cls): <NEW_LINE> <INDENT> class Article(textitem_cls): <NEW_LINE> <INDENT> __tablename__ = 'article' <NEW_LINE> __versioned__ = {'exclude': ['_updated_at']} <NEW_LINE> id = sa.Column( sa.Integer, sa.ForeignKey(textitem_cls.id), primary_key=True ) <NEW_LINE> updated_at = sa.Column('_updated_at', sa.DateTime) <NEW_LINE> content = sa.Column('_content', sa.String) <NEW_LINE> __mapper_args__ = {'polymorphic_identity': 'article'} <NEW_LINE> <DEDENT> return Article <NEW_LINE> <DEDENT> @pytest.fixture <NEW_LINE> def models(self, article_cls, textitem_cls): <NEW_LINE> <INDENT> return [article_cls, textitem_cls] <NEW_LINE> <DEDENT> @pytest.fixture <NEW_LINE> def article(self, article_cls, session): <NEW_LINE> <INDENT> article = article_cls( updated_at=datetime(2001, 1, 1), created_at=datetime(2000, 1, 1), title='Some title', content='Some content' ) <NEW_LINE> session.add(article) <NEW_LINE> session.commit() <NEW_LINE> return article <NEW_LINE> <DEDENT> def test_updating_excluded_child_attr_does_not_add_activity( self, article, session, activity_cls ): <NEW_LINE> <INDENT> article.updated_at = datetime(2002, 1, 1) <NEW_LINE> session.commit() <NEW_LINE> assert session.query(activity_cls).count() == 2 | Test column exclusion with polymorphic inheritance and column aliases to
cover as many edge cases as possible. | 62598fb2379a373c97d9908d |
class MiddlewarePipeline(HTTPAdapter): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._current_middleware = None <NEW_LINE> self._first_middleware = None <NEW_LINE> self.poolmanager = PoolManager(ssl_version=ssl.PROTOCOL_TLSv1_2) <NEW_LINE> <DEDENT> def add_middleware(self, middleware): <NEW_LINE> <INDENT> if self._middleware_present(): <NEW_LINE> <INDENT> self._current_middleware.next = middleware <NEW_LINE> self._current_middleware = middleware <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._first_middleware = middleware <NEW_LINE> self._current_middleware = self._first_middleware <NEW_LINE> <DEDENT> <DEDENT> def send(self, request, **kwargs): <NEW_LINE> <INDENT> middleware_control_json = request.headers.pop('middleware_control', None) <NEW_LINE> if middleware_control_json: <NEW_LINE> <INDENT> middleware_control = json.loads(middleware_control_json) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> middleware_control = dict() <NEW_LINE> <DEDENT> request.context = RequestContext(middleware_control, request.headers) <NEW_LINE> if self._middleware_present(): <NEW_LINE> <INDENT> return self._first_middleware.send(request, **kwargs) <NEW_LINE> <DEDENT> return super().send(request, **kwargs) <NEW_LINE> <DEDENT> def _middleware_present(self): <NEW_LINE> <INDENT> return self._current_middleware | MiddlewarePipeline, entry point of middleware
The pipeline is implemented as a linked-list, read more about
it here https://buffered.dev/middleware-python-requests/ | 62598fb276e4537e8c3ef61e |
class Person(object): <NEW_LINE> <INDENT> def __init__(self, name, slug=None, **kwargs): <NEW_LINE> <INDENT> super(Person, self).__init__() <NEW_LINE> self.name = name <NEW_LINE> self.biography = self.birthplace = self.tmdb_id = self.birthday = None <NEW_LINE> self.job = self.character = self._images = self._movie_credits = None <NEW_LINE> self._tv_credits = None <NEW_LINE> self.slug = slug or slugify(self.name) <NEW_LINE> if len(kwargs) > 0: <NEW_LINE> <INDENT> self._build(kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._get() <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def search(cls, name, year=None): <NEW_LINE> <INDENT> return search(name, search_type='person', year=year) <NEW_LINE> <DEDENT> @property <NEW_LINE> def ext(self): <NEW_LINE> <INDENT> return 'people/{id}'.format(id=self.slug) <NEW_LINE> <DEDENT> @property <NEW_LINE> def ext_full(self): <NEW_LINE> <INDENT> return self.ext + '?extended=full' <NEW_LINE> <DEDENT> @property <NEW_LINE> def images_ext(self): <NEW_LINE> <INDENT> return self.ext + '?extended=images' <NEW_LINE> <DEDENT> @property <NEW_LINE> def ext_movie_credits(self): <NEW_LINE> <INDENT> return self.ext + '/movies' <NEW_LINE> <DEDENT> @property <NEW_LINE> def ext_tv_credits(self): <NEW_LINE> <INDENT> return self.ext + '/shows' <NEW_LINE> <DEDENT> @get <NEW_LINE> def _get(self): <NEW_LINE> <INDENT> data = yield self.ext_full <NEW_LINE> self._build(data) <NEW_LINE> <DEDENT> def _build(self, data): <NEW_LINE> <INDENT> extract_ids(data) <NEW_LINE> for key, val in data.items(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> setattr(self, key, val) <NEW_LINE> <DEDENT> except AttributeError as ae: <NEW_LINE> <INDENT> if not hasattr(self, '_' + key): <NEW_LINE> <INDENT> raise ae <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def ids(self): <NEW_LINE> <INDENT> return {'ids': {'trakt': self.trakt, 'slug': self.slug, 'imdb': self.imdb, 'tmdb': self.tmdb}} <NEW_LINE> <DEDENT> @property <NEW_LINE> @get <NEW_LINE> def images(self): <NEW_LINE> <INDENT> if self._images is None: <NEW_LINE> <INDENT> data = yield self.images_ext <NEW_LINE> self._images = data.get('images', {}) <NEW_LINE> <DEDENT> yield self._images <NEW_LINE> <DEDENT> @property <NEW_LINE> @get <NEW_LINE> def movie_credits(self): <NEW_LINE> <INDENT> if self._movie_credits is None: <NEW_LINE> <INDENT> data = yield self.ext_movie_credits <NEW_LINE> self._movie_credits = MovieCredits(**data) <NEW_LINE> <DEDENT> yield self._movie_credits <NEW_LINE> <DEDENT> @property <NEW_LINE> @get <NEW_LINE> def tv_credits(self): <NEW_LINE> <INDENT> if self._tv_credits is None: <NEW_LINE> <INDENT> data = yield self.ext_tv_credits <NEW_LINE> self._tv_credits = TVCredits(**data) <NEW_LINE> <DEDENT> yield self._tv_credits <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '<Person>: {0}'.format(self.name) <NEW_LINE> <DEDENT> __repr__ = __str__ | A Class representing a trakt.tv Person such as an Actor or Director | 62598fb2ff9c53063f51a6c5 |
class KMP: <NEW_LINE> <INDENT> def __init__(self, pat): <NEW_LINE> <INDENT> self.pat = pat <NEW_LINE> M = len(pat) <NEW_LINE> self.dfa = [[0 for i in range(M)] for j in range(256)] <NEW_LINE> self.dfa[pat[0]][0] = 1 <NEW_LINE> X = 0 <NEW_LINE> for j in range(M): <NEW_LINE> <INDENT> for c in range(256): <NEW_LINE> <INDENT> self.dfa[c][j] = self.dfa[c][X] <NEW_LINE> <DEDENT> self.dfa[pat[j]][j] = j+1 <NEW_LINE> X = self.dfa[pat[j]][X] <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, txt): <NEW_LINE> <INDENT> N = len(txt) <NEW_LINE> M = len(self.pat) <NEW_LINE> j = 0 <NEW_LINE> for i in range(N): <NEW_LINE> <INDENT> j = self.dfa[txt[i]][j] <NEW_LINE> if j == M: <NEW_LINE> <INDENT> return i - M <NEW_LINE> <DEDENT> <DEDENT> return N | Knuth-Morris-Pratt algorithm | 62598fb266656f66f7d5a468 |
class AssessmentFeedbackAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ('id', 'submission_uuid',) <NEW_LINE> search_fields = ('id', 'submission_uuid',) <NEW_LINE> readonly_fields = ( 'submission_uuid', 'assessments_by', 'options', 'feedback_text' ) <NEW_LINE> exclude = ('assessments',) <NEW_LINE> def assessments_by(self, assessment_feedback): <NEW_LINE> <INDENT> return format_html_join(", ", '<a href="{}">{}</a>', (( reverse_lazy('admin:assessment_assessment_change', args=[asmt.id]), asmt.scorer_id) for asmt in assessment_feedback.assessments.all() )) | Django admin model for AssessmentFeedbacks. | 62598fb28a349b6b436862b6 |
class ListSnapshot(command.Lister): <NEW_LINE> <INDENT> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(ListSnapshot, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( '--all-projects', action='store_true', default=False, help='Include all projects (admin only)', ) <NEW_LINE> parser.add_argument( '--long', action='store_true', default=False, help='List additional fields in output', ) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> def _format_volume_id(volume_id): <NEW_LINE> <INDENT> volume = volume_id <NEW_LINE> if volume_id in volume_cache.keys(): <NEW_LINE> <INDENT> volume = volume_cache[volume_id].display_name <NEW_LINE> <DEDENT> return volume <NEW_LINE> <DEDENT> if parsed_args.long: <NEW_LINE> <INDENT> columns = ['ID', 'Display Name', 'Display Description', 'Status', 'Size', 'Created At', 'Volume ID', 'Metadata'] <NEW_LINE> column_headers = copy.deepcopy(columns) <NEW_LINE> column_headers[6] = 'Volume' <NEW_LINE> column_headers[7] = 'Properties' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> columns = ['ID', 'Display Name', 'Display Description', 'Status', 'Size'] <NEW_LINE> column_headers = copy.deepcopy(columns) <NEW_LINE> <DEDENT> column_headers[1] = 'Name' <NEW_LINE> column_headers[2] = 'Description' <NEW_LINE> volume_cache = {} <NEW_LINE> try: <NEW_LINE> <INDENT> for s in self.app.client_manager.volume.volumes.list(): <NEW_LINE> <INDENT> volume_cache[s.id] = s <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> search_opts = { 'all_tenants': parsed_args.all_projects, } <NEW_LINE> data = self.app.client_manager.volume.volume_snapshots.list( search_opts=search_opts) <NEW_LINE> return (column_headers, (utils.get_item_properties( s, columns, formatters={'Metadata': utils.format_dict, 'Volume ID': _format_volume_id}, ) for s in data)) | List snapshots | 62598fb2b7558d58954636a4 |
class IncludeRole(TaskInclude): <NEW_LINE> <INDENT> _allow_duplicates = FieldAttribute(isa='bool', default=True, private=True) <NEW_LINE> _private = FieldAttribute(isa='bool', default=None, private=True) <NEW_LINE> _static = FieldAttribute(isa='bool', default=None) <NEW_LINE> def __init__(self, block=None, role=None, task_include=None): <NEW_LINE> <INDENT> super(IncludeRole, self).__init__(block=block, role=role, task_include=task_include) <NEW_LINE> self._from_files = {} <NEW_LINE> self._parent_role = role <NEW_LINE> self._role_name = None <NEW_LINE> <DEDENT> def get_block_list(self, play=None, variable_manager=None, loader=None): <NEW_LINE> <INDENT> if play is None: <NEW_LINE> <INDENT> myplay = self._parent._play <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> myplay = play <NEW_LINE> <DEDENT> ri = RoleInclude.load(self._role_name, play=myplay, variable_manager=variable_manager, loader=loader) <NEW_LINE> ri.vars.update(self.vars) <NEW_LINE> actual_role = Role.load(ri, myplay, parent_role=self._parent_role, from_files=self._from_files) <NEW_LINE> actual_role._metadata.allow_duplicates = self.allow_duplicates <NEW_LINE> if not self._parent_role: <NEW_LINE> <INDENT> dep_chain = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dep_chain = list(self._parent_role._parents) <NEW_LINE> dep_chain.append(self._parent_role) <NEW_LINE> <DEDENT> blocks = actual_role.compile(play=myplay, dep_chain=dep_chain) <NEW_LINE> for b in blocks: <NEW_LINE> <INDENT> b._parent = self <NEW_LINE> <DEDENT> myplay.handlers = myplay.handlers + actual_role.get_handler_blocks(play=myplay) <NEW_LINE> return blocks <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None): <NEW_LINE> <INDENT> ir = IncludeRole(block, role, task_include=task_include).load_data(data, variable_manager=variable_manager, loader=loader) <NEW_LINE> ir._role_name = ir.args.get('name', ir.args.get('role')) <NEW_LINE> if ir._role_name is None: <NEW_LINE> <INDENT> raise AnsibleParserError("'name' is a required field for include_role.") <NEW_LINE> <DEDENT> for key in ['tasks', 'vars', 'defaults']: <NEW_LINE> <INDENT> from_key = '%s_from' % key <NEW_LINE> if ir.args.get(from_key): <NEW_LINE> <INDENT> ir._from_files[key] = basename(ir.args.get(from_key)) <NEW_LINE> <DEDENT> <DEDENT> for option in ['private', 'allow_duplicates']: <NEW_LINE> <INDENT> if option in ir.args: <NEW_LINE> <INDENT> setattr(ir, option, ir.args.get(option)) <NEW_LINE> <DEDENT> <DEDENT> return ir <NEW_LINE> <DEDENT> def copy(self, exclude_parent=False, exclude_tasks=False): <NEW_LINE> <INDENT> new_me = super(IncludeRole, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks) <NEW_LINE> new_me.statically_loaded = self.statically_loaded <NEW_LINE> new_me._from_files = self._from_files.copy() <NEW_LINE> new_me._parent_role = self._parent_role <NEW_LINE> new_me._role_name = self._role_name <NEW_LINE> return new_me <NEW_LINE> <DEDENT> def get_include_params(self): <NEW_LINE> <INDENT> v = super(IncludeRole, self).get_include_params() <NEW_LINE> if self._parent_role: <NEW_LINE> <INDENT> v.update(self._parent_role.get_role_params()) <NEW_LINE> <DEDENT> return v | A Role include is derived from a regular role to handle the special
circumstances related to the `- include_role: ...` | 62598fb2796e427e5384e80d |
class ExporterSetting(dict): <NEW_LINE> <INDENT> def __init__(self, enabled, initial_convert_time=None, tuple_key=None, extra_schema=None, parsing_format=None, base_schema=None, update_time=None): <NEW_LINE> <INDENT> assert enabled is not None <NEW_LINE> if initial_convert_time is None: <NEW_LINE> <INDENT> initial_convert_time = long(time.mktime(time.localtime())*1000) <NEW_LINE> <DEDENT> if not tuple_key: <NEW_LINE> <INDENT> tuple_key = 'ptuple' <NEW_LINE> <DEDENT> if not extra_schema: <NEW_LINE> <INDENT> extra_schema = [] <NEW_LINE> <DEDENT> if not parsing_format: <NEW_LINE> <INDENT> parsing_format = 'NginxPlusLogParserDriver' <NEW_LINE> <DEDENT> if not base_schema: <NEW_LINE> <INDENT> base_schema = [] <NEW_LINE> <DEDENT> extra_schema_list = [x if isinstance(x, ExporterExtraSchema) else ExporterExtraSchema.from_dict(x) for x in extra_schema] <NEW_LINE> extra_schema_str = json.dumps(extra_schema_list) <NEW_LINE> base_schema_list = [x if isinstance(x, ExporterExtraSchema) else ExporterExtraSchema.from_dict(x) for x in base_schema] <NEW_LINE> base_schema_str = json.dumps(base_schema_list) <NEW_LINE> super(ExporterSetting, self).__init__({ 'enabled': enabled, 'initalConvetTime': long(initial_convert_time), 'tupleKey': tuple_key, 'extraSchema': extra_schema_str, 'parsingFormat': parsing_format, 'baseSchema': base_schema_str, 'updateTime': update_time }) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dict_obj): <NEW_LINE> <INDENT> extra_schema = None <NEW_LINE> if 'extraSchema' in dict_obj: <NEW_LINE> <INDENT> extra_schema = json.loads(dict_obj['extraSchema']) <NEW_LINE> <DEDENT> base_schema = None <NEW_LINE> if 'baseSchema' in dict_obj: <NEW_LINE> <INDENT> base_schema = json.loads(dict_obj['baseSchema']) <NEW_LINE> <DEDENT> return cls(dict_obj['enabled'], dict_obj.get('initalConvetTime'), dict_obj.get('tupleKey'), extra_schema, dict_obj.get('parsingFormat'), base_schema, dict_obj.get('updateTime')) | Structure for Exporter setting.
Fields:
enabled (bool): The exporter is enabled or not.
initalConvetTime (long): The initial conversion time in Epoch (milliseconds). Default: now.
tupleKey (str): The tuple key. Default: ptuple.
extraSchema (object): The extra schema.
parsingFormat (str): The input data format. Default: NginxPlusLogParserDriver.
baseSchema (obj); The base/default schema.
updateTime (long): The latest data export time in Epoch (milliseconds).
NOTE:
initalConvetTime is the legacy typo of "initialConversionTime". | 62598fb255399d3f05626593 |
class User(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'users' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> username = db.Column(db.String(20), unique=True) <NEW_LINE> name = db.Column(db.String(20)) <NEW_LINE> password = db.Column(db.String(20)) <NEW_LINE> admin = db.Column(db.Boolean) <NEW_LINE> def organizations(self): <NEW_LINE> <INDENT> return [om.organization for om in OrganizationMember.query.filter_by( user_id=self.id, accepted=True ).all() ] <NEW_LINE> <DEDENT> def is_authenticated(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def is_anonymous(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def is_active(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def get_id(self): <NEW_LINE> <INDENT> return unicode(self.id) <NEW_LINE> <DEDENT> def check_password(self, password): <NEW_LINE> <INDENT> return check_password_hash(self.password, password) <NEW_LINE> <DEDENT> def __init__(self, username, name, password, admin=False): <NEW_LINE> <INDENT> self.username = username <NEW_LINE> self.name = name <NEW_LINE> self.password = generate_password_hash(password) <NEW_LINE> self.admin = admin | Represents a standard user. (Login, own objects, etc) | 62598fb2442bda511e95c4d0 |
class LRPSequentialPresetBFlat(LRPSequentialPresetB): <NEW_LINE> <INDENT> def __init__(self, model, *args, **kwargs): <NEW_LINE> <INDENT> super(LRPSequentialPresetBFlat, self).__init__(model, *args, input_layer_rule="Flat", **kwargs) | Special LRP-configuration for ConvNets | 62598fb2baa26c4b54d4f32f |
class Policy(object): <NEW_LINE> <INDENT> def __init__(): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def choose(self, agent): <NEW_LINE> <INDENT> return 0 | Policy prescribes action to be taken given the agent's parameter
estimates | 62598fb297e22403b383af88 |
class update_account_result(object): <NEW_LINE> <INDENT> def __init__(self, success=None, e1=None, e2=None, e3=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> self.e1 = e1 <NEW_LINE> self.e2 = e2 <NEW_LINE> self.e3 = e3 <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.success = TAccount() <NEW_LINE> self.success.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.e1 = TAccountNotAuthorizedException() <NEW_LINE> self.e1.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.e2 = TAccountInvalidAttributesException() <NEW_LINE> self.e2.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 3: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.e3 = TAccountNotFoundException() <NEW_LINE> self.e3.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('update_account_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.STRUCT, 0) <NEW_LINE> self.success.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.e1 is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('e1', TType.STRUCT, 1) <NEW_LINE> self.e1.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.e2 is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('e2', TType.STRUCT, 2) <NEW_LINE> self.e2.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.e3 is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('e3', TType.STRUCT, 3) <NEW_LINE> self.e3.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- success
- e1
- e2
- e3 | 62598fb260cbc95b063643c5 |
@total_ordering <NEW_LINE> class Contig: <NEW_LINE> <INDENT> def __init__(self, contig_name, contig_size): <NEW_LINE> <INDENT> self.name = contig_name <NEW_LINE> self.size = contig_size <NEW_LINE> self.total_reads = 0 <NEW_LINE> self.reads = 0 <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.reads / self.size == other.reads / other.size <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.reads / self.size < other.reads / other.size | Stores name, size, reads, and total reads for a contig
and implements ordering based on number of reads | 62598fb2460517430c43209b |
class LocalFileAdapter(requests.adapters.BaseAdapter): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def _chkpath(method, path): <NEW_LINE> <INDENT> if method.lower() in ('put', 'delete'): <NEW_LINE> <INDENT> return 501, "Not Implemented" <NEW_LINE> <DEDENT> elif method.lower() not in ('get', 'head'): <NEW_LINE> <INDENT> return 405, "Method Not Allowed" <NEW_LINE> <DEDENT> elif os.path.isdir(path): <NEW_LINE> <INDENT> return 400, "Path Not A File" <NEW_LINE> <DEDENT> elif not os.path.isfile(path): <NEW_LINE> <INDENT> return 404, "File Not Found" <NEW_LINE> <DEDENT> elif not os.access(path, os.R_OK): <NEW_LINE> <INDENT> return 403, "Access Denied" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 200, "OK" <NEW_LINE> <DEDENT> <DEDENT> def send(self, req, **kwargs): <NEW_LINE> <INDENT> path = os.path.normcase(os.path.normpath(url2pathname(req.path_url))) <NEW_LINE> response = requests.Response() <NEW_LINE> response.status_code, response.reason = self._chkpath(req.method, path) <NEW_LINE> if response.status_code == 200 and req.method.lower() != 'head': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> response.raw = open(path, 'rb') <NEW_LINE> <DEDENT> except (OSError, IOError): <NEW_LINE> <INDENT> response.status_code = 500 <NEW_LINE> response.reason = "" <NEW_LINE> <DEDENT> <DEDENT> if isinstance(req.url, bytes): <NEW_LINE> <INDENT> response.url = req.url.decode('utf-8') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response.url = req.url <NEW_LINE> <DEDENT> response.request = req <NEW_LINE> response.connection = self <NEW_LINE> return response <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> pass | Protocol Adapter to allow Requests to GET file:// URLs
@todo: Properly handle non-empty hostname portions. | 62598fb291f36d47f2230ee4 |
class OrangeOpType(Enum): <NEW_LINE> <INDENT> def __init__(self, number): <NEW_LINE> <INDENT> self._as_parameter__ = number <NEW_LINE> <DEDENT> TextCommand = 1 <NEW_LINE> BatteryPercent = 2 <NEW_LINE> LastSpeechHeard = 3 <NEW_LINE> LastSpeechSpoken = 4 <NEW_LINE> IpAddress = 5 <NEW_LINE> GoogleSpeech = 6 <NEW_LINE> ToggleGoogleSpeech = 7 <NEW_LINE> InternetStatus = 8 <NEW_LINE> BatteryIsCharging = 9 <NEW_LINE> BoardTemperature = 10 <NEW_LINE> LocalizationQuality = 11 <NEW_LINE> WifiSsidAndStrength = 12 <NEW_LINE> SpeechEnergyThreshold = 13 | Enumerated type for operation type | 62598fb266673b3332c30446 |
class Solution: <NEW_LINE> <INDENT> def findMin(self, nums): <NEW_LINE> <INDENT> if not nums: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> target = nums[-1] <NEW_LINE> start, end = 0, len(nums) - 1 <NEW_LINE> while start + 1 < end: <NEW_LINE> <INDENT> mid = start + (end - start) // 2 <NEW_LINE> if nums[mid] <= target: <NEW_LINE> <INDENT> end = mid <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> start = mid <NEW_LINE> <DEDENT> <DEDENT> return min(nums[start], nums[end]) | @param nums: a rotated sorted array
@return: the minimum number in the array | 62598fb2a79ad1619776a0e2 |
class MCVersionsList(object): <NEW_LINE> <INDENT> def __init__(self, mc_dir): <NEW_LINE> <INDENT> self._dict = {} <NEW_LINE> os.chdir(os.path.join(mc_dir, 'versions')) <NEW_LINE> for version in os.listdir(): <NEW_LINE> <INDENT> if os.path.isdir(version): <NEW_LINE> <INDENT> json_file = os.path.join(version, version + '.json') <NEW_LINE> if os.path.exists(json_file): <NEW_LINE> <INDENT> with open(json_file) as fp: <NEW_LINE> <INDENT> self._dict[version] = MCVersion(json.load(fp)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> os.chdir('..') <NEW_LINE> self.list = sorted(self._dict) <NEW_LINE> <DEDENT> def get(self, version_id): <NEW_LINE> <INDENT> this = self._dict.get(version_id) <NEW_LINE> if this is None: <NEW_LINE> <INDENT> raise ValueError('Invalid version id {0}. Please check if {0}.json exists.'.format(version_id)) <NEW_LINE> <DEDENT> parent_id = this.inherits_from <NEW_LINE> if parent_id: <NEW_LINE> <INDENT> parent = self.get(parent_id) <NEW_LINE> this.inherit(parent) <NEW_LINE> <DEDENT> return this | An object to handle a list of valid Minecraft version files. | 62598fb27047854f4633f455 |
@ejit <NEW_LINE> class Exception(BaseException): <NEW_LINE> <INDENT> pass | Common base class for all non-exit exceptions. | 62598fb2a8370b77170f0456 |
class Connection(_http.JSONConnection): <NEW_LINE> <INDENT> API_BASE_URL = 'https://logging.googleapis.com' <NEW_LINE> API_VERSION = 'v2' <NEW_LINE> API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' <NEW_LINE> SCOPE = ('https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/cloud-platform') | A connection to Google Stackdriver Logging via the JSON REST API.
:type credentials: :class:`oauth2client.client.OAuth2Credentials`
:param credentials: (Optional) The OAuth2 Credentials to use for this
connection.
:type http: :class:`httplib2.Http` or class that defines ``request()``.
:param http: (Optional) HTTP object to make requests.
:type api_base_url: str
:param api_base_url: The base of the API call URL. Defaults to the value
:attr:`Connection.API_BASE_URL`. | 62598fb2cc0a2c111447b08c |
class TestImagesIPv6(functional.FunctionalTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> test_utils.get_unused_port_ipv4 = test_utils.get_unused_port <NEW_LINE> test_utils.get_unused_port_and_socket_ipv4 = ( test_utils.get_unused_port_and_socket) <NEW_LINE> test_utils.get_unused_port = test_utils.get_unused_port_ipv6 <NEW_LINE> test_utils.get_unused_port_and_socket = ( test_utils.get_unused_port_and_socket_ipv6) <NEW_LINE> super(TestImagesIPv6, self).setUp() <NEW_LINE> self.cleanup() <NEW_LINE> self.ping_server_ipv4 = self.ping_server <NEW_LINE> self.ping_server = self.ping_server_ipv6 <NEW_LINE> self.include_scrubber = False <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.ping_server = self.ping_server_ipv4 <NEW_LINE> super(TestImagesIPv6, self).tearDown() <NEW_LINE> test_utils.get_unused_port = test_utils.get_unused_port_ipv4 <NEW_LINE> test_utils.get_unused_port_and_socket = ( test_utils.get_unused_port_and_socket_ipv4) <NEW_LINE> <DEDENT> def _headers(self, custom_headers=None): <NEW_LINE> <INDENT> base_headers = { 'X-Identity-Status': 'Confirmed', 'X-Auth-Token': '932c5c84-02ac-4fe5-a9ba-620af0e2bb96', 'X-User-Id': 'f9a41d13-0c13-47e9-bee2-ce4e8bfe958e', 'X-Tenant-Id': TENANT1, 'X-Roles': 'reader,member', } <NEW_LINE> base_headers.update(custom_headers or {}) <NEW_LINE> return base_headers <NEW_LINE> <DEDENT> def test_image_list_ipv6(self): <NEW_LINE> <INDENT> self.api_server.deployment_flavor = "caching" <NEW_LINE> self.api_server.send_identity_credentials = True <NEW_LINE> self.start_servers(**self.__dict__.copy()) <NEW_LINE> url = f'http://[::1]:{self.api_port}' <NEW_LINE> path = '/' <NEW_LINE> requests.get(url + path, headers=self._headers()) <NEW_LINE> path = '/v2/images' <NEW_LINE> response = requests.get(url + path, headers=self._headers()) <NEW_LINE> self.assertEqual(200, response.status_code) <NEW_LINE> images = jsonutils.loads(response.text)['images'] <NEW_LINE> self.assertEqual(0, len(images)) | Verify that API and REG servers running IPv6 can communicate | 62598fb24f88993c371f0548 |
class Cheshire2QueryStream(QueryStream): <NEW_LINE> <INDENT> booleans = {'AND': 'and', '.AND.': 'and', '&&': 'and', 'OR': 'or', '.OR.': 'or', '||': 'or', 'NOT': 'not', '.NOT.': 'not', 'ANDNOT': 'not', '.ANDNOT.': 'not', '!!': 'not' } <NEW_LINE> relations = {'<': '<', 'LT': '<', '.LT.': '<', '<=': '<=', 'LE': '<=', '.LE.': '<=', '=': '=', '>=': '>=', 'GE': '>=', '.GE.': '>=', '>': '>', 'GT': '>', '.GT.': '>', '<=>': 'within', 'WITHIN': 'within', '.WITHIN.': 'within', '%': 'all/stem', 'STEM': 'all/stem', '.STEM.': 'all/stem', '?': 'all/phonetic', '??': 'all/phonetic', 'PHON': 'all/phonetic', '.PHON.': 'all/phonetic', '@': 'all/relevant', 'REL': 'all/relevant', '.REL.': 'all/relevant', '@@': 'all/rel.algorithm=trec2', '.TREC2.': 'all/rel.algorithm=trec2', '.TREC3.': 'all/rel.algorithm=trec3', '@*': 'all/rel.algorithm=trec2/rel.feedback', '.TREC2FBK.': 'all/rel.algorithm=trec2/rel.feedback', '@+': 'all/rel.algorithm=okapi', '.OKAPI.': 'all/rel.algorithm=okapi', '@/': 'all/rel.algorithm=tfidf', '.TFIDF.': 'all/rel.algorithm=tfidf', '@&': 'all/rel.algorithm=lucene', '.LUCENE.': 'all/rel.algorithm=lucene', '@#': 'all/rel.algorithm=cori', '.CORI.': 'all/rel.algorithm=cori' } <NEW_LINE> geoRelations = {'>#<': 'within', '.FULLY_ENCLOSED_WITHIN.': 'within', '<#>': 'encloses', '.ENCLOSES.': 'encloses', } <NEW_LINE> proxBooleans = {'!PROX': (2, 0, 2), '!ADJ': (2, 0, 2), '!NEAR': (20, 0, 2), '!FAR': (20, 0, 4), '!OPROX': (2, 1, 2), '!OADJ': (2, 1, 2), '!ONEAR': (20, 1, 2), '!OFAR': (20, 1, 4) } <NEW_LINE> def parse(self, session, data, codec, db): <NEW_LINE> <INDENT> pass | A QueryStream to process queries in the Cheshire 2 Query Syntax.
http://cheshire.berkeley.edu/cheshire2.html#zfind
top ::= query ['resultsetid' name]
query ::= query boolean clause | clause
clause ::= '(' query ')'
| attributes [relation] term
| resultset
attributes ::= '[' { [set] type '=' value } ']' | name
boolean ::= 'and' | 'or' | 'not' | (synonyms)
prox ::= ('!PROX' | (synonyms)) {'/' name}
relation ::= '>' | '<' | ...
[bib1 1=5, bib1 3=6] > term and title @ fish
| 62598fb27d847024c075c43c |
class SystemPolicyV1Beta1ClientMeta(type): <NEW_LINE> <INDENT> _transport_registry = ( OrderedDict() ) <NEW_LINE> _transport_registry["grpc"] = SystemPolicyV1Beta1GrpcTransport <NEW_LINE> _transport_registry["grpc_asyncio"] = SystemPolicyV1Beta1GrpcAsyncIOTransport <NEW_LINE> def get_transport_class( cls, label: str = None, ) -> Type[SystemPolicyV1Beta1Transport]: <NEW_LINE> <INDENT> if label: <NEW_LINE> <INDENT> return cls._transport_registry[label] <NEW_LINE> <DEDENT> return next(iter(cls._transport_registry.values())) | Metaclass for the SystemPolicyV1Beta1 client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects. | 62598fb23539df3088ecc32c |
class SwitchField(BooleanField): <NEW_LINE> <INDENT> def __init__(self, label=None, **kwargs): <NEW_LINE> <INDENT> super().__init__(label, **kwargs) | A wrapper field for ``BooleanField`` that renders as a Bootstrap switch.
.. versionadded:: 2.0.0 | 62598fb27b180e01f3e4908d |
class StreamCounterStub(object): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.Count = channel.stream_unary( '/streamcount.StreamCounter/Count', request_serializer=streamcount__pb2.CountRequest.SerializeToString, response_deserializer=streamcount__pb2.CountReply.FromString, ) | The greeting service definition.
| 62598fb2cc0a2c111447b08d |
class AggregateTask(models.Model): <NEW_LINE> <INDENT> aggregate = models.ForeignKey('Aggregate') <NEW_LINE> task_id = models.CharField(max_length=36) <NEW_LINE> timestamp = models.DateTimeField(default=datetime.now) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = ('aggregate', 'task_id') | A Celery task in the process of recalculating an Aggregate | 62598fb2851cf427c66b8336 |
class Marker(models.Model): <NEW_LINE> <INDENT> name = models.CharField(unique=True, max_length=255, verbose_name=_(u"name"), help_text=_("Name of the marker.")) <NEW_LINE> slug = models.SlugField(unique=True, verbose_name=_(u"name in URL"), null=True) <NEW_LINE> layer = models.ForeignKey(Layer, verbose_name=_("layer"), on_delete=models.PROTECT) <NEW_LINE> status = models.ForeignKey(Status, verbose_name=_("status"), on_delete=models.PROTECT) <NEW_LINE> desc = models.TextField(null=True, blank=True, verbose_name=_("description"), help_text=_(u"Detailed marker descrption.")) <NEW_LINE> remark = models.TextField(null=True, blank=True, help_text=_(u"Internal information about layer."), verbose_name=_("internal remark")) <NEW_LINE> default_icon_height = models.IntegerField(default=20) <NEW_LINE> default_icon_width = models.IntegerField(default=20) <NEW_LINE> default_icon = models.ImageField( null=True, blank=True, upload_to='icons', storage=SlugifyFileSystemStorage(), verbose_name=_("default icon"), height_field='default_icon_height', width_field='default_icon_width', ) <NEW_LINE> menu_icon_height = models.IntegerField(default=20) <NEW_LINE> menu_icon_width = models.IntegerField(default=20) <NEW_LINE> menu_icon = models.ImageField( null=True, blank=True, upload_to='icons/marker/menu', storage=SlugifyFileSystemStorage(), verbose_name=_("menu icon"), height_field='menu_icon_height', width_field='menu_icon_width', ) <NEW_LINE> minzoom = models.PositiveIntegerField(default=1, verbose_name=_("Minimal zoom"), help_text=_(u"Minimal zoom in which the POIs of this marker will be shown on the map.")) <NEW_LINE> maxzoom = models.PositiveIntegerField(default=10, verbose_name=_("Maximal zoom"), help_text=_(u"Maximal zoom in which the POIs of this marker will be shown on the map.")) <NEW_LINE> line_width = models.FloatField(verbose_name=_(u"line width"), default=2,) <NEW_LINE> line_color = RGBColorField(default="#ffc90e", verbose_name=_("line color")) <NEW_LINE> created_at = models.DateTimeField(auto_now_add=True, verbose_name=_("created at")) <NEW_LINE> last_modification = models.DateTimeField(auto_now=True, verbose_name=_("last modification at")) <NEW_LINE> order = models.IntegerField(verbose_name=_("order"), default=0, blank=False, null=False) <NEW_LINE> def line_color_kml(this): <NEW_LINE> <INDENT> color = this.line_color[1:] <NEW_LINE> return "88" + color[4:6] + color[2:4] + color[0:2] <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._meta.get_field('status').default = get_default_status() <NEW_LINE> <DEDENT> except django.db.utils.ProgrammingError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return super(Marker, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> permissions = [ ("can_only_view", "Can only view"), ] <NEW_LINE> verbose_name = _(u"marker") <NEW_LINE> verbose_name_plural = _(u"markers") <NEW_LINE> ordering = ['order', ] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name | Map markers with display style definition. | 62598fb2dd821e528d6d8fa9 |
class Worker(object): <NEW_LINE> <INDENT> def __init__(self, name, sess, ac_parms, globalAC, game_name, ): <NEW_LINE> <INDENT> super(Worker, self).__init__() <NEW_LINE> self.name = name <NEW_LINE> self.sess = sess <NEW_LINE> self.ac_parms =ac_parms <NEW_LINE> self.globalAC = globalAC <NEW_LINE> self.env = gym.make(game_name).unwrapped <NEW_LINE> self.AC = ACNet4CartPole( n_actions = self.ac_parms['n_actions'], n_features=self.ac_parms['n_features'], sess =self.sess, globalAC=globalAC, scope = self.name, OPT_A = self.ac_parms['OPT_A'], OPT_C =self.ac_parms['OPT_C'], ) <NEW_LINE> self.memory = A3CMemory() <NEW_LINE> <DEDENT> def work(self): <NEW_LINE> <INDENT> total_step = 1 <NEW_LINE> global GLOBAL_EP <NEW_LINE> while not COORD.should_stop() and GLOBAL_EP < MAX_GLOBAL_EP: <NEW_LINE> <INDENT> s = self.env.reset() <NEW_LINE> ep_r = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> self.env.render() <NEW_LINE> a = self.AC.choose_action(s) <NEW_LINE> s_, r, done, info = self.env.step(a) <NEW_LINE> if done: r= -5 <NEW_LINE> ep_r+=r <NEW_LINE> self.memory.store_transition(s,a,r) <NEW_LINE> if total_step % UPDATE_GLOBAL_ITER == 0 or done : <NEW_LINE> <INDENT> if done: <NEW_LINE> <INDENT> v_s_ =0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> v_s_ = self.sess.run(self.AC.v,{self.AC.s:s_[np.newaxis,:]})[0,0] <NEW_LINE> <DEDENT> for r in self.memory.buffer_r[::-1]: <NEW_LINE> <INDENT> v_s_ = r + GAMMA * v_s_ <NEW_LINE> self.memory.buffer_v_target.append(v_s_) <NEW_LINE> <DEDENT> self.memory.buffer_v_target.reverse() <NEW_LINE> buffer_s, buffer_a, buffer_v_target = self.memory.get_data() <NEW_LINE> self.AC.update_global({ self.AC.s: buffer_s, self.AC.a: buffer_a, self.AC.v_target: buffer_v_target, }) <NEW_LINE> self.memory.clean() <NEW_LINE> self.AC.pull_global() <NEW_LINE> <DEDENT> s=s_ <NEW_LINE> total_step+=1 <NEW_LINE> if done: <NEW_LINE> <INDENT> if len(GLOBAL_RUNNING_R)==0: <NEW_LINE> <INDENT> GLOBAL_RUNNING_R.append(ep_r) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> GLOBAL_RUNNING_R.append(0.99 * GLOBAL_RUNNING_R[-1] + 0.01 * ep_r) <NEW_LINE> <DEDENT> print( self.name, "Ep:", GLOBAL_EP, "| Ep_r: %i" % GLOBAL_RUNNING_R[-1], ) <NEW_LINE> GLOBAL_EP += 1 <NEW_LINE> break | docstring for Worker | 62598fb23d592f4c4edbaf3b |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.