code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class GridStateActionSpace2D(StateActionSpace): <NEW_LINE> <INDENT> def __init__(self, dimensions = (3,1), allow_diag_actions = True): <NEW_LINE> <INDENT> import numpy as np <NEW_LINE> half_dimensions = (np.array(dimensions)/2).astype(int) <NEW_LINE> self.min_indices = -half_dimensions <NEW_LINE> self.max_indices = self.min_indices + dimensions - (1, 1) <NEW_LINE> """All States""" <NEW_LINE> self.states = [(i-half_dimensions[0], j-half_dimensions[1]) for i, j in np.ndindex(dimensions)] <NEW_LINE> self.states.append((10000, 10000)) <NEW_LINE> def __generate_possible_actions(dims, state_index, allow_diag=True): <NEW_LINE> <INDENT> if state_index == (0, 0) or state_index == (10000, 10000): <NEW_LINE> <INDENT> return [(0, 0)] <NEW_LINE> <DEDENT> min_indices = -(np.array(dims)/2).astype(int) <NEW_LINE> max_indices = (min_indices + dims) - (1, 1) <NEW_LINE> min_action = -(np.less(min_indices, (0, 0))).astype(int) <NEW_LINE> max_action = np.less((0, 0), max_indices).astype(int) <NEW_LINE> actions = [] <NEW_LINE> for i in range(min_action[0], max_action[0] + 1): <NEW_LINE> <INDENT> for j in range(min_action[1], max_action[1] + 1): <NEW_LINE> <INDENT> if (i*j == 0 or allow_diag) and (i, j) != (0, 0): <NEW_LINE> <INDENT> actions.append((i, j)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return actions <NEW_LINE> <DEDENT> self.eligible_actions = dict() <NEW_LINE> for state in self.states: <NEW_LINE> <INDENT> self.eligible_actions[state] = __generate_possible_actions( dimensions, state_index=state, allow_diag=allow_diag_actions) <NEW_LINE> <DEDENT> self.actions = self.eligible_actions[(1, 0)] <NEW_LINE> <DEDENT> def get_list_of_states(self): <NEW_LINE> <INDENT> return self.states <NEW_LINE> <DEDENT> def get_list_of_actions(self): <NEW_LINE> <INDENT> return self.actions <NEW_LINE> <DEDENT> def get_eligible_actions(self, state): <NEW_LINE> <INDENT> assert isinstance(state, tuple) <NEW_LINE> return self.eligible_actions[state] <NEW_LINE> <DEDENT> def is_terminal_state(self, state): <NEW_LINE> <INDENT> import numpy as np <NEW_LINE> if np.less(state, self.min_indices).any() or np.greater(state, self.max_indices).any(): <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> elif state == (0, 0): <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 | Grid-based state-action space
Grid based state-action space. Each state and action is a tuple of two
integers. Constructed with the dimensions of the thing. | 62598fb599fddb7c1ca62e5e |
class TestScoringService(object): <NEW_LINE> <INDENT> def test_do_inferenceで推論値yが入力サンプル数分反映されていればTrue(self, do): <NEW_LINE> <INDENT> request_path = sd.joinpath('data', 'request_sample.json') <NEW_LINE> with open(request_path, 'r') as f: <NEW_LINE> <INDENT> request_data = json.load(f) <NEW_LINE> <DEDENT> from scoring_service import ScoringService <NEW_LINE> response_body = ScoringService.do_inference(request_data) <NEW_LINE> samples = len(request_data['PassengerId']) <NEW_LINE> expected = (samples, ) <NEW_LINE> assert response_body['Survived'].shape == expected | 推論パイプラインの結合テスト
| 62598fb5167d2b6e312b705a |
class Model(AbstractModel, StaticChildrenMixin, StaticActionsMixin): <NEW_LINE> <INDENT> __slots__ = () | Static model with a known set of sub-models and actions. | 62598fb5379a373c97d990fd |
class HTTPException(BaseException): <NEW_LINE> <INDENT> pass | This class signals an error during the processing of a HTTP request | 62598fb54f6381625f199534 |
class SublayerConnection(nn.Module): <NEW_LINE> <INDENT> def __init__(self, size: int, dropout: float) -> None: <NEW_LINE> <INDENT> super(SublayerConnection, self).__init__() <NEW_LINE> self.norm = LayerNorm(size) <NEW_LINE> self.dropout = nn.Dropout(dropout) <NEW_LINE> <DEDENT> def forward(self, x: Tensor, sublayer: nn.Module) -> Tensor: <NEW_LINE> <INDENT> return x + self.dropout(sublayer(self.norm(x))) | A residual connection followed by a layer norm.
Notes:
for code simplicity the norm is first as opposed to last.
Attributes:
norm (LayerNorm): A normalization layer defined with `size`.
dropout (nn.Dropout): A dropout module used after the sublayer and before the residual connection. | 62598fb592d797404e388bd7 |
@add_start_docstrings( XLM_ROBERTA_START_DOCSTRING, ) <NEW_LINE> class XLMRobertaForMaskedLM(RobertaForMaskedLM): <NEW_LINE> <INDENT> config_class = XLMRobertaConfig | This class overrides [`RobertaForMaskedLM`]. Please check the superclass for the appropriate documentation
alongside usage examples. | 62598fb5e5267d203ee6b9e6 |
class CommentDetailsForm(CommentSecurityForm): <NEW_LINE> <INDENT> comment = forms.CharField(widget=forms.Textarea, max_length=COMMENT_MAX_LENGTH) <NEW_LINE> def get_comment_object(self): <NEW_LINE> <INDENT> if not self.is_valid(): <NEW_LINE> <INDENT> raise ValueError("get_comment_object may only be called on valid forms") <NEW_LINE> <DEDENT> CommentModel = self.get_comment_model() <NEW_LINE> new = CommentModel(**self.get_comment_create_data()) <NEW_LINE> new = self.check_for_duplicate_comment(new) <NEW_LINE> return new <NEW_LINE> <DEDENT> def get_comment_model(self): <NEW_LINE> <INDENT> return Comment <NEW_LINE> <DEDENT> def get_comment_create_data(self): <NEW_LINE> <INDENT> return dict( content_type = ContentType.objects.get_for_model(self.target_object), object_pk = force_text(self.target_object._get_pk_val()), comment = self.cleaned_data["comment"], submit_date = timezone.now(), site_id = settings.SITE_ID, is_public = True, is_removed = False, ) <NEW_LINE> <DEDENT> def check_for_duplicate_comment(self, new): <NEW_LINE> <INDENT> possible_duplicates = self.get_comment_model()._default_manager.using( self.target_object._state.db ).filter( content_type = new.content_type, object_pk = new.object_pk, ) <NEW_LINE> for old in possible_duplicates: <NEW_LINE> <INDENT> if old.submit_date.date() == new.submit_date.date() and old.comment == new.comment: <NEW_LINE> <INDENT> return old <NEW_LINE> <DEDENT> <DEDENT> return new <NEW_LINE> <DEDENT> def clean_comment(self): <NEW_LINE> <INDENT> comment = self.cleaned_data["comment"] <NEW_LINE> if settings.COMMENTS_ALLOW_PROFANITIES == False: <NEW_LINE> <INDENT> bad_words = [w for w in settings.PROFANITIES_LIST if w in comment.lower()] <NEW_LINE> if bad_words: <NEW_LINE> <INDENT> raise forms.ValidationError(ungettext( "Watch your mouth! The word %s is not allowed here.", "Watch your mouth! The words %s are not allowed here.", len(bad_words)) % get_text_list( ['"%s%s%s"' % (i[0], '-'*(len(i)-2), i[-1]) for i in bad_words], ugettext('and'))) <NEW_LINE> <DEDENT> <DEDENT> return comment | Handles the specific details of the comment (name, comment, etc.). | 62598fb5ec188e330fdf8978 |
class PBSProResourceDefinition: <NEW_LINE> <INDENT> def __init__( self, name: str, resource_type: ResourceType, flag: ResourceFlag ) -> None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.type = resource_type <NEW_LINE> self.flag = "".join(sorted(flag)) <NEW_LINE> self.__flag_simplified = self.flag.replace("m", "") <NEW_LINE> self.read_only = False <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_consumable(self) -> bool: <NEW_LINE> <INDENT> return self.__flag_simplified in ["fh", "hn", "q", "hnq"] <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_host(self) -> bool: <NEW_LINE> <INDENT> return "h" in self.flag <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return "ResourceDef(name={}, type={}, flag={})".format( self.name, self.type.name, self.flag ) | Resource slot_type
type = string
flag = h | 62598fb5dc8b845886d5369f |
class RuleStorage: <NEW_LINE> <INDENT> def __init__(self, module_name, f_name, annotator_info): <NEW_LINE> <INDENT> self.module_name = module_name <NEW_LINE> self.f_name = f_name <NEW_LINE> self.annotator_info = annotator_info <NEW_LINE> self.target_name = f"{module_name}:{f_name}" <NEW_LINE> self.rule_name = f"{module_name}::{f_name}" <NEW_LINE> self.full_name = f"{module_name}:{f_name}" <NEW_LINE> self.inputs = [] <NEW_LINE> self.outputs = [] <NEW_LINE> self.parameters = {} <NEW_LINE> self.docs = [] <NEW_LINE> self.doc_annotations = [] <NEW_LINE> self.wildcard_annotations = [] <NEW_LINE> self.configs = set() <NEW_LINE> self.classes = set() <NEW_LINE> self.missing_config = set() <NEW_LINE> self.missing_binaries = set() <NEW_LINE> self.export_dirs = None <NEW_LINE> self.has_preloader = bool(annotator_info["preloader"]) <NEW_LINE> self.use_preloader = False <NEW_LINE> self.type = annotator_info["type"].name <NEW_LINE> self.annotator = annotator_info["type"] is registry.Annotator.annotator <NEW_LINE> self.importer = annotator_info["type"] is registry.Annotator.importer <NEW_LINE> self.exporter = annotator_info["type"] is registry.Annotator.exporter <NEW_LINE> self.installer = annotator_info["type"] is registry.Annotator.installer <NEW_LINE> self.modelbuilder = annotator_info["type"] is registry.Annotator.modelbuilder <NEW_LINE> self.description = annotator_info["description"] <NEW_LINE> self.file_extension = annotator_info["file_extension"] <NEW_LINE> self.import_outputs = annotator_info["outputs"] <NEW_LINE> self.order = annotator_info["order"] <NEW_LINE> self.abstract = annotator_info["abstract"] <NEW_LINE> self.wildcards = annotator_info["wildcards"] | Object to store parameters for a snake rule. | 62598fb54c3428357761a3a2 |
class AppConfig() : <NEW_LINE> <INDENT> def __init__(self, appName) : <NEW_LINE> <INDENT> self._systemSettings = None <NEW_LINE> self._userPrefs = None <NEW_LINE> self.project = appName <NEW_LINE> self.variable = appName.upper() + "_CONF" <NEW_LINE> self._fileName = appName + ".yaml" <NEW_LINE> self.systemDir = "/etc/" + appName <NEW_LINE> pathdir = os.path.expanduser("~") + "/.config/" <NEW_LINE> self._userPrfesPathName = os.path.join(pathdir, self._fileName) <NEW_LINE> <DEDENT> def _load(self) : <NEW_LINE> <INDENT> if self._systemSettings is None and self._userPrefs is None : <NEW_LINE> <INDENT> self._userPrefs = {} <NEW_LINE> pathdir = [] <NEW_LINE> if os.environ.get(self.variable) : <NEW_LINE> <INDENT> pathdir.append(os.environ.get(self.variable)) <NEW_LINE> <DEDENT> pathdir.extend([os.curdir, self.systemDir]) <NEW_LINE> for loc in pathdir : <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> f = os.path.join(loc, self._fileName) <NEW_LINE> with open(f, 'r') as ymlfile: <NEW_LINE> <INDENT> self._systemSettings = yaml.safe_load(ymlfile) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> except IOError as e: <NEW_LINE> <INDENT> print ("Skipped exception: <%s> " % str(e)) <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> with open(self._userPrfesPathName, 'r') as ymlfile: <NEW_LINE> <INDENT> self._userPrefs = yaml.safe_load(ymlfile) <NEW_LINE> <DEDENT> if not self._userPrefs: <NEW_LINE> <INDENT> self._userPrefs = {} <NEW_LINE> <DEDENT> <DEDENT> except IOError as e: <NEW_LINE> <INDENT> print ("Skipped exception: <%s> " % str(e)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def systemSettings(self) : <NEW_LINE> <INDENT> self._load() <NEW_LINE> return self._systemSettings <NEW_LINE> <DEDENT> @property <NEW_LINE> def userPreferences(self) : <NEW_LINE> <INDENT> self._load() <NEW_LINE> return self._userPrefs <NEW_LINE> <DEDENT> def userPrefSet(self, category, key, value): <NEW_LINE> <INDENT> self._load() <NEW_LINE> if category in self._userPrefs.keys(): <NEW_LINE> <INDENT> self._userPrefs[category][key] = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._userPrefs[category] = {key : value} <NEW_LINE> <DEDENT> <DEDENT> def saveUserPreferences(self) : <NEW_LINE> <INDENT> with open(self._userPrfesPathName, 'w') as outfile: <NEW_LINE> <INDENT> yaml.dump(self._userPrefs, outfile, default_flow_style=False) | AppConfig is an application configuration file management
appName is the application name
configuration file name is appName + ".yaml"
which is searched in these places and order:
1. from environment variable $'AppNme'
2. current directory
3. /etc/'AppName'/
Application user preferences are loaded and saved into
~/.config/'appName'.yaml | 62598fb55fcc89381b2661c0 |
class Paralleliser(object): <NEW_LINE> <INDENT> def __init__(self, inputs, paralleliserInfo): <NEW_LINE> <INDENT> self.doneCheckers = []; <NEW_LINE> self.paralleliserState = ParalleliserState.NOT_STARTED; <NEW_LINE> self.inputs = inputs; <NEW_LINE> self.paralleliserInfo = paralleliserInfo; <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> if (self.paralleliserState != ParalleliserState.NOT_STARTED): <NEW_LINE> <INDENT> raise RuntimeError("Paralleliser was already started!"); <NEW_LINE> <DEDENT> self.paralleliserState = ParalleliserState.STARTED <NEW_LINE> for anInput in self.inputs: <NEW_LINE> <INDENT> self.doneCheckers.append(self.paralleliserInfo.parallelProcessKickerOffer.execute(*anInput.args, **anInput.kwargs)) <NEW_LINE> <DEDENT> isDone = False; <NEW_LINE> numRunningJobs = self._numRunningJobs(); <NEW_LINE> self.paralleliserState = ParalleliserState.DONE; <NEW_LINE> return self.paralleliserInfo.queue; <NEW_LINE> <DEDENT> def finish(self): <NEW_LINE> <INDENT> for doneChecker in self.doneCheckers: <NEW_LINE> <INDENT> doneChecker.finish(); <NEW_LINE> <DEDENT> <DEDENT> def _numRunningJobs(self): <NEW_LINE> <INDENT> numRunningJobs = 0; <NEW_LINE> for doneChecker in self.doneCheckers: <NEW_LINE> <INDENT> if (doneChecker.isDone() != True): <NEW_LINE> <INDENT> numRunningJobs += 1; <NEW_LINE> <DEDENT> <DEDENT> return numRunningJobs; | takes an instance of paralleliserInfo (which contains info on how to kick off the jobs) and
a series of inputs, and executes the jobs in parallel. | 62598fb521bff66bcd722d50 |
class CurveDirection(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "curve.switch_direction_obm" <NEW_LINE> bl_label = "Curve Direction" <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> bpy.ops.object.editmode_toggle() <NEW_LINE> bpy.ops.curve.switch_direction() <NEW_LINE> bpy.ops.object.editmode_toggle() <NEW_LINE> bpy.ops.curve.origin_start_point() <NEW_LINE> return {'FINISHED'} | switch curve direction | 62598fb5a8370b77170f04c5 |
class Minc1Image(SpatialImage): <NEW_LINE> <INDENT> header_class = Minc1Header <NEW_LINE> _meta_sniff_len = 4 <NEW_LINE> valid_exts = ('.mnc',) <NEW_LINE> files_types = (('image', '.mnc'),) <NEW_LINE> _compressed_suffixes = ('.gz', '.bz2') <NEW_LINE> makeable = True <NEW_LINE> rw = False <NEW_LINE> ImageArrayProxy = MincImageArrayProxy <NEW_LINE> @classmethod <NEW_LINE> def from_file_map(klass, file_map): <NEW_LINE> <INDENT> with file_map['image'].get_prepare_fileobj() as fobj: <NEW_LINE> <INDENT> minc_file = Minc1File(netcdf_file(fobj)) <NEW_LINE> affine = minc_file.get_affine() <NEW_LINE> if affine.shape != (4, 4): <NEW_LINE> <INDENT> raise MincError('Image does not have 3 spatial dimensions') <NEW_LINE> <DEDENT> data_dtype = minc_file.get_data_dtype() <NEW_LINE> shape = minc_file.get_data_shape() <NEW_LINE> zooms = minc_file.get_zooms() <NEW_LINE> header = klass.header_class(data_dtype, shape, zooms) <NEW_LINE> data = klass.ImageArrayProxy(minc_file) <NEW_LINE> <DEDENT> return klass(data, affine, header, extra=None, file_map=file_map) | Class for MINC1 format images
The MINC1 image class uses the default header type, rather than a specific
MINC header type - and reads the relevant information from the MINC file on
load. | 62598fb501c39578d7f12e62 |
class RandomAgent(AbstractAgent): <NEW_LINE> <INDENT> def __init__(self, player): <NEW_LINE> <INDENT> self._player = player <NEW_LINE> <DEDENT> @property <NEW_LINE> def player(self) -> Player: <NEW_LINE> <INDENT> return self._player <NEW_LINE> <DEDENT> def choose_move(self, game: GameModel, opponent_move: Optional[Move]) -> Move: <NEW_LINE> <INDENT> options = get_legal_move_coords(game) <NEW_LINE> assert options, 'Player invoked with no legal moves' <NEW_LINE> return Move(player=self.player, coords=random.choice(options)) | An agent which picks a random move | 62598fb563d6d428bbee2896 |
@add_metaclass(type) <NEW_LINE> class HostData(object): <NEW_LINE> <INDENT> def __init__(self, uuid, name, status, result): <NEW_LINE> <INDENT> self.uuid = uuid <NEW_LINE> self.name = name <NEW_LINE> self.status = status <NEW_LINE> self.result = result <NEW_LINE> self.finish = time.time() | Data about an individual host. | 62598fb530bbd722464699ed |
class PolarPoint: <NEW_LINE> <INDENT> COORDINATE_ORDER = ('NEZ', 'ENZ') <NEW_LINE> def __init__(self, dist, angle, z_angle, th, angle_type, base_point, pid, text, coordorder): <NEW_LINE> <INDENT> self.dist = float(dist) <NEW_LINE> angle = float(angle) <NEW_LINE> z_angle = float(z_angle) <NEW_LINE> self.th = float(th) <NEW_LINE> self.angle_type = angle_type <NEW_LINE> if angle_type == 'deg': <NEW_LINE> <INDENT> self.angle = radians(angle) <NEW_LINE> self.z_angle = radians(z_angle) <NEW_LINE> <DEDENT> if angle_type == 'gon': <NEW_LINE> <INDENT> self.angle = radians(angle * 0.9) <NEW_LINE> self.z_angle = radians(z_angle * 0.9) <NEW_LINE> <DEDENT> self.pid = pid <NEW_LINE> self.text = text <NEW_LINE> if any((coordorder == v for v in PolarPoint.COORDINATE_ORDER)): <NEW_LINE> <INDENT> self.coordorder = coordorder <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Invalid coordinate order') <NEW_LINE> <DEDENT> self.base_x = base_point.x <NEW_LINE> self.base_y = base_point.y <NEW_LINE> self.base_z = base_point.z <NEW_LINE> self.ih = base_point.ih <NEW_LINE> <DEDENT> def to_point(self): <NEW_LINE> <INDENT> cart_coords = polar_to_cartesian(self.base_x, self.base_y, self.base_z, self.dist, self.angle, self.z_angle, self.ih, self.th) <NEW_LINE> if self.coordorder == 'NEZ': <NEW_LINE> <INDENT> cart_coords['x'], cart_coords['y'] = cart_coords['y'], cart_coords['x'] <NEW_LINE> <DEDENT> cart_point = Point(self.pid, cart_coords['x'], cart_coords['y'], cart_coords['z'], self.text) <NEW_LINE> return cart_point | A point geometry defined by polar coordinates. | 62598fb571ff763f4b5e785d |
class PercentFraction(types.TypeDecorator): <NEW_LINE> <INDENT> impl = types.Numeric(11, 10) <NEW_LINE> def load_dialect_impl(self, dialect): <NEW_LINE> <INDENT> if _is_mysql(dialect): <NEW_LINE> <INDENT> return mysql.DECIMAL(precision=11, scale=10, unsigned=True) <NEW_LINE> <DEDENT> return self.impl <NEW_LINE> <DEDENT> @property <NEW_LINE> def python_type(self): <NEW_LINE> <INDENT> return int | Highly accurate percent fraction. | 62598fb57b25080760ed759b |
class MyException(Exception): <NEW_LINE> <INDENT> pass | Base class for custom exceptions. | 62598fb5a79ad1619776a154 |
class HGHBogusNumbersError(ValueError): <NEW_LINE> <INDENT> pass | Error which is raised when the HGH parameters contain f-type
or higher projectors. The HGH article only defines atomic Hamiltonian
matrices up to l=2, so these are meaningless. | 62598fb530dc7b766599f936 |
class TestUpperBound(GPflowTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.X = np.random.rand(100, 1) <NEW_LINE> self.Y = np.sin(1.5 * 2 * np.pi * self.X) + np.random.randn(*self.X.shape) * 0.1 <NEW_LINE> <DEDENT> def test_few_inducing_points(self): <NEW_LINE> <INDENT> with self.test_context() as session: <NEW_LINE> <INDENT> vfe = gpflow.models.SGPR(self.X, self.Y, gpflow.kernels.RBF(1), self.X[:10, :].copy()) <NEW_LINE> opt = gpflow.train.ScipyOptimizer() <NEW_LINE> opt.minimize(vfe) <NEW_LINE> full = gpflow.models.GPR(self.X, self.Y, gpflow.kernels.RBF(1)) <NEW_LINE> full.kern.lengthscales = vfe.kern.lengthscales.read_value() <NEW_LINE> full.kern.variance = vfe.kern.variance.read_value() <NEW_LINE> full.likelihood.variance = vfe.likelihood.variance.read_value() <NEW_LINE> lml_upper = vfe.compute_upper_bound() <NEW_LINE> lml_vfe = - session.run(vfe.objective) <NEW_LINE> lml_full = - session.run(full.objective) <NEW_LINE> self.assertTrue(lml_upper > lml_full > lml_vfe) | Test for upper bound for regression marginal likelihood | 62598fb5460517430c4320d1 |
class TestInlineResponse20081Devices(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testInlineResponse20081Devices(self): <NEW_LINE> <INDENT> pass | InlineResponse20081Devices unit test stubs | 62598fb567a9b606de5460b8 |
class raw_bdf: <NEW_LINE> <INDENT> def __init__(self, participant_id): <NEW_LINE> <INDENT> self.participant_id = participant_id <NEW_LINE> self.bdf_fname = data_dir + '/RAWEEG/%d_EmoWorM.bdf' % participant_id <NEW_LINE> <DEDENT> def plot_event_channel(self, show=False, save=True, plot_breaks=False, figsize=(20, 10)): <NEW_LINE> <INDENT> raw = mne.io.read_raw_edf(self.bdf_fname, preload=False, stim_channel='Status') <NEW_LINE> x1, t1 = raw[-1] <NEW_LINE> x1 = x1[0] <NEW_LINE> fig = plt.figure(figsize=figsize) <NEW_LINE> ax = fig.add_subplot(111) <NEW_LINE> ax.plot(t1, x1, 'b') <NEW_LINE> ax.set_xticks(np.arange(0, t1.max(), 100)) <NEW_LINE> ax.set_ylim((0, 6e4)) <NEW_LINE> ax.set_xlabel('Time (seconds)') <NEW_LINE> xlim = ax.get_xlim() <NEW_LINE> ax.plot(xlim, [4096, 4096], 'r-') <NEW_LINE> ax.plot(xlim, [8192, 8192], 'r-') <NEW_LINE> ax.plot(xlim, [16384, 16384], 'r-') <NEW_LINE> ax.plot(xlim, [32768, 32768], 'r-') <NEW_LINE> if plot_breaks: <NEW_LINE> <INDENT> ylim = ax.get_ylim() <NEW_LINE> breaks = self._get_breaks() <NEW_LINE> for b in breaks[1:]: <NEW_LINE> <INDENT> ax.plot([b, b], ylim, 'g-') <NEW_LINE> <DEDENT> <DEDENT> if save: <NEW_LINE> <INDENT> png_fname = self.bdf_fname.replace('.bdf', '.png') <NEW_LINE> fig.savefig(png_fname) <NEW_LINE> <DEDENT> if show: <NEW_LINE> <INDENT> plt.show() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> plt.close(fig) <NEW_LINE> <DEDENT> <DEDENT> def crop_file(self): <NEW_LINE> <INDENT> breaks = self._get_breaks() <NEW_LINE> raw = mne.io.read_raw_edf(self.bdf_fname, preload=True, stim_channel='Status') <NEW_LINE> pv_counter = 0 <NEW_LINE> wm_counter = 0 <NEW_LINE> for b in range(2, 6): <NEW_LINE> <INDENT> this_raw = raw.crop(tmin=breaks[b-1], tmax=breaks[b], copy=True) <NEW_LINE> eve = mne.find_events(this_raw, mask=255) <NEW_LINE> assert np.in1d(len(eve), [48, 144]).all() <NEW_LINE> if len(eve) == 48: <NEW_LINE> <INDENT> fname = data_dir + '/RAWFIF/%d_PV%d-raw.fif' % (self.participant_id, pv_counter) <NEW_LINE> pv_counter += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fname = data_dir + '/RAWFIF/%d_WM%d-raw.fif' % (self.participant_id, wm_counter) <NEW_LINE> wm_counter += 1 <NEW_LINE> <DEDENT> this_raw.save(fname, proj=False, overwrite=True) <NEW_LINE> <DEDENT> <DEDENT> def _get_breaks(self): <NEW_LINE> <INDENT> breaks = np.genfromtxt(code_dir + '/bdf_breaks.txt', delimiter=',') <NEW_LINE> breaks, = breaks[breaks[:, 0] == self.participant_id, :] <NEW_LINE> return breaks | Class to handle the raw eeg data
Parameters
----------
participant_id : int
The id number for this participant | 62598fb57cff6e4e811b5b08 |
class AsyncRemotePillar(object): <NEW_LINE> <INDENT> def __init__(self, opts, grains, minion_id, saltenv, ext=None, functions=None, pillar=None, pillarenv=None): <NEW_LINE> <INDENT> self.opts = opts <NEW_LINE> self.opts['environment'] = saltenv <NEW_LINE> self.ext = ext <NEW_LINE> self.grains = grains <NEW_LINE> self.minion_id = minion_id <NEW_LINE> self.channel = salt.transport.client.AsyncReqChannel.factory(opts) <NEW_LINE> if pillarenv is not None or 'pillarenv' not in self.opts: <NEW_LINE> <INDENT> self.opts['pillarenv'] = pillarenv <NEW_LINE> <DEDENT> self.pillar_override = {} <NEW_LINE> if pillar is not None: <NEW_LINE> <INDENT> if isinstance(pillar, dict): <NEW_LINE> <INDENT> self.pillar_override = pillar <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.error('Pillar data must be a dictionary') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @tornado.gen.coroutine <NEW_LINE> def compile_pillar(self): <NEW_LINE> <INDENT> load = {'id': self.minion_id, 'grains': self.grains, 'saltenv': self.opts['environment'], 'pillarenv': self.opts['pillarenv'], 'pillar_override': self.pillar_override, 'ver': '2', 'cmd': '_pillar'} <NEW_LINE> if self.ext: <NEW_LINE> <INDENT> load['ext'] = self.ext <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> ret_pillar = yield self.channel.crypted_transfer_decode_dictentry( load, dictkey='pillar', ) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> log.exception('Exception getting pillar:') <NEW_LINE> raise SaltClientError('Exception getting pillar.') <NEW_LINE> <DEDENT> if not isinstance(ret_pillar, dict): <NEW_LINE> <INDENT> msg = ('Got a bad pillar from master, type {0}, expecting dict: ' '{1}').format(type(ret_pillar).__name__, ret_pillar) <NEW_LINE> log.error(msg) <NEW_LINE> raise SaltClientError(msg) <NEW_LINE> <DEDENT> raise tornado.gen.Return(ret_pillar) | Get the pillar from the master | 62598fb5009cb60464d0160b |
class WithingsAttribute: <NEW_LINE> <INDENT> def __init__( self, measurement: str, measure_type, friendly_name: str, unit_of_measurement: str, icon: str, ) -> None: <NEW_LINE> <INDENT> self.measurement = measurement <NEW_LINE> self.measure_type = measure_type <NEW_LINE> self.friendly_name = friendly_name <NEW_LINE> self.unit_of_measurement = unit_of_measurement <NEW_LINE> self.icon = icon | Base class for modeling withing data. | 62598fb5283ffb24f3cf3976 |
class CountSet(BaseCount.CountSet): <NEW_LINE> <INDENT> def day_totals(self): <NEW_LINE> <INDENT> return list(self.counts.values_list("day").annotate(total=Sum("count")).order_by("day")) <NEW_LINE> <DEDENT> def month_totals(self): <NEW_LINE> <INDENT> counts = self.counts.extra(select={"month": 'EXTRACT(month FROM "day")'}) <NEW_LINE> return list(counts.values_list("month").annotate(replies=Sum("count")).order_by("month")) | A queryset of counts which can be aggregated in different ways | 62598fb5d486a94d0ba2c0bb |
class BedFile: <NEW_LINE> <INDENT> def __init__(self, filename, referenceGenome, flankLength): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.peakBoundaries = {} <NEW_LINE> self.raw = BedTool(self.filename) <NEW_LINE> self.merged = self.raw.sort().merge() <NEW_LINE> self.slopped = None <NEW_LINE> self.chromosomes = [x[0] for x in self.merged] <NEW_LINE> self.startvals = None <NEW_LINE> self.endvals = None <NEW_LINE> self.referenceGenome = referenceGenome <NEW_LINE> self.flankLength = flankLength <NEW_LINE> <DEDENT> def extractIntervals(self): <NEW_LINE> <INDENT> midpointlist = [] <NEW_LINE> for peak in self.merged: <NEW_LINE> <INDENT> midpoint = round((int(peak[1]) + int(peak[2]))/2) <NEW_LINE> midpointlist.append((peak[0], midpoint, midpoint+1)) <NEW_LINE> <DEDENT> midpoints = BedTool(midpointlist) <NEW_LINE> chrom = pybedtools.chromsizes(self.referenceGenome) <NEW_LINE> self.slopped = midpoints.slop(b=self.flankLength, g=chrom) <NEW_LINE> self.startvals = [int(x[1]) for x in self.slopped] <NEW_LINE> self.endvals = [int(x[2]) for x in self.slopped] <NEW_LINE> return self.chromosomes, self.startvals, self.endvals | API for processing BedFiles | 62598fb5fff4ab517ebcd8d0 |
class DetailView(generic.DetailView): <NEW_LINE> <INDENT> model=Question <NEW_LINE> template_name='polls/detail.html' <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return Question.objects.filter(pub_date__lte=timezone.now()) | 显示明细 model也是django内置的属性-模型 | 62598fb599fddb7c1ca62e5f |
class Fringe: <NEW_LINE> <INDENT> def __init__(self, s): <NEW_LINE> <INDENT> self.structure = s() <NEW_LINE> assert ('push' in dir(s) and 'pop' in dir(s) and 'isEmpty' in dir(s)) <NEW_LINE> <DEDENT> def push(self, item, base, cost): <NEW_LINE> <INDENT> self.structure.push(item) if cost == 0 else self.structure.push( item, base + cost) <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> return self.structure.pop() <NEW_LINE> <DEDENT> def isEmpty(self): <NEW_LINE> <INDENT> return self.structure.isEmpty() | Allows for a very (very) pretty abstraction, wherein the implementation
of graph search doesn't change, the user just decides which fringe type to
use. Stack will run DFS, Queue will run BFS, priority queues will run either UCS
or A*, depending on the cost function. | 62598fb5167d2b6e312b705c |
class MovieListViewTest(TransactionTestCase): <NEW_LINE> <INDENT> reset_sequences = True <NEW_LINE> @freeze_time("2012-01-14") <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.client = APIClient() <NEW_LINE> self.user = User.objects.create_user('hiren', 'a@b.com', 'bunny') <NEW_LINE> self.client.force_authenticate(user=self.user) <NEW_LINE> Movie.objects.create( name="Schindler's List", imdb_rating=9, movie_type='His', ) <NEW_LINE> <DEDENT> def test_movie_listing_works(self): <NEW_LINE> <INDENT> response = self.client.get('/api/movielist/') <NEW_LINE> self.assertEqual(response.json(), [{'updated_at': '2012-01-14T00:00:00Z', 'created_at': '2012-01-14T00:00:00Z', 'id': 1, 'name': "Schindler's List", 'imdb_rating': 9, 'movie_type': 'His'}]) | Test movie list view | 62598fb555399d3f056265fe |
class BeamType(Container): <NEW_LINE> <INDENT> allowed_enclosed_commands = ['Correlation'] <NEW_LINE> command_params = { 'partnum': { 'desc': 'Particle number', 'doc': '', 'type': 'Integer', 'req': True, 'default': None}, 'bmtype': { 'desc': 'beam type {magnitude = mass code; sign = charge}: 1: e, 2: μ, 3: π, 4: K, 5: p. ' '6: d, 7: He3, 8: Li7', 'doc': '', 'out_dict': { 'e': 1, 'mu': 2, 'pi': 3, 'k': 4, 'p': 5, 'd': 6, 'he3': 7, 'li7': 8}, 'type': 'Integer', 'req': True, 'default': None}, 'fractbt': { 'desc': 'Fraction of beam of this type {0-1} The sum of all fracbt(i) should =1.0', 'doc': '', 'type': 'Real', 'req': True, 'default': None}, 'distribution': { 'desc': 'Beam distribution object', 'doc': '', 'type': 'Distribution', 'req': True, 'default': None}, 'nbcorr': { 'desc': '# of beam correlations {0-10}', 'doc': '', 'type': 'Integer', 'req': True, 'default': 0, 'min': 0, 'max': 10}} <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> ICoolObject.check_command_params_init(self, BeamType.command_params, **kwargs) <NEW_LINE> Container.__init__(self) <NEW_LINE> <DEDENT> def __setattr__(self, name, value): <NEW_LINE> <INDENT> self.__icool_setattr__(name, value) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'BeamType: \n' <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '[BeamType: ]' <NEW_LINE> <DEDENT> def gen_for001(self, file): <NEW_LINE> <INDENT> file.write(str(self.partnum)) <NEW_LINE> file.write(' ') <NEW_LINE> file.write(str(self.bmtype)) <NEW_LINE> file.write(' ') <NEW_LINE> file.write(str(self.fractbt)) <NEW_LINE> file.write('\n') <NEW_LINE> self.distribution.gen_for001(file) <NEW_LINE> file.write('\n') <NEW_LINE> file.write(str(self.nbcorr)) <NEW_LINE> file.write('\n') <NEW_LINE> for c in self.enclosed_commands: <NEW_LINE> <INDENT> c.gen_for001(file) | A BeamType is a:
(1) PARTNUM (I) particle number
(2) BMTYPE (I) beam type {magnitude = mass code; sign = charge}
1: e
2: μ
3: π
4: K
5: p
6: d
7: He3
8: Li7
(3) FRACBT (R) fraction of beam of this type {0-1} The sum of all fracbt(i) should =1.0
(4) Distribution
(5) NBCORR # of beam correlations {0-10}
(6) From 0-10 enclosed Correlation objects as specified by NBCORR (5) | 62598fb55fc7496912d482f0 |
class Annotator(db.Model): <NEW_LINE> <INDENT> __tablename__ = "annotator" <NEW_LINE> id = db.Column(db.Integer, autoincrement=True, primary_key=True) <NEW_LINE> name = db.Column(db.Text) <NEW_LINE> task_id = db.Column(db.Integer, db.ForeignKey('annotation_task.id'), nullable=False) <NEW_LINE> token = db.Column(db.Text) <NEW_LINE> results = db.relationship('Result', lazy='select', cascade='all', backref='annotator') <NEW_LINE> entries = db.relationship('Entry', lazy='subquery', secondary='annotator_entries') <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(Annotator, self).__init__() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<ID: {}, Name: {}}>".format(self.id, self.name) <NEW_LINE> <DEDENT> def as_dict(self): <NEW_LINE> <INDENT> return {c.name: getattr(self, c.name) for c in self.__table__.columns} | Annotator Class | 62598fb57047854f4633f4c5 |
class EchoReader(): <NEW_LINE> <INDENT> def __init__(self, infile='', associatedchannel=''): <NEW_LINE> <INDENT> self.infile = infile <NEW_LINE> self.associatedchannel = associatedchannel <NEW_LINE> self.uniques = {';': 'UNIQ_' + self.get_unique_string() + '_QINU', ':': 'UNIQ_' + self.get_unique_string() + '_QINU', ',': 'UNIQ_' + self.get_unique_string() + '_QINU'} <NEW_LINE> if self.infile: <NEW_LINE> <INDENT> print('Using infile') <NEW_LINE> self.notifiers = [] <NEW_LINE> self.associations = {} <NEW_LINE> self.files = {} <NEW_LINE> infiles = self.escape(self.infile) <NEW_LINE> for filechan in infiles.split(';'): <NEW_LINE> <INDENT> temparr = filechan.split(':') <NEW_LINE> filename = self.unescape(temparr[0]) <NEW_LINE> try: <NEW_LINE> <INDENT> print('Opening: ' + filename) <NEW_LINE> f = open(filename) <NEW_LINE> f.seek(0, 2) <NEW_LINE> self.files[filename] = f <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> print('Failed to open file: ' + filename) <NEW_LINE> self.files[filename] = None <NEW_LINE> pass <NEW_LINE> <DEDENT> wm = pyinotify.WatchManager() <NEW_LINE> mask = pyinotify.IN_MODIFY | pyinotify.IN_CREATE <NEW_LINE> wm.watch_transient_file(filename, mask, EventHandler) <NEW_LINE> notifier = EchoNotifier(pyinotify.Notifier(wm)) <NEW_LINE> self.notifiers.append(notifier) <NEW_LINE> if len(temparr) > 1: <NEW_LINE> <INDENT> chans = self.unescape(temparr[1]) <NEW_LINE> self.associations[filename] = chans <NEW_LINE> <DEDENT> <DEDENT> for notifier in self.notifiers: <NEW_LINE> <INDENT> print('Starting notifier loop') <NEW_LINE> notifier.start() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> s = raw_input() <NEW_LINE> s = beautify_message(s) <NEW_LINE> self.bot.connection.privmsg(self.chans, s.replace('\n', '')) <NEW_LINE> <DEDENT> except EOFError: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def get_unique_string(self): <NEW_LINE> <INDENT> unique = '' <NEW_LINE> for i in range(15): <NEW_LINE> <INDENT> unique = unique + random.choice(string.letters) <NEW_LINE> <DEDENT> return unique <NEW_LINE> <DEDENT> def escape(self, string): <NEW_LINE> <INDENT> escaped_string = re.sub('\\\;', self.uniques[';'], string) <NEW_LINE> escaped_string = re.sub('\\\:', self.uniques[':'], escaped_string) <NEW_LINE> escaped_string = re.sub('\\\,', self.uniques[','], escaped_string) <NEW_LINE> return escaped_string <NEW_LINE> <DEDENT> def unescape(self, string): <NEW_LINE> <INDENT> unescaped_string = re.sub(self.uniques[';'], ';', string) <NEW_LINE> unescaped_string = re.sub(self.uniques[':'], ':', unescaped_string) <NEW_LINE> unescaped_string = re.sub(self.uniques[','], ',', unescaped_string) <NEW_LINE> return unescaped_string <NEW_LINE> <DEDENT> def readfile(self, filename): <NEW_LINE> <INDENT> if self.files[filename]: <NEW_LINE> <INDENT> return self.files[filename].read() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> def getchannels(self, filename): <NEW_LINE> <INDENT> if filename in self.associations: <NEW_LINE> <INDENT> return self.associations[filename] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return bot.chans | Essentially an initalization class | 62598fb5851cf427c66b83a0 |
class Log(models.Model): <NEW_LINE> <INDENT> team = models.ForeignKey(Team) <NEW_LINE> lat = models.FloatField() <NEW_LINE> lon = models.FloatField() <NEW_LINE> def get_owner_object(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.name | Model for a GPS log entry | 62598fb521bff66bcd722d52 |
class MetadataBlock: <NEW_LINE> <INDENT> def __init__(self, metadata, action_pairs, start_time, runtime): <NEW_LINE> <INDENT> self.metadata = metadata <NEW_LINE> self.action_pairs = action_pairs <NEW_LINE> self.start_time = start_time <NEW_LINE> self.runtime = runtime <NEW_LINE> <DEDENT> def desc(self, sep='\n'): <NEW_LINE> <INDENT> return self.metadata.desc(sep=sep) <NEW_LINE> <DEDENT> def start_position(self): <NEW_LINE> <INDENT> return self.start_time * 1000 <NEW_LINE> <DEDENT> def end_position(self): <NEW_LINE> <INDENT> return (self.start_time + self.runtime) * 1000 <NEW_LINE> <DEDENT> def start_index(self): <NEW_LINE> <INDENT> assert len(self.action_pairs) > 0 <NEW_LINE> return self.action_pairs[0].get_index() <NEW_LINE> <DEDENT> def end_index(self): <NEW_LINE> <INDENT> assert len(self.action_pairs) > 0 <NEW_LINE> return self.action_pairs[-1].get_index() <NEW_LINE> <DEDENT> def can_set_runtime(self): <NEW_LINE> <INDENT> return any([ap.can_set_runtime() for ap in self.action_pairs]) | List of Metadata corresponding to a AnimationBlock
Args:
metadata (Metadata[]): List of Metadata corresponding to the animations of the block
action_pairs (AlgoSceneActionPairs[])
start_time (float)
run_time (float): Total runtime of all the animations in the block | 62598fb5cc40096d6161a24e |
class AnswerBase(object): <NEW_LINE> <INDENT> def __init__(self, vctx, location, name, answers_desc): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.answers_desc = answers_desc <NEW_LINE> self.required = getattr(answers_desc, "required", False) <NEW_LINE> <DEDENT> def get_correct_answer_text(self, page_context): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get_correctness(self, answer): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get_weight(self, answer): <NEW_LINE> <INDENT> if answer is not None: <NEW_LINE> <INDENT> return self.weight * self.get_correctness(answer) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> def get_field_layout(self, correctness=None): <NEW_LINE> <INDENT> if correctness is None: <NEW_LINE> <INDENT> return Field( self.name, use_popover="true", popover_title=getattr(self.answers_desc, "hint_title", ""), popover_content=getattr(self.answers_desc, "hint", ""), style=self.get_width_str() ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Field( self.name, use_popover="true", popover_title=getattr(self.answers_desc, "hint_title", ""), popover_content=getattr(self.answers_desc, "hint", ""), style=self.get_width_str(self.width + 2), correctness=correctness ) <NEW_LINE> <DEDENT> <DEDENT> def get_form_field(self, page_context): <NEW_LINE> <INDENT> raise NotImplementedError() | Abstract interface for answer class of different type.
.. attribute:: type
.. attribute:: form_field_class | 62598fb516aa5153ce4005ee |
class GsInlineDiffStageOrResetBase(TextCommand, GitCommand): <NEW_LINE> <INDENT> def run(self, edit, **kwargs): <NEW_LINE> <INDENT> sublime.set_timeout_async(lambda: self.run_async(**kwargs), 0) <NEW_LINE> <DEDENT> def run_async(self, reset=False): <NEW_LINE> <INDENT> in_cached_mode = self.view.settings().get("git_savvy.inline_diff.cached") <NEW_LINE> savvy_settings = sublime.load_settings("GitSavvy.sublime-settings") <NEW_LINE> ignore_ws = ( "--ignore-whitespace" if savvy_settings.get("inline_diff_ignore_eol_whitespaces", True) else None ) <NEW_LINE> selections = self.view.sel() <NEW_LINE> region = selections[0] <NEW_LINE> if len(selections) > 1 or not region.empty(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> line_number = self.view.rowcol(region.begin())[0] + 1 <NEW_LINE> diff_lines = self.get_diff_from_line(line_number, reset) <NEW_LINE> rel_path = self.get_rel_path() <NEW_LINE> if os.name == "nt": <NEW_LINE> <INDENT> rel_path = rel_path.replace("\\", "/") <NEW_LINE> <DEDENT> header = DIFF_HEADER.format(path=rel_path) <NEW_LINE> full_diff = header + diff_lines + "\n" <NEW_LINE> args = [ "apply", "--unidiff-zero", "--reverse" if (reset or in_cached_mode) else None, "--cached" if (not reset or in_cached_mode) else None, ignore_ws, "-" ] <NEW_LINE> self.git(*args, stdin=full_diff) <NEW_LINE> self.save_to_history(args, full_diff) <NEW_LINE> self.view.run_command("gs_inline_diff_refresh") <NEW_LINE> <DEDENT> def save_to_history(self, args, full_diff): <NEW_LINE> <INDENT> history = self.view.settings().get("git_savvy.inline_diff.history") or [] <NEW_LINE> history.append((args, full_diff)) <NEW_LINE> self.view.settings().set("git_savvy.inline_diff.history", history) | Base class for any stage or reset operation in the inline-diff view.
Determine the line number of the current cursor location, and use that
to determine what diff to apply to the file (implemented in subclass). | 62598fb563d6d428bbee2898 |
class ApiGetFlowResultsExportCommandHandlerRegressionTest( api_regression_test_lib.ApiRegressionTest): <NEW_LINE> <INDENT> api_method = "GetFlowResultsExportCommand" <NEW_LINE> handler = flow_plugin.ApiGetFlowResultsExportCommandHandler <NEW_LINE> def Run(self): <NEW_LINE> <INDENT> client_id = self.SetupClient(0) <NEW_LINE> flow_urn = "F:ABCDEF" <NEW_LINE> self.Check( "GetFlowResultsExportCommand", args=flow_plugin.ApiGetFlowResultsExportCommandArgs( client_id=client_id, flow_id=flow_urn)) | Regression test for ApiGetFlowResultsExportCommandHandler. | 62598fb55fdd1c0f98e5e079 |
class LineCollection(Collection): <NEW_LINE> <INDENT> _edge_default = True <NEW_LINE> def __init__(self, segments, linewidths=None, colors=None, antialiaseds=None, linestyles='solid', offsets=None, transOffset=None, norm=None, cmap=None, pickradius=5, zorder=2, facecolors='none', **kwargs ): <NEW_LINE> <INDENT> if colors is None: <NEW_LINE> <INDENT> colors = mpl.rcParams['lines.color'] <NEW_LINE> <DEDENT> if linewidths is None: <NEW_LINE> <INDENT> linewidths = (mpl.rcParams['lines.linewidth'],) <NEW_LINE> <DEDENT> if antialiaseds is None: <NEW_LINE> <INDENT> antialiaseds = (mpl.rcParams['lines.antialiased'],) <NEW_LINE> <DEDENT> colors = mcolors.colorConverter.to_rgba_array(colors) <NEW_LINE> Collection.__init__( self, edgecolors=colors, facecolors=facecolors, linewidths=linewidths, linestyles=linestyles, antialiaseds=antialiaseds, offsets=offsets, transOffset=transOffset, norm=norm, cmap=cmap, pickradius=pickradius, zorder=zorder, **kwargs) <NEW_LINE> self.set_segments(segments) <NEW_LINE> <DEDENT> def set_segments(self, segments): <NEW_LINE> <INDENT> if segments is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> _segments = [] <NEW_LINE> for seg in segments: <NEW_LINE> <INDENT> if not np.ma.isMaskedArray(seg): <NEW_LINE> <INDENT> seg = np.asarray(seg, np.float_) <NEW_LINE> <DEDENT> _segments.append(seg) <NEW_LINE> <DEDENT> if self._uniform_offsets is not None: <NEW_LINE> <INDENT> _segments = self._add_offsets(_segments) <NEW_LINE> <DEDENT> self._paths = [mpath.Path(_seg) for _seg in _segments] <NEW_LINE> self.stale = True <NEW_LINE> <DEDENT> set_verts = set_segments <NEW_LINE> set_paths = set_segments <NEW_LINE> def get_segments(self): <NEW_LINE> <INDENT> segments = [] <NEW_LINE> for path in self._paths: <NEW_LINE> <INDENT> vertices = [vertex for vertex, _ in path.iter_segments()] <NEW_LINE> vertices = np.asarray(vertices) <NEW_LINE> segments.append(vertices) <NEW_LINE> <DEDENT> return segments <NEW_LINE> <DEDENT> def _add_offsets(self, segs): <NEW_LINE> <INDENT> offsets = self._uniform_offsets <NEW_LINE> Nsegs = len(segs) <NEW_LINE> Noffs = offsets.shape[0] <NEW_LINE> if Noffs == 1: <NEW_LINE> <INDENT> for i in range(Nsegs): <NEW_LINE> <INDENT> segs[i] = segs[i] + i * offsets <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(Nsegs): <NEW_LINE> <INDENT> io = i % Noffs <NEW_LINE> segs[i] = segs[i] + offsets[io:io + 1] <NEW_LINE> <DEDENT> <DEDENT> return segs <NEW_LINE> <DEDENT> def set_color(self, c): <NEW_LINE> <INDENT> self.set_edgecolor(c) <NEW_LINE> self.stale = True <NEW_LINE> <DEDENT> def get_color(self): <NEW_LINE> <INDENT> return self._edgecolors <NEW_LINE> <DEDENT> get_colors = get_color | All parameters must be sequences or scalars; if scalars, they will
be converted to sequences. The property of the ith line
segment is::
prop[i % len(props)]
i.e., the properties cycle if the ``len`` of props is less than the
number of segments. | 62598fb5a8370b77170f04c7 |
class UserPrivacySettingRuleRestrictChatMembers(Object): <NEW_LINE> <INDENT> ID = "userPrivacySettingRuleRestrictChatMembers" <NEW_LINE> def __init__(self, chat_ids, **kwargs): <NEW_LINE> <INDENT> self.chat_ids = chat_ids <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read(q: dict, *args) -> "UserPrivacySettingRuleRestrictChatMembers": <NEW_LINE> <INDENT> chat_ids = q.get('chat_ids') <NEW_LINE> return UserPrivacySettingRuleRestrictChatMembers(chat_ids) | A rule to restrict all members of specified basic groups and supergroups from doing something
Attributes:
ID (:obj:`str`): ``UserPrivacySettingRuleRestrictChatMembers``
Args:
chat_ids (List of :obj:`int`):
The chat identifiers, total number of chats in all rules must not exceed 20
Returns:
UserPrivacySettingRule
Raises:
:class:`telegram.Error` | 62598fb58a349b6b43686326 |
class Bootstrap(object): <NEW_LINE> <INDENT> def __init__(self, conf): <NEW_LINE> <INDENT> self.conf = conf <NEW_LINE> self.conf.register_opts(_GENERAL_OPTIONS) <NEW_LINE> self.conf.register_opts(_DRIVER_OPTIONS, group=_DRIVER_GROUP) <NEW_LINE> self.driver_conf = self.conf[_DRIVER_GROUP] <NEW_LINE> log.setup('marconi') <NEW_LINE> mode = 'admin' if conf.admin_mode else 'public' <NEW_LINE> self._transport_type = 'marconi.queues.{0}.transport'.format(mode) <NEW_LINE> <DEDENT> @decorators.lazy_property(write=False) <NEW_LINE> def storage(self): <NEW_LINE> <INDENT> LOG.debug(_(u'Loading storage driver')) <NEW_LINE> if self.conf.sharding: <NEW_LINE> <INDENT> LOG.debug(_(u'Storage sharding enabled')) <NEW_LINE> storage_driver = sharding.DataDriver(self.conf) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> storage_driver = storage_utils.load_storage_driver(self.conf) <NEW_LINE> <DEDENT> LOG.debug(_(u'Loading storage pipeline')) <NEW_LINE> return pipeline.DataDriver(self.conf, storage_driver) <NEW_LINE> <DEDENT> @decorators.lazy_property(write=False) <NEW_LINE> def cache(self): <NEW_LINE> <INDENT> LOG.debug(_(u'Loading Proxy Cache Driver')) <NEW_LINE> try: <NEW_LINE> <INDENT> mgr = oslo_cache.get_cache(self.conf) <NEW_LINE> return mgr <NEW_LINE> <DEDENT> except RuntimeError as exc: <NEW_LINE> <INDENT> LOG.exception(exc) <NEW_LINE> raise exceptions.InvalidDriver(exc) <NEW_LINE> <DEDENT> <DEDENT> @decorators.lazy_property(write=False) <NEW_LINE> def transport(self): <NEW_LINE> <INDENT> transport_name = self.driver_conf.transport <NEW_LINE> LOG.debug(_(u'Loading transport driver: %s'), transport_name) <NEW_LINE> try: <NEW_LINE> <INDENT> mgr = driver.DriverManager(self._transport_type, transport_name, invoke_on_load=True, invoke_args=[self.conf, self.storage, self.cache]) <NEW_LINE> return mgr.driver <NEW_LINE> <DEDENT> except RuntimeError as exc: <NEW_LINE> <INDENT> LOG.exception(exc) <NEW_LINE> raise exceptions.InvalidDriver(exc) <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.transport.listen() | Defines the Marconi bootstrapper.
The bootstrap loads up drivers per a given configuration, and
manages their lifetimes. | 62598fb57b25080760ed759d |
class SignUpCfsServiceRequest(AbstractModel): <NEW_LINE> <INDENT> pass | SignUpCfsService request structure.
| 62598fb54f88993c371f0581 |
class ExponentStyle(Style): <NEW_LINE> <INDENT> default_style = '' <NEW_LINE> background_color = BACKGROUND <NEW_LINE> highlight_color = SELECTION <NEW_LINE> styles = { Comment: COMMENT, Text: FOREGROUND, Keyword: BLUE, Keyword.Type: YELLOW, Operator.Word: '', String: GREEN, String.Char: FOREGROUND, Name.Builtin: RED, Name.Variable: '', Name.Variable.Instance: RED, Name.Constant: GREEN, Name.Class: YELLOW, Name.Function: BLUE, Name.Namespace: YELLOW, Name.Exception: RED, Name.Tag: PURPLE, Name.Other: FOREGROUND, Name.Decorator: AQUA, Generic.Deleted: RED, Generic.Inserted: GREEN, Generic.Heading: "bold " + FOREGROUND, Generic.Subheading: "bold " + AQUA, Generic.Prompt: "bold " + COMMENT } | Exponent color scheme, based on the Tomorrow theme. | 62598fb5d268445f26639bf9 |
class getFilePath_result(object): <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.STRING, 'success', 'UTF8', None, ), (1, TType.STRUCT, 'rnfEx', (ResourceNotFoundException, ResourceNotFoundException.thrift_spec), None, ), (2, TType.STRUCT, 'svEx', (ServerLogicException, ServerLogicException.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, success=None, rnfEx=None, svEx=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> self.rnfEx = rnfEx <NEW_LINE> self.svEx = svEx <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.rnfEx = ResourceNotFoundException() <NEW_LINE> self.rnfEx.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.svEx = ServerLogicException() <NEW_LINE> self.svEx.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('getFilePath_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.STRING, 0) <NEW_LINE> oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.rnfEx is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('rnfEx', TType.STRUCT, 1) <NEW_LINE> self.rnfEx.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.svEx is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('svEx', TType.STRUCT, 2) <NEW_LINE> self.svEx.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- success
- rnfEx
- svEx | 62598fb599fddb7c1ca62e60 |
class Game(GameBase): <NEW_LINE> <INDENT> home_team = models.ForeignKey(School, related_name="boys_basketball_home_team", null=True) <NEW_LINE> away_team = models.ForeignKey(School, related_name="boys_basketball_away_team", null=True) <NEW_LINE> season = models.ForeignKey(Season, related_name="boys_basketball_game_season") <NEW_LINE> home_quarter_1_score = models.IntegerField(default=0, blank=True) <NEW_LINE> home_quarter_2_score = models.IntegerField(default=0, blank=True) <NEW_LINE> home_quarter_3_score = models.IntegerField(default=0, blank=True) <NEW_LINE> home_quarter_4_score = models.IntegerField(default=0, blank=True) <NEW_LINE> home_overtime_1_score = models.IntegerField(default=0, blank=True) <NEW_LINE> home_overtime_2_score = models.IntegerField(default=0, blank=True) <NEW_LINE> home_overtime_3_score = models.IntegerField(default=0, blank=True) <NEW_LINE> home_total_score = models.IntegerField(default=0, blank=True) <NEW_LINE> away_quarter_1_score = models.IntegerField(default=0, blank=True) <NEW_LINE> away_quarter_2_score = models.IntegerField(default=0, blank=True) <NEW_LINE> away_quarter_3_score = models.IntegerField(default=0, blank=True) <NEW_LINE> away_quarter_4_score = models.IntegerField(default=0, blank=True) <NEW_LINE> away_overtime_1_score = models.IntegerField(default=0, blank=True) <NEW_LINE> away_overtime_2_score = models.IntegerField(default=0, blank=True) <NEW_LINE> away_overtime_3_score = models.IntegerField(default=0, blank=True) <NEW_LINE> away_total_score = models.IntegerField(default=0, blank=True) <NEW_LINE> week = models.IntegerField(max_length=2, default=0) <NEW_LINE> game_of_the_week = models.BooleanField(default=False) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u'Week %s: %s at %s' % (self.week, self.away_team, self.home_team) <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.slug == None or self.slug == '': <NEW_LINE> <INDENT> self.slug = slugify(self.__unicode__()) <NEW_LINE> <DEDENT> self.home_total_score = self.home_quarter_1_score+ self.home_quarter_2_score+ self.home_quarter_3_score+ self.home_quarter_4_score+ self.home_overtime_1_score+ self.home_overtime_2_score+ self.home_overtime_3_score <NEW_LINE> self.away_total_score = self.away_quarter_1_score+ self.away_quarter_2_score+ self.away_quarter_3_score+ self.away_quarter_4_score+ self.away_overtime_1_score+ self.away_overtime_2_score+ self.away_overtime_3_score <NEW_LINE> super(Game, self).save(*args, **kwargs) | A representation of a Boys Basketball game. | 62598fb53346ee7daa3376bd |
class MultipleFormView(TemplateView): <NEW_LINE> <INDENT> form_classes = {} <NEW_LINE> form_instances = {} <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(MultipleFormView, self).get_context_data(**kwargs) <NEW_LINE> forms_initialized = {"forms": {}} <NEW_LINE> for name, obj in self.form_classes.items(): <NEW_LINE> <INDENT> if "kwargs" not in obj: <NEW_LINE> <INDENT> obj["kwargs"] = {} <NEW_LINE> <DEDENT> if "instance" in obj: <NEW_LINE> <INDENT> forms_initialized["forms"][name] = obj["form"]( obj["args"] if "args" in obj else None, prefix=name, instance=obj["instance"], **obj["kwargs"], ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> forms_initialized["forms"][name] = obj["form"]( obj["args"] if "args" in obj else None, prefix=name, **obj["kwargs"] ) <NEW_LINE> <DEDENT> <DEDENT> return merge_dicts(context, forms_initialized) <NEW_LINE> <DEDENT> def post(self, request, **kwargs): <NEW_LINE> <INDENT> forms_initialized = {"forms": {}} <NEW_LINE> for name, obj in self.form_classes.items(): <NEW_LINE> <INDENT> if "kwargs" not in obj: <NEW_LINE> <INDENT> obj["kwargs"] = {} <NEW_LINE> <DEDENT> if "args" in obj: <NEW_LINE> <INDENT> forms_initialized["forms"][name] = obj["form"]( obj["args"], prefix=name, data=request.POST, instance=obj["instance"] if "instance" in obj else None, **obj["kwargs"], ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if "instance" in obj: <NEW_LINE> <INDENT> forms_initialized["forms"][name] = obj["form"]( prefix=name, data=request.POST, instance=obj["instance"], **obj["kwargs"], ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> forms_initialized["forms"][name] = obj["form"]( prefix=name, data=request.POST, **obj["kwargs"], ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> valid = True <NEW_LINE> for form_class in forms_initialized.values(): <NEW_LINE> <INDENT> if valid is False: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if type(form_class) is dict: <NEW_LINE> <INDENT> valid = all([form_class[form].is_valid() for form in form_class]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> valid = form_class.is_valid() <NEW_LINE> <DEDENT> <DEDENT> if valid: <NEW_LINE> <INDENT> return self.process_forms(forms_initialized) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> context = merge_dicts(self.get_context_data(), forms_initialized) <NEW_LINE> return self.render_to_response(context) <NEW_LINE> <DEDENT> <DEDENT> def process_forms(self, form_instances): <NEW_LINE> <INDENT> raise NotImplementedError | View mixin that handles multiple forms / formsets.
After the successful data is inserted ``self.process_forms`` is called. | 62598fb5ff9c53063f51a739 |
class MetricPlugin: <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> NAME = '' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.name = self.NAME <NEW_LINE> self._value = 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> self._value = self._collect_metric() <NEW_LINE> return self._value <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def _collect_metric(self): <NEW_LINE> <INDENT> pass | This class provides a basic structure for all metric plugins | 62598fb59f288636728188af |
class TEST25(PMEM2_INTEGRATION): <NEW_LINE> <INDENT> test_case = "test_deep_flush_e_range_before" | test deep flush with range out of map | 62598fb5a219f33f346c68f0 |
class Zone: <NEW_LINE> <INDENT> zone_id = None <NEW_LINE> sync_token = None <NEW_LINE> atomic = False <NEW_LINE> def __init__(self, json=None): <NEW_LINE> <INDENT> if json is not None: <NEW_LINE> <INDENT> self.zone_id = ZoneID(parse(json, 'zoneID')) <NEW_LINE> self.sync_token = parse(json, 'syncToken') <NEW_LINE> self.atomic = parse(json, 'atomic') <NEW_LINE> <DEDENT> <DEDENT> def json(self): <NEW_LINE> <INDENT> zone_id = self.zone_id.json() <NEW_LINE> if not all([ zone_id ]): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> json_object = { 'zoneID': zone_id } <NEW_LINE> if self.sync_token is not None: <NEW_LINE> <INDENT> json_object['syncToken'] = self.sync_token <NEW_LINE> <DEDENT> if self.atomic is not None: <NEW_LINE> <INDENT> json_object['atomic'] = self.atomic <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> json_object['atomic'] = False <NEW_LINE> <DEDENT> return json_object | A zone dictionary describes a successful zone fetch. | 62598fb5a17c0f6771d5c321 |
class TaskRerun(BaseClass): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(TaskRerun, self).__init__() <NEW_LINE> <DEDENT> def do(self): <NEW_LINE> <INDENT> hour = int(time.strftime("%H", time.localtime())) <NEW_LINE> if hour <= 6 or hour >= 22: <NEW_LINE> <INDENT> self.log.error("The current time process suspended") <NEW_LINE> return False <NEW_LINE> <DEDENT> for doc in self.mgdb.task_rerun.find({"status": 1}): <NEW_LINE> <INDENT> self.mgdb.task_rerun.update({"_id": ObjectId(doc.get("_id"))}, { "$set": {"status": 0}}) <NEW_LINE> self.__rerun(doc) <NEW_LINE> self.log.info("rerun task:%s, task_day:%s" % (doc.get("task_key"), doc.get("task_day"))) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def __rerun(self, d): <NEW_LINE> <INDENT> if not d: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> ddate = time.strftime( "%Y%m%d", time.localtime(int(time.time()) - 86400)) <NEW_LINE> tdate = d.get("task_day", ddate) <NEW_LINE> self.__upFailure(d.get("task_key"), tdate) <NEW_LINE> obj = TaskInit(tdate, d.get("task_key"), tdate) <NEW_LINE> obj.initTasks() <NEW_LINE> if int(d.get("follow")) == 1: <NEW_LINE> <INDENT> self.__initTask(d.get("task_key"), tdate) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def __initTask(self, tkey, tday): <NEW_LINE> <INDENT> inited = {} <NEW_LINE> for task in TaskLibrary().getFollows(tkey): <NEW_LINE> <INDENT> self.log.info("recursion rerun task:%s, task_day:%s" % (task.get("task_key"), tday)) <NEW_LINE> if task.get("task_key") in inited: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.__upFailure(task.get("task_key"), tday) <NEW_LINE> obj = TaskInit(tday, task.get("task_key"), tday) <NEW_LINE> obj.initTasks() <NEW_LINE> self.__initTask(task.get("task_key"), tday) <NEW_LINE> inited[task.get("task_key")] = 1 <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def __upFailure(self, tkey, tday): <NEW_LINE> <INDENT> return self.mgdb.task_history.update_many({"task_key": tkey, 'task_day': tday, 'status': "finished"}, {"$set": {"status": "bad"}}) | rerun | 62598fb5d7e4931a7ef3c181 |
class BatchedCalls(object): <NEW_LINE> <INDENT> def __init__(self, iterator_slice, backend_and_jobs, reducer_callback=None, pickle_cache=None): <NEW_LINE> <INDENT> self.items = list(iterator_slice) <NEW_LINE> self._size = len(self.items) <NEW_LINE> self._reducer_callback = reducer_callback <NEW_LINE> if isinstance(backend_and_jobs, tuple): <NEW_LINE> <INDENT> self._backend, self._n_jobs = backend_and_jobs <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._backend, self._n_jobs = backend_and_jobs, None <NEW_LINE> <DEDENT> self._pickle_cache = pickle_cache if pickle_cache is not None else {} <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> with parallel_backend(self._backend, n_jobs=self._n_jobs): <NEW_LINE> <INDENT> return [func(*args, **kwargs) for func, args, kwargs in self.items] <NEW_LINE> <DEDENT> <DEDENT> def __reduce__(self): <NEW_LINE> <INDENT> if self._reducer_callback is not None: <NEW_LINE> <INDENT> self._reducer_callback() <NEW_LINE> <DEDENT> return ( BatchedCalls, (self.items, (self._backend, self._n_jobs), None, self._pickle_cache) ) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self._size | Wrap a sequence of (func, args, kwargs) tuples as a single callable | 62598fb53539df3088ecc398 |
class FlipTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api_base_domain = 'api.fake-host.com' <NEW_LINE> self.rotation = ['red', 'black', 'turquoise'] <NEW_LINE> self.current_stage = 'black' <NEW_LINE> self.next_stage = 'turquoise' <NEW_LINE> <DEDENT> @pytest.mark.skip(reason="moto does not yet support AWS:ApiGateway:GetBasePathMapping") <NEW_LINE> def test_get_live_stage(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @pytest.mark.skip(reason="moto does not yet support AWS:ApiGateway:UpdateBasePathMapping") <NEW_LINE> def test_update_base_path_mapping(self): <NEW_LINE> <INDENT> pass | TestCase class for testing flip.py | 62598fb5b7558d5895463719 |
class ItemData(RESTPayload, ImageData): <NEW_LINE> <INDENT> item_id: int <NEW_LINE> item_type_id: Optional[int] = None <NEW_LINE> item_category_id: Optional[int] = None <NEW_LINE> activatable_ability_id: Optional[int] = None <NEW_LINE> passive_ability_id: Optional[int] = None <NEW_LINE> is_vehicle_weapon: bool <NEW_LINE> name: LocaleData <NEW_LINE> description: Optional[LocaleData] = None <NEW_LINE> faction_id: Optional[int] = None <NEW_LINE> max_stack_size: int <NEW_LINE> skill_set_id: Optional[int] = None <NEW_LINE> is_default_attachment: bool | Data class for :class:`auraxium.ps2.Item`.
This class mirrors the payload data returned by the API, you may
use its attributes as keys in filters or queries. | 62598fb563b5f9789fe85258 |
class MockDropTable: <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.query = 'DROP TABLE IF EXISTS {};'.format(name) <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> return self.query | Mocking a drop table MySQL query. | 62598fb6a79ad1619776a158 |
class PayByTxnError(Exception): <NEW_LINE> <INDENT> pass | Custom error class: thrown when the 'PAY BY" transaction cannot be found. | 62598fb65166f23b2e2434c7 |
class Input(Placeholder): <NEW_LINE> <INDENT> def __init__(self, function_handle, index=0, order=0, exponent=1): <NEW_LINE> <INDENT> if not isinstance(function_handle, collections.Callable): <NEW_LINE> <INDENT> raise TypeError("callable object has to be provided.") <NEW_LINE> <DEDENT> if not isinstance(index, int) or index < 0: <NEW_LINE> <INDENT> raise TypeError("index must be a positive integer.") <NEW_LINE> <DEDENT> if not isinstance(exponent, Number): <NEW_LINE> <INDENT> raise TypeError("exponent must be a number") <NEW_LINE> <DEDENT> if exponent != 1: <NEW_LINE> <INDENT> raise ValueError("Providing exponents that differ from 1 is no " "longer supported.") <NEW_LINE> <DEDENT> super().__init__(dict(input=function_handle, index=index, exponent=exponent), order=(order, 0)) | Class that works as a placeholder for an input of the system.
Args:
function_handle (callable): Handle that will be called by the simulation
unit.
index (int): If the system's input is vectorial, specify the element to
be used.
order (int): temporal derivative order of this term
(See :py:class:`.Placeholder`).
exponent (numbers.Number): See :py:class:`.FieldVariable`.
Note:
if *order* is nonzero, the callable is expected to return the temporal
derivatives of the input signal by returning an array of
``len(order)+1``. | 62598fb65fdd1c0f98e5e07b |
class DatabaseAppsRouter(object): <NEW_LINE> <INDENT> def db_for_read(self, model, **hints): <NEW_LINE> <INDENT> if model._meta.app_label in DATABASE_MAPPING: <NEW_LINE> <INDENT> return DATABASE_MAPPING[model._meta.app_label] <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def db_for_write(self, model, **hints): <NEW_LINE> <INDENT> if model._meta.app_label in DATABASE_MAPPING: <NEW_LINE> <INDENT> return DATABASE_MAPPING[model._meta.app_label] <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def allow_relation(self, obj1, obj2, **hints): <NEW_LINE> <INDENT> db_obj1 = DATABASE_MAPPING.get(obj1._meta.app_label) <NEW_LINE> db_obj2 = DATABASE_MAPPING.get(obj2._meta.app_label) <NEW_LINE> if db_obj1 and db_obj2: <NEW_LINE> <INDENT> if db_obj1 == db_obj2: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def allow_syncdb(self, db, model): <NEW_LINE> <INDENT> if db in DATABASE_MAPPING.values(): <NEW_LINE> <INDENT> return DATABASE_MAPPING.get(model._meta.app_label) == db <NEW_LINE> <DEDENT> elif model._meta.app_label in DATABASE_MAPPING: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def allow_migrate(self, db, app_label, model=None, **hints): <NEW_LINE> <INDENT> if db in DATABASE_MAPPING.values(): <NEW_LINE> <INDENT> return DATABASE_MAPPING.get(app_label) == db <NEW_LINE> <DEDENT> elif app_label in DATABASE_MAPPING: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return None | A router to control all database operations on models for different
databases.
In case an app is not set in settings.APP2DATABASE_MAPPING, the router
will fallback to the `default` database.
Settings example:
APP2DATABASE_MAPPING = {'app1': 'db1', 'app2': 'db2'} | 62598fb6e1aae11d1e7ce89a |
class Question: <NEW_LINE> <INDENT> def __init__(self, answer: str, question: str, options: list, init_text: str): <NEW_LINE> <INDENT> self.answer = answer <NEW_LINE> self.question = question <NEW_LINE> self.options = options <NEW_LINE> self.init_text = init_text <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{}'.format(self.__dict__) | This class represents the Question object.
Args:
answer: str, answer for the question
question: str, question (init_text with replaced similar words
options: list, list of 4 possible options for a given question
init_text: str, original text | 62598fb6097d151d1a2c111c |
class Solution: <NEW_LINE> <INDENT> def countCornerRectangles(self, grid): <NEW_LINE> <INDENT> if not grid: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> m,n = len(grid),len(grid[0]) <NEW_LINE> res = 0 <NEW_LINE> for i in range(m-1): <NEW_LINE> <INDENT> for j in range(n-1): <NEW_LINE> <INDENT> if grid[i][j] == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for x in range(i+1,m): <NEW_LINE> <INDENT> for y in range(j+1,n): <NEW_LINE> <INDENT> if not grid[x][y]: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if grid[i][y] and grid[x][j]: <NEW_LINE> <INDENT> res += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return res | @param grid: the grid
@return: the number of corner rectangles | 62598fb601c39578d7f12e66 |
class Status: <NEW_LINE> <INDENT> SUCCESS = 'SUCCESS' <NEW_LINE> FAILED = 'FAILED' | CloudFormation custom resource status constants
http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/crpg-ref-responses.html | 62598fb6442bda511e95c546 |
class Node: <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.left = self.right = self.parent = None <NEW_LINE> self.data = data <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f'{self.data}' | Node class with a pointer to its parent | 62598fb64f6381625f199537 |
class Generator(object): <NEW_LINE> <INDENT> def __init__(self, base_path): <NEW_LINE> <INDENT> self.base_path = base_path <NEW_LINE> if not os.path.exists(os.path.join(self.base_path, "slide01.svg")): <NEW_LINE> <INDENT> raise ValueError("Directory does not appear to contain slides") <NEW_LINE> <DEDENT> self.type_paths = { "pdf": os.path.join(self.base_path, "pdfs"), "png": os.path.join(self.base_path, "pngs"), } <NEW_LINE> <DEDENT> def make_singles(self, type): <NEW_LINE> <INDENT> assert type in ["png", "pdf"] <NEW_LINE> if not os.path.isdir(self.type_paths[type]): <NEW_LINE> <INDENT> os.mkdir(self.type_paths[type]) <NEW_LINE> <DEDENT> seen = set() <NEW_LINE> for filename in os.listdir(self.base_path): <NEW_LINE> <INDENT> path = os.path.join(self.base_path, filename) <NEW_LINE> if path.endswith(".svg"): <NEW_LINE> <INDENT> seen.add(filename) <NEW_LINE> destination = os.path.join( self.type_paths[type], "%s.%s" % (filename, type), ) <NEW_LINE> if not os.path.exists(destination) or os.stat(destination).st_mtime <= os.stat(path).st_mtime: <NEW_LINE> <INDENT> print("Rendering %s to %s" % (filename, type.upper())) <NEW_LINE> if type == "png": <NEW_LINE> <INDENT> subprocess.call(["inkscape", "-z", filename, "-w", "1280", "-e", destination]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> subprocess.call(["inkscape", "-z", filename, "-A", destination]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> for filename in os.listdir(self.type_paths[type]): <NEW_LINE> <INDENT> original_name = filename[:-4] <NEW_LINE> path = os.path.join(self.type_paths[type], filename) <NEW_LINE> if original_name not in seen: <NEW_LINE> <INDENT> print("Removing outdated %s" % filename) <NEW_LINE> os.unlink(path) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def make_combined_pdf(self): <NEW_LINE> <INDENT> print("Making combined PDF") <NEW_LINE> destination = os.path.join(self.base_path, "slides.pdf") <NEW_LINE> subprocess.call([ "pdftk", ] + glob.glob("%s/*.svg.pdf" % self.type_paths["pdf"]) + [ "cat", "output", destination, ]) <NEW_LINE> <DEDENT> def build(self, png=True, pdf=True): <NEW_LINE> <INDENT> if png: <NEW_LINE> <INDENT> self.make_singles("png") <NEW_LINE> <DEDENT> if pdf: <NEW_LINE> <INDENT> self.make_singles("pdf") <NEW_LINE> self.make_combined_pdf() | Generates PDF and PNG output from a set of slide SVGs. | 62598fb67047854f4633f4c8 |
class exists_args(object): <NEW_LINE> <INDENT> def __init__(self, id=None,): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.I64: <NEW_LINE> <INDENT> self.id = iprot.readI64() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('exists_args') <NEW_LINE> if self.id is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('id', TType.I64, 1) <NEW_LINE> oprot.writeI64(self.id) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- id | 62598fb6796e427e5384e881 |
class VirtualMotorCenterAndGap(Device): <NEW_LINE> <INDENT> xc = Cpt(VirtualCenter, '-Ax:X}') <NEW_LINE> yc = Cpt(VirtualCenter, '-Ax:Y}') <NEW_LINE> xg = Cpt(VirtualGap, '-Ax:X}') <NEW_LINE> yg = Cpt(VirtualGap, '-Ax:Y}') | Center and gap with virtual motors | 62598fb692d797404e388bda |
class Section(MutableMapping): <NEW_LINE> <INDENT> def __init__(self, namespace, *args, **kwargs): <NEW_LINE> <INDENT> super(Section, self).__init__(*args, **kwargs) <NEW_LINE> self.namespace = namespace <NEW_LINE> self.__storage__ = dict() <NEW_LINE> <DEDENT> def __setitem__(self, name, value): <NEW_LINE> <INDENT> self.__storage__[name] = str(value) <NEW_LINE> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> return self.__storage__[name] <NEW_LINE> <DEDENT> def __delitem__(self, name): <NEW_LINE> <INDENT> del self.__storage__[name] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.__storage__) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.__storage__) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<{0} {1}>".format(self.__class__.__name__, str(dict(self))) <NEW_LINE> <DEDENT> def iteritems(self): <NEW_LINE> <INDENT> for key in self.__storage__.keys(): <NEW_LINE> <INDENT> yield key, self[key] <NEW_LINE> <DEDENT> <DEDENT> items = lambda s: list(s.iteritems()) | Representation of INI section. | 62598fb6bd1bec0571e15139 |
class StandardTableaux(SemistandardTableaux): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def __classcall_private__(cls, *args, **kwargs): <NEW_LINE> <INDENT> from sage.combinat.partition import _Partitions, Partition <NEW_LINE> from sage.combinat.skew_partition import SkewPartitions <NEW_LINE> if args: <NEW_LINE> <INDENT> n = args[0] <NEW_LINE> <DEDENT> elif 'n' in kwargs: <NEW_LINE> <INDENT> n = kwargs[n] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> n = None <NEW_LINE> <DEDENT> if n is None: <NEW_LINE> <INDENT> return StandardTableaux_all() <NEW_LINE> <DEDENT> elif n in _Partitions: <NEW_LINE> <INDENT> return StandardTableaux_shape(Partition(n)) <NEW_LINE> <DEDENT> elif n in SkewPartitions(): <NEW_LINE> <INDENT> from sage.combinat.skew_tableau import StandardSkewTableaux <NEW_LINE> return StandardSkewTableaux(n) <NEW_LINE> <DEDENT> if not isinstance(n, (int, Integer)) or n < 0: <NEW_LINE> <INDENT> raise ValueError( "The argument must be a non-negative integer or a partition." ) <NEW_LINE> <DEDENT> return StandardTableaux_size(n) <NEW_LINE> <DEDENT> Element = StandardTableau <NEW_LINE> def __contains__(self, x): <NEW_LINE> <INDENT> if isinstance(x, StandardTableau): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif Tableaux.__contains__(self, x): <NEW_LINE> <INDENT> flatx = sorted(sum((list(row) for row in x),[])) <NEW_LINE> return flatx == list(range(1,len(flatx)+1)) and (len(x)==0 or (all(row[i]<row[i+1] for row in x for i in range(len(row)-1)) and all(x[r][c]<x[r+1][c] for r in range(len(x)-1) for c in range(len(x[r+1])) ) )) <NEW_LINE> <DEDENT> return False | A factory for the various classes of standard tableaux.
INPUT:
- Either a non-negative integer (possibly specified with the keyword ``n``)
or a partition.
OUTPUT:
- With no argument, the class of all standard tableaux
- With a non-negative integer argument, ``n``, the class of all standard
tableaux of size ``n``
- With a partition argument, the class of all standard tableaux of that
shape.
A standard tableau is a semistandard tableaux which contains each of the
entries from 1 to ``n`` exactly once.
All classes of standard tableaux are iterable.
EXAMPLES::
sage: ST = StandardTableaux(3); ST
Standard tableaux of size 3
sage: ST.first()
[[1, 2, 3]]
sage: ST.last()
[[1], [2], [3]]
sage: ST.cardinality()
4
sage: ST.list()
[[[1, 2, 3]], [[1, 3], [2]], [[1, 2], [3]], [[1], [2], [3]]]
.. SEEALSO::
- :class:`Tableaux`
- :class:`Tableau`
- :class:`SemistandardTableaux`
- :class:`SemistandardTableau`
- :class:`StandardTableau`
- :class:`StandardSkewTableaux`
TESTS::
sage: StandardTableaux()([])
[]
sage: ST = StandardTableaux([2,2]); ST
Standard tableaux of shape [2, 2]
sage: ST.first()
[[1, 3], [2, 4]]
sage: ST.last()
[[1, 2], [3, 4]]
sage: ST.cardinality()
2
sage: ST.list()
[[[1, 3], [2, 4]], [[1, 2], [3, 4]]]
sage: StandardTableau([[1,2,3],[4,5]]).residue_sequence(3).standard_tableaux()
Standard tableaux with 3-residue sequence (0,1,2,2,0) and multicharge (0) | 62598fb6aad79263cf42e8c2 |
class WidthOneDense(keras.layers.Layer): <NEW_LINE> <INDENT> def __init__(self, input_dim, units, activation=None, trainable=True): <NEW_LINE> <INDENT> super(WidthOneDense, self).__init__() <NEW_LINE> dim = int(np.sqrt(input_dim)) <NEW_LINE> mask = tf.cast(tf.linalg.band_part(tf.ones([dim, dim]),0,-1), dtype=tf.bool) <NEW_LINE> upper_tri = tf.boolean_mask(tf.eye(dim), mask) <NEW_LINE> w_init = tf.reshape(tf.concat([upper_tri, tf.zeros(input_dim - len(upper_tri))], axis=0), [-1, 1]) <NEW_LINE> self.w = tf.Variable( initial_value=w_init, trainable=trainable, ) <NEW_LINE> self.activation = activations.get(activation) <NEW_LINE> <DEDENT> def call(self, inputs): <NEW_LINE> <INDENT> return self.activation(tf.matmul(inputs, self.w)) | Usage: layer = WidthOneDense(n**2, 1)
where n is the number of sections for different ks
n = 5 for k = 1
n = 15 for k = 2
n = 35 for k = 3
This layer is used directly after Bihomogeneous_k layers to sum over all
the terms in the previous layer. The weights are initialized so that the h
matrix is a real identity matrix. The training does not work if they are randomly
initialized. | 62598fb65fc7496912d482f2 |
class Ensemble(object): <NEW_LINE> <INDENT> def __init__(self, name, verbose=False): <NEW_LINE> <INDENT> self.Name = name <NEW_LINE> self.Type = 'custom' <NEW_LINE> self.Run = self.init_run(verbose) <NEW_LINE> self.TCString = self.Run.TCString <NEW_LINE> self.Data = self.load_data() <NEW_LINE> self.Runs = self.init_runs() <NEW_LINE> self.N = len(self.Runs) <NEW_LINE> self.DUT = self.init_dut() <NEW_LINE> self.DUTType = self.Runs[0].Type <NEW_LINE> self.PBar = PBar(counter=True, t='min') <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> return self.Runs[item] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.Name <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f'{self.__class__.__name__} {self.Name} with {self.N} runs' <NEW_LINE> <DEDENT> def init_run(self, verbose): <NEW_LINE> <INDENT> return Run(load_tree=False, verbose=verbose) <NEW_LINE> <DEDENT> def init_runs(self): <NEW_LINE> <INDENT> return [Run(data[0], data[2], load_tree=False, verbose=False) for data in self.Data] <NEW_LINE> <DEDENT> def load_data(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def save_dir(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def res_dir(self): <NEW_LINE> <INDENT> return self.TCString <NEW_LINE> <DEDENT> @property <NEW_LINE> def tcs(self): <NEW_LINE> <INDENT> return [d[2] for d in self.Data] <NEW_LINE> <DEDENT> @property <NEW_LINE> def raw_files_exist(self): <NEW_LINE> <INDENT> return all([file_exists(run.Converter.RawFilePath) for run in self.Runs]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def final_files_exist(self): <NEW_LINE> <INDENT> return all([file_exists(run.RootFilePath) for run in self.Runs]) <NEW_LINE> <DEDENT> def init_dut(self): <NEW_LINE> <INDENT> return self.Runs[0].DUTs[self.Data[0][1] - 1] <NEW_LINE> <DEDENT> def get_runs(self): <NEW_LINE> <INDENT> return array([run.Number for run in self.Runs]) <NEW_LINE> <DEDENT> def get_fluxes(self): <NEW_LINE> <INDENT> return array([run.Flux for run in self.Runs]) <NEW_LINE> <DEDENT> def get_biases(self): <NEW_LINE> <INDENT> return array([self.Runs[i].load_biases()[d[1] - 1] for i, d in enumerate(self.Data)]) <NEW_LINE> <DEDENT> def get_durations(self): <NEW_LINE> <INDENT> return array([run.Duration.total_seconds() for run in self.Runs]) <NEW_LINE> <DEDENT> def get_start_times(self): <NEW_LINE> <INDENT> return array([run.LogStart for run in self.Runs]) <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return self.Name <NEW_LINE> <DEDENT> def get_irradiation(self): <NEW_LINE> <INDENT> return self.DUT.get_irradiation(self.TCString) <NEW_LINE> <DEDENT> def get_dut_nrs(self): <NEW_LINE> <INDENT> return array([d[1] for d in self.Data]) <NEW_LINE> <DEDENT> @update_pbar <NEW_LINE> def copy_raw_file(self, run: Run): <NEW_LINE> <INDENT> run.Converter.copy_raw_file(out=False) <NEW_LINE> <DEDENT> def copy_raw_files(self): <NEW_LINE> <INDENT> self.PBar.start(self.N) <NEW_LINE> [self.copy_raw_file(run) for run in self.Runs] | General enseble class for runs. | 62598fb64a966d76dd5eefc5 |
class FeedTagsTestCase(TestCase): <NEW_LINE> <INDENT> @patch.object(TagsFeeder, "feed") <NEW_LINE> @patch("dakara_feeder.__main__.load_feeder_securely") <NEW_LINE> @patch("dakara_feeder.__main__.load_config_securely") <NEW_LINE> def test_feed( self, mocked_load_config, mocked_load_feeder, mocked_feed, ): <NEW_LINE> <INDENT> config = { "kara_folder": Path("path") / "to" / "folder", "server": { "url": "www.example.com", "login": "login", "password": "password", }, } <NEW_LINE> mocked_load_config.return_value = config <NEW_LINE> feed_tags( Namespace( debug=False, file=Path("path") / "to" / "tags" / "file", progress=True ) ) <NEW_LINE> mocked_load_config.assert_called_with(False) <NEW_LINE> mocked_load_feeder.assert_called_with(ANY) <NEW_LINE> mocked_feed.assert_called_with() | Test the feed tags subcommand. | 62598fb6a79ad1619776a15a |
class EntityAvailabilityStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): <NEW_LINE> <INDENT> AVAILABLE = "Available" <NEW_LINE> LIMITED = "Limited" <NEW_LINE> RENAMING = "Renaming" <NEW_LINE> RESTORING = "Restoring" <NEW_LINE> UNKNOWN = "Unknown" | Entity availability status.
| 62598fb6460517430c4320d4 |
class LoggingMixin(): <NEW_LINE> <INDENT> def __init__(self, message, project_name): <NEW_LINE> <INDENT> self.message = message <NEW_LINE> self.goggingfile_location = BASE_DIR + "logs/" + project_name + ".log" <NEW_LINE> self.logger = logging.getLogger('django-helper') <NEW_LINE> self.logger.setLevel(logging.DEBUG) <NEW_LINE> self.django_helper = logging.FileHandler(self.goggingfile_location) <NEW_LINE> self.formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') <NEW_LINE> self.django_helper.setFormatter(self.formatter) <NEW_LINE> self.logger.addHandler(self.django_helper) <NEW_LINE> <DEDENT> @property <NEW_LINE> def addlog(self): <NEW_LINE> <INDENT> self.logger = logging.getLogger('django-helper') <NEW_LINE> try: <NEW_LINE> <INDENT> self.logger.debug(self.message) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.logger.debug(self.message) <NEW_LINE> <DEDENT> return True | Provides full logging of requests and responses | 62598fb6a05bb46b3848a959 |
class MainMenuController(ConsoleController): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> entries = [TextMenuEntry("Start", self.startGame), TextMenuEntry("Options", self.runOptions), TextMenuEntry("Exit", self.stopRunning)] <NEW_LINE> self.menu = Menu(entries) <NEW_LINE> screen = MainMenuScreen(self.menu) <NEW_LINE> cmds = {KAO_UP:self.menu.up, KAO_DOWN:self.menu.down, ENDL:self.menu.enter} <NEW_LINE> ConsoleController.__init__(self, screen, commands=cmds) <NEW_LINE> <DEDENT> def startGame(self, entry): <NEW_LINE> <INDENT> self.runController(TrainerMenuController()) <NEW_LINE> <DEDENT> def runOptions(self, entry): <NEW_LINE> <INDENT> self.runController(OptionsMenuController()) | Controller for the main menu | 62598fb6bf627c535bcb158f |
class DatasetProcessor(object): <NEW_LINE> <INDENT> def __init__(self, input_directory, output_file=None, max_size=None, parallel=True): <NEW_LINE> <INDENT> self.file_queue = self.paths_to_process(input_directory, max_size) <NEW_LINE> self.output = self.output_filename(input_directory, output_file) <NEW_LINE> self.parallel = parallel <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def paths_to_process(input_directory, max_size): <NEW_LINE> <INDENT> search_string = os.path.join(input_directory, '*.pcap') <NEW_LINE> paths = list(sorted(glob.glob(search_string))) <NEW_LINE> if max_size: <NEW_LINE> <INDENT> size = int(max_size) * 1024 ** 2 <NEW_LINE> paths = list(filter(lambda p: os.path.getsize(p) < max_size, paths)) <NEW_LINE> <DEDENT> return paths <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def output_filename(input_directory, output_file): <NEW_LINE> <INDENT> if output_file: <NEW_LINE> <INDENT> return output_file <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> directory_name = os.path.basename(input_directory) <NEW_LINE> if not directory_name: <NEW_LINE> <INDENT> directory_name = os.path.dirname(input_directory) <NEW_LINE> directory_name = os.path.basename(directory_name) <NEW_LINE> <DEDENT> return '{0}.csv'.format(directory_name) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def process_pcap(path, path_index, files_count): <NEW_LINE> <INDENT> print('[{1}/{2}] Processing: {0}'.format(path, path_index, files_count)) <NEW_LINE> try: <NEW_LINE> <INDENT> f = Flow.from_path(path) <NEW_LINE> if not f.data.empty: <NEW_LINE> <INDENT> return f.features <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Warning: Flow '{0}' is empty".format(path)) <NEW_LINE> <DEDENT> <DEDENT> except (Exception, AttributeError) as e: <NEW_LINE> <INDENT> print("Data extraction from '{0}' failed: {1}".format(path, str(e))) <NEW_LINE> <DEDENT> <DEDENT> def process(self): <NEW_LINE> <INDENT> futures = [] <NEW_LINE> queue_length = len(self.file_queue) <NEW_LINE> with pebble.ProcessPool(max_workers=config.NUM_JOBS) as pool: <NEW_LINE> <INDENT> for counter, path in enumerate(self.file_queue): <NEW_LINE> <INDENT> future = pool.schedule( self.process_pcap, (path, counter + 1, queue_length), timeout=1800, ) <NEW_LINE> futures.append(future) <NEW_LINE> <DEDENT> <DEDENT> raw_data = [] <NEW_LINE> for future in futures: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = future.result() <NEW_LINE> if result is not None: <NEW_LINE> <INDENT> raw_data.append(result) <NEW_LINE> <DEDENT> <DEDENT> except TimeoutError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> pool.close() <NEW_LINE> pool.join() <NEW_LINE> data = pandas.DataFrame(raw_data) <NEW_LINE> data.to_csv(self.output, header=True, index=False, mode='w') | A tool to for creation of data matrix from the annotated PCAP files. | 62598fb6be8e80087fbbf156 |
class Port(NetworkNotificationBase, plugin_base.NonMetricNotificationBase): <NEW_LINE> <INDENT> resource_name = 'port' | Listen for Neutron notifications.
Handle port.{create.end|update.*|exists} notifications from neutron. | 62598fb6a8370b77170f04cc |
class MI_Platform(object): <NEW_LINE> <INDENT> def __init__(self, plt=None): <NEW_LINE> <INDENT> self.instruments = [] <NEW_LINE> if plt is None: <NEW_LINE> <INDENT> self.identifier = None <NEW_LINE> self.description = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> val = plt.find(util.nspath_eval('gmi:identifier', namespaces)) <NEW_LINE> self.identifier = util.testXMLValue(val) <NEW_LINE> val = plt.find(util.nspath_eval('gmi:description', namespaces)) <NEW_LINE> self.description = util.testXMLValue(val) <NEW_LINE> for i in plt.findall(util.nspath_eval('gmi:instrument/gmi:MI_Instrument', namespaces)): <NEW_LINE> <INDENT> self.instruments.append(MI_Instrument(i)) | Process gmi:MI_Platform | 62598fb64a966d76dd5eefc6 |
class Radio(BasePageWidget): <NEW_LINE> <INDENT> def __init__(self, owner, locatordict): <NEW_LINE> <INDENT> self._values = locatordict <NEW_LINE> super(Radio,self).__init__(owner, None) <NEW_LINE> <DEDENT> def _updateLocators(self): <NEW_LINE> <INDENT> self._locvalues = {} <NEW_LINE> for (textkey,locatorid) in self._values.items(): <NEW_LINE> <INDENT> self._locvalues[textkey] = self.owner.locators[locatorid] <NEW_LINE> <DEDENT> <DEDENT> def _checkLocators(self,widgets=None,cltype=''): <NEW_LINE> <INDENT> header = "\n===== missing locators in %s =====" % (self.owner.__class__.__name__) <NEW_LINE> missing = '' <NEW_LINE> for (key,loctext) in self._locvalues.items(): <NEW_LINE> <INDENT> if self.owner.is_present(locator=loctext) == False: <NEW_LINE> <INDENT> locatorid = self._values[key] <NEW_LINE> missing += "\n%s: \"%s\"" % (locatorid,loctext) <NEW_LINE> <DEDENT> <DEDENT> if missing != '': <NEW_LINE> <INDENT> raise LocatorException(header+missing) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> for (key,loctext) in self._locvalues.items(): <NEW_LINE> <INDENT> e = self.wait_until_present(locator=loctext) <NEW_LINE> try: <NEW_LINE> <INDENT> c = e.get_attribute('checked') <NEW_LINE> if c == 'checked': <NEW_LINE> <INDENT> return key <NEW_LINE> <DEDENT> <DEDENT> except NoSuchAttributeException: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, val): <NEW_LINE> <INDENT> loctext = self._locvalues[val] <NEW_LINE> self.logger.info("clicking %s" % (loctext)) <NEW_LINE> e = self.wait_until_visible(locator=loctext) <NEW_LINE> e.click() | this object has no locatorid or locator,
all the information is in locatordict | 62598fb6cc0a2c111447b0fe |
class Reg8Bit(VarReg): <NEW_LINE> <INDENT> def __init__(self, reg): <NEW_LINE> <INDENT> super(Reg8Bit, self).__init__(reg, 1) | An 8-bit register | 62598fb6fff4ab517ebcd8d5 |
class TaskflowAPIAuthentication(BaseAuthentication): <NEW_LINE> <INDENT> def authenticate(self, request): <NEW_LINE> <INDENT> taskflow_secret = None <NEW_LINE> if request.method == 'POST' and 'sodar_secret' in request.POST: <NEW_LINE> <INDENT> taskflow_secret = request.POST['sodar_secret'] <NEW_LINE> <DEDENT> elif request.method == 'GET': <NEW_LINE> <INDENT> taskflow_secret = request.GET.get('sodar_secret', None) <NEW_LINE> <DEDENT> if ( not hasattr(settings, 'TASKFLOW_SODAR_SECRET') or taskflow_secret != settings.TASKFLOW_SODAR_SECRET ): <NEW_LINE> <INDENT> raise PermissionDenied('Not authorized') | Taskflow API authentication handling | 62598fb623849d37ff8511a1 |
class MucRoomUser: <NEW_LINE> <INDENT> def __init__(self,presence_or_user_or_jid): <NEW_LINE> <INDENT> if isinstance(presence_or_user_or_jid,MucRoomUser): <NEW_LINE> <INDENT> self.presence=presence_or_user_or_jid.presence <NEW_LINE> self.role=presence_or_user_or_jid.role <NEW_LINE> self.affiliation=presence_or_user_or_jid.affiliation <NEW_LINE> self.room_jid=presence_or_user_or_jid.room_jid <NEW_LINE> self.real_jid=presence_or_user_or_jid.real_jid <NEW_LINE> self.nick=presence_or_user_or_jid.nick <NEW_LINE> self.new_nick=None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.affiliation="none" <NEW_LINE> self.presence=None <NEW_LINE> self.real_jid=None <NEW_LINE> self.new_nick=None <NEW_LINE> if isinstance(presence_or_user_or_jid,JID): <NEW_LINE> <INDENT> self.nick=presence_or_user_or_jid.resource <NEW_LINE> self.room_jid=presence_or_user_or_jid <NEW_LINE> self.role="none" <NEW_LINE> <DEDENT> elif isinstance(presence_or_user_or_jid,Presence): <NEW_LINE> <INDENT> self.nick=None <NEW_LINE> self.room_jid=None <NEW_LINE> self.role="participant" <NEW_LINE> self.update_presence(presence_or_user_or_jid) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("Bad argument type for MucRoomUser constructor") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def update_presence(self,presence): <NEW_LINE> <INDENT> self.presence=MucPresence(presence) <NEW_LINE> t=presence.get_type() <NEW_LINE> if t=="unavailable": <NEW_LINE> <INDENT> self.role="none" <NEW_LINE> self.affiliation="none" <NEW_LINE> <DEDENT> self.room_jid=self.presence.get_from() <NEW_LINE> self.nick=self.room_jid.resource <NEW_LINE> mc=self.presence.get_muc_child() <NEW_LINE> if isinstance(mc,MucUserX): <NEW_LINE> <INDENT> items=mc.get_items() <NEW_LINE> for item in items: <NEW_LINE> <INDENT> if not isinstance(item,MucItem): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if item.role: <NEW_LINE> <INDENT> self.role=item.role <NEW_LINE> <DEDENT> if item.affiliation: <NEW_LINE> <INDENT> self.affiliation=item.affiliation <NEW_LINE> <DEDENT> if item.jid: <NEW_LINE> <INDENT> self.real_jid=item.jid <NEW_LINE> <DEDENT> if item.nick: <NEW_LINE> <INDENT> self.new_nick=item.nick <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def same_as(self,other): <NEW_LINE> <INDENT> return self.room_jid==other.room_jid | Describes a user of a MUC room.
The attributes of this object should not be changed directly.
:Ivariables:
- `presence`: last presence stanza received for the user.
- `role`: user's role.
- `affiliation`: user's affiliation.
- `room_jid`: user's room jid.
- `real_jid`: user's real jid or None if not available.
- `nick`: user's nick (resource part of `room_jid`)
:Types:
- `presence`: `MucPresence`
- `role`: `str`
- `affiliation`: `str`
- `room_jid`: `JID`
- `real_jid`: `JID`
- `nick`: `unicode` | 62598fb6f548e778e596b693 |
class Admin(object): <NEW_LINE> <INDENT> def __init__(self, app=None, name=None, url=None, subdomain=None, index_view=None, translations_path=None, endpoint=None, static_url_path=None, base_template=None, template_mode=None): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> self.translations_path = translations_path <NEW_LINE> self._views = [] <NEW_LINE> self._menu = [] <NEW_LINE> self._menu_categories = dict() <NEW_LINE> self._menu_links = [] <NEW_LINE> if name is None: <NEW_LINE> <INDENT> name = 'Admin' <NEW_LINE> <DEDENT> self.name = name <NEW_LINE> self.index_view = index_view or AdminIndexView(endpoint=endpoint, url=url) <NEW_LINE> self.endpoint = endpoint or self.index_view.endpoint <NEW_LINE> self.url = url or self.index_view.url <NEW_LINE> self.static_url_path = static_url_path <NEW_LINE> self.subdomain = subdomain <NEW_LINE> self.base_template = base_template or 'admin/base.html' <NEW_LINE> self.template_mode = template_mode or 'bootstrap2' <NEW_LINE> self.add_view(self.index_view) <NEW_LINE> if app is not None: <NEW_LINE> <INDENT> self._init_extension() <NEW_LINE> <DEDENT> <DEDENT> def add_view(self, view): <NEW_LINE> <INDENT> self._views.append(view) <NEW_LINE> if self.app is not None: <NEW_LINE> <INDENT> self.app.register_blueprint(view.create_blueprint(self)) <NEW_LINE> <DEDENT> self._add_view_to_menu(view) <NEW_LINE> <DEDENT> def add_link(self, link): <NEW_LINE> <INDENT> if link.category: <NEW_LINE> <INDENT> self._add_menu_item(link, link.category) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._menu_links.append(link) <NEW_LINE> <DEDENT> <DEDENT> def _add_menu_item(self, menu_item, target_category): <NEW_LINE> <INDENT> if target_category: <NEW_LINE> <INDENT> category = self._menu_categories.get(target_category) <NEW_LINE> if category is None: <NEW_LINE> <INDENT> category = MenuCategory(target_category) <NEW_LINE> self._menu_categories[target_category] = category <NEW_LINE> self._menu.append(category) <NEW_LINE> <DEDENT> category.add_child(menu_item) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._menu.append(menu_item) <NEW_LINE> <DEDENT> <DEDENT> def _add_view_to_menu(self, view): <NEW_LINE> <INDENT> self._add_menu_item(MenuView(view.name, view), view.category) <NEW_LINE> <DEDENT> def init_app(self, app): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> self._init_extension() <NEW_LINE> for view in self._views: <NEW_LINE> <INDENT> app.register_blueprint(view.create_blueprint(self)) <NEW_LINE> <DEDENT> <DEDENT> def _init_extension(self): <NEW_LINE> <INDENT> if not hasattr(self.app, 'extensions'): <NEW_LINE> <INDENT> self.app.extensions = dict() <NEW_LINE> <DEDENT> admins = self.app.extensions.get('admin', []) <NEW_LINE> for p in admins: <NEW_LINE> <INDENT> if p.endpoint == self.endpoint: <NEW_LINE> <INDENT> raise Exception(u'Cannot have two Admin() instances with same' u' endpoint name.') <NEW_LINE> <DEDENT> if p.url == self.url and p.subdomain == self.subdomain: <NEW_LINE> <INDENT> raise Exception(u'Cannot assign two Admin() instances with same' u' URL and subdomain to the same application.') <NEW_LINE> <DEDENT> <DEDENT> admins.append(self) <NEW_LINE> self.app.extensions['admin'] = admins <NEW_LINE> <DEDENT> def menu(self): <NEW_LINE> <INDENT> return self._menu <NEW_LINE> <DEDENT> def menu_links(self): <NEW_LINE> <INDENT> return self._menu_links | Collection of the admin views. Also manages menu structure. | 62598fb6097d151d1a2c111e |
class ListUsersViewSet(viewsets.GenericViewSet, mixins.ListModelMixin): <NEW_LINE> <INDENT> queryset = User.objects.all() <NEW_LINE> serializer_class = UserSerializer <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return self.queryset | List users | 62598fb67d43ff248742747a |
class HelperTests(TestCase): <NEW_LINE> <INDENT> def _load_fixture(self, fixture_name): <NEW_LINE> <INDENT> self._ids = {} <NEW_LINE> for col in ['Project', 'VCSSystem', 'File', 'Commit', 'FileAction', 'CodeEntityState', 'Hunk']: <NEW_LINE> <INDENT> module = importlib.import_module('visualSHARK.models') <NEW_LINE> obj = getattr(module, col) <NEW_LINE> obj.drop_collection() <NEW_LINE> <DEDENT> fixture = json.load(open('visualSHARK/fixtures/{}.json'.format(fixture_name), 'r')) <NEW_LINE> for col in fixture['collections']: <NEW_LINE> <INDENT> module = importlib.import_module('visualSHARK.models') <NEW_LINE> obj = getattr(module, col['model']) <NEW_LINE> for document in col['documents']: <NEW_LINE> <INDENT> tosave = document.copy() <NEW_LINE> had_id_mapping = False <NEW_LINE> for k, v in document.items(): <NEW_LINE> <INDENT> if k == 'id': <NEW_LINE> <INDENT> self._ids[document['id']] = None <NEW_LINE> del tosave['id'] <NEW_LINE> had_id_mapping = True <NEW_LINE> <DEDENT> if type(v) not in [int] and v.startswith('{') and v.endswith('}'): <NEW_LINE> <INDENT> tosave[k] = self._ids[v.replace('{', '').replace('}', '')] <NEW_LINE> <DEDENT> <DEDENT> r = obj(**tosave) <NEW_LINE> r.save() <NEW_LINE> if had_id_mapping: <NEW_LINE> <INDENT> self._ids[document['id']] = r.id <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def test_entity_matching(self): <NEW_LINE> <INDENT> self._load_fixture('entity_matching') <NEW_LINE> l = Label() <NEW_LINE> entities = l.generate_affected_entities(Commit.objects.get(id=self._ids['commit2']), self._ids['fileaction2']) <NEW_LINE> self.assertEqual(len(entities), 0) | This tests the helpers. | 62598fb699cbb53fe6830fc5 |
class ScansTSV: <NEW_LINE> <INDENT> def __init__(self, scans_tsv_file, acquisition_file, verbose): <NEW_LINE> <INDENT> self.verbose = verbose <NEW_LINE> self.scans_tsv_file = scans_tsv_file <NEW_LINE> self.acquisition_file = acquisition_file <NEW_LINE> self.tsv_entries = utilities.read_tsv_file(self.scans_tsv_file) <NEW_LINE> self.tsv_headers = self.tsv_entries[0] <NEW_LINE> self.acquisition_data = self.find_acquisition_data() <NEW_LINE> <DEDENT> def find_acquisition_data(self): <NEW_LINE> <INDENT> for entry in self.tsv_entries: <NEW_LINE> <INDENT> if os.path.basename(self.acquisition_file) in entry['filename']: <NEW_LINE> <INDENT> return entry <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_acquisition_time(self): <NEW_LINE> <INDENT> if not self.acquisition_data: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if 'acq_time' in self.acquisition_data: <NEW_LINE> <INDENT> if isinstance(self.tsv_entries, list): <NEW_LINE> <INDENT> acq_time_list = [ele for ele in self.tsv_entries if ele['filename'] in self.acquisition_file] <NEW_LINE> if len(acq_time_list) == 1: <NEW_LINE> <INDENT> eeg_acq_time = acq_time_list[0]['acq_time'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('More than one or no acquisition time has been found for ', self.acquisition_file) <NEW_LINE> exit() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> eeg_acq_time = self.acquisition_data['acq_time'] <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> eeg_acq_time = parse(eeg_acq_time) <NEW_LINE> <DEDENT> except ValueError as e: <NEW_LINE> <INDENT> message = "ERROR: could not convert acquisition time '" + eeg_acq_time + "' to datetime: " + str(e) <NEW_LINE> print(message) <NEW_LINE> exit(lib.exitcode.PROGRAM_EXECUTION_FAILURE) <NEW_LINE> <DEDENT> return eeg_acq_time <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def get_age_at_scan(self): <NEW_LINE> <INDENT> age_header_list = ['age', 'age_at_scan', 'age_acq_time'] <NEW_LINE> for header_name in age_header_list: <NEW_LINE> <INDENT> if header_name in self.tsv_headers and self.acquisition_data: <NEW_LINE> <INDENT> return self.acquisition_data[header_name].strip() <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def copy_scans_tsv_file_to_loris_bids_dir(self, bids_sub_id, loris_bids_root_dir, data_dir): <NEW_LINE> <INDENT> file = self.scans_tsv_file <NEW_LINE> copy = loris_bids_root_dir + '/sub-' + bids_sub_id + '/' + os.path.basename(self.scans_tsv_file) <NEW_LINE> utilities.copy_file(file, copy, self.verbose) <NEW_LINE> relative_path = copy.replace(data_dir, "") <NEW_LINE> return relative_path | This class reads the BIDS sub-XXX_scans.tsv file that includes acquisition level information
such as scan date or age at scan...
:Example:
from lib.scanstsv import ScansTSV
scan_info = ScansTSV(scans_tsv_file, acquisition_file)
acquisition_time = scan_info.get_acquisition_time()
age_at_scan = scan_info.get_age_at_scan | 62598fb67b180e01f3e490c8 |
class Seasson(models.Model): <NEW_LINE> <INDENT> name = models.CharField(u'Name', max_length=50) <NEW_LINE> from_date = models.DateField(u'From date') <NEW_LINE> to_date = models.DateField(u'To date', null=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name | A seasson for the plate. | 62598fb667a9b606de5460bf |
class Fim(AFNICommand): <NEW_LINE> <INDENT> _cmd = '3dfim+' <NEW_LINE> input_spec = FimInputSpec <NEW_LINE> output_spec = AFNICommandOutputSpec | Program to calculate the cross-correlation of an ideal reference
waveform with the measured FMRI time series for each voxel.
For complete details, see the `3dfim+ Documentation.
<https://afni.nimh.nih.gov/pub/dist/doc/program_help/3dfim+.html>`_
Examples
========
>>> from nipype.interfaces import afni
>>> fim = afni.Fim()
>>> fim.inputs.in_file = 'functional.nii'
>>> fim.inputs.ideal_file= 'seed.1D'
>>> fim.inputs.out_file = 'functional_corr.nii'
>>> fim.inputs.out = 'Correlation'
>>> fim.inputs.fim_thr = 0.0009
>>> fim.cmdline # doctest: +ALLOW_UNICODE
'3dfim+ -input functional.nii -ideal_file seed.1D -fim_thr 0.000900 -out Correlation -bucket functional_corr.nii'
>>> res = fim.run() # doctest: +SKIP | 62598fb67047854f4633f4ca |
class UserCreationForm(forms.ModelForm): <NEW_LINE> <INDENT> password1 = forms.CharField(label='Password', widget=forms.PasswordInput) <NEW_LINE> password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = ClanUser <NEW_LINE> fields = ('discord_id', 'discord_nickname') <NEW_LINE> <DEDENT> def clean_password2(self): <NEW_LINE> <INDENT> password1 = self.cleaned_data.get("password1") <NEW_LINE> password2 = self.cleaned_data.get("password2") <NEW_LINE> if password1 and password2 and password1 != password2: <NEW_LINE> <INDENT> raise forms.ValidationError("Passwords don't match") <NEW_LINE> <DEDENT> return password2 <NEW_LINE> <DEDENT> def save(self, commit=True): <NEW_LINE> <INDENT> user = super(UserCreationForm, self).save(commit=False) <NEW_LINE> user.set_password(self.cleaned_data["password1"]) <NEW_LINE> if commit: <NEW_LINE> <INDENT> user.save() <NEW_LINE> <DEDENT> return user | A form for creating new users. Includes all the required
fields, plus a repeated password. | 62598fb6a219f33f346c68f4 |
class CompositeOutputDevice(SourceMixin, CompositeDevice): <NEW_LINE> <INDENT> def on(self): <NEW_LINE> <INDENT> for device in self: <NEW_LINE> <INDENT> if isinstance(device, (OutputDevice, CompositeOutputDevice)): <NEW_LINE> <INDENT> device.on() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def off(self): <NEW_LINE> <INDENT> for device in self: <NEW_LINE> <INDENT> if isinstance(device, (OutputDevice, CompositeOutputDevice)): <NEW_LINE> <INDENT> device.off() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def toggle(self): <NEW_LINE> <INDENT> for device in self: <NEW_LINE> <INDENT> if isinstance(device, (OutputDevice, CompositeOutputDevice)): <NEW_LINE> <INDENT> device.toggle() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return super(CompositeOutputDevice, self).value <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, value): <NEW_LINE> <INDENT> for device, v in zip(self, value): <NEW_LINE> <INDENT> if isinstance(device, (OutputDevice, CompositeOutputDevice)): <NEW_LINE> <INDENT> device.value = v | Extends :class:`CompositeDevice` with :meth:`on`, :meth:`off`, and
:meth:`toggle` methods for controlling subordinate output devices. Also
extends :attr:`value` to be writeable.
:param list _order:
If specified, this is the order of named items specified by keyword
arguments (to ensure that the :attr:`value` tuple is constructed with a
specific order). All keyword arguments *must* be included in the
collection. If omitted, an alphabetically sorted order will be selected
for keyword arguments. | 62598fb6ff9c53063f51a73d |
class ChargePreviewMetricsCmrr(object): <NEW_LINE> <INDENT> swagger_types = { 'discount': 'float', 'discount_delta': 'float', 'regular': 'float', 'regular_delta': 'float' } <NEW_LINE> attribute_map = { 'discount': 'discount', 'discount_delta': 'discountDelta', 'regular': 'regular', 'regular_delta': 'regularDelta' } <NEW_LINE> def __init__(self, discount=None, discount_delta=None, regular=None, regular_delta=None): <NEW_LINE> <INDENT> self._discount = None <NEW_LINE> self._discount_delta = None <NEW_LINE> self._regular = None <NEW_LINE> self._regular_delta = None <NEW_LINE> self.discriminator = None <NEW_LINE> if discount is not None: <NEW_LINE> <INDENT> self.discount = discount <NEW_LINE> <DEDENT> if discount_delta is not None: <NEW_LINE> <INDENT> self.discount_delta = discount_delta <NEW_LINE> <DEDENT> if regular is not None: <NEW_LINE> <INDENT> self.regular = regular <NEW_LINE> <DEDENT> if regular_delta is not None: <NEW_LINE> <INDENT> self.regular_delta = regular_delta <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def discount(self): <NEW_LINE> <INDENT> return self._discount <NEW_LINE> <DEDENT> @discount.setter <NEW_LINE> def discount(self, discount): <NEW_LINE> <INDENT> self._discount = discount <NEW_LINE> <DEDENT> @property <NEW_LINE> def discount_delta(self): <NEW_LINE> <INDENT> return self._discount_delta <NEW_LINE> <DEDENT> @discount_delta.setter <NEW_LINE> def discount_delta(self, discount_delta): <NEW_LINE> <INDENT> self._discount_delta = discount_delta <NEW_LINE> <DEDENT> @property <NEW_LINE> def regular(self): <NEW_LINE> <INDENT> return self._regular <NEW_LINE> <DEDENT> @regular.setter <NEW_LINE> def regular(self, regular): <NEW_LINE> <INDENT> self._regular = regular <NEW_LINE> <DEDENT> @property <NEW_LINE> def regular_delta(self): <NEW_LINE> <INDENT> return self._regular_delta <NEW_LINE> <DEDENT> @regular_delta.setter <NEW_LINE> def regular_delta(self, regular_delta): <NEW_LINE> <INDENT> self._regular_delta = regular_delta <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ChargePreviewMetricsCmrr): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fb68e7ae83300ee918f |
class Xlsatoms(AutotoolsPackage): <NEW_LINE> <INDENT> homepage = "http://cgit.freedesktop.org/xorg/app/xlsatoms" <NEW_LINE> url = "https://www.x.org/archive/individual/app/xlsatoms-1.1.2.tar.gz" <NEW_LINE> version('1.1.2', '1f32e2b8c2135b5867291517848cb396') <NEW_LINE> depends_on('libxcb', when='@1.1:') <NEW_LINE> depends_on('libx11', when='@:1.0') <NEW_LINE> depends_on('pkg-config@0.9.0:', type='build') <NEW_LINE> depends_on('util-macros', type='build') | xlsatoms lists the interned atoms defined on an X11 server. | 62598fb6dc8b845886d536a7 |
class Undeletable(models.Model): <NEW_LINE> <INDENT> date_deleted = models.DateTimeField(blank=True) <NEW_LINE> objects = gw_managers.UndeletableManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> base_manager_name = 'objects' <NEW_LINE> <DEDENT> def delete(self, *args, **kwargs): <NEW_LINE> <INDENT> self.date_deleted = timezone.now() <NEW_LINE> return | Replaces deletion of this model with updating of date_deleted.
NOTE: The instances can be normally deleted via Managers. | 62598fb65fc7496912d482f3 |
class FakeDoer(Doer): <NEW_LINE> <INDENT> def run(self, file_name): <NEW_LINE> <INDENT> return 'Hello' | A fake doer to test with as we can't directly instantiate the
:class:`.Doer` class. | 62598fb691f36d47f2230f20 |
class MQTTPublisher(): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def publish_temperature(serial): <NEW_LINE> <INDENT> if is_blank(serial): <NEW_LINE> <INDENT> raise IllegalArgumentException("serial is required") <NEW_LINE> <DEDENT> mqtt_id = ini_config.get("MQTT", "MQTT_CLIENT_ID") <NEW_LINE> user = ini_config.get("MQTT", "MQTT_USERNAME") <NEW_LINE> pw = ini_config.get("MQTT", "MQTT_PASSWORD") <NEW_LINE> host = ini_config.get("MQTT", "MQTT_HOST") <NEW_LINE> port = ini_config.getint("MQTT", "MQTT_PORT") <NEW_LINE> topic = ini_config.get("MQTT", "MQTT_TOPIC") <NEW_LINE> mqttc = mqtt.Client(client_id=mqtt_id, clean_session=True, protocol=mqtt.MQTTv31) <NEW_LINE> mqttc.username_pw_set(user, password=pw) <NEW_LINE> sensor_temperature = DS18B20Sensor(serial) <NEW_LINE> readings = sensor_temperature.get_measurement() <NEW_LINE> message = OrderedDict() <NEW_LINE> message["value"] = readings.get_value() <NEW_LINE> message["unit"] = readings.get_unit() <NEW_LINE> message["utc"] = readings.get_utc() <NEW_LINE> json_message = json.dumps(message, indent=2, sort_keys=True) <NEW_LINE> auth = OrderedDict() <NEW_LINE> auth["username"] = user <NEW_LINE> auth["password"] = pw <NEW_LINE> logging.debug("Publishing to MQTT Broker: " "host [{0}], port [{1}], client id [{2}], " "user [{3}], sensor serial [{4}]" .format(host, port, mqtt_id, user, serial)) <NEW_LINE> publish.single(topic, payload=json_message, qos=0, retain=False, hostname=host, port=port, client_id=mqtt_id, keepalive=20, auth=auth) <NEW_LINE> logging.debug("Message [{0}] was published correctly: " "host [{1}], port [{2}], client id [{3}], " "user [{4}], sensor serial [{5}]" .format(message, host, port, mqtt_id, user, serial)) <NEW_LINE> return | MQTT message publisher.
| 62598fb64c3428357761a3aa |
class BadParentTracker(AxonException): <NEW_LINE> <INDENT> pass | Parent tracker is bad (not actually a tracker?)
Possible causes:
- creating a coordinatingassistanttracker specifying a parent that is not
also a coordinatingassistanttracker? | 62598fb6377c676e912f6de6 |
class DatatypeException(Exception): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> message = "Error in common.datatypes: " + message <NEW_LINE> super().__init__(message) | Exception for errors relating to the datatypes in common.datatypes | 62598fb6d7e4931a7ef3c184 |
class ZhihuspiderItem(Item): <NEW_LINE> <INDENT> huati = Field() <NEW_LINE> question = Field() <NEW_LINE> author = Field() <NEW_LINE> content = Field() <NEW_LINE> voteup_count = Field() <NEW_LINE> answer_id = Field() <NEW_LINE> comment = Field() | son_href = Field()
son_name = Field()
son_content = Field()
topic_name = Field()
title = Field()
name = Field()
question_id = Field()
topic = Field() | 62598fb67d847024c075c4ab |
class OperationDefinition(Model): <NEW_LINE> <INDENT> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display': {'key': 'display', 'type': 'OperationDisplayDefinition'}, } <NEW_LINE> def __init__(self, *, name: str=None, display=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(OperationDefinition, self).__init__(**kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.display = display | The definition of a container registry operation.
:param name: Operation name: {provider}/{resource}/{operation}.
:type name: str
:param display: The display information for the container registry
operation.
:type display:
~azure.mgmt.containerregistry.v2017_03_01.models.OperationDisplayDefinition | 62598fb65fcc89381b2661c4 |
class Square: <NEW_LINE> <INDENT> def __init__(self, size=0): <NEW_LINE> <INDENT> if isinstance(size, int) is False: <NEW_LINE> <INDENT> raise TypeError("size must be an integer") <NEW_LINE> <DEDENT> if size < 0: <NEW_LINE> <INDENT> raise ValueError("size must be >= 0") <NEW_LINE> <DEDENT> self.__size = size | . | 62598fb61f5feb6acb162d0f |
class AutosysAny(object): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> for key, val in kwargs.items(): <NEW_LINE> <INDENT> exec("self." + key + '=val') | Creates an Autosys Job object with any and only the parameters specified by the user.
Args:
Any number of parameters that togeather constitute an AutosysJob object
Returns:
Instance of AutosysAny object | 62598fb61b99ca400228f5a8 |
class TestWithCallOnMatches(GetRunbooksToExecTest): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> self.target['runbooks']['book1']['actions'].update({ 'flop': {'call_on': ['OK', 'WARNING', 'CRITICAL'], 'trigger': 1, 'frequency': 0} }) <NEW_LINE> self.target['runbooks']['book1']['status']['OK'] = 1 <NEW_LINE> run_these = get_runbooks_to_exec(self.item, self.target, self.logger) <NEW_LINE> self.assertTrue(run_these == {'book1': Set(['flop'])}) | Test when there are matching actions | 62598fb6a79ad1619776a15c |
class percent_not_minority_households_within_walking_distance(Variable): <NEW_LINE> <INDENT> _return_type="float32" <NEW_LINE> number_of_not_minority_households_within_walking_distance = "number_of_not_minority_households_within_walking_distance" <NEW_LINE> number_of_households_within_walking_distance ="number_of_households_within_walking_distance" <NEW_LINE> def dependencies(self): <NEW_LINE> <INDENT> return [my_attribute_label(self.number_of_not_minority_households_within_walking_distance), my_attribute_label(self.number_of_households_within_walking_distance)] <NEW_LINE> <DEDENT> def compute(self, dataset_pool): <NEW_LINE> <INDENT> hh_wwd = self.get_dataset().get_attribute(self.number_of_households_within_walking_distance) <NEW_LINE> return 100.0*ma.filled(self.get_dataset().get_attribute(self.number_of_not_minority_households_within_walking_distance)/ ma.masked_where(hh_wwd == 0, hh_wwd.astype(float32)),0.0) | Percent of households within the walking radius that are designated as not minority.
[100 * (sum (over c in cell.entity_within_walking_radius) of (count of households hh placed in c such that is not minority)) /
(sum (over c in cell.entity_within_walking_radius) of (count of households hh placed in c))] | 62598fb616aa5153ce4005f4 |
class Cdecimal(AutotoolsPackage): <NEW_LINE> <INDENT> homepage = "https://www.bytereef.org/mpdecimal/" <NEW_LINE> url = "https://www.bytereef.org/software/mpdecimal/releases/cdecimal-2.3.tar.gz" <NEW_LINE> version('2.3', sha256='d737cbe43ed1f6ad9874fb86c3db1e9bbe20c0c750868fde5be3f379ade83d8b') <NEW_LINE> patch('darwin_install_name.patch', when='platform=darwin') | cdecimal is a fast drop-in replacement for the decimal module in
Python's standard library. | 62598fb6be383301e02538eb |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.