code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class MessageProxyModel(QSortFilterProxyModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(MessageProxyModel, self).__init__() <NEW_LINE> self.setDynamicSortFilter(True) <NEW_LINE> self.setFilterRole(Qt.UserRole) <NEW_LINE> self.setSortCaseSensitivity(Qt.CaseInsensitive) <NEW_LINE> self.setSortRole(Qt.UserRole) <NEW_LINE> self._exclude_filters = FilterCollection() <NEW_LINE> self._highlight_filters = FilterCollection() <NEW_LINE> self._show_highlighted_only = False <NEW_LINE> self._source_model = None <NEW_LINE> <DEDENT> def setSourceModel(self, source_model): <NEW_LINE> <INDENT> super(MessageProxyModel, self).setSourceModel(source_model) <NEW_LINE> self._source_model = self.sourceModel() <NEW_LINE> <DEDENT> def filterAcceptsRow(self, sourcerow, sourceparent): <NEW_LINE> <INDENT> msg = self._source_model._messages[sourcerow] <NEW_LINE> if self._exclude_filters.test_message(msg): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> highlighted = True <NEW_LINE> if self._highlight_filters.count_enabled_filters() > 0: <NEW_LINE> <INDENT> highlighted = self._highlight_filters.test_message(msg, default=True) <NEW_LINE> <DEDENT> if self._show_highlighted_only and not highlighted: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> msg.highlighted = highlighted <NEW_LINE> return True <NEW_LINE> <DEDENT> def data(self, proxy_index, role=None): <NEW_LINE> <INDENT> index = self.mapToSource(proxy_index) <NEW_LINE> if role == Qt.ForegroundRole: <NEW_LINE> <INDENT> msg = self._source_model._messages[index.row()] <NEW_LINE> if not msg.highlighted: <NEW_LINE> <INDENT> return QBrush(Qt.gray) <NEW_LINE> <DEDENT> <DEDENT> return self._source_model.data(index, role) <NEW_LINE> <DEDENT> def handle_exclude_filters_changed(self): <NEW_LINE> <INDENT> self.invalidateFilter() <NEW_LINE> <DEDENT> def handle_highlight_filters_changed(self): <NEW_LINE> <INDENT> if self._show_highlighted_only: <NEW_LINE> <INDENT> self.invalidateFilter() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.invalidateFilter() <NEW_LINE> self.dataChanged.emit(self.index(0, 0), self.index(self.rowCount() - 1, self.columnCount() - 1), []) <NEW_LINE> <DEDENT> <DEDENT> def add_exclude_filter(self, newfilter): <NEW_LINE> <INDENT> self._exclude_filters.append(newfilter) <NEW_LINE> <DEDENT> def add_highlight_filter(self, newfilter): <NEW_LINE> <INDENT> self._highlight_filters.append(newfilter) <NEW_LINE> <DEDENT> def delete_exclude_filter(self, index): <NEW_LINE> <INDENT> del self._exclude_filters[index] <NEW_LINE> self.handle_exclude_filters_changed() <NEW_LINE> <DEDENT> def delete_highlight_filter(self, index): <NEW_LINE> <INDENT> del self._highlight_filters[index] <NEW_LINE> self.handle_highlight_filters_changed() <NEW_LINE> <DEDENT> def set_show_highlighted_only(self, show_highlighted_only): <NEW_LINE> <INDENT> self._show_highlighted_only = show_highlighted_only <NEW_LINE> self.invalidateFilter() | Provides sorting and filtering capabilities for the MessageDataModel.
Filtering is based on a collection of exclude and highlight filters. | 62598fb7cc0a2c111447b128 |
class SimulationSpace(goos.Model): <NEW_LINE> <INDENT> type = goos.ModelNameType("simulation_space") <NEW_LINE> mesh = goos.types.PolyModelType(MeshModel) <NEW_LINE> sim_region = goos.types.ModelType(goos.Box3d) <NEW_LINE> pml_thickness = goos.types.ListType(goos.types.IntType(), min_size=6, max_size=6) <NEW_LINE> reflection_symmetry = goos.types.ListType(goos.types.IntType(), min_size=3, max_size=3) | Defines a simulation space.
A simulation space contains information regarding the permittivity
distributions but not the fields, i.e. no information regarding sources
and wavelengths.
Attributes:
name: Name to identify the simulation space. Must be unique.
mesh: Meshing information. This describes how the simulation region
should be meshed.
sim_region: Rectangular prism simulation domain.
reflection_symmetry: Three element list with symmetry values in every axis
- 0: no symmetry
- 1: electric anti-symmetry around the center
- 2: electric symmetry around the center | 62598fb797e22403b383b022 |
class ParsingContext: <NEW_LINE> <INDENT> parenthesis_count = 0 <NEW_LINE> curly_bracket_count = 0 <NEW_LINE> square_bracket_count = 0 <NEW_LINE> in_single_quote = False <NEW_LINE> in_double_quote = False <NEW_LINE> def __init__(self, line): <NEW_LINE> <INDENT> self.line = line <NEW_LINE> <DEDENT> def in_global_expression(self): <NEW_LINE> <INDENT> return (self.parenthesis_count == 0 and self.curly_bracket_count == 0 and self.square_bracket_count == 0 and not self.in_single_quote and not self.in_double_quote) <NEW_LINE> <DEDENT> def count_special_chars(self, char, prev_char): <NEW_LINE> <INDENT> if char == '(': <NEW_LINE> <INDENT> self.parenthesis_count += 1 <NEW_LINE> <DEDENT> elif char == ')': <NEW_LINE> <INDENT> self.parenthesis_count -= 1 <NEW_LINE> if self.parenthesis_count < 0: <NEW_LINE> <INDENT> raise RMarkdownOptionParsingError( 'Option line "{}" has too many ' 'closing parentheses'.format(self.line)) <NEW_LINE> <DEDENT> <DEDENT> elif char == '{': <NEW_LINE> <INDENT> self.curly_bracket_count += 1 <NEW_LINE> <DEDENT> elif char == '}': <NEW_LINE> <INDENT> self.curly_bracket_count -= 1 <NEW_LINE> if self.curly_bracket_count < 0: <NEW_LINE> <INDENT> raise RMarkdownOptionParsingError( 'Option line "{}" has too many ' 'closing curly brackets'.format(self.line)) <NEW_LINE> <DEDENT> <DEDENT> elif char == '[': <NEW_LINE> <INDENT> self.square_bracket_count += 1 <NEW_LINE> <DEDENT> elif char == ']': <NEW_LINE> <INDENT> self.square_bracket_count -= 1 <NEW_LINE> if self.square_bracket_count < 0: <NEW_LINE> <INDENT> raise RMarkdownOptionParsingError( 'Option line "{}" has too many ' 'closing square brackets'.format(self.line)) <NEW_LINE> <DEDENT> <DEDENT> elif char == "'" and prev_char != '\\': <NEW_LINE> <INDENT> self.in_single_quote = not self.in_single_quote <NEW_LINE> <DEDENT> elif char == '"' and prev_char != '\\': <NEW_LINE> <INDENT> self.in_double_quote = not self.in_double_quote | Class for determining where to split rmd options | 62598fb7ec188e330fdf89ac |
class Rename(Command): <NEW_LINE> <INDENT> def __init__(self, callback, uid, script, directories, new_word, refactor): <NEW_LINE> <INDENT> self.script = script <NEW_LINE> self.new_word = new_word <NEW_LINE> self.jedi_refactor = refactor <NEW_LINE> self.directories = directories <NEW_LINE> super(Rename, self).__init__(callback, uid) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> renames = {} <NEW_LINE> try: <NEW_LINE> <INDENT> usages = self.script.usages() <NEW_LINE> proposals = self.jedi_refactor.rename(self.script, self.new_word) <NEW_LINE> for u in usages: <NEW_LINE> <INDENT> path = u.module_path.rsplit('/{0}.py'.format(u.module_name))[0] <NEW_LINE> if path in self.directories: <NEW_LINE> <INDENT> if u.module_path not in renames: <NEW_LINE> <INDENT> renames[u.module_path] = [] <NEW_LINE> <DEDENT> thefile = proposals.new_files().get(u.module_path) <NEW_LINE> if thefile is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> lineno = u.line - 1 <NEW_LINE> line = thefile.splitlines()[lineno] <NEW_LINE> renames[u.module_path].append({ 'lineno': lineno, 'line': line }) <NEW_LINE> <DEDENT> <DEDENT> success = True <NEW_LINE> <DEDENT> except Exception as error: <NEW_LINE> <INDENT> logging.error(error) <NEW_LINE> logging.debug(traceback.format_exc().splitlines()) <NEW_LINE> success = False <NEW_LINE> <DEDENT> self.callback({ 'success': success, 'renames': renames, 'uid': self.uid }) | Get back a python definition where to go
| 62598fb79f288636728188e0 |
class EventType(object): <NEW_LINE> <INDENT> def __init__(self, gateway): <NEW_LINE> <INDENT> self.INCOMING_CALL = gateway.jvm.pctelelog.events.EventType.INCOMING_CALL <NEW_LINE> self.MISSED_CALL = gateway.jvm.pctelelog.events.EventType.MISSED_CALL <NEW_LINE> self.CALL_ENDED = gateway.jvm.pctelelog.events.EventType.CALL_ENDED <NEW_LINE> self.CLIENT_CONNECT = gateway.jvm.pctelelog.events.EventType.CLIENT_CONNECT <NEW_LINE> self.SHUTDOWN = gateway.jvm.pctelelog.events.EventType.SHUTDOWN | The counterpart for EventType.java enum | 62598fb7009cb60464d0163f |
class Polynomial(object): <NEW_LINE> <INDENT> def __init__(self, polynomial): <NEW_LINE> <INDENT> self.polynomial = tuple(polynomial) <NEW_LINE> <DEDENT> def get_polynomial(self): <NEW_LINE> <INDENT> return self.polynomial <NEW_LINE> <DEDENT> def __neg__(self): <NEW_LINE> <INDENT> return Polynomial(map(lambda x: (-x[0], x[1]), self.polynomial)) <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> return Polynomial(self.polynomial + other.get_polynomial()) <NEW_LINE> <DEDENT> def __sub__(self, other): <NEW_LINE> <INDENT> return Polynomial(self.polynomial + (-other).get_polynomial()) <NEW_LINE> <DEDENT> def __mul__(self, other): <NEW_LINE> <INDENT> return Polynomial([(pair1[0] * pair2[0], pair1[1] + pair2[1]) for pair1 in self.polynomial for pair2 in other.get_polynomial()]) <NEW_LINE> <DEDENT> def __call__(self, x): <NEW_LINE> <INDENT> return sum((pair[0] * x ** pair[1]) for pair in self.polynomial) <NEW_LINE> <DEDENT> def simplify(self): <NEW_LINE> <INDENT> new_poly = [] <NEW_LINE> for pair in self.polynomial: <NEW_LINE> <INDENT> for x in range(len(new_poly)): <NEW_LINE> <INDENT> if pair[1] == new_poly[x][1]: <NEW_LINE> <INDENT> new_poly[x] = (pair[0] + new_poly[x][0], pair[1]) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> new_poly.append(pair) <NEW_LINE> <DEDENT> <DEDENT> for pair in new_poly: <NEW_LINE> <INDENT> if pair[0] == 0: <NEW_LINE> <INDENT> new_poly.remove(pair) <NEW_LINE> <DEDENT> <DEDENT> new_poly.sort(key=lambda x: x[1], reverse=True) <NEW_LINE> self.polynomial = tuple(new_poly) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> poly_str = [] <NEW_LINE> first = True <NEW_LINE> for pair in self.polynomial: <NEW_LINE> <INDENT> if not first: <NEW_LINE> <INDENT> if pair[0] < 0: <NEW_LINE> <INDENT> poly_str.append("-") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> poly_str.append("+") <NEW_LINE> <DEDENT> pair = (abs(pair[0]), pair[1]) <NEW_LINE> <DEDENT> if pair[0] == 1 or pair[0] == -1: <NEW_LINE> <INDENT> if pair[0] == 1: <NEW_LINE> <INDENT> sign = "" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sign = "-" <NEW_LINE> <DEDENT> if pair[1] == 0: <NEW_LINE> <INDENT> poly_str.append(str(pair[0])) <NEW_LINE> <DEDENT> elif pair[1] == 1: <NEW_LINE> <INDENT> poly_str.append("%sx" % sign) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> poly_str.append("%sx^%d" % (sign, pair[1])) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if pair[1] == 0: <NEW_LINE> <INDENT> poly_str.append(str(pair[0])) <NEW_LINE> <DEDENT> elif pair[1] == 1: <NEW_LINE> <INDENT> poly_str.append("%dx" % pair[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> poly_str.append("%dx^%d" % (pair[0], pair[1])) <NEW_LINE> <DEDENT> <DEDENT> first = False <NEW_LINE> <DEDENT> return " ".join(poly_str) | Class supporting basic arithmetic, simplification, evaluation, and
pretty-printing of polynomials. | 62598fb7379a373c97d99132 |
class RequestCoordinatorWrapperException(DeltaException): <NEW_LINE> <INDENT> pass | Is raised when request coordinator wrapper encounters error. | 62598fb74a966d76dd5eeff4 |
class SchemaCache(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._loaded = {} <NEW_LINE> <DEDENT> def _load_base_schema(self, schema_filename): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._loaded[schema_filename] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> with open(schema_filename, "r") as sfile: <NEW_LINE> <INDENT> self._loaded[schema_filename] = yaml.load(sfile.read()) <NEW_LINE> <DEDENT> logger.debug("Loaded schema from %s", schema_filename) <NEW_LINE> return self._loaded[schema_filename] <NEW_LINE> <DEDENT> <DEDENT> def _load_schema_with_plugins(self, schema_filename): <NEW_LINE> <INDENT> mangled = "{}-plugins".format(schema_filename) <NEW_LINE> try: <NEW_LINE> <INDENT> return self._loaded[mangled] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> plugins = load_plugins() <NEW_LINE> base_schema = copy.deepcopy(self._load_base_schema(schema_filename)) <NEW_LINE> logger.debug("Adding plugins to schema: %s", plugins) <NEW_LINE> for p in plugins: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> plugin_schema = p.plugin.schema <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> logger.debug("No schema defined for %s", p.name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> base_schema["mapping"].update(plugin_schema.get("initialisation", {})) <NEW_LINE> <DEDENT> <DEDENT> self._loaded[mangled] = base_schema <NEW_LINE> return self._loaded[mangled] <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, schema_filename, with_plugins): <NEW_LINE> <INDENT> if with_plugins: <NEW_LINE> <INDENT> schema = self._load_schema_with_plugins(schema_filename) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> schema = self._load_base_schema(schema_filename) <NEW_LINE> <DEDENT> return schema | Caches loaded schemas | 62598fb7a219f33f346c6922 |
class BaseHandler(tornado.web.RequestHandler): <NEW_LINE> <INDENT> __TOKEN_LIST = {} <NEW_LINE> def __init__(self, application, request, **kwargs): <NEW_LINE> <INDENT> super(BaseHandler, self).__init__(application, request, **kwargs) <NEW_LINE> <DEDENT> def generate_token(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> new_token = binascii.hexlify(os.urandom(16)).decode("utf8") <NEW_LINE> if new_token not in self.__TOKEN_LIST: <NEW_LINE> <INDENT> return new_token <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def on_login_success(self, new_token, user_id): <NEW_LINE> <INDENT> self.set_cookie('_token', new_token) <NEW_LINE> self.__TOKEN_LIST[new_token] = user_id <NEW_LINE> <DEDENT> def get_current_user(self): <NEW_LINE> <INDENT> token = self.get_cookie('_token') <NEW_LINE> if token and token in self.__TOKEN_LIST: <NEW_LINE> <INDENT> user_id = self.__TOKEN_LIST[token] <NEW_LINE> user = self.application.user_dict[user_id] <NEW_LINE> user.update({'id': user_id}) <NEW_LINE> return user <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def get(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.render('404.html') | 检测用户登陆 | 62598fb7f9cc0f698b1c535b |
class Wedge(Patch): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> pars = (self.center[0], self.center[1], self.r, self.theta1, self.theta2, self.width) <NEW_LINE> fmt = "Wedge(center=(%g, %g), r=%g, theta1=%g, theta2=%g, width=%s)" <NEW_LINE> return fmt % pars <NEW_LINE> <DEDENT> @docstring.dedent_interpd <NEW_LINE> def __init__(self, center, r, theta1, theta2, width=None, **kwargs): <NEW_LINE> <INDENT> Patch.__init__(self, **kwargs) <NEW_LINE> self.center = center <NEW_LINE> self.r, self.width = r, width <NEW_LINE> self.theta1, self.theta2 = theta1, theta2 <NEW_LINE> self._patch_transform = transforms.IdentityTransform() <NEW_LINE> self._recompute_path() <NEW_LINE> <DEDENT> def _recompute_path(self): <NEW_LINE> <INDENT> if abs((self.theta2 - self.theta1) - 360) <= 1e-12: <NEW_LINE> <INDENT> theta1, theta2 = 0, 360 <NEW_LINE> connector = Path.MOVETO <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> theta1, theta2 = self.theta1, self.theta2 <NEW_LINE> connector = Path.LINETO <NEW_LINE> <DEDENT> arc = Path.arc(theta1, theta2) <NEW_LINE> if self.width is not None: <NEW_LINE> <INDENT> v1 = arc.vertices <NEW_LINE> v2 = arc.vertices[::-1] * float(self.r - self.width) / self.r <NEW_LINE> v = np.vstack([v1, v2, v1[0, :], (0, 0)]) <NEW_LINE> c = np.hstack([arc.codes, arc.codes, connector, Path.CLOSEPOLY]) <NEW_LINE> c[len(arc.codes)] = connector <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> v = np.vstack([arc.vertices, [(0, 0), arc.vertices[0, :], (0, 0)]]) <NEW_LINE> c = np.hstack([arc.codes, [connector, connector, Path.CLOSEPOLY]]) <NEW_LINE> <DEDENT> v *= self.r <NEW_LINE> v += np.asarray(self.center) <NEW_LINE> self._path = Path(v, c) <NEW_LINE> <DEDENT> def set_center(self, center): <NEW_LINE> <INDENT> self._path = None <NEW_LINE> self.center = center <NEW_LINE> self.stale = True <NEW_LINE> <DEDENT> def set_radius(self, radius): <NEW_LINE> <INDENT> self._path = None <NEW_LINE> self.r = radius <NEW_LINE> self.stale = True <NEW_LINE> <DEDENT> def set_theta1(self, theta1): <NEW_LINE> <INDENT> self._path = None <NEW_LINE> self.theta1 = theta1 <NEW_LINE> self.stale = True <NEW_LINE> <DEDENT> def set_theta2(self, theta2): <NEW_LINE> <INDENT> self._path = None <NEW_LINE> self.theta2 = theta2 <NEW_LINE> self.stale = True <NEW_LINE> <DEDENT> def set_width(self, width): <NEW_LINE> <INDENT> self._path = None <NEW_LINE> self.width = width <NEW_LINE> self.stale = True <NEW_LINE> <DEDENT> def get_path(self): <NEW_LINE> <INDENT> if self._path is None: <NEW_LINE> <INDENT> self._recompute_path() <NEW_LINE> <DEDENT> return self._path | Wedge shaped patch. | 62598fb7091ae35668704d3d |
class HDF4FileHandler(BaseFileHandler): <NEW_LINE> <INDENT> def __init__(self, filename, filename_info, filetype_info): <NEW_LINE> <INDENT> super(HDF4FileHandler, self).__init__(filename, filename_info, filetype_info) <NEW_LINE> self.file_content = {} <NEW_LINE> file_handle = SD(self.filename, SDC.READ) <NEW_LINE> self._collect_attrs('', file_handle.attributes()) <NEW_LINE> for k, v in file_handle.datasets().items(): <NEW_LINE> <INDENT> self.collect_metadata(k, file_handle.select(k)) <NEW_LINE> <DEDENT> del file_handle <NEW_LINE> <DEDENT> def _collect_attrs(self, name, attrs): <NEW_LINE> <INDENT> for key, value in six.iteritems(attrs): <NEW_LINE> <INDENT> value = np.squeeze(value) <NEW_LINE> if issubclass(value.dtype.type, np.string_) and not value.shape: <NEW_LINE> <INDENT> value = np.asscalar(value) <NEW_LINE> if not isinstance(value, str): <NEW_LINE> <INDENT> value = value.decode() <NEW_LINE> <DEDENT> self.file_content["{}/attr/{}".format(name, key)] = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.file_content["{}/attr/{}".format(name, key)] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def collect_metadata(self, name, obj): <NEW_LINE> <INDENT> if isinstance(obj, SDS): <NEW_LINE> <INDENT> self.file_content[name] = obj <NEW_LINE> info = obj.info() <NEW_LINE> self.file_content[name + "/dtype"] = HTYPE_TO_DTYPE.get(info[3]) <NEW_LINE> self.file_content[name + "/shape"] = info[2] if isinstance(info[2], (int, float)) else tuple(info[2]) <NEW_LINE> <DEDENT> <DEDENT> def _open_xarray_dataset(self, val, chunks=CHUNK_SIZE): <NEW_LINE> <INDENT> dask_arr = from_sds(val, chunks=chunks) <NEW_LINE> attrs = val.attributes() <NEW_LINE> return xr.DataArray(dask_arr, dims=('y', 'x'), attrs=attrs) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> val = self.file_content[key] <NEW_LINE> if isinstance(val, SDS): <NEW_LINE> <INDENT> return self._open_xarray_dataset(val) <NEW_LINE> <DEDENT> return val <NEW_LINE> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> return item in self.file_content <NEW_LINE> <DEDENT> def get(self, item, default=None): <NEW_LINE> <INDENT> if item in self: <NEW_LINE> <INDENT> return self[item] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return default | Small class for inspecting a HDF5 file and retrieve its metadata/header data.
| 62598fb7460517430c4320ec |
class TestUploadSessionDetail(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testUploadSessionDetail(self): <NEW_LINE> <INDENT> pass | UploadSessionDetail unit test stubs | 62598fb72ae34c7f260ab1fa |
class CodeMessageException(RuntimeError): <NEW_LINE> <INDENT> def __init__(self, code, msg): <NEW_LINE> <INDENT> super(CodeMessageException, self).__init__("%d: %s" % (code, msg)) <NEW_LINE> self.code = code <NEW_LINE> self.msg = msg <NEW_LINE> <DEDENT> def error_dict(self): <NEW_LINE> <INDENT> return cs_error(self.msg) | An exception with integer code and message string attributes.
Attributes:
code (int): HTTP error code
msg (str): string describing the error | 62598fb7aad79263cf42e8f1 |
class PlotWindow(PlotWidget): <NEW_LINE> <INDENT> def __init__(self, title=None, **kargs): <NEW_LINE> <INDENT> mkQApp() <NEW_LINE> self.win = QtGui.QMainWindow() <NEW_LINE> PlotWidget.__init__(self, **kargs) <NEW_LINE> self.win.setCentralWidget(self) <NEW_LINE> for m in ['resize']: <NEW_LINE> <INDENT> setattr(self, m, getattr(self.win, m)) <NEW_LINE> <DEDENT> if title is not None: <NEW_LINE> <INDENT> self.win.setWindowTitle(title) <NEW_LINE> <DEDENT> self.win.show() | (deprecated; use PlotWidget instead) | 62598fb7bd1bec0571e15151 |
class AdaDelta(Optimizer): <NEW_LINE> <INDENT> def __init__(self, beta= 0.9, epsilon=1e-7, **kwargs) -> None: <NEW_LINE> <INDENT> super(AdaDelta, self).__init__(**kwargs) <NEW_LINE> self.D = [] <NEW_LINE> self.beta = beta <NEW_LINE> self.cache = [] <NEW_LINE> self.delta = [] <NEW_LINE> self.eps = epsilon <NEW_LINE> self.init = True <NEW_LINE> <DEDENT> def step(self, layers)->None: <NEW_LINE> <INDENT> if self.init: <NEW_LINE> <INDENT> for layer in layers: <NEW_LINE> <INDENT> self.cache.append(np.zeros_like(layer.trainable)) <NEW_LINE> self.delta.append(np.zeros_like(layer.trainable)) <NEW_LINE> self.D.append(np.zeros_like(layer.trainable)) <NEW_LINE> <DEDENT> self.init = False <NEW_LINE> <DEDENT> for ind, layer in enumerate(layers): <NEW_LINE> <INDENT> for ind_tra, trainable in enumerate(layer.trainable): <NEW_LINE> <INDENT> self.cache[ind][ind_tra] = self.beta * self.cache[ind][ind_tra] + (1. - self.beta) * trainable.grad.value ** 2 <NEW_LINE> self.delta[ind][ind_tra] = (np.sqrt(self.D[ind][ind_tra] + self.eps)) / (np.sqrt(self.cache[ind][ind_tra] + self.eps)) * trainable.grad.value <NEW_LINE> self.D[ind][ind_tra] = self.beta * self.D[ind][ind_tra] + (1. - self.beta) * self.delta[ind][ind_tra] ** 2 <NEW_LINE> trainable.value -= self.delta[ind][ind_tra] | Implementation of AdaDelta (Adaptive Delta).
AdaDelta has no learning rate.
It uses different from of Adagrad and RMSprop.
Beta recomended as a default.
Arguments :
-----------
beta : Update coefficient.
>>> type : float
>>> Default : 0.9
epsilon : Clip value to get rid of 0 division error.
>>> type : float
>>> Default : 1e-7. | 62598fb77c178a314d78d5bc |
class TaskForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Task <NEW_LINE> fields = ['task_title', 'task_description', 'task_due'] <NEW_LINE> widgets={ 'task_title': forms.TextInput(attrs = {'placeholder': 'Give the task a name here.'}), 'task_description': forms.Textarea(attrs={'placeholder': 'Describe the task here.'}), 'task_due': DateTimePicker(options={"format": "YYYY-MM-DD HH:mm"}) } <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(TaskForm, self).__init__(*args, **kwargs) <NEW_LINE> for field_name, field in self.fields.items(): <NEW_LINE> <INDENT> field.widget.attrs['class'] = 'form-control' | A form for a Task | 62598fb7fff4ab517ebcd906 |
class StateManager(models.Manager): <NEW_LINE> <INDENT> def get_queryset(self): <NEW_LINE> <INDENT> return super(StateManager, self).get_queryset().filter( state=self.state_filter) <NEW_LINE> <DEDENT> def get_or_create(self, **kwargs): <NEW_LINE> <INDENT> return self.get_queryset().get_or_create( state=self.state_filter, **kwargs) | For searching/creating State Chats only. | 62598fb7be383301e025391a |
class Post: <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> <DEDENT> def _rawtext(self): <NEW_LINE> <INDENT> with open(self.path, 'r') as f: <NEW_LINE> <INDENT> rt = f.read() <NEW_LINE> self._rawtext = lambda: rt <NEW_LINE> return rt <NEW_LINE> <DEDENT> <DEDENT> def _md(self): <NEW_LINE> <INDENT> md = markdown.Markdown(extensions=['markdown.extensions.meta']) <NEW_LINE> self._md = lambda: md <NEW_LINE> return md <NEW_LINE> <DEDENT> def content(self): <NEW_LINE> <INDENT> content = Markup(self._md().convert(self._rawtext())) <NEW_LINE> self.content = lambda: content <NEW_LINE> return content <NEW_LINE> <DEDENT> def title(self): <NEW_LINE> <INDENT> return self.meta('title') <NEW_LINE> <DEDENT> def template(self): <NEW_LINE> <INDENT> if self.meta('template'): <NEW_LINE> <INDENT> return self.meta('template')+'.html' <NEW_LINE> <DEDENT> <DEDENT> def meta(self, key): <NEW_LINE> <INDENT> self.content() <NEW_LINE> return self._md().Meta[key][0] if key in self._md().Meta else False <NEW_LINE> <DEDENT> def get_compiled_path(self, new_ext='.html'): <NEW_LINE> <INDENT> fname, _ = os.path.splitext(self.path) <NEW_LINE> return fname + new_ext <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.path <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__str__() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.path == other.path | Abstracts the concept of Post. Efficiency-wise it is terrible and needs
lazy-loading. | 62598fb79f288636728188e2 |
class MelodyAutocompleteEngine: <NEW_LINE> <INDENT> autocompleter: Autocompleter <NEW_LINE> def __init__(self, config: Dict[str, Any]) -> None: <NEW_LINE> <INDENT> self.autocompleter = SimplePrefixTree(config['weight_type']) if config['autocompleter'] == 'simple' else CompressedPrefixTree(config['weight_type']) <NEW_LINE> with open(config['file']) as f: <NEW_LINE> <INDENT> lines = csv.reader(f) <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> if line: <NEW_LINE> <INDENT> pairs = [] <NEW_LINE> for i in range(1, len(line) - 1, 2): <NEW_LINE> <INDENT> pairs.append((int(line[i]), int(line[i+1]))) <NEW_LINE> <DEDENT> interval = [] <NEW_LINE> for i in range(len(pairs) - 1): <NEW_LINE> <INDENT> interval.append(int(pairs[i+1][0]) - int(pairs[i][0])) <NEW_LINE> <DEDENT> value = Melody(line[0], pairs) <NEW_LINE> self.autocompleter.insert(value, 1, interval) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def autocomplete(self, prefix: List[int], limit: Optional[int] = None) -> List[Tuple[Melody, float]]: <NEW_LINE> <INDENT> return self.autocompleter.autocomplete(prefix, limit) <NEW_LINE> <DEDENT> def remove(self, prefix: List[int]) -> None: <NEW_LINE> <INDENT> self.autocompleter.remove(prefix) | An autocomplete engine that suggests melodies based on a few intervals.
The values stored are Melody objects, and the corresponding
prefix sequence for a Melody is its interval sequence.
Because the prefix is based only on interval sequence and not the
starting pitch or duration of the notes, it is possible for different
melodies to have the same prefix.
# === Private Attributes ===
autocompleter: An Autocompleter used by this engine. | 62598fb7ff9c53063f51a76d |
class Trainer(object): <NEW_LINE> <INDENT> def __init__(self, storage, **kwargs): <NEW_LINE> <INDENT> self.storage = storage <NEW_LINE> self.logger = logging.getLogger(__name__) <NEW_LINE> <DEDENT> def train(self, *args, **kwargs): <NEW_LINE> <INDENT> raise self.TrainerInitializationException() <NEW_LINE> <DEDENT> def get_or_create(self, statement_text): <NEW_LINE> <INDENT> statement = self.storage.find(statement_text) <NEW_LINE> if not statement: <NEW_LINE> <INDENT> statement = Statement(statement_text) <NEW_LINE> <DEDENT> return statement <NEW_LINE> <DEDENT> class TrainerInitializationException(Exception): <NEW_LINE> <INDENT> def __init__(self, value=None): <NEW_LINE> <INDENT> default = ( 'A training class must specified before calling train(). ' + 'See http://chatterbot.readthedocs.io/en/stable/training.html' ) <NEW_LINE> self.value = value or default <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self.value) <NEW_LINE> <DEDENT> <DEDENT> def _generate_export_data(self): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for statement in self.storage.filter(): <NEW_LINE> <INDENT> for response in statement.in_response_to: <NEW_LINE> <INDENT> result.append([response.text, statement.text]) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def export_for_training(self, file_path='./export.json'): <NEW_LINE> <INDENT> from jsondb.db import Database <NEW_LINE> database = Database(file_path) <NEW_LINE> export = {'export': self._generate_export_data()} <NEW_LINE> database.data(dictionary=export) | Base class for all other trainer classes. | 62598fb7a219f33f346c6924 |
class RunMethodGUI(RunMethodBase): <NEW_LINE> <INDENT> def __init__(self, frame): <NEW_LINE> <INDENT> self.frame = frame <NEW_LINE> self.current_dmu = None <NEW_LINE> self.increment = 0 <NEW_LINE> <DEDENT> def get_categories(self): <NEW_LINE> <INDENT> return self.frame.construct_categories() <NEW_LINE> <DEDENT> def get_coefficients(self): <NEW_LINE> <INDENT> return self.frame.data_frame.data_tab.read_coefficients() <NEW_LINE> <DEDENT> def show_error(self, message): <NEW_LINE> <INDENT> logger = get_logger() <NEW_LINE> logger.error(message) <NEW_LINE> showerror('Error', message) <NEW_LINE> <DEDENT> def validate_weights_if_needed(self): <NEW_LINE> <INDENT> self.frame.params_frame.weight_tab.on_validate_weights() <NEW_LINE> <DEDENT> def init_before_run(self, nb_models, coefficients): <NEW_LINE> <INDENT> current_dmu = StringVar() <NEW_LINE> current_dmu.trace('w', self.frame.on_dmu_change) <NEW_LINE> self.current_dmu = current_dmu <NEW_LINE> self.increment = 100 / (len(coefficients) * nb_models) <NEW_LINE> self.frame.progress_bar['value'] = 0 <NEW_LINE> <DEDENT> def decorate_model(self, model_obj): <NEW_LINE> <INDENT> model = ProgressBarDecorator(model_obj, self.current_dmu) <NEW_LINE> self.frame.increment = self.increment <NEW_LINE> return model <NEW_LINE> <DEDENT> def post_process_solutions(self, solutions, params, param_strs, all_ranks, run_date, total_seconds): <NEW_LINE> <INDENT> categorical = params.get_parameter_value('CATEGORICAL_CATEGORY') <NEW_LINE> if not categorical.strip(): <NEW_LINE> <INDENT> categorical = None <NEW_LINE> <DEDENT> data_file = self.frame.data_frame.data_tab.get_data_file_name() <NEW_LINE> self.frame.data_frame.solution_tab.update_data_file_name(data_file) <NEW_LINE> if '*' not in data_file: <NEW_LINE> <INDENT> params.update_parameter('DATA_FILE', data_file) <NEW_LINE> <DEDENT> self.frame.progress_bar['value'] = 100 <NEW_LINE> self.frame.data_frame.select(1) <NEW_LINE> self.frame.data_frame.solution_tab.show_solution(solutions, params, param_strs, run_date, total_seconds, ranks=all_ranks, categorical= categorical) | This class implements running routing from GUI.
Attributes:
frame (Tk Frame): main GUI frame.
current_dmu (StringVar): StringVar object that tracks when
when DMU changes during solution process.
increment (double): progress bar increment.
Args:
frame (Tk Frame): main GUI frame. | 62598fb74428ac0f6e658641 |
class TR(WebElement): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> tagName = "tr" | Defines a table row | 62598fb744b2445a339b6a03 |
class CreateNewClientTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client.defaults['HTTP_AUTHORIZATION'] = get_token() <NEW_LINE> <DEDENT> def test_create_valid_client(self): <NEW_LINE> <INDENT> valid_payload = { "name": "Ian Marcos", "surname": "Carvalho", "email": "ianmarcoscarvalho@gmail.com.br", "phone": "9137946863", "cpf": "51281103898", } <NEW_LINE> response = self.client.post( reverse('clients'), data=json.dumps(valid_payload), content_type="application/json", ) <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_201_CREATED) <NEW_LINE> self.assertEqual(response.data, {"client_id": "1"}) <NEW_LINE> <DEDENT> def test_create_invalid_client(self): <NEW_LINE> <INDENT> invalid_payload = { "name": "Ian Marcos", "surname": "Carvalho", "email": "ianmarcoscarvalho@gmail.com.br", "phone": "9137946863", } <NEW_LINE> response = self.client.post( reverse('clients'), data=json.dumps(invalid_payload), content_type="application/json", ) <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) | Test module for inserting a new Client | 62598fb78e7ae83300ee91bf |
class UriSpec(PipelineSpec): <NEW_LINE> <INDENT> pass | Data spec for URI string.
| 62598fb77047854f4633f4f5 |
class ModifyJidujilu(ModifyYuedujilu): <NEW_LINE> <INDENT> grok.context(Interface) <NEW_LINE> grok.name('modify_jidujilu') <NEW_LINE> grok.require('zope2.View') | AJAX action for jidu jilu.
| 62598fb7f548e778e596b6c4 |
class ComputeNodeDeleteUserOptions(Model): <NEW_LINE> <INDENT> def __init__(self, timeout=30, client_request_id=None, return_client_request_id=False, ocp_date=None): <NEW_LINE> <INDENT> self.timeout = timeout <NEW_LINE> self.client_request_id = client_request_id <NEW_LINE> self.return_client_request_id = return_client_request_id <NEW_LINE> self.ocp_date = ocp_date | Additional parameters for delete_user operation.
:param timeout: The maximum time that the server can spend processing the
request, in seconds. The default is 30 seconds. Default value: 30 .
:type timeout: int
:param client_request_id: The caller-generated request identity, in the
form of a GUID with no decoration such as curly braces, e.g.
9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
:type client_request_id: str
:param return_client_request_id: Whether the server should return the
client-request-id in the response. Default value: False .
:type return_client_request_id: bool
:param ocp_date: The time the request was issued. Client libraries
typically set this to the current system clock time; set it explicitly if
you are calling the REST API directly.
:type ocp_date: datetime | 62598fb73317a56b869be5dd |
class ODSWriter(BookWriter): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> BookWriter.__init__(self) <NEW_LINE> self._native_book = None <NEW_LINE> <DEDENT> def open(self, file_name, **keywords): <NEW_LINE> <INDENT> BookWriter.open(self, file_name, **keywords) <NEW_LINE> self._native_book = ezodf.newdoc( doctype="ods", filename=self._file_alike_object) <NEW_LINE> skip_backup_flag = self._keywords.get('skip_backup', True) <NEW_LINE> if skip_backup_flag: <NEW_LINE> <INDENT> self._native_book.backup = False <NEW_LINE> <DEDENT> <DEDENT> def create_sheet(self, name): <NEW_LINE> <INDENT> return ODSSheetWriter(self._native_book, None, name) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self._native_book.save() <NEW_LINE> self._native_book = None | open document spreadsheet writer | 62598fb72ae34c7f260ab1fc |
class Solution: <NEW_LINE> <INDENT> def validWordSquare(self, words): <NEW_LINE> <INDENT> if len(words) == 0 or len(words) == 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> for i in range(1, len(words)): <NEW_LINE> <INDENT> for j in range(i, len(words)): <NEW_LINE> <INDENT> if words[i][j] != words[j][i]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return True | @param words: a list of string
@return: a boolean | 62598fb721bff66bcd722d88 |
class insert(ignore): <NEW_LINE> <INDENT> def __init__(self, label=None): <NEW_LINE> <INDENT> self.label = label <NEW_LINE> <DEDENT> def apply(self, runner, obj): <NEW_LINE> <INDENT> runner[self.label or obj.__name__].add(obj) | A decorator to explicitly mark that a method of a :class:`~mush.Plug` should
be added to a runner by :meth:`~mush.Plug.add_to`. The `label` parameter
can be used to indicate a different label at which to add the method,
instead of using the name of the method. | 62598fb7283ffb24f3cf39a4 |
class UpdateOwnStatus(permissions.BasePermission): <NEW_LINE> <INDENT> def has_object_permission(self, request, view, obj): <NEW_LINE> <INDENT> if request.method in permissions.SAFE_METHODS: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return obj.user_profile.id == request.user.id | allow user to update their own status | 62598fb79c8ee82313040203 |
@python_2_unicode_compatible <NEW_LINE> class Essay(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=20) <NEW_LINE> sub_title = models.CharField(max_length=30, blank=True) <NEW_LINE> body = MDTextField() <NEW_LINE> created_time = models.DateTimeField() <NEW_LINE> modified_time = models.DateTimeField() <NEW_LINE> excerpt = models.CharField(max_length=200, blank=True) <NEW_LINE> category = models.ForeignKey(Category, on_delete=models.CASCADE) <NEW_LINE> tags = models.ManyToManyField(Tag, blank=True) <NEW_LINE> author = models.ForeignKey(User, on_delete=models.CASCADE) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse('essay:essay', kwargs={'pk': self.pk}) | 文章的数据库表稍微复杂一点,主要是涉及的字段更多。 | 62598fb7cc40096d6161a269 |
class overlappedGenesH3K4Profile(TrackerImages): <NEW_LINE> <INDENT> pass | Chromatin profile per gene | 62598fb730dc7b766599f96d |
class IdentityNode(ActivationNode): <NEW_LINE> <INDENT> def f(self, x): <NEW_LINE> <INDENT> return x <NEW_LINE> <DEDENT> def derivative(self, x): <NEW_LINE> <INDENT> if isinstance(x, np.ndarray): <NEW_LINE> <INDENT> return np.ones(x.shape) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 1.0 | A node where the activation function is the identity:
f(x) = x. | 62598fb710dbd63aa1c70cd8 |
class ItemSimilarity(BaseSimilarity): <NEW_LINE> <INDENT> def __init__(self, model, distance, num_best=None): <NEW_LINE> <INDENT> BaseSimilarity.__init__(self, model, distance, num_best) <NEW_LINE> <DEDENT> def get_similarity(self, source_id, target_id): <NEW_LINE> <INDENT> source_preferences = self.model.preferences_for_item(source_id) <NEW_LINE> target_preferences = self.model.preferences_for_item(target_id) <NEW_LINE> if self.model.has_preference_values(): <NEW_LINE> <INDENT> source_preferences, target_preferences = find_common_elements(source_preferences, target_preferences) <NEW_LINE> <DEDENT> if source_preferences.ndim == 1 and target_preferences.ndim == 1: <NEW_LINE> <INDENT> source_preferences = np.asarray([source_preferences]) <NEW_LINE> target_preferences = np.asarray([target_preferences]) <NEW_LINE> <DEDENT> if self.distance == loglikehood_coefficient: <NEW_LINE> <INDENT> if not source_preferences.shape[1] == 0 and not target_preferences.shape[1] == 0: <NEW_LINE> <INDENT> return self.distance(self.model.items_count(), source_preferences, target_preferences) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return np.array([[np.nan]]) <NEW_LINE> <DEDENT> <DEDENT> if not source_preferences.shape[1] == 0 and not target_preferences.shape[1] == 0: <NEW_LINE> <INDENT> return self.distance(source_preferences, target_preferences) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return np.array([[np.nan]]) <NEW_LINE> <DEDENT> <DEDENT> def get_similarities(self, source_id): <NEW_LINE> <INDENT> return [(other_id, self.get_similarity(source_id, other_id)) for other_id in self.model.item_ids()] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for item_id in self.model.item_ids(): <NEW_LINE> <INDENT> yield item_id, self[item_id] | Returns the degree of similarity, of two items, based on its preferences by the users.
Implementations of this class define a notion of similarity between two items.
Implementations should return values in the range 0.0 to 1.0, with 1.0 representing
perfect similarity.
Parameters
----------
`model`: DataModel
Defines the data model where data is fetched.
`distance`: Function
Pairwise Function between two vectors.
`num_best`: int
If it is left unspecified, similarity queries return a full list (one
float for every item in the model, including the query item).
If `num_best` is set, queries return `num_best` most similar items, as a
sorted list.
Methods
---------
get_similarity()
Return similarity of the `source_id` to a specific `target_id` in the model.
get_similarities()
Return similarity of the `source_id` to all sources in the model.
Examples
--------- | 62598fb726068e7796d4ca78 |
class CreateLiveCallbackRuleResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RequestId = params.get("RequestId") | CreateLiveCallbackRule返回参数结构体
| 62598fb7167d2b6e312b7094 |
class MockedGoogleAnalyticsSource(GoogleAnalyticsSource): <NEW_LINE> <INDENT> def __init__(self, mocked_value): <NEW_LINE> <INDENT> super(MockedGoogleAnalyticsSource, self).__init__('', 0) <NEW_LINE> self.mocked_value = mocked_value <NEW_LINE> self.last_query = None <NEW_LINE> <DEDENT> def _query(self, **kwargs): <NEW_LINE> <INDENT> self.last_query = kwargs <NEW_LINE> return { 'reports': [{ 'data': { 'rows': [ { 'metrics': [{'values': [str(self.mocked_value)]}], 'dimensions': ['20181213'], } ] } }] } | This class is used to mock values returned by Google Analytics API | 62598fb77c178a314d78d5be |
class EnzymeFamily(Base): <NEW_LINE> <INDENT> __tablename__ = ENZYME_FAMILY_TABLE_NAME <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> expasy_id = Column(String(16), unique=True, index=True, nullable=False, doc='The ExPASy enzyme code.') <NEW_LINE> parent_id = Column(Integer, ForeignKey(f'{EnzymeSuperFamily.__tablename__}.id'), nullable=True) | Third level entry | 62598fb7fff4ab517ebcd908 |
class TestView(cocos.layer.Layer): <NEW_LINE> <INDENT> def __init__(self, assets): <NEW_LINE> <INDENT> super(TestView, self).__init__() <NEW_LINE> self.assets = assets <NEW_LINE> nwidget.events.clear(cocos.director.director.window) <NEW_LINE> self.is_event_handler = True <NEW_LINE> ui = CocosWidget() <NEW_LINE> self.add(ui, z=1) <NEW_LINE> bg = model.Background(self.assets) <NEW_LINE> self.add(bg.node) <NEW_LINE> marker = model.Marker(self.assets) <NEW_LINE> marker.node.position = 50, 50 <NEW_LINE> marker.node.scale = 0.25 <NEW_LINE> self.add(marker.node) <NEW_LINE> ui.widgets.append( nwidget.Label( text="Press arrow keys to change snake sprite", size=13, color=(255, 255, 255, 255) ).bounds(20, 150, 600, 180) ) <NEW_LINE> snake = model.Snake(self.assets) <NEW_LINE> snake.node.position = 50, 100 <NEW_LINE> snake.node.scale = 0.5 <NEW_LINE> self.add(snake.node) <NEW_LINE> self.snake = snake <NEW_LINE> ui.widgets.append( nwidget.Label( text="Press arrow keys to move path in that direction\nL to increase length of path, R to reset path", size=13, color=(255, 255, 255, 255) ).bounds(20, 300, 600, 425) ) <NEW_LINE> self.path = model.Path() <NEW_LINE> self.reset_path() <NEW_LINE> self.path.move(x=-50) <NEW_LINE> self.path.move(y=-30) <NEW_LINE> self.path.move(x=-100) <NEW_LINE> self.path.move(y=-20) <NEW_LINE> self.path.move(y=-20) <NEW_LINE> self.add(self.path) <NEW_LINE> <DEDENT> def reset_path(self): <NEW_LINE> <INDENT> self.path.reset() <NEW_LINE> self.path.x = 100 <NEW_LINE> self.path.y = 200 <NEW_LINE> self.path.length = 150 <NEW_LINE> self.path.move(x=100) <NEW_LINE> self.path.move(y=100) <NEW_LINE> <DEDENT> def check_path(self): <NEW_LINE> <INDENT> if self.path.intersects(): <NEW_LINE> <INDENT> self.path.color = (127, 0, 0, 127) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.path.color = (127, 127, 127, 127) <NEW_LINE> <DEDENT> <DEDENT> def on_key_press(self, key, modifiers): <NEW_LINE> <INDENT> md = 5 <NEW_LINE> if key == pyglet.window.key.L: <NEW_LINE> <INDENT> self.path.length += 5 <NEW_LINE> <DEDENT> if key == pyglet.window.key.R: <NEW_LINE> <INDENT> self.reset_path() <NEW_LINE> <DEDENT> elif key == pyglet.window.key.UP: <NEW_LINE> <INDENT> self.snake.up() <NEW_LINE> self.path.move(y=md) <NEW_LINE> <DEDENT> elif key == pyglet.window.key.DOWN: <NEW_LINE> <INDENT> self.snake.down() <NEW_LINE> self.path.move(y=-md) <NEW_LINE> <DEDENT> elif key == pyglet.window.key.LEFT: <NEW_LINE> <INDENT> self.snake.left() <NEW_LINE> self.path.move(x=-md) <NEW_LINE> <DEDENT> elif key == pyglet.window.key.RIGHT: <NEW_LINE> <INDENT> self.snake.right() <NEW_LINE> self.path.move(x=md) <NEW_LINE> <DEDENT> self.check_path() | Testing class | 62598fb7796e427e5384e8b5 |
class Attachments(ExtractionMode): <NEW_LINE> <INDENT> __mode__ = 'attachments' <NEW_LINE> def __init__(self, identification_json, basedir=''): <NEW_LINE> <INDENT> super(Attachments, self).__init__(identification_json, basedir=basedir) <NEW_LINE> <DEDENT> def specs(self): <NEW_LINE> <INDENT> basedir, attachments = self.basedir_sourcename, [] <NEW_LINE> for attachment in self.identification_json.get(self.__mode__): <NEW_LINE> <INDENT> path = os.path.join(basedir, attachment.get('file_name')) <NEW_LINE> attachments.append(self.spec(attachment.get('id'), path)) <NEW_LINE> <DEDENT> return attachments | Attachments extraction mode | 62598fb79f288636728188e4 |
class AsciiFont: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def primary(arg_printable): <NEW_LINE> <INDENT> return "\x1b[10m{0}\x1b[0m".format(arg_printable) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def alternate(arg_printable, arg_alternate_font_no): <NEW_LINE> <INDENT> inserted_no = "1" <NEW_LINE> if int(arg_alternate_font_no) in range(1, 10): <NEW_LINE> <INDENT> inserted_no = str(arg_alternate_font_no) <NEW_LINE> <DEDENT> return "\x1b[1{0}m{1}\x1b[0m".format(inserted_no, arg_printable) | docstring | 62598fb7aad79263cf42e8f4 |
class PID(object): <NEW_LINE> <INDENT> def __init__(self, pLocalPID, pIdent=0): <NEW_LINE> <INDENT> if not isInteger(pLocalPID) or not isInteger(pIdent): <NEW_LINE> <INDENT> raise InvalidParameter() <NEW_LINE> <DEDENT> self.mLocalPID = pLocalPID <NEW_LINE> self.mIdent = pIdent <NEW_LINE> <DEDENT> def __eq__(self, pOther): <NEW_LINE> <INDENT> if None == pOther: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> lOther = pOther <NEW_LINE> if isinstance(lOther, str): <NEW_LINE> <INDENT> return self.__repr__() == lOther <NEW_LINE> <DEDENT> if isinstance(lOther, PIN): <NEW_LINE> <INDENT> lOther = lOther.mPID <NEW_LINE> <DEDENT> if self.mLocalPID != lOther.mLocalPID: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.mIdent != lOther.mIdent: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "@%x" % self.mLocalPID <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def fromPB(cls, pPBPID): <NEW_LINE> <INDENT> return PIN.PID(pPBPID.id, pPBPID.ident) | PID native (non-PB) representation. | 62598fb7379a373c97d99136 |
class ScorecardTemplateItem(object): <NEW_LINE> <INDENT> def __init__(self, scorecard_templates, scorecard): <NEW_LINE> <INDENT> self._scorecard_templates = scorecard_templates <NEW_LINE> self._requestor = self._scorecard_templates._requestor <NEW_LINE> self._data = scorecard <NEW_LINE> self._id = scorecard["id"] <NEW_LINE> self.name = scorecard["name"] <NEW_LINE> self.computed = scorecard["computed"] <NEW_LINE> self.custom = scorecard["custom"] <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> self._scorecard_templates.delete(self._id) <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> body = { "name": self.name, "computed": self.computed, "custom": self.custom } <NEW_LINE> _, scorecard = self._requestor.put('/metrics/templates/' + self._id, json=body) <NEW_LINE> self._data = scorecard <NEW_LINE> self.name = scorecard["name"] <NEW_LINE> self.computed = scorecard["computed"] <NEW_LINE> self.custom = scorecard["custom"] | This class represents a scorecard template. It's instantiated by the
:class:`proknow.ScorecardTemplates.ScorecardTemplates` class as a complete representation of the
scorecard.
Attributes:
id (str): The id of the scorecard (readonly).
data (dict): The complete representation of the scorecard as returned from the API
(readonly).
name (str): The name of the scorecard.
computed (list): The computed metrics of the scorecard.
custom (list): The custom metrics of the scorecard. | 62598fb791f36d47f2230f39 |
class JsonHandler(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.file_name = "data.json" <NEW_LINE> <DEDENT> def dump_file(self, x): <NEW_LINE> <INDENT> storage = self.load_file() <NEW_LINE> with open(self.file_name, "w+") as f: <NEW_LINE> <INDENT> if storage is None: <NEW_LINE> <INDENT> city_list = [] <NEW_LINE> x.update({"cities": city_list}) <NEW_LINE> json.dump(x, f, indent=2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> storage.update(x) <NEW_LINE> json.dump(storage, f, indent=2) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def load_file(self): <NEW_LINE> <INDENT> with open(self.file_name, "r+") as f: <NEW_LINE> <INDENT> if f.read() == '': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> f.seek(0) <NEW_LINE> return json.load(f) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def update_event_in_file(self, id, dic): <NEW_LINE> <INDENT> storage = self.load_file() <NEW_LINE> if id in storage: <NEW_LINE> <INDENT> with open(self.file_name, "w+") as f: <NEW_LINE> <INDENT> storage[id] = dic <NEW_LINE> json.dump(storage, f, indent=2) <NEW_LINE> return 1 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return "No id" <NEW_LINE> <DEDENT> <DEDENT> def delete_event_in_file(self, event_id): <NEW_LINE> <INDENT> storage = self.load_file() <NEW_LINE> if event_id in storage: <NEW_LINE> <INDENT> with open(self.file_name, "w+") as f: <NEW_LINE> <INDENT> del storage[event_id] <NEW_LINE> json.dump(storage, f, indent=2) <NEW_LINE> return 1 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return event_id | Attributes:
file_name: File name to which json objects are stored
Methods:
dump_file(object),
load_file(): Used by all other methods,
delete_event_in_file(event_id),
update_event_in_file(event_id, field_name, field_value). | 62598fb74f88993c371f059d |
class SessionPeekHelper6(SessionPeekHelper5): <NEW_LINE> <INDENT> pass | Helper class for implementing session peek feature
This class works with data constructed by
:class:`~plainbox.impl.session.suspend.SessionSuspendHelper6` which has
been pre-processed by :class:`SessionPeekHelper` (to strip the initial
envelope).
The only goal of this class is to reconstruct session state meta-data. | 62598fb74c3428357761a3dc |
class CeilometerApiPlatformTests(ceilometermanager.CeilometerBaseTest): <NEW_LINE> <INDENT> def test_check_alarm(self): <NEW_LINE> <INDENT> fail_msg = "Creation instance failed" <NEW_LINE> create_kwargs = {} <NEW_LINE> if 'neutron' in self.config.network.network_provider: <NEW_LINE> <INDENT> network = [net.id for net in self.compute_client.networks.list() if net.label == self.private_net] <NEW_LINE> create_kwargs = {'nics': [{'net-id': network[0]}]} <NEW_LINE> <DEDENT> image = nmanager.get_image_from_name() <NEW_LINE> name = rand_name('ost1_test-instance-alarm_actions') <NEW_LINE> self.instance = self.verify(600, self.compute_client.servers.create, 1, fail_msg, "server creation", name=name, flavor=self.flavor, image=image, **create_kwargs) <NEW_LINE> self.set_resource(self.instance.id, self.instance) <NEW_LINE> self.verify(200, self._wait_for_instance_metrics, 2, "instance is not available", "instance becoming 'available'", self.instance, 'ACTIVE') <NEW_LINE> fail_msg = "Creation metrics failed." <NEW_LINE> statistic_meter_resp = self.verify(600, self.wait_for_instance_metrics, 3, fail_msg, "metrics created", self.meter_name) <NEW_LINE> fail_msg = "Creation alarm failed." <NEW_LINE> threshold = statistic_meter_resp[0].avg - 1 <NEW_LINE> create_alarm_resp = self.verify(5, self.create_alarm, 4, fail_msg, "alarm_create", meter_name=self.meter_name, threshold=threshold, name=self.name, period=self.period, statistic=self.statistic, comparison_operator=self.comparison_operator) <NEW_LINE> fail_msg = "Alarm verify state failed." <NEW_LINE> self.verify(1000, self.wait_for_alarm_status, 5, fail_msg, "alarm status becoming 'alarm'", create_alarm_resp.alarm_id) | TestClass contains tests that check basic Ceilometer functionality. | 62598fb78a349b6b4368635d |
class OdmlCsvTable(OdmlTable): <NEW_LINE> <INDENT> def __init__(self, load_from=None): <NEW_LINE> <INDENT> super(OdmlCsvTable, self).__init__(load_from=load_from) <NEW_LINE> <DEDENT> def write2file(self, save_to): <NEW_LINE> <INDENT> self.consistency_check() <NEW_LINE> with open(save_to, 'w') as csvfile: <NEW_LINE> <INDENT> len_docdict = 0 if not self._docdict else len(self._docdict) <NEW_LINE> fieldnames = list(range(max(len(self._header), len_docdict * 2 + 1))) <NEW_LINE> csvwriter = csv.DictWriter(csvfile, fieldnames=fieldnames, dialect='excel', quoting=csv.QUOTE_NONNUMERIC) <NEW_LINE> oldpath = "" <NEW_LINE> oldprop = "" <NEW_LINE> if self._docdict: <NEW_LINE> <INDENT> doc_list = ['Document Information'] <NEW_LINE> for doc_key in sorted(self._docdict): <NEW_LINE> <INDENT> doc_list = doc_list + [doc_key, self._docdict[doc_key]] <NEW_LINE> <DEDENT> csvwriter.writerow(dict(zip(range(len(doc_list)), doc_list))) <NEW_LINE> <DEDENT> header_list = [self._header_titles[h] if h is not None else "" for h in self._header] <NEW_LINE> csvwriter.writerow(dict(zip(range(len(header_list)), header_list))) <NEW_LINE> for dic in self._odmldict: <NEW_LINE> <INDENT> tmp_row = dic.copy() <NEW_LINE> tmp_row['Path'], tmp_row['PropertyName'] = tmp_row['Path'].split(':') <NEW_LINE> tmp_row['SectionName'] = tmp_row['Path'].split('/')[-1] <NEW_LINE> if tmp_row["Path"].split(':')[0] == oldpath: <NEW_LINE> <INDENT> if not self.show_all_sections: <NEW_LINE> <INDENT> for h in self._SECTION_INF + ['SectionName', 'Path']: <NEW_LINE> <INDENT> tmp_row[h] = "" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> oldpath = tmp_row["Path"].split(':')[0] <NEW_LINE> oldprop = "" <NEW_LINE> <DEDENT> if tmp_row['PropertyName'] == oldprop: <NEW_LINE> <INDENT> if not self.show_all_properties: <NEW_LINE> <INDENT> for h in self._PROPERTY_INF + ['PropertyName']: <NEW_LINE> <INDENT> tmp_row[h] = "" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> oldprop = tmp_row['PropertyName'] <NEW_LINE> <DEDENT> row = {header_list.index(self._header_titles[h]): tmp_row[h] for h in self._header if h is not None} <NEW_LINE> def write_row(row): <NEW_LINE> <INDENT> if not (list(row.values()) == ['' for r in row]): <NEW_LINE> <INDENT> csvwriter.writerow(row) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> if tmp_row['Value'] == []: <NEW_LINE> <INDENT> tmp_row['Value'] = [''] <NEW_LINE> <DEDENT> for v in tmp_row['Value']: <NEW_LINE> <INDENT> if 'Value' in header_list: <NEW_LINE> <INDENT> row[header_list.index('Value')] = v <NEW_LINE> <DEDENT> write_row(row) <NEW_LINE> for h in self._header: <NEW_LINE> <INDENT> if ((not self.show_all_properties and h in self._PROPERTY_INF + ['PropertyName']) or (not self.show_all_sections and h in self._SECTION_INF + ['SectionName', 'Path'])): <NEW_LINE> <INDENT> row[header_list.index(self._header_titles[h])] = '' | Class to create a csv-file from an odml-file | 62598fb7091ae35668704d41 |
class TypeDeclarationFixup(FileMatch): <NEW_LINE> <INDENT> regexp = RE_FILE_BEGIN <NEW_LINE> def gen_patches(self) -> Iterable[Patch]: <NEW_LINE> <INDENT> if self.file.filename_matches('qom/object.h'): <NEW_LINE> <INDENT> self.debug("skipping object.h") <NEW_LINE> return <NEW_LINE> <DEDENT> decl_types: List[Type[TypeDeclaration]] = [DeclareInstanceChecker, DeclareInstanceType, DeclareClassCheckers, DeclareClassType, DeclareObjCheckers] <NEW_LINE> checker_dict: Dict[str, List[TypeDeclaration]] = {} <NEW_LINE> for t in decl_types: <NEW_LINE> <INDENT> for m in self.file.matches_of_type(t): <NEW_LINE> <INDENT> checker_dict.setdefault(m.group('uppercase'), []).append(m) <NEW_LINE> <DEDENT> <DEDENT> self.debug("checker_dict: %r", checker_dict) <NEW_LINE> for uppercase,checkers in checker_dict.items(): <NEW_LINE> <INDENT> fields = ('instancetype', 'classtype', 'uppercase', 'typename') <NEW_LINE> fvalues = dict((field, set(getattr(m, field) for m in checkers if getattr(m, field, None) is not None)) for field in fields) <NEW_LINE> for field,values in fvalues.items(): <NEW_LINE> <INDENT> if len(values) > 1: <NEW_LINE> <INDENT> for c in checkers: <NEW_LINE> <INDENT> c.warn("%s mismatch (%s)", field, ' '.join(values)) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> <DEDENT> field_dict = dict((f, v.pop() if v else None) for f,v in fvalues.items()) <NEW_LINE> yield from self.gen_patches_for_type(uppercase, checkers, field_dict) <NEW_LINE> <DEDENT> <DEDENT> def find_conflicts(self, uppercase: str, checkers: List[TypeDeclaration]) -> bool: <NEW_LINE> <INDENT> conflicting: List[FileMatch] = [] <NEW_LINE> conflicting.extend(chain(self.file.find_matches(DefineDirective, uppercase), self.file.find_matches(DeclareInterfaceChecker, uppercase, 'uppercase'), self.file.find_matches(DeclareClassType, uppercase, 'uppercase'), self.file.find_matches(DeclareInstanceType, uppercase, 'uppercase'))) <NEW_LINE> conflicting.extend(o for o in chain(self.allfiles.find_matches(DeclareInstanceChecker, uppercase, 'uppercase'), self.allfiles.find_matches(DeclareClassCheckers, uppercase, 'uppercase'), self.allfiles.find_matches(DeclareInterfaceChecker, uppercase, 'uppercase'), self.allfiles.find_matches(DefineDirective, uppercase)) if o is not None and o.file != self.file and not (o.file.filename.suffix == '.c' and self.file.filename.suffix == '.c')) <NEW_LINE> if conflicting: <NEW_LINE> <INDENT> for c in checkers: <NEW_LINE> <INDENT> c.warn("skipping due to conflicting %s macro", uppercase) <NEW_LINE> <DEDENT> for o in conflicting: <NEW_LINE> <INDENT> if o is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> o.warn("conflicting %s macro is here", uppercase) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def gen_patches_for_type(self, uppercase: str, checkers: List[TypeDeclaration], fields: Dict[str, Optional[str]]) -> Iterable[Patch]: <NEW_LINE> <INDENT> return <NEW_LINE> yield | Common base class for code that will look at a set of type declarations | 62598fb7956e5f7376df570e |
class Pupil_Server(Plugin): <NEW_LINE> <INDENT> def __init__(self, g_pool,address="tcp://127.0.0.1:5000"): <NEW_LINE> <INDENT> super(Pupil_Server, self).__init__(g_pool) <NEW_LINE> self.order = .9 <NEW_LINE> self.context = zmq.Context() <NEW_LINE> self.socket = self.context.socket(zmq.PUB) <NEW_LINE> self.address = address <NEW_LINE> self.set_server(address) <NEW_LINE> self.menu = None <NEW_LINE> <DEDENT> def init_gui(self): <NEW_LINE> <INDENT> if self.g_pool.app == 'capture': <NEW_LINE> <INDENT> self.menu = ui.Growing_Menu("Pupil Broadcast Server") <NEW_LINE> self.g_pool.sidebar.append(self.menu) <NEW_LINE> <DEDENT> elif self.g_pool.app == 'player': <NEW_LINE> <INDENT> self.menu = ui.Scrolling_Menu("Pupil Broadcast Server") <NEW_LINE> self.g_pool.gui.append(self.menu) <NEW_LINE> <DEDENT> self.menu.append(ui.Button('Close',self.close)) <NEW_LINE> help_str = "Pupil Message server: Using ZMQ and the *Publish-Subscribe* scheme" <NEW_LINE> self.menu.append(ui.Info_Text(help_str)) <NEW_LINE> self.menu.append(ui.Text_Input('address',self,setter=self.set_server,label='Address')) <NEW_LINE> <DEDENT> def deinit_gui(self): <NEW_LINE> <INDENT> if self.menu: <NEW_LINE> <INDENT> if self.g_pool.app == 'capture': <NEW_LINE> <INDENT> self.g_pool.sidebar.remove(self.menu) <NEW_LINE> <DEDENT> elif self.g_pool.app == 'player': <NEW_LINE> <INDENT> self.g_pool.gui.remove(self.menu) <NEW_LINE> <DEDENT> self.menu = None <NEW_LINE> <DEDENT> <DEDENT> def set_server(self,new_address): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.socket.unbind(self.address) <NEW_LINE> logger.debug('Detached from %s'%self.address) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.socket.bind(new_address) <NEW_LINE> self.address = new_address <NEW_LINE> logger.debug('Bound to %s'%self.address) <NEW_LINE> <DEDENT> except zmq.ZMQError as e: <NEW_LINE> <INDENT> logger.error("Could not set Socket: %s. Reason: %s"%(new_address,e)) <NEW_LINE> <DEDENT> <DEDENT> def update(self,frame,events): <NEW_LINE> <INDENT> for topic,data in events.iteritems(): <NEW_LINE> <INDENT> self.socket.send_multipart((topic, json.dumps(data))) <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.alive = False <NEW_LINE> <DEDENT> def get_init_dict(self): <NEW_LINE> <INDENT> return {'address':self.address} <NEW_LINE> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> self.deinit_gui() <NEW_LINE> self.context.destroy() | pupil server plugin | 62598fb721bff66bcd722d8a |
class MacroLoadError(Exception): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return ERROR_LOAD_MACRO.format(self.args[0], self.args[1]) | Raise this error if an exception occurs during macro import. | 62598fb7e5267d203ee6ba20 |
class DefaultNAT(object): <NEW_LINE> <INDENT> def __init__(self, engine): <NEW_LINE> <INDENT> self.engine = engine <NEW_LINE> <DEDENT> @property <NEW_LINE> def status(self): <NEW_LINE> <INDENT> return self.engine.data["default_nat"] <NEW_LINE> <DEDENT> def enable(self): <NEW_LINE> <INDENT> self.engine.data["default_nat"] = True <NEW_LINE> <DEDENT> def disable(self): <NEW_LINE> <INDENT> self.engine.data["default_nat"] = False <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "{0}(enabled={1})".format(self.__class__.__name__, self.status) | Default NAT on the engine is used to automatically create NAT
configurations based on internal routing. This simplifies the
need to create specific NAT rules, primarily for outbound traffic.
.. note:: You must call engine.update() to commit any changes. | 62598fb797e22403b383b027 |
class CRFLoss_gd(nn.Module): <NEW_LINE> <INDENT> def __init__(self, tagset_size, start_tag, end_tag, average_batch=True): <NEW_LINE> <INDENT> super(CRFLoss_gd, self).__init__() <NEW_LINE> self.tagset_size = tagset_size <NEW_LINE> self.average_batch = average_batch <NEW_LINE> self.crit = nn.CrossEntropyLoss(size_average=self.average_batch) <NEW_LINE> <DEDENT> def forward(self, scores, target, current): <NEW_LINE> <INDENT> ins_num = current.size(0) <NEW_LINE> current = current.expand(ins_num, 1, self.tagset_size) <NEW_LINE> scores = scores.view(ins_num, self.tagset_size, self.tagset_size) <NEW_LINE> current_score = torch.gather(scores, 1, current).squeeze() <NEW_LINE> return self.crit(current_score, target) | loss for greedy decode loss, i.e., although its for CRF Layer, we calculate the loss as
.. math::
\sum_{j=1}^n \log (p(\hat{y}_{j+1}|z_{j+1}, \hat{y}_{j}))
instead of
.. math::
\sum_{j=1}^n \log (\phi(\hat{y}_{j-1}, \hat{y}_j, \mathbf{z}_j)) - \log (\sum_{\mathbf{y}' \in \mathbf{Y}(\mathbf{Z})} \prod_{j=1}^n \phi(y'_{j-1}, y'_j, \mathbf{z}_j) )
args:
tagset_size: target_set_size
start_tag: ind for <start>
end_tag: ind for <pad>
average_batch: whether average the loss among batch | 62598fb72c8b7c6e89bd38e7 |
class Line(Curve): <NEW_LINE> <INDENT> _revit_object_class = DB.Line <NEW_LINE> @classmethod <NEW_LINE> def new(cls, pt1, pt2): <NEW_LINE> <INDENT> pt1 = XYZ(pt1) <NEW_LINE> pt2 = XYZ(pt2) <NEW_LINE> line = DB.Line.CreateBound(pt1.unwrap(), pt2.unwrap()) <NEW_LINE> return cls(line) <NEW_LINE> <DEDENT> @property <NEW_LINE> def start_point(self): <NEW_LINE> <INDENT> return XYZ(self._revit_object.GetEndPoint(0)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def end_point(self): <NEW_LINE> <INDENT> return XYZ(self._revit_object.GetEndPoint(1)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def mid_point(self): <NEW_LINE> <INDENT> return XYZ(self._revit_object.GetEndPoint(0.5)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def end_points(self): <NEW_LINE> <INDENT> return (XYZ(self.start_point), XYZ(self.end_point)) | DB.Line Wrapper
>>> line = Line.new([-10,0], [10,0])
>>> # or
>>> line = Line.new(ExistingLineObject)
>>> line.create_detail() | 62598fb7be383301e025391e |
class MedianFilterGpuTools(PluginTools): <NEW_LINE> <INDENT> pass | A plugin to apply 2D/3D median filter on a GPU. The 3D capability is enabled through padding. Note that the kernel_size in 2D will be kernel_size x kernel_size
and in 3D case kernel_size x kernel_size x kernel_size. | 62598fb7dc8b845886d536da |
class Update(ApiNavigator, Operator): <NEW_LINE> <INDENT> bl_idname = "api_navigator.update" <NEW_LINE> bl_label = "API Navigator Update" <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> api_update() <NEW_LINE> return {'FINISHED'} | Update the tree structure | 62598fb732920d7e50bc6171 |
class Road(object): <NEW_LINE> <INDENT> def __init__(self, coordinate): <NEW_LINE> <INDENT> self.coordinate=coordinate | Class definition of road.
Each road is considered to be a square. | 62598fb7be7bc26dc9251eed |
class V1ListRunArtifactsResponse(object): <NEW_LINE> <INDENT> openapi_types = { 'count': 'int', 'results': 'list[V1RunArtifact]', 'previous': 'str', 'next': 'str' } <NEW_LINE> attribute_map = { 'count': 'count', 'results': 'results', 'previous': 'previous', 'next': 'next' } <NEW_LINE> def __init__(self, count=None, results=None, previous=None, next=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration.get_default_copy() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._count = None <NEW_LINE> self._results = None <NEW_LINE> self._previous = None <NEW_LINE> self._next = None <NEW_LINE> self.discriminator = None <NEW_LINE> if count is not None: <NEW_LINE> <INDENT> self.count = count <NEW_LINE> <DEDENT> if results is not None: <NEW_LINE> <INDENT> self.results = results <NEW_LINE> <DEDENT> if previous is not None: <NEW_LINE> <INDENT> self.previous = previous <NEW_LINE> <DEDENT> if next is not None: <NEW_LINE> <INDENT> self.next = next <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def count(self): <NEW_LINE> <INDENT> return self._count <NEW_LINE> <DEDENT> @count.setter <NEW_LINE> def count(self, count): <NEW_LINE> <INDENT> self._count = count <NEW_LINE> <DEDENT> @property <NEW_LINE> def results(self): <NEW_LINE> <INDENT> return self._results <NEW_LINE> <DEDENT> @results.setter <NEW_LINE> def results(self, results): <NEW_LINE> <INDENT> self._results = results <NEW_LINE> <DEDENT> @property <NEW_LINE> def previous(self): <NEW_LINE> <INDENT> return self._previous <NEW_LINE> <DEDENT> @previous.setter <NEW_LINE> def previous(self, previous): <NEW_LINE> <INDENT> self._previous = previous <NEW_LINE> <DEDENT> @property <NEW_LINE> def next(self): <NEW_LINE> <INDENT> return self._next <NEW_LINE> <DEDENT> @next.setter <NEW_LINE> def next(self, next): <NEW_LINE> <INDENT> self._next = next <NEW_LINE> <DEDENT> def to_dict(self, serialize=False): <NEW_LINE> <INDENT> result = {} <NEW_LINE> def convert(x): <NEW_LINE> <INDENT> if hasattr(x, "to_dict"): <NEW_LINE> <INDENT> args = getfullargspec(x.to_dict).args <NEW_LINE> if len(args) == 1: <NEW_LINE> <INDENT> return x.to_dict() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return x.to_dict(serialize) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return x <NEW_LINE> <DEDENT> <DEDENT> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> attr = self.attribute_map.get(attr, attr) if serialize else attr <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: convert(x), value )) <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], convert(item[1])), value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = convert(value) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V1ListRunArtifactsResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V1ListRunArtifactsResponse): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict() | NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually. | 62598fb723849d37ff8511d5 |
class TestShowColormaps(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> plt.close('all') <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> plt.close('all') <NEW_LINE> <DEDENT> def test_all(self): <NEW_LINE> <INDENT> fig = psyc.show_colormaps(use_qt=False) <NEW_LINE> self.assertEqual(fig.number, 1) <NEW_LINE> self.assertGreater(len(fig.axes), 15) <NEW_LINE> <DEDENT> def test_some(self): <NEW_LINE> <INDENT> cmap = plt.get_cmap('Reds') <NEW_LINE> fig = psyc.show_colormaps(['jet', cmap, 'red_white_blue'], use_qt=False) <NEW_LINE> self.assertEqual(fig.number, 1) <NEW_LINE> self.assertEqual(len(fig.axes), 3) <NEW_LINE> <DEDENT> @unittest.skipIf( six.PY2 or (bt.sns_version is not None and bt.sns_version < '0.8'), 'Not implemented TestCase method' if six.PY2 else "Crashed by seaborn") <NEW_LINE> def test_warning_similar(self): <NEW_LINE> <INDENT> with self.assertWarnsRegex(UserWarning, 'Similar colormaps'): <NEW_LINE> <INDENT> fig = psyc.show_colormaps('jett', use_qt=False) <NEW_LINE> <DEDENT> self.assertEqual(fig.number, 1) <NEW_LINE> self.assertEqual(len(fig.axes), 0) <NEW_LINE> <DEDENT> @unittest.skipIf( six.PY2 or (bt.sns_version is not None and bt.sns_version < '0.8'), 'Not implemented TestCase method' if six.PY2 else "Crashed by seaborn") <NEW_LINE> def test_warning_unknown(self): <NEW_LINE> <INDENT> with self.assertWarnsRegex(UserWarning, 'Run function without arguments'): <NEW_LINE> <INDENT> fig = psyc.show_colormaps('asdfkj', use_qt=False) <NEW_LINE> <DEDENT> self.assertEqual(fig.number, 1) <NEW_LINE> self.assertEqual(len(fig.axes), 0) | Test the :func:`psy_simple.colors.show_colormaps` function | 62598fb799fddb7c1ca62e7c |
class Database: <NEW_LINE> <INDENT> def __init__(self, connection_string): <NEW_LINE> <INDENT> engine = sa.create_engine(connection_string) <NEW_LINE> Session.configure(bind=engine) <NEW_LINE> Base.metadata.create_all(engine) <NEW_LINE> <DEDENT> def _nomade_model(self): <NEW_LINE> <INDENT> with session_scope() as session: <NEW_LINE> <INDENT> return session.query(NomadeModel).first() or NomadeModel() <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def migration_id(self): <NEW_LINE> <INDENT> return self._nomade_model().migration or None <NEW_LINE> <DEDENT> @migration_id.setter <NEW_LINE> def migration_id(self, id): <NEW_LINE> <INDENT> nomade_model = self._nomade_model() <NEW_LINE> nomade_model.migration = id <NEW_LINE> with session_scope() as session: <NEW_LINE> <INDENT> if nomade_model.migration: <NEW_LINE> <INDENT> session.add(nomade_model) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> session.delete(nomade_model) <NEW_LINE> <DEDENT> except sa.exc.InvalidRequestError: <NEW_LINE> <INDENT> pass | Class responsible for dealing with simple database operations.
(e.g. get migration ID, set current migration ID). | 62598fb744b2445a339b6a05 |
class RegistrationForm(forms.Form): <NEW_LINE> <INDENT> username = forms.RegexField(regex=r'^\w+$', max_length=30, widget=forms.TextInput(attrs=attrs_dict), label=_(u'username')) <NEW_LINE> email = forms.EmailField(widget=forms.TextInput(attrs=dict(attrs_dict, maxlength=75)), label=_(u'email address')) <NEW_LINE> password1 = forms.CharField(widget=forms.PasswordInput(attrs=attrs_dict, render_value=False), label=_(u'password')) <NEW_LINE> password2 = forms.CharField(widget=forms.PasswordInput(attrs=attrs_dict, render_value=False), label=_(u'password (again)')) <NEW_LINE> def clean_username(self): <NEW_LINE> <INDENT> if RegistrationProfile.objects( username__iexact=self.cleaned_data['username']): <NEW_LINE> <INDENT> raise forms.ValidationError( _(u'This username is already taken. Please choose another.')) <NEW_LINE> <DEDENT> return self.cleaned_data['username'] <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> if 'password1' in self.cleaned_data and 'password2' in self.cleaned_data: <NEW_LINE> <INDENT> if self.cleaned_data['password1'] != self.cleaned_data['password2']: <NEW_LINE> <INDENT> raise forms.ValidationError(_(u'You must type the same password each time')) <NEW_LINE> <DEDENT> <DEDENT> return self.cleaned_data <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> new_user = RegistrationProfile.create_inactive_user( username=self.cleaned_data['username'], password=self.cleaned_data['password1'], email=self.cleaned_data['email'], send_email = False) <NEW_LINE> new_user.is_active = True <NEW_LINE> new_user.activation_key = u"ALREADY_ACTIVATED" <NEW_LINE> new_user.save() <NEW_LINE> return new_user | Form for registering a new user account.
Validates that the requested username is not already in use, and
requires the password to be entered twice to catch typos.
Subclasses should feel free to add any additional validation they
need, but should either preserve the base ``save()`` or implement
a ``save()`` which accepts the ``profile_callback`` keyword
argument and passes it through to
``RegistrationProfile.objects.create_inactive_user()``. | 62598fb74a966d76dd5eeffb |
class S3Error(CloudBackupLibError): <NEW_LINE> <INDENT> def __init__(self, tree, status, msg=None): <NEW_LINE> <INDENT> self.src = 's3' <NEW_LINE> self.err_no = status <NEW_LINE> self.tree = tree <NEW_LINE> if tree: <NEW_LINE> <INDENT> self._parse() <NEW_LINE> <DEDENT> elif msg: <NEW_LINE> <INDENT> self.msg = msg <NEW_LINE> <DEDENT> <DEDENT> def _parse(self, tree=None): <NEW_LINE> <INDENT> if not tree: <NEW_LINE> <INDENT> tree = self.tree <NEW_LINE> <DEDENT> for tag_name in ('Code', 'Message', 'RequestId', 'Resource'): <NEW_LINE> <INDENT> tag = tree.find(tag_name) <NEW_LINE> if hasattr(tag, 'text'): <NEW_LINE> <INDENT> if tag_name == 'Message': <NEW_LINE> <INDENT> self.msg = tag.text <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setattr(self, tag_name.lower(), tag.text) | Amazon S3 error | 62598fb7bf627c535bcb15c6 |
class BootstrapTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.msa = AlignIO.read("TreeConstruction/msa.phy", "phylip") <NEW_LINE> <DEDENT> def test_bootstrap(self): <NEW_LINE> <INDENT> msa_list = list(Consensus.bootstrap(self.msa, 100)) <NEW_LINE> self.assertEqual(len(msa_list), 100) <NEW_LINE> self.assertEqual(len(msa_list[0]), len(self.msa)) <NEW_LINE> self.assertEqual(len(msa_list[0][0]), len(self.msa[0])) <NEW_LINE> <DEDENT> def test_bootstrap_trees(self): <NEW_LINE> <INDENT> calculator = DistanceCalculator("blosum62") <NEW_LINE> constructor = DistanceTreeConstructor(calculator) <NEW_LINE> trees = list(Consensus.bootstrap_trees(self.msa, 100, constructor)) <NEW_LINE> self.assertEqual(len(trees), 100) <NEW_LINE> self.assertIsInstance(trees[0], BaseTree.Tree) <NEW_LINE> <DEDENT> def test_bootstrap_consensus(self): <NEW_LINE> <INDENT> calculator = DistanceCalculator("blosum62") <NEW_LINE> constructor = DistanceTreeConstructor(calculator, "nj") <NEW_LINE> tree = Consensus.bootstrap_consensus( self.msa, 100, constructor, Consensus.majority_consensus ) <NEW_LINE> self.assertIsInstance(tree, BaseTree.Tree) <NEW_LINE> Phylo.write(tree, os.path.join(temp_dir, "bootstrap_consensus.tre"), "newick") | Test for bootstrap methods. | 62598fb73317a56b869be5df |
@projects_api.route('/<int:project_id>/sprints/<int:sprint_id>/report') <NEW_LINE> class SprintReport(Resource): <NEW_LINE> <INDENT> def get(self, project_id, sprint_id): <NEW_LINE> <INDENT> result = get_sprint_report(project_id, sprint_id) <NEW_LINE> return result | Operations related to report generation for sprint | 62598fb72ae34c7f260ab200 |
class ModelFormValidation(FormValidation): <NEW_LINE> <INDENT> resource = ModelResource <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> if 'resource' not in kwargs: <NEW_LINE> <INDENT> raise ImproperlyConfigured("You must provide a 'resource' to 'ModelFormValidation' classes.") <NEW_LINE> <DEDENT> self.resource = kwargs.pop('resource') <NEW_LINE> super(ModelFormValidation, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def _get_pk_from_resource_uri(self, resource_field, resource_uri): <NEW_LINE> <INDENT> base_resource_uri = resource_field.to().get_resource_uri() <NEW_LINE> if not resource_uri.startswith(base_resource_uri): <NEW_LINE> <INDENT> raise Exception("Couldn't match resource_uri {0} with {1}".format(resource_uri, base_resource_uri)) <NEW_LINE> <DEDENT> before, after = resource_uri.split(base_resource_uri) <NEW_LINE> return after[:-1] if after.endswith('/') else after <NEW_LINE> <DEDENT> def form_args(self, bundle): <NEW_LINE> <INDENT> rsc = self.resource() <NEW_LINE> kwargs = super(ModelFormValidation, self).form_args(bundle) <NEW_LINE> for name, rel_field in list(rsc.fields.items()): <NEW_LINE> <INDENT> data = kwargs['data'] <NEW_LINE> if not issubclass(rel_field.__class__, RelatedField): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if name in data and data[name] is not None: <NEW_LINE> <INDENT> resource_uri = (data[name] if rel_field.full is False else data[name]['resource_uri']) <NEW_LINE> pk = self._get_pk_from_resource_uri(rel_field, resource_uri) <NEW_LINE> kwargs['data'][name] = pk <NEW_LINE> <DEDENT> <DEDENT> return kwargs | Override tastypie's standard ``FormValidation`` since this does not care
about URI to PK conversion for ``ToOneField`` or ``ToManyField``. | 62598fb756ac1b37e6302310 |
class Conversion(models.Model): <NEW_LINE> <INDENT> deposit = models.OneToOneField(Deposit, related_name='conversion', on_delete=models.DO_NOTHING) <NEW_LINE> from_coin = models.ForeignKey(Coin, verbose_name="From Coin", on_delete=models.DO_NOTHING, related_name='conversions_from') <NEW_LINE> from_address = models.CharField('From Address / Account (if known)', max_length=255, blank=True, null=True) <NEW_LINE> to_coin = models.ForeignKey(Coin, verbose_name="Converted Into (Symbol)", on_delete=models.DO_NOTHING, related_name='conversions_to') <NEW_LINE> to_address = models.CharField('Destination Address / Account', max_length=255) <NEW_LINE> to_memo = models.CharField('Destination Memo (if applicable)', max_length=1000, blank=True, null=True) <NEW_LINE> to_amount = models.DecimalField('Amount Sent', max_digits=MAX_STORED_DIGITS, decimal_places=MAX_STORED_DP) <NEW_LINE> to_txid = models.CharField('Transaction ID (Destination coin)', max_length=255, null=True, blank=True) <NEW_LINE> tx_fee = models.DecimalField('Blockchain Fee', max_digits=MAX_STORED_DIGITS, decimal_places=MAX_STORED_DP, default=0) <NEW_LINE> ex_fee = models.DecimalField('Exchange Fee', max_digits=MAX_STORED_DIGITS, decimal_places=MAX_STORED_DP, default=0) <NEW_LINE> created_at = models.DateTimeField('Creation Time', auto_now_add=True, db_index=True) <NEW_LINE> updated_at = models.DateTimeField('Last Update', auto_now=True) <NEW_LINE> @property <NEW_LINE> def from_coin_symbol(self): <NEW_LINE> <INDENT> return self.from_coin.symbol <NEW_LINE> <DEDENT> @property <NEW_LINE> def to_coin_symbol(self): <NEW_LINE> <INDENT> return self.to_coin.symbol <NEW_LINE> <DEDENT> @property <NEW_LINE> def from_amount(self): <NEW_LINE> <INDENT> return self.deposit.amount <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f'Conversion ID {self.id} - From: {self.from_coin} to {self.to_coin} (Destination: {self.to_address})' | Once a :class:`models.Deposit` has been scanned, assuming it has a valid address or account/memo, the
destination cryptocurrency/token will be sent to the user.
Successful conversion attempts are logged here, allowing for reference of where the coins came from, where
they went, and what fees were taken. | 62598fb71b99ca400228f5c2 |
class FDSStorage(Storage): <NEW_LINE> <INDENT> def __init__(self,client_conf=None,base_url=None): <NEW_LINE> <INDENT> if client_conf is None: <NEW_LINE> <INDENT> client_conf = settings.FDFS_CLIENT_CONF <NEW_LINE> <DEDENT> self.client_conf = client_conf <NEW_LINE> if base_url is None: <NEW_LINE> <INDENT> base_url = settings.FDFS_URL <NEW_LINE> <DEDENT> self.base_url = base_url <NEW_LINE> <DEDENT> def _open(self,name,mode='rb'): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _save(self,name,content): <NEW_LINE> <INDENT> client=Fdfs_client(self.client_conf) <NEW_LINE> ret=client.upload_by_buffer(content.read()) <NEW_LINE> if ret.get('Status') !='Upload successed.': <NEW_LINE> <INDENT> raise Exception('上传文件失败') <NEW_LINE> <DEDENT> file_name=ret.get('Remote file_id') <NEW_LINE> return file_name <NEW_LINE> <DEDENT> def exists(self, name): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def url(self, name): <NEW_LINE> <INDENT> return self.base_url+name | 文件存储 | 62598fb7627d3e7fe0e06fd4 |
class CurrentPedalboardObserver(ApplicationObserver): <NEW_LINE> <INDENT> def __init__(self, current_controller): <NEW_LINE> <INDENT> super(CurrentPedalboardObserver, self).__init__() <NEW_LINE> self._current_controller = current_controller <NEW_LINE> <DEDENT> def on_bank_updated(self, bank, update_type, index, origin, **kwargs): <NEW_LINE> <INDENT> if update_type == UpdateType.UPDATED: <NEW_LINE> <INDENT> old_bank = kwargs['old'] <NEW_LINE> if old_bank == self._current_controller.bank: <NEW_LINE> <INDENT> self._current_controller.set_bank(bank, try_preserve_index=True) <NEW_LINE> <DEDENT> <DEDENT> elif update_type == UpdateType.DELETED: <NEW_LINE> <INDENT> if bank == self._current_controller.bank: <NEW_LINE> <INDENT> next_bank_index = self._current_controller.next_bank_index(index-1) <NEW_LINE> new_current_bank = origin.banks[next_bank_index] <NEW_LINE> self._current_controller.set_bank(new_current_bank) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def on_pedalboard_updated(self, pedalboard, update_type, index, origin, **kwargs): <NEW_LINE> <INDENT> if update_type == UpdateType.UPDATED: <NEW_LINE> <INDENT> old_pedalboard = kwargs['old'] <NEW_LINE> if pedalboard == self._current_controller.pedalboard or old_pedalboard == self._current_controller.pedalboard: <NEW_LINE> <INDENT> self._current_controller.set_pedalboard(pedalboard, notify=False, force=True) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def on_effect_status_toggled(self, effect, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def on_connection_updated(self, connection, update_type, pedalboard, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def on_param_value_changed(self, param, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def on_effect_updated(self, effect, update_type, index, origin, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def on_current_pedalboard_changed(self, pedalboard, **kwargs): <NEW_LINE> <INDENT> pass | This viewer allows change the current pedalboard
if it is updated or removed or if your bank is updated or removed. | 62598fb7283ffb24f3cf39a8 |
class AuthTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = app.test_client() <NEW_LINE> self.user = { 'username': 'haddie', 'email': 'test@example.com', 'password': 'test_password'} <NEW_LINE> with app.app_context(): <NEW_LINE> <INDENT> db.create_all() <NEW_LINE> <DEDENT> <DEDENT> def test_successful_registration(self): <NEW_LINE> <INDENT> res = self.client.post('auth/register', data=self.user) <NEW_LINE> self.assertEqual(res.status_code, 201) <NEW_LINE> <DEDENT> def test_already_registered_user(self): <NEW_LINE> <INDENT> res = self.client.post('auth/register',data=self.user) <NEW_LINE> second_res = self.client.post('auth/register', data=self.user) <NEW_LINE> self.assertEqual(second_res.status_code, 409) <NEW_LINE> result = json.loads(second_res.data.decode()) <NEW_LINE> self.assertEqual( result['message'], "User already exists please login") <NEW_LINE> <DEDENT> def test_user_login(self): <NEW_LINE> <INDENT> res = self.client.post('/auth/register',data=self.user) <NEW_LINE> login_res = self.client.post('/auth/login', data=self.user) <NEW_LINE> result = json.loads(login_res.data.decode()) <NEW_LINE> self.assertEqual(result['message'], "You logged in successfully.") <NEW_LINE> self.assertEqual(login_res.status_code, 200) <NEW_LINE> self.assertTrue(result['access_token']) <NEW_LINE> <DEDENT> def test_user_with_wrong_credentials_cannot_login(self): <NEW_LINE> <INDENT> res = self.client.post('/auth/register',data=self.user) <NEW_LINE> user_data={'email':'zsh@gmail.com', 'password':'000000000'} <NEW_LINE> login_res = self.client.post('/auth/login',data=user_data) <NEW_LINE> result = json.loads(login_res.data.decode()) <NEW_LINE> self.assertEqual(result['message'], "Invalid email or password, Please try again") <NEW_LINE> self.assertEqual(login_res.status_code, 401) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> with app.app_context(): <NEW_LINE> <INDENT> db.session.remove() <NEW_LINE> db.drop_all() | This represents the authentication testcase | 62598fb7cc40096d6161a26b |
class WeekendShift(Shift): <NEW_LINE> <INDENT> def __init__(self, time, config): <NEW_LINE> <INDENT> super().__init__(time, config) <NEW_LINE> <DEDENT> def check_between(self, day, worker): <NEW_LINE> <INDENT> holidays = self.conf.holidays <NEW_LINE> before_day = holidays[holidays.index(day) - self.conf.day_between_weekend: holidays.index(day)] <NEW_LINE> after_day = holidays[holidays.index(day) + 1: holidays.index(day) + self.conf.day_between_weekend + 1] <NEW_LINE> return not sum([1 for day in before_day + after_day if worker.worker_days[day] in self.conf.all_shifts]) <NEW_LINE> <DEDENT> def arrange_shifts(self, workers): <NEW_LINE> <INDENT> for day in self.conf.holidays: <NEW_LINE> <INDENT> for count in range(self.count_shifts[day]): <NEW_LINE> <INDENT> random.shuffle(workers) <NEW_LINE> for index, worker in enumerate(workers): <NEW_LINE> <INDENT> if all([self.check_between(day, worker), worker.shift_in_month[self.time] > 0, self.conf.weekday_shifts_on_team[day] > teams[worker.team].count_workers_in_day[day], self.conf.max_weekend > worker.weekend_days, self.next_last_shift(day, worker), self.check_more_five(day, worker.worker_days, worker.count_last_month_shifts) if day < 29 else True, self.check_vacation(day, worker.worker_days)]): <NEW_LINE> <INDENT> self.install_shift(day, worker) <NEW_LINE> break <NEW_LINE> <DEDENT> if index + 1 == len(work_at_weekend): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return False | max_weekend - максимум смен в выходные за месяц | 62598fb73d592f4c4edbafe4 |
class Day(db.Model): <NEW_LINE> <INDENT> __tablename__ = "sp_days" <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> day = db.Column(db.Integer) <NEW_LINE> points = db.Column(db.Integer) <NEW_LINE> player_id = db.Column(db.Integer, db.ForeignKey("sp_players.id")) <NEW_LINE> player = db.relationship("Player", backref=db.backref("days", lazy="dynamic")) <NEW_LINE> game_id = db.Column(db.Integer, db.ForeignKey("sp_games.id")) <NEW_LINE> game = db.relationship("Game", backref=db.backref("days", lazy="dynamic")) <NEW_LINE> coalition_id = db.Column(db.Integer, db.ForeignKey("sp_coalitions.id")) <NEW_LINE> coalition = db.relationship("Coalition", backref=db.backref("days")) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "<Day(%s)>" % (self.id) | Model for Day | 62598fb732920d7e50bc6173 |
class MetricsExampleModel(BaseModel): <NEW_LINE> <INDENT> summary: Optional[Dict[str, Any]] <NEW_LINE> output: Optional[Dict[str, Any]] <NEW_LINE> data: Optional[List[Any]] <NEW_LINE> @classmethod <NEW_LINE> def from_metrics(cls, metrics: MetricsExample) -> "MetricsExampleModel": <NEW_LINE> <INDENT> return cls.parse_obj(metrics.exportAsDict()) | A variant of `MetricsExample` based on model. | 62598fb7ff9c53063f51a773 |
class UpdateUEcFirewallResponseSchema(schema.ResponseSchema): <NEW_LINE> <INDENT> fields = {} | UpdateUEcFirewall - 更新防火墙信息,新增和删除规则 | 62598fb7a8370b77170f0503 |
class ResourceRequirements(object): <NEW_LINE> <INDENT> def __init__(self, requirement_dict=None): <NEW_LINE> <INDENT> self._requirements = requirement_dict if requirement_dict else {} <NEW_LINE> <DEDENT> def set(self, key, value): <NEW_LINE> <INDENT> if key == "tags": <NEW_LINE> <INDENT> self._set_tag(tags=value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if isinstance(value, dict) and key in self._requirements and isinstance( self._requirements[key], dict): <NEW_LINE> <INDENT> self._requirements[key] = merge(self._requirements[key], value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._requirements[key] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> if "." in key and len(key) > 1: <NEW_LINE> <INDENT> return self._recursive_get(key) <NEW_LINE> <DEDENT> return self._requirements.get(key, None) <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> return self._requirements[item] <NEW_LINE> <DEDENT> def _recursive_get(self, key, dic=None): <NEW_LINE> <INDENT> return recursive_search(key, dic) if dic else recursive_search(key, self._requirements) <NEW_LINE> <DEDENT> def get_requirements(self): <NEW_LINE> <INDENT> return self._requirements <NEW_LINE> <DEDENT> def _set_tag(self, tag=None, tags=None, value=True): <NEW_LINE> <INDENT> existing_tags = self._requirements.get("tags") <NEW_LINE> if tags and not tag: <NEW_LINE> <INDENT> existing_tags = merge(existing_tags, tags) <NEW_LINE> self._requirements["tags"] = existing_tags <NEW_LINE> <DEDENT> elif tag and not tags: <NEW_LINE> <INDENT> existing_tags[tag] = value <NEW_LINE> self._requirements["tags"] = existing_tags <NEW_LINE> <DEDENT> <DEDENT> def remove_empty_tags(self, tags=None): <NEW_LINE> <INDENT> new_tags = {} <NEW_LINE> old_tags = tags if tags else self.get("tags") <NEW_LINE> for tag in old_tags.keys(): <NEW_LINE> <INDENT> if old_tags[tag] is not None: <NEW_LINE> <INDENT> new_tags[tag] = old_tags[tag] <NEW_LINE> <DEDENT> <DEDENT> if not tags: <NEW_LINE> <INDENT> self._requirements["tags"] = new_tags <NEW_LINE> return None <NEW_LINE> <DEDENT> return new_tags | ResourceRequirements class. Contains methods for getting and setting requirement values as
well as processing requirements into formats supported by allocators. | 62598fb7091ae35668704d45 |
class SubscriptionNotFound(UserError): <NEW_LINE> <INDENT> def __init__(self, msg='Subscription not found'): <NEW_LINE> <INDENT> UserError.__init__(self, msg) | Raised when a subscription is not found. | 62598fb77047854f4633f4fb |
class Spectrum(object): <NEW_LINE> <INDENT> def __init__(self,filepath, name): <NEW_LINE> <INDENT> self.filepath = filepath <NEW_LINE> self.name = name <NEW_LINE> self._data, self.spec_dict = self.get_spec() <NEW_LINE> self.x = self._data['x'] <NEW_LINE> self.y = self._data['y'] <NEW_LINE> self.xy_measure = np.array([self._data['x'],self._data['y']], dtype='float64') <NEW_LINE> self.spd = self.to_spd() <NEW_LINE> self.XYZ = self.spd_to_XYZ() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return (self.name, str(self._data)) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_spec(self, start=380, end=780): <NEW_LINE> <INDENT> former = r'(.*data=")([,\.\dE-]*)(".*mfp=")(.*)(" \/>)' <NEW_LINE> with open(self.filepath) as f: <NEW_LINE> <INDENT> raw_data = f.read() <NEW_LINE> matched = re.match(former, raw_data) <NEW_LINE> data = list(map(float,matched.group(2).split(','))) <NEW_LINE> data = dict(zip(keys, data)) <NEW_LINE> raw_spec = list(zip(range(350, 1001),map(float,matched.group(4).split(',')))) <NEW_LINE> for x in raw_spec: <NEW_LINE> <INDENT> if x[0] == start: <NEW_LINE> <INDENT> start_index = raw_spec.index(x) <NEW_LINE> <DEDENT> elif x[0] == end: <NEW_LINE> <INDENT> end_index = raw_spec.index(x) <NEW_LINE> <DEDENT> <DEDENT> spec_dict= dict(raw_spec[start_index:end_index+1]) <NEW_LINE> return (data, spec_dict) <NEW_LINE> <DEDENT> <DEDENT> def to_spd(self): <NEW_LINE> <INDENT> self.spd = colour.SpectralPowerDistribution(self.spec_dict, name=self.name) <NEW_LINE> return self.spd <NEW_LINE> <DEDENT> def get(self, attr): <NEW_LINE> <INDENT> return self._data.get(attr,0) <NEW_LINE> <DEDENT> cmfs = colour.STANDARD_OBSERVERS_CMFS['CIE 1931 2 Degree Standard Observer'] <NEW_LINE> illuminant = colour.ILLUMINANTS_RELATIVE_SPDS['D65'] <NEW_LINE> def spd_to_XYZ(self, cmfs=cmfs, illuminant=illuminant): <NEW_LINE> <INDENT> XYZ = colour.spectral_to_XYZ(self.spd, cmfs, illuminant) <NEW_LINE> return XYZ <NEW_LINE> <DEDENT> def XYZ_to_xy(self): <NEW_LINE> <INDENT> return colour.XYZ_to_xy(self.XYZ) <NEW_LINE> <DEDENT> def spd_plot(self): <NEW_LINE> <INDENT> cpt.single_spd_plot(self.spd) | 导入仪器数据,将其数据定义成一个光谱对象,主要属性有`data`和`spec_dict` | 62598fb7fff4ab517ebcd90d |
class GameSprit(pygame.sprite.Sprite): <NEW_LINE> <INDENT> def __init__(self, image_name, speed = 1): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.image = pygame.image.load(image_name) <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> self.speed = speed <NEW_LINE> self.speedy = 0 <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.rect.y += self.speed | 飞机大战游戏精灵 | 62598fb7aad79263cf42e8f9 |
class Keys(DbEntityKeys, BuiltFormKeys, TemplateKeys, SRSKeys): <NEW_LINE> <INDENT> class Fab(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def ricate(cls, key): <NEW_LINE> <INDENT> return "__".join(cls.prefixes() + [key]) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def prefixes(cls): <NEW_LINE> <INDENT> prefix = cls.prefix() <NEW_LINE> return super(Keys.Fab, cls).prefixes() if hasattr(super(Keys.Fab, cls), 'prefixes') else [] + [ prefix] if prefix else [] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def prefix(cls): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def prefix(cls): <NEW_LINE> <INDENT> return cls.Fab.prefix() <NEW_LINE> <DEDENT> POLICY_TRANSIT = 'transit' <NEW_LINE> CONTENT_TYPE_XML = 'xml', <NEW_LINE> CONTENT_TYPE_SLD = 'sld' <NEW_LINE> CONTENT_TYPE_PNG = 'png' <NEW_LINE> CONTENT_TYPE_PYTHON = 'python' <NEW_LINE> CONTENT_TYPE_CSS = 'css' <NEW_LINE> CONTENT_TYPE_JSON = 'json' <NEW_LINE> INTEREST_OWNER = 'owner' <NEW_LINE> INTEREST_DEPENDENT = 'dependent' <NEW_LINE> INTEREST_FOLLOWER = 'follower' <NEW_LINE> SORT_TYPE_KEY = 'key' <NEW_LINE> SORT_TYPE_NAME = 'name' <NEW_LINE> SORT_TYPE_PRESENTATION_MEDIA_DB_ENTITY_KEY = 'presentation_media_db_entity_key' <NEW_LINE> SORT_TYPE_PRESENTATION_MEDIA_DB_ENTITY_NAME = 'presentation_media_db_entity_name' <NEW_LINE> SORT_TYPE_PRESENTATION_MEDIA_MEDIUM_KEY = 'presentation_media_medium_key' <NEW_LINE> SORT_TYPE_PRESENTATION_MEDIA_MEDIUM_NAME = 'presentation_media_medium_name' <NEW_LINE> GLOBAL_CONFIG_KEY = 'global' <NEW_LINE> GLOBAL_CONFIG_NAME = 'Global Config' <NEW_LINE> PRESENTATION_GEOSERVER = 'presentation_geoserver' <NEW_LINE> LAYER_LIBRARY_DEFAULT = 'library_default' <NEW_LINE> MEDIUM_DEFAULT = 'medium_default' <NEW_LINE> STYLE_BUILT_FORM = 'built_form_cartoCSS' | Keys representing names by which to identity Policies, DBEntities, DbEntityInterests, Media, and Media content types.
These values could be represented as instantiations of a reference table, but for now this is adequate. Some of these will also be represented as classes, which might cause the key to be embedded in the class definition instead. | 62598fb767a9b606de5460f7 |
class GroupsV2GroupV2ClanInfo(object): <NEW_LINE> <INDENT> swagger_types = { 'clan_callsign': 'str', 'clan_banner_data': 'ComponentsschemasGroupsV2ClanBanner' } <NEW_LINE> attribute_map = { 'clan_callsign': 'clanCallsign', 'clan_banner_data': 'clanBannerData' } <NEW_LINE> def __init__(self, clan_callsign=None, clan_banner_data=None): <NEW_LINE> <INDENT> self._clan_callsign = None <NEW_LINE> self._clan_banner_data = None <NEW_LINE> self.discriminator = None <NEW_LINE> if clan_callsign is not None: <NEW_LINE> <INDENT> self.clan_callsign = clan_callsign <NEW_LINE> <DEDENT> if clan_banner_data is not None: <NEW_LINE> <INDENT> self.clan_banner_data = clan_banner_data <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def clan_callsign(self): <NEW_LINE> <INDENT> return self._clan_callsign <NEW_LINE> <DEDENT> @clan_callsign.setter <NEW_LINE> def clan_callsign(self, clan_callsign): <NEW_LINE> <INDENT> self._clan_callsign = clan_callsign <NEW_LINE> <DEDENT> @property <NEW_LINE> def clan_banner_data(self): <NEW_LINE> <INDENT> return self._clan_banner_data <NEW_LINE> <DEDENT> @clan_banner_data.setter <NEW_LINE> def clan_banner_data(self, clan_banner_data): <NEW_LINE> <INDENT> self._clan_banner_data = clan_banner_data <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, GroupsV2GroupV2ClanInfo): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fb799cbb53fe6830ffc |
class SubordinateComponents(Section): <NEW_LINE> <INDENT> ROOT_NAME = 'dsc' <NEW_LINE> type = xmlmap.StringField("@type") <NEW_LINE> c = xmlmap.NodeListField("e:c01", Component) <NEW_LINE> def hasSeries(self): <NEW_LINE> <INDENT> if len(self.c) and (self.c[0].level == 'series' or (self.c[0].c and self.c[0].c[0])): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | Description of Subordinate Components (dsc element); container lists and series.
Expected node element passed to constructor: `ead/archdesc/dsc`. | 62598fb71b99ca400228f5c3 |
class MarriageItem: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.I16, 'pos', None, -1, ), (2, TType.I32, 'itemCfgId', None, 0, ), ) <NEW_LINE> def __init__(self, pos=thrift_spec[1][4], itemCfgId=thrift_spec[2][4],): <NEW_LINE> <INDENT> self.pos = pos <NEW_LINE> self.itemCfgId = itemCfgId <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.I16: <NEW_LINE> <INDENT> self.pos = iprot.readI16() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.itemCfgId = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('MarriageItem') <NEW_LINE> if self.pos is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('pos', TType.I16, 1) <NEW_LINE> oprot.writeI16(self.pos) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.itemCfgId is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('itemCfgId', TType.I32, 2) <NEW_LINE> oprot.writeI32(self.itemCfgId) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.pos) <NEW_LINE> value = (value * 31) ^ hash(self.itemCfgId) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- pos
- itemCfgId | 62598fb7283ffb24f3cf39a9 |
class Cmd(AlfOperator): <NEW_LINE> <INDENT> __slots__ = ( 'appname', 'args', ) <NEW_LINE> alf_schema = schema.Cmd <NEW_LINE> def _parse_mandatory_args(self, args, kwargs): <NEW_LINE> <INDENT> super(Cmd, self)._parse_mandatory_args(args, kwargs) <NEW_LINE> self.args = list(args[:]) <NEW_LINE> del(args[:]) <NEW_LINE> <DEDENT> def executable_property(): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> return self.appname <NEW_LINE> <DEDENT> def set(self, val): <NEW_LINE> <INDENT> self.appname = val <NEW_LINE> <DEDENT> return property(get, set) <NEW_LINE> <DEDENT> executable = executable_property() <NEW_LINE> del(executable_property) | A Command representation, see http://renderman.pixar.com/resources/current/tractor/scriptingOperators.html#cmd
Examples
--------
@snippet test_examples.py alf_cmd | 62598fb7498bea3a75a57c48 |
class ClusterListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[Cluster]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["Cluster"]] = None, next_link: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(ClusterListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = next_link | The response of the List Event Hubs Clusters operation.
:param value: The Event Hubs Clusters present in the List Event Hubs operation results.
:type value: list[~azure.mgmt.eventhub.v2021_11_01.models.Cluster]
:param next_link: Link to the next set of results. Empty unless the value parameter contains an
incomplete list of Event Hubs Clusters.
:type next_link: str | 62598fb77c178a314d78d5c4 |
class ViewCheckerBlock(XBlock): <NEW_LINE> <INDENT> has_children = True <NEW_LINE> state = String(scope=Scope.user_state) <NEW_LINE> position = 0 <NEW_LINE> def student_view(self, context): <NEW_LINE> <INDENT> msg = "{} != {}".format(self.state, self.scope_ids.usage_id) <NEW_LINE> assert self.state == unicode(self.scope_ids.usage_id), msg <NEW_LINE> fragments = self.runtime.render_children(self) <NEW_LINE> result = Fragment( content=u"<p>ViewCheckerPassed: {}</p>\n{}".format( unicode(self.scope_ids.usage_id), "\n".join(fragment.content for fragment in fragments), ) ) <NEW_LINE> return result | XBlock for testing user state in views. | 62598fb77d847024c075c4e2 |
class SSA: <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.data = np.array(data) <NEW_LINE> try: <NEW_LINE> <INDENT> self.index = list(data.index) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.index = [i for i in range(self.data.shape[0])] <NEW_LINE> <DEDENT> self.M = None <NEW_LINE> self.N2 = None <NEW_LINE> self.X = None <NEW_LINE> self.covmat = None <NEW_LINE> self.E = None <NEW_LINE> self.values = None <NEW_LINE> self.RC = None <NEW_LINE> self.algo = None <NEW_LINE> self.freqs = None <NEW_LINE> self.freq_rank = None <NEW_LINE> self.ismc = False <NEW_LINE> <DEDENT> def _embed(self, M): <NEW_LINE> <INDENT> self.M = M <NEW_LINE> N = self.data.shape[0] <NEW_LINE> self.N2 = N - self.M + 1 <NEW_LINE> if (self.N2 < self.M): <NEW_LINE> <INDENT> raise ValueError('Window length is too big') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.X = utils.embedded(self.data, self.M) <NEW_LINE> <DEDENT> <DEDENT> def _compute_cov(self, algo='BK'): <NEW_LINE> <INDENT> if algo == 'BK': <NEW_LINE> <INDENT> self.covmat = utils.covmat_bk(self.X, self.N2) <NEW_LINE> <DEDENT> elif algo == 'VG': <NEW_LINE> <INDENT> self.covmat = utils.covmat_vg(self.data, self.M) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Incorrect algorithm name') <NEW_LINE> <DEDENT> self.algo = algo <NEW_LINE> <DEDENT> def run_ssa(self, M, algo='BK'): <NEW_LINE> <INDENT> self._embed(M) <NEW_LINE> self._compute_cov(algo=algo) <NEW_LINE> self.values, self.E = utils.eigen_decomp(self.covmat) <NEW_LINE> self.freqs = utils.dominant_freqs(self.E) <NEW_LINE> self.freq_rank = self.freqs.argsort() <NEW_LINE> self.RC = utils.RC_table(self) <NEW_LINE> <DEDENT> def plot(self, freq_rank=True): <NEW_LINE> <INDENT> return utils.plot(self, freq_rank) <NEW_LINE> <DEDENT> def show_f(self): <NEW_LINE> <INDENT> return utils.freq_table(self) <NEW_LINE> <DEDENT> def reconstruct(self, components): <NEW_LINE> <INDENT> if len(components) == 2: <NEW_LINE> <INDENT> name = 'RC {}-{}'.format(components[0] + 1, components[1] + 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> name = 'Reconstruction' <NEW_LINE> <DEDENT> components = [i - 1 for i in components] <NEW_LINE> res = self.RC.iloc[:, components].sum(axis=1) <NEW_LINE> res.name = name <NEW_LINE> res.index = self.index <NEW_LINE> return res | Generic instance of a SSA analysis
Args:
data: array like, input time series, must be one dimensional
Attributes:
data (array): input time series
index (index): index of the time series
M (int): Window length
N2 (int): Reduced length
X (numpy matrix): Trajectory matrix
covmat (numpy matrix): Covariance matrix
E (numpy matrix): EOF matrix
values (list): Eigenvalues
RC (DataFrame): Reconstructed Components
algo (str): Algorithm used to compute the covariance matrix
freqs (list): Dominant frequencies of the EOFs
freq_rank (index): Frequency ranked index
ismc (bool): Monte Carlo test | 62598fb7f548e778e596b6cb |
@attr(shard=10) <NEW_LINE> class StudioHelpTest(StudioCourseTest): <NEW_LINE> <INDENT> def test_studio_help_links(self): <NEW_LINE> <INDENT> page = DashboardPage(self.browser) <NEW_LINE> page.visit() <NEW_LINE> click_studio_help(page) <NEW_LINE> links = studio_help_links(page) <NEW_LINE> expected_links = [{ 'href': u'http://docs.edx.org/', 'text': u'edX Documentation', 'sr_text': u'Access documentation on http://docs.edx.org' }, { 'href': u'https://open.edx.org/', 'text': u'Open edX Portal', 'sr_text': u'Access the Open edX Portal' }, { 'href': u'https://www.edx.org/course/overview-creating-edx-course-edx-edx101#.VO4eaLPF-n1', 'text': u'Enroll in edX101', 'sr_text': u'Enroll in edX101: Overview of Creating an edX Course' }, { 'href': u'https://www.edx.org/course/creating-course-edx-studio-edx-studiox', 'text': u'Enroll in StudioX', 'sr_text': u'Enroll in StudioX: Creating a Course with edX Studio' }, { 'href': u'mailto:partner-support@example.com', 'text': u'Contact Us', 'sr_text': 'Send an email to partner-support@example.com' }] <NEW_LINE> for expected, actual in zip(expected_links, links): <NEW_LINE> <INDENT> self.assertEqual(expected['href'], actual.get_attribute('href')) <NEW_LINE> self.assertEqual(expected['text'], actual.text) <NEW_LINE> self.assertEqual( expected['sr_text'], actual.find_element_by_xpath('following-sibling::span').text ) | Tests for Studio help. | 62598fb73346ee7daa3376db |
class UserReceivingFormMixin(object): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.user = kwargs.pop("user", None) <NEW_LINE> super(UserReceivingFormMixin, self).__init__(*args, **kwargs) | Mixin for user receiving in forms.
It should be used with UserPassingViewMixin | 62598fb7009cb60464d01649 |
class THSGN(THSHQ): <NEW_LINE> <INDENT> path = 'gn' <NEW_LINE> url = f"{base_url}{path}/" <NEW_LINE> api_name = "同花顺概念" <NEW_LINE> def _parse_head_in_page(self): <NEW_LINE> <INDENT> tr_css = '.m-table tbody tr' <NEW_LINE> trs = self.driver.find_elements_by_css_selector(tr_css) <NEW_LINE> dates = [tr.find_element_by_css_selector( ':first-child').text for tr in trs] <NEW_LINE> events = [tr.find_element_by_css_selector( ':nth-child(3)').text for tr in trs] <NEW_LINE> dates = pd.to_datetime(dates) <NEW_LINE> urls = self._parse_href_in_table() <NEW_LINE> res = [] <NEW_LINE> for date, event, key in zip(dates, events, urls): <NEW_LINE> <INDENT> res.append( GNINFO( url=urls[key], 日期=date, 名称=key, 编码=urls[key].split('/')[-2], 驱动事件=event, 概念定义='', 股票列表=[], ) ) <NEW_LINE> <DEDENT> del self.driver.requests <NEW_LINE> return res <NEW_LINE> <DEDENT> def _add_update(self, info): <NEW_LINE> <INDENT> definition_css = '.board-txt > p:nth-child(2)' <NEW_LINE> info.概念定义 = self.driver.find_element_by_css_selector( definition_css).text <NEW_LINE> return info | 同花顺概念 | 62598fb74a966d76dd5eeffe |
class Outlier_StandardDev(OutlierMethod): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def execute(self, data): <NEW_LINE> <INDENT> self.mean = mean([value[1] for value in data]) <NEW_LINE> self.std = std([value[1] for value in data]) <NEW_LINE> <DEDENT> def get_scores(self, data): <NEW_LINE> <INDENT> self.execute(data) <NEW_LINE> if self.std == 0: <NEW_LINE> <INDENT> return [0]*len(data) <NEW_LINE> <DEDENT> return [(value[1] - self.mean)/self.std for value in data] | Statistical outlier detection method using the standard deviation.
Usually, items with score > 2 are considered as outliers. | 62598fb74527f215b58e9ffc |
class ComputeCauseEffectStructure(MapReduce): <NEW_LINE> <INDENT> description = "Computing concepts" <NEW_LINE> @property <NEW_LINE> def subsystem(self): <NEW_LINE> <INDENT> return self.context[0] <NEW_LINE> <DEDENT> def empty_result(self, *args): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def compute(mechanism, subsystem, purviews, cause_purviews, effect_purviews): <NEW_LINE> <INDENT> concept = subsystem.concept( mechanism, purviews=purviews, cause_purviews=cause_purviews, effect_purviews=effect_purviews, ) <NEW_LINE> concept.subsystem = None <NEW_LINE> return concept <NEW_LINE> <DEDENT> def process_result(self, new_concept, concepts): <NEW_LINE> <INDENT> if new_concept.phi > 0: <NEW_LINE> <INDENT> new_concept.subsystem = self.subsystem <NEW_LINE> concepts.append(new_concept) <NEW_LINE> <DEDENT> return concepts | Engine for computing a |CauseEffectStructure|. | 62598fb7baa26c4b54d4f3e0 |
class syncthreads_count(Stub): <NEW_LINE> <INDENT> _description_ = '<syncthreads_count()>' | syncthreads_count(predictate)
An extension to numba.cuda.syncthreads where the return value is a count
of the threads where predicate is true. | 62598fb7a8370b77170f0505 |
class Aug30(Version): <NEW_LINE> <INDENT> def __init__(self, options=None): <NEW_LINE> <INDENT> import parameters_new as parameters <NEW_LINE> self.name = "Aug30" <NEW_LINE> self.Nmin = 280 <NEW_LINE> self.Nmax = 560 <NEW_LINE> self.buried_cutoff = 15.00; self.buried_cutoff_sqr = self.buried_cutoff*self.buried_cutoff <NEW_LINE> self.desolv_cutoff = 20.00; self.desolv_cutoff_sqr = self.desolv_cutoff*self.desolv_cutoff <NEW_LINE> self.setDesolvation("VolumeModel", prefactor=-14.00, surface=0.00) <NEW_LINE> self.setCoulomb("Linear", cutoff=[4.0, 7.0], diel=80.0, scaled=True) <NEW_LINE> self.doingBackBoneReorganization = True <NEW_LINE> self.BackBoneParameters = parameters.getHydrogenBondParameters(type='back-bone') <NEW_LINE> self.SideChainParameters = parameters.getHydrogenBondParameters(type='side-chain') <NEW_LINE> self.interaction = getSideChainInteractionMatrix(side_chain=parameters.getInteraction()) <NEW_LINE> self.setBackBoneMaxPKA(resType=None, dpka=-0.70) <NEW_LINE> self.setSideChainMaxPKA(resType=None, dpka=-0.70) <NEW_LINE> self.angularDependentSideChainInteractions = ["HIS", "ARG", "AMD", "TRP"] <NEW_LINE> self.exclude_sidechain_interactions = [] | This is a test to set up rules for different propka versions | 62598fb799fddb7c1ca62e7e |
class SiteObject(): <NEW_LINE> <INDENT> def __init__(self, filename, md5, site_prefix='', dir_prefix=''): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.md5 = md5 <NEW_LINE> self.dir_prefix = dir_prefix <NEW_LINE> self.site_prefix = site_prefix <NEW_LINE> <DEDENT> @property <NEW_LINE> def s3_key(self): <NEW_LINE> <INDENT> filename = self.filename <NEW_LINE> if self.dir_prefix: <NEW_LINE> <INDENT> filename = remove_prefix(filename, path.join(self.dir_prefix, '')) <NEW_LINE> <DEDENT> return f'{self.site_prefix}/{filename}' <NEW_LINE> <DEDENT> def upload_to_s3(self, bucket, s3_client): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def delete_from_s3(self, bucket, s3_client): <NEW_LINE> <INDENT> s3_client.delete_object( Bucket=bucket, Key=self.s3_key, ) | An abstract class for an individual object that can be uploaded to S3 | 62598fb70fa83653e46f5008 |
class RateLimitation(asyncpokepyExceptions): <NEW_LINE> <INDENT> def __init__(self, error='Your IP address is being ratelimited by the API. Try again later. Continued overworking may result in your IP being blocked.'): <NEW_LINE> <INDENT> self.error = error <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.error | Raised when you are being ratelimited
| 62598fb771ff763f4b5e789e |
class LingerBaseAdapterFactory(LingerPlugin): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(LingerBaseAdapterFactory, self).__init__() <NEW_LINE> self.item = LingerBaseAdapter <NEW_LINE> <DEDENT> def get_instance(self, configuration): <NEW_LINE> <INDENT> return self.item(configuration) <NEW_LINE> <DEDENT> def get_fields(self): <NEW_LINE> <INDENT> fields, optional_fields = super(LingerBaseAdapterFactory, self).get_fields() <NEW_LINE> return (fields, optional_fields) | Base adapter factory for linger | 62598fb74428ac0f6e658648 |
class UserCreationForm(forms.ModelForm): <NEW_LINE> <INDENT> password1 = forms.CharField(label='Password', widget=forms.PasswordInput) <NEW_LINE> password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ('email', 'birth_date') <NEW_LINE> <DEDENT> def clean_password2(self): <NEW_LINE> <INDENT> password1 = self.cleaned_data.get("password1") <NEW_LINE> password2 = self.cleaned_data.get("password2") <NEW_LINE> if password1 and password2 and password1 != password2: <NEW_LINE> <INDENT> raise forms.ValidationError("Passwords don't match") <NEW_LINE> <DEDENT> return password2 <NEW_LINE> <DEDENT> def save(self, commit=True): <NEW_LINE> <INDENT> user = super(UserCreationForm, self).save(commit=False) <NEW_LINE> user.set_password(self.cleaned_data["password1"]) <NEW_LINE> if commit: <NEW_LINE> <INDENT> user.save() <NEW_LINE> <DEDENT> return user | A form for creating new users. Includes all the required
fields, plus a repeated password. | 62598fb74a966d76dd5eefff |
class ScreenShotError(Exception): <NEW_LINE> <INDENT> def __init__(self, message, details=None): <NEW_LINE> <INDENT> super().__init__(message) <NEW_LINE> self.details = details or {} | Error handling class. | 62598fb7236d856c2adc94d3 |
class InputStream: <NEW_LINE> <INDENT> def probe(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> vim.eval("getchar(0)") <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise UserInterrupt() | Get a character from Vim's input stream.
Used to check for keyboard interrupts. | 62598fb7aad79263cf42e8fb |
class IntegrityError(CacaoAccountingException): <NEW_LINE> <INDENT> pass | Clase para generar errores de Integridad. | 62598fb75166f23b2e243503 |
class PixivAuthFailed(PixivError): <NEW_LINE> <INDENT> pass | Auth error | 62598fb7442bda511e95c582 |
class Development(Config): <NEW_LINE> <INDENT> DEBUG = True <NEW_LINE> TESTING = True | Configurations for development | 62598fb7be8e80087fbbf190 |
class RedHatSssd(sssd, RedHatPlugin): <NEW_LINE> <INDENT> def setup(self): <NEW_LINE> <INDENT> super(RedHatSssd, self).setup() | sssd-related Diagnostic Information on Red Hat based distributions
| 62598fb7cc0a2c111447b134 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.