code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Caption(ContainedText): <NEW_LINE> <INDENT> def __init__(self, table, parentviewer, attrs): <NEW_LINE> <INDENT> ContainedText.__init__(self, table, parentviewer, attrs) <NEW_LINE> self._tw.config(relief=FLAT, borderwidth=0) <NEW_LINE> def conv_align(val): <NEW_LINE> <INDENT> return grailutil.conv_enumeration( grailutil.conv_normstring(val), ['top', 'bottom', 'left', 'right']) or 'top' <NEW_LINE> <DEDENT> self.align = self.attribute('align', conv=conv_align) <NEW_LINE> <DEDENT> def finish(self, table, padding=0): <NEW_LINE> <INDENT> ContainedText.finish(self, table, padding=0) <NEW_LINE> self._viewer.text.tag_add('contents', 1.0, END) <NEW_LINE> self._viewer.text.tag_config('contents', justify=CENTER)
A table caption element.
62598fba97e22403b383b080
class WebHookMethod(Enum): <NEW_LINE> <INDENT> GET = "GET" <NEW_LINE> POST = "POST"
WebHook Method
62598fba26068e7796d4cad3
class TweetAnalyzer(): <NEW_LINE> <INDENT> def clean_tweet(self, tweet): <NEW_LINE> <INDENT> return ' '.join(re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", tweet).split()) <NEW_LINE> <DEDENT> def analyze_sentiment(self, tweet): <NEW_LINE> <INDENT> analysis = TextBlob(self.clean_tweet(tweet)) <NEW_LINE> if analysis.sentiment.polarity > 0: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> elif analysis.sentiment.polarity == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> def tweets_to_data_frame(self, tweets): <NEW_LINE> <INDENT> df = pd.DataFrame(data=[tweet.text for tweet in tweets], columns=['tweets']) <NEW_LINE> df['source'] = np.array([tweet.source for tweet in tweets]) <NEW_LINE> df['likes'] = np.array([tweet.favorite_count for tweet in tweets]) <NEW_LINE> df['retweets'] = np.array([tweet.retweet_count for tweet in tweets]) <NEW_LINE> return df
Functionality for analyzing and categorizing content from tweets.
62598fba5fcc89381b266209
class MapInputHandler(object): <NEW_LINE> <INDENT> def __init__(self, pInputFilePath, pPointClass = None, pSegmentClass = None): <NEW_LINE> <INDENT> self._inputFilePath = pInputFilePath <NEW_LINE> self._pointClass = tuple if pPointClass is None else pPointClass <NEW_LINE> self._segmentClass = tuple if pSegmentClass is None else pSegmentClass <NEW_LINE> self.streets = [] <NEW_LINE> pass <NEW_LINE> <DEDENT> def setPointClass(self, pPointClass): <NEW_LINE> <INDENT> self._pointClass = pPointClass <NEW_LINE> <DEDENT> def setSegmentClass(self, pSegmentClass): <NEW_LINE> <INDENT> self._segmentClass = pSegmentClass <NEW_LINE> <DEDENT> def parseFile(self): <NEW_LINE> <INDENT> raise NotImplementedError("Should be implemented in subclasses.")
Handles the parsing of a file which contains a street network.
62598fba4527f215b58ea04f
class cubicInterpFunction: <NEW_LINE> <INDENT> def __init__(self, y0, dy0, y1, dy1): <NEW_LINE> <INDENT> y3 = y1 <NEW_LINE> y1 = y0 + dy0/3.0 <NEW_LINE> y2 = y3 - dy1/3.0 <NEW_LINE> self.Y = y0, y1, y2, y3 <NEW_LINE> <DEDENT> def __call__(self, t): <NEW_LINE> <INDENT> mt = 1-t <NEW_LINE> y0, y1, y2, y3 = self.Y <NEW_LINE> return y0*mt**3 + 3*y1*mt*mt*t + 3*y2*mt*t*t + y3*t**3
Create an interpolating function between two points with a cubic polynomial. Like :func:`makeInterpFuncs`, but only uses the first derivatives.
62598fba377c676e912f6e2d
class TensorBoard(Callback): <NEW_LINE> <INDENT> def __init__(self, log_dir='./logs', histogram_freq=0): <NEW_LINE> <INDENT> super(Callback, self).__init__() <NEW_LINE> if K._BACKEND != 'tensorflow': <NEW_LINE> <INDENT> raise Exception('TensorBoard callback only works ' 'with the TensorFlow backend.') <NEW_LINE> <DEDENT> self.log_dir = log_dir <NEW_LINE> self.histogram_freq = histogram_freq <NEW_LINE> self.merged = None <NEW_LINE> <DEDENT> def _set_model(self, model): <NEW_LINE> <INDENT> import tensorflow as tf <NEW_LINE> import keras.backend.tensorflow_backend as KTF <NEW_LINE> self.model = model <NEW_LINE> self.sess = KTF.get_session() <NEW_LINE> if self.histogram_freq and not self.merged: <NEW_LINE> <INDENT> mod_type = self.model.get_config()['name'] <NEW_LINE> if mod_type == 'Sequential': <NEW_LINE> <INDENT> layers = {l.get_config()['name']: l for l in self.model.layers} <NEW_LINE> <DEDENT> elif mod_type == 'Graph': <NEW_LINE> <INDENT> layers = self.model.nodes <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('Unrecognized model:', self.model.get_config()['name']) <NEW_LINE> <DEDENT> for l in layers: <NEW_LINE> <INDENT> cur_layer = layers[l] <NEW_LINE> if hasattr(cur_layer, 'W'): <NEW_LINE> <INDENT> tf.histogram_summary('{}_W'.format(l), cur_layer.W) <NEW_LINE> <DEDENT> if hasattr(cur_layer, 'b'): <NEW_LINE> <INDENT> tf.histogram_summary('{}_b'.format(l), cur_layer.b) <NEW_LINE> <DEDENT> if hasattr(cur_layer, 'get_output'): <NEW_LINE> <INDENT> tf.histogram_summary('{}_out'.format(l), cur_layer.get_output()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.merged = tf.merge_all_summaries() <NEW_LINE> self.writer = tf.train.SummaryWriter(self.log_dir, self.sess.graph_def) <NEW_LINE> <DEDENT> def on_epoch_end(self, epoch, logs={}): <NEW_LINE> <INDENT> import tensorflow as tf <NEW_LINE> if self.model.validation_data and self.histogram_freq: <NEW_LINE> <INDENT> if epoch % self.histogram_freq == 0: <NEW_LINE> <INDENT> if self.params.get('show_accuracy'): <NEW_LINE> <INDENT> test_function = self.model._test_with_acc <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> test_function = self.model._test <NEW_LINE> <DEDENT> names = [v.name for v in test_function.inputs] <NEW_LINE> feed_dict = dict(zip(names, self.model.validation_data)) <NEW_LINE> result = self.sess.run([self.merged], feed_dict=feed_dict) <NEW_LINE> summary_str = result[0] <NEW_LINE> self.writer.add_summary(summary_str, epoch) <NEW_LINE> <DEDENT> <DEDENT> for name, value in logs.items(): <NEW_LINE> <INDENT> if name in ['batch', 'size']: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> summary = tf.Summary() <NEW_LINE> summary_value = summary.value.add() <NEW_LINE> summary_value.simple_value = value <NEW_LINE> summary_value.tag = name <NEW_LINE> self.writer.add_summary(summary, epoch) <NEW_LINE> <DEDENT> self.writer.flush()
Tensorboard basic visualizations. This callback writes a log for TensorBoard, which allows you to visualize dynamic graphs of your training and test metrics, as well as activation histograms for the different layers in your model. TensorBoard is a visualization tool provided with TensorFlow. If you have installed TensorFlow with pip, you should be able to launch TensorBoard from the command line: ``` tensorboard --logdir=/full_path_to_your_logs ``` You can find more information about TensorBoard [here](https://www.tensorflow.org/versions/master/how_tos/summaries_and_tensorboard/index.html). # Arguments log_dir: the path of the directory where to save the log files to be parsed by tensorboard histogram_freq: frequency (in epochs) at which to compute activation histograms for the layers of the model. If set to 0, histograms won't be computed.
62598fbaf548e778e596b720
class Application(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.api = falcon.API() <NEW_LINE> self.trigger_manager = TriggerManager() <NEW_LINE> self.reload_config() <NEW_LINE> self.bootstrap = None <NEW_LINE> signal.signal(signal.SIGINT, self.on_shutdown) <NEW_LINE> <DEDENT> def on_shutdown(self, *arg): <NEW_LINE> <INDENT> self.trigger_manager.on_shutdown() <NEW_LINE> <DEDENT> def reload_config(self, config_name=None): <NEW_LINE> <INDENT> self.configs = load_configs(config_name) <NEW_LINE> self.app_context = ApplicationContext(api=self.api, configs=self.configs) <NEW_LINE> self.trigger_manager.app_context = self.app_context <NEW_LINE> self.register_triggers() <NEW_LINE> <DEDENT> def register_triggers(self): <NEW_LINE> <INDENT> self.trigger_manager.remove_all() <NEW_LINE> triggers_config = self.get_config('triggers') <NEW_LINE> if triggers_config is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for config in triggers_config: <NEW_LINE> <INDENT> self.register_trigger(config) <NEW_LINE> <DEDENT> <DEDENT> def register_trigger(self, config): <NEW_LINE> <INDENT> event = config.get('event', None) <NEW_LINE> condition = config.get('condition', None) <NEW_LINE> action = config['action'] <NEW_LINE> extended_properties = config.get('ext', None) <NEW_LINE> self.trigger_manager.register_trigger_by_name(action, event, condition, extended_properties) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> if self.bootstrap is not None: <NEW_LINE> <INDENT> self.bootstrap.app_context = self.app_context <NEW_LINE> self.bootstrap.trigger_manager = self.trigger_manager <NEW_LINE> self.bootstrap.run() <NEW_LINE> <DEDENT> <DEDENT> def get_config(self, name): <NEW_LINE> <INDENT> if self.app_context is not None: <NEW_LINE> <INDENT> return self.app_context.get_config(name) <NEW_LINE> <DEDENT> return None
Main class, control application life-cycle and routing
62598fbad7e4931a7ef3c210
class _ExceptionProxy(object): <NEW_LINE> <INDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> raise ValueError("Zip file has been closed") <NEW_LINE> <DEDENT> def __setattr__(self, name, value): <NEW_LINE> <INDENT> raise ValueError("Zip file has been closed") <NEW_LINE> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> return False
A placeholder for an object that may no longer be used.
62598fba4428ac0f6e65869d
class Get_friends_number(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.headers = util.headers <NEW_LINE> self.base_url = util.parse_friends_url() <NEW_LINE> util.check_path('friends') <NEW_LINE> print('Start to get friends list and save it for ./friends folder') <NEW_LINE> <DEDENT> def get_friends(self): <NEW_LINE> <INDENT> key = True <NEW_LINE> position = 0 <NEW_LINE> while key: <NEW_LINE> <INDENT> url = self.base_url + '&offset=' + str(position) <NEW_LINE> referer = 'http://qzs.qq.com/qzone/v8/pages/setting/visit_v8.html' <NEW_LINE> self.headers['Referer'] = referer <NEW_LINE> print("\tDealing with position\t%d." % position) <NEW_LINE> res = requests.get(url, headers=self.headers) <NEW_LINE> html = res.text <NEW_LINE> with open('friends/offset' + str(position) + '.json', 'w') as f: <NEW_LINE> <INDENT> f.write(html) <NEW_LINE> <DEDENT> with open('friends/offset' + str(position) + '.json') as f2: <NEW_LINE> <INDENT> con = f2.read() <NEW_LINE> <DEDENT> if "请先登录" in con: <NEW_LINE> <INDENT> print("登录失败,请检查原因") <NEW_LINE> key = False <NEW_LINE> break <NEW_LINE> <DEDENT> if '''"uinlist":[]''' in con: <NEW_LINE> <INDENT> print("Get friends Finish") <NEW_LINE> break <NEW_LINE> key = False <NEW_LINE> <DEDENT> position += 50 <NEW_LINE> sleep(5)
Use to get one's friends from their qzone's entry list
62598fba44b2445a339b6a32
class XmlNs0ChangeUserGroupRequest(object): <NEW_LINE> <INDENT> swagger_types = { 'name': 'str' } <NEW_LINE> attribute_map = { 'name': 'name' } <NEW_LINE> def __init__(self, name=None): <NEW_LINE> <INDENT> self._name = None <NEW_LINE> self.discriminator = None <NEW_LINE> if name is not None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> if name is not None and len(name) > 2147483647: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `name`, length must be less than or equal to `2147483647`") <NEW_LINE> <DEDENT> if name is not None and len(name) < 1: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `name`, length must be greater than or equal to `1`") <NEW_LINE> <DEDENT> self._name = name <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, XmlNs0ChangeUserGroupRequest): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fbad486a94d0ba2c149
class ShowPool(neutronV20.ShowCommand): <NEW_LINE> <INDENT> resource = 'pool'
Show information of a given pool.
62598fba627d3e7fe0e0702d
class MongoError(Exception): <NEW_LINE> <INDENT> pass
Base MongoDB error.
62598fba091ae35668704d9d
class Formula(Computation): <NEW_LINE> <INDENT> def __init__(self, data_type, func, cast=True): <NEW_LINE> <INDENT> self._data_type = data_type <NEW_LINE> self._func = func <NEW_LINE> self._cast = cast <NEW_LINE> <DEDENT> def get_computed_data_type(self, table): <NEW_LINE> <INDENT> return self._data_type <NEW_LINE> <DEDENT> def run(self, table): <NEW_LINE> <INDENT> new_column = [] <NEW_LINE> for row in table.rows: <NEW_LINE> <INDENT> v = self._func(row) <NEW_LINE> if self._cast: <NEW_LINE> <INDENT> v = self._data_type.cast(v) <NEW_LINE> <DEDENT> new_column.append(v) <NEW_LINE> <DEDENT> return new_column
A simple drop-in computation that can apply any function to rows. :param data_type: The data type this formula will return. :param func: The function to be applied to each row. Must return a valid value for the specified data type. :param cast: If ``True``, each return value will be cast to the specified ``data_type`` to ensure it is valid. Only specify false if you are certain your formula always returns the correct type.
62598fba283ffb24f3cf39ff
class MainPage(webapp2.RequestHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> template = JINJA_ENVIRONMENT.get_template('index.html') <NEW_LINE> now = datetime.utcnow() <NEW_LINE> now = now.replace(tzinfo=pytz.utc) <NEW_LINE> real_localtz = datetime.astimezone(now, pytz.timezone('America/New_York')) <NEW_LINE> menus = menu_urls(real_localtz) <NEW_LINE> lulu = DiningHall("lulu", menus, real_localtz) <NEW_LINE> bates = DiningHall("bates", menus, real_localtz) <NEW_LINE> pom = DiningHall("pom", menus, real_localtz) <NEW_LINE> stone = DiningHall("stone", menus, real_localtz) <NEW_LINE> tower = DiningHall("tower", menus, real_localtz) <NEW_LINE> date_day = real_localtz.strftime("%d") <NEW_LINE> if date_day[0]=='0': <NEW_LINE> <INDENT> date_string = real_localtz.strftime("%A, %B "+date_day[1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> date_string = real_localtz.strftime("%A, %B %d") <NEW_LINE> <DEDENT> emporium_is_open = emp_is_open(real_localtz) <NEW_LINE> leaky_beaker_is_open = lb_is_open(real_localtz) <NEW_LINE> collins_is_open = collins_cafe_is_open(real_localtz) <NEW_LINE> template_values = { "date_string" : date_string, "lulu" : lulu, "bates" : bates, "pom" : pom, "stone" : stone, "tower" : tower, "emporium_is_open" : emporium_is_open, "leaky_beaker_is_open" : leaky_beaker_is_open, "collins_is_open" : collins_is_open } <NEW_LINE> self.response.write(template.render(template_values))
Renders the main page of the Wellesley Daily Dish application with the current menu for each dining hall displayed.
62598fba60cbc95b063644b9
class Handler(ABC): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def set_next(self, handler: 'Handler') -> 'Handler': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def handle(self, request) -> Optional[str]: <NEW_LINE> <INDENT> pass
Интерфейс Обработчика объявляет метод построения цепочки обработчиков. Он также объявляет метод для выполнения запроса.
62598fbaad47b63b2c5a79cf
class SignalHandler(tornado.web.RequestHandler): <NEW_LINE> <INDENT> def initialize(self, pipes): <NEW_LINE> <INDENT> self.pipes = pipes <NEW_LINE> <DEDENT> def put(self, pid, signal): <NEW_LINE> <INDENT> self.pipes.repository[pid].send(signal)
API handler to send signals to pipes
62598fba4527f215b58ea050
class Metadata(ImpactFunctionMetadata): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_metadata(): <NEW_LINE> <INDENT> dict_meta = { 'id': 'EarthQuakeBuildingImpactFunction', 'name': tr('Earthquake Building Impact Function'), 'impact': tr('Be affected'), 'author': 'N/A', 'date_implemented': 'N/A', 'overview': tr( 'This impact function will calculate the impact of an ' 'earthquake on buildings, reporting how many are expected ' 'to be damaged etc.'), 'categories': { 'hazard': { 'definition': hazard_definition, 'subcategory': hazard_earthquake, 'units': [unit_mmi], 'layer_constraints': [ layer_vector_polygon, layer_raster_numeric ] }, 'exposure': { 'definition': exposure_definition, 'subcategory': exposure_structure, 'units': [ unit_building_type_type, unit_building_generic], 'layer_constraints': [ layer_vector_polygon, layer_vector_point ] } } } <NEW_LINE> return dict_meta
Metadata for Earthquake Building Impact Function. .. versionadded:: 2.1 We only need to re-implement get_metadata(), all other behaviours are inherited from the abstract base class.
62598fba7047854f4633f551
class Log(Actor): <NEW_LINE> <INDENT> def exception_handler(self, action_function, args): <NEW_LINE> <INDENT> exception_token = args[0] <NEW_LINE> return action_function(self, "Exception '%s'" % (exception_token,)) <NEW_LINE> <DEDENT> @manage(['loglevel']) <NEW_LINE> def init(self, loglevel): <NEW_LINE> <INDENT> self.loglevel = loglevel <NEW_LINE> self.setup() <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> if self.loglevel == "INFO": <NEW_LINE> <INDENT> self._logger = _log.info <NEW_LINE> <DEDENT> elif self.loglevel == "WARNING": <NEW_LINE> <INDENT> self._logger = _log.warning <NEW_LINE> <DEDENT> elif self.loglevel == "ERROR": <NEW_LINE> <INDENT> self._logger = _log.error <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> self._logger = _log.info <NEW_LINE> <DEDENT> <DEDENT> def will_migrate(self): <NEW_LINE> <INDENT> self._logger(" -- migrating") <NEW_LINE> <DEDENT> def did_migrate(self): <NEW_LINE> <INDENT> self.setup() <NEW_LINE> self._logger(" -- finished migrating") <NEW_LINE> <DEDENT> @condition(action_input=['data']) <NEW_LINE> def log(self, data): <NEW_LINE> <INDENT> self._logger("{}".format(data)) <NEW_LINE> <DEDENT> action_priority = (log, )
Write data to calvin log using specified loglevel. Supported loglevels: INFO, WARNING, ERROR Input: data : data to be logger
62598fba91f36d47f2230f68
class PerlinNoise1DShader(StrokeShader): <NEW_LINE> <INDENT> def __init__(self, freq=10, amp=10, oct=4, angle=radians(45), seed=-1): <NEW_LINE> <INDENT> StrokeShader.__init__(self) <NEW_LINE> self.noise = Noise(seed) <NEW_LINE> self.freq = freq <NEW_LINE> self.amp = amp <NEW_LINE> self.oct = oct <NEW_LINE> self.dir = Vector((cos(angle), sin(angle))) <NEW_LINE> <DEDENT> def shade(self, stroke): <NEW_LINE> <INDENT> length = stroke.length_2d <NEW_LINE> for svert in stroke: <NEW_LINE> <INDENT> nres = self.noise.turbulence1(length * svert.u, self.freq, self.amp, self.oct) <NEW_LINE> svert.point += nres * self.dir <NEW_LINE> <DEDENT> stroke.update_length()
Displaces the stroke using the curvilinear abscissa. This means that lines with the same length and sampling interval will be identically distorded.
62598fbaf548e778e596b721
class YandexMarketSite(models.Model): <NEW_LINE> <INDENT> u <NEW_LINE> site = models.ForeignKey(Site, verbose_name=_('Site'), unique=True) <NEW_LINE> name = models.CharField(max_length=200, verbose_name=_('Name')) <NEW_LINE> company = models.CharField(max_length=200, verbose_name=_('Company')) <NEW_LINE> url = models.URLField(verbose_name=_('URL')) <NEW_LINE> email = models.EmailField(verbose_name=_('Email'), blank=True) <NEW_LINE> notes = models.CharField(max_length=1024, verbose_name=_('Notes'), blank=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('Yandex.Market site') <NEW_LINE> verbose_name_plural = _('Yandex.Market sites')
Настройки сайта для вывода товаров в Яндекс.Маркет
62598fba7d847024c075c538
class Deprecated(BaseAdmonition): <NEW_LINE> <INDENT> node_class = deprecated_node <NEW_LINE> has_content = True <NEW_LINE> required_arguments = 0 <NEW_LINE> optional_arguments = 0 <NEW_LINE> final_argument_whitespace = False <NEW_LINE> option_spec = { 'class': directives.class_option, } <NEW_LINE> def run(self): <NEW_LINE> <INDENT> if not self.options.get('class'): <NEW_LINE> <INDENT> self.options['class'] = ['deprecated'] <NEW_LINE> <DEDENT> (deprecated,) = super(Deprecated, self).run() <NEW_LINE> if isinstance(deprecated, nodes.system_message): <NEW_LINE> <INDENT> return [deprecated] <NEW_LINE> <DEDENT> deprecated.insert(0, nodes.title(text=_('Deprecated'))) <NEW_LINE> set_source_info(self, deprecated) <NEW_LINE> env = self.state.document.settings.env <NEW_LINE> targetid = 'index-%s' % env.new_serialno('index') <NEW_LINE> deprecated['targetref'] = '%s:%s' % (env.docname, targetid) <NEW_LINE> targetnode = nodes.target('', '', ids=[targetid]) <NEW_LINE> return [targetnode, deprecated]
A deprecated entry, displayed (if configured) in the form of an admonition.
62598fba3539df3088ecc428
class ServerGroupsManager(base.ManagerWithFind): <NEW_LINE> <INDENT> resource_class = ServerGroup <NEW_LINE> def list(self, all_projects=False): <NEW_LINE> <INDENT> all = '?all_projects' if all_projects else '' <NEW_LINE> return self._list('/os-server-groups%s' % all, 'server_groups') <NEW_LINE> <DEDENT> def get(self, id): <NEW_LINE> <INDENT> return self._get('/os-server-groups/%s' % id, 'server_group') <NEW_LINE> <DEDENT> def delete(self, id): <NEW_LINE> <INDENT> return self._delete('/os-server-groups/%s' % id) <NEW_LINE> <DEDENT> def create(self, **kwargs): <NEW_LINE> <INDENT> body = {'server_group': kwargs} <NEW_LINE> return self._create('/os-server-groups', body, 'server_group')
Manage :class:`ServerGroup` resources.
62598fba67a9b606de54614e
class CheckBaselineForDisallowedIssuesTest(unittest.TestCase): <NEW_LINE> <INDENT> baseline_xml = minidom.parseString( '<?xml version="1.0" encoding="utf-8"?>\n' '<issues format="5" by="lint 4.1.0" client="cli" variant="all" version="4.1.0">\n' ' <issue id="foo" message="foo is evil" errorLine1="foo()">\n' ' <location file="a/b/c.java" line="3" column="10"/>\n' ' </issue>\n' ' <issue id="bar" message="bar is known to be evil" errorLine1="bar()">\n' ' <location file="a/b/c.java" line="5" column="12"/>\n' ' </issue>\n' ' <issue id="baz" message="baz may be evil" errorLine1="a = baz()">\n' ' <location file="a/b/c.java" line="10" column="10"/>\n' ' </issue>\n' ' <issue id="foo" message="foo is evil" errorLine1="b = foo()">\n' ' <location file="a/d/e.java" line="100" column="4"/>\n' ' </issue>\n' '</issues>\n') <NEW_LINE> def test_check_baseline_for_disallowed_issues(self): <NEW_LINE> <INDENT> disallowed_issues = lint_project_xml.check_baseline_for_disallowed_issues(self.baseline_xml, ["foo", "bar", "qux"]) <NEW_LINE> self.assertEqual({"foo", "bar"}, disallowed_issues)
Unit tests for check_baseline_for_disallowed_issues function.
62598fba92d797404e388c21
class RestorableMongodbCollectionGetResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'resource': {'key': 'properties.resource', 'type': 'RestorableMongodbCollectionPropertiesResource'}, } <NEW_LINE> def __init__( self, *, resource: Optional["RestorableMongodbCollectionPropertiesResource"] = None, **kwargs ): <NEW_LINE> <INDENT> super(RestorableMongodbCollectionGetResult, self).__init__(**kwargs) <NEW_LINE> self.id = None <NEW_LINE> self.name = None <NEW_LINE> self.type = None <NEW_LINE> self.resource = resource
An Azure Cosmos DB MongoDB collection event. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: The unique resource Identifier of the ARM resource. :vartype id: str :ivar name: The name of the ARM resource. :vartype name: str :ivar type: The type of Azure resource. :vartype type: str :ivar resource: The resource of an Azure Cosmos DB MongoDB collection event. :vartype resource: ~azure.mgmt.cosmosdb.models.RestorableMongodbCollectionPropertiesResource
62598fba099cdd3c636754a0
class BlueShard3(BlueShard, _Model): <NEW_LINE> <INDENT> __tablename__ = 'blue_shard_3'
Bluetooth shard 3.
62598fba1b99ca400228f5ef
class Corner(NonGoal): <NEW_LINE> <INDENT> def __init__(self, a, b, foot, success=True, **kwargs): <NEW_LINE> <INDENT> arrow = "->,head_length=0.6,head_width=0.4" <NEW_LINE> super(Corner, self).__init__(a, b, foot, success, event_type="corner", arrowstyle=arrow, **kwargs)
Representa um escanteio
62598fba7b180e01f3e4910d
class InceptionAUnit(nn.Module): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(InceptionAUnit, self).__init__() <NEW_LINE> self.scale = 0.17 <NEW_LINE> in_channels = 320 <NEW_LINE> self.branches = Concurrent() <NEW_LINE> self.branches.add_module("branch1", Conv1x1Branch( in_channels=in_channels, out_channels=32)) <NEW_LINE> self.branches.add_module("branch2", ConvSeqBranch( in_channels=in_channels, out_channels_list=(32, 32), kernel_size_list=(1, 3), strides_list=(1, 1), padding_list=(0, 1))) <NEW_LINE> self.branches.add_module("branch3", ConvSeqBranch( in_channels=in_channels, out_channels_list=(32, 48, 64), kernel_size_list=(1, 3, 3), strides_list=(1, 1, 1), padding_list=(0, 1, 1))) <NEW_LINE> self.conv = conv1x1( in_channels=128, out_channels=in_channels, bias=True) <NEW_LINE> self.activ = nn.ReLU(inplace=True) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> identity = x <NEW_LINE> x = self.branches(x) <NEW_LINE> x = self.conv(x) <NEW_LINE> x = self.scale * x + identity <NEW_LINE> x = self.activ(x) <NEW_LINE> return x
InceptionResNetV2 type Inception-A unit.
62598fba56ac1b37e630236a
class GenericCardTypeWidget(CardTypeWidget): <NEW_LINE> <INDENT> component_type = "generic_card_type_widget"
A card type widget that can be used as fallback when no dedicated widget exists.
62598fba56b00c62f0fb2a37
class _Tokenizer(object): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tokenize(self, sent): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def detokenize(self, tokens): <NEW_LINE> <INDENT> raise NotImplementedError
The abstract class of Tokenizer Implement ```tokenize``` method to split a string of sentence into tokens. Implement ```detokenize``` method to combine tokens into a whole sentence. ```special_tokens``` stores some helper tokens to describe and restore the tokenizing.
62598fba5fdd1c0f98e5e10d
class UnSupportedOperation(LexicalParserException): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> LexicalParserException.__init__(self, "Операция не поддерживается для данных значений")
Исключение "Неподдерживаемая операция"
62598fbafff4ab517ebcd960
class FDGuest(Guest): <NEW_LINE> <INDENT> def __init__(self, tdl, config, auto, output_disk, nicmodel, clockoffset, mousetype, diskbus, macaddress): <NEW_LINE> <INDENT> Guest.__init__(self, tdl, config, auto, output_disk, nicmodel, clockoffset, mousetype, diskbus, False, True, macaddress) <NEW_LINE> self.orig_floppy = os.path.join(self.data_dir, "floppies", self.tdl.distro + self.tdl.update + self.tdl.arch + ".img") <NEW_LINE> self.modified_floppy_cache = os.path.join(self.data_dir, "floppies", self.tdl.distro + self.tdl.update + self.tdl.arch + "-oz.img") <NEW_LINE> self.output_floppy = os.path.join(self.output_dir, self.tdl.name + "-oz.img") <NEW_LINE> self.floppy_contents = os.path.join(self.data_dir, "floppycontent", self.tdl.name) <NEW_LINE> self.log.debug("Original floppy path: %s", self.orig_floppy) <NEW_LINE> self.log.debug("Modified floppy cache: %s", self.modified_floppy_cache) <NEW_LINE> self.log.debug("Output floppy path: %s", self.output_floppy) <NEW_LINE> self.log.debug("Floppy content path: %s", self.floppy_contents) <NEW_LINE> <DEDENT> def _get_original_floppy(self, floppyurl, fd, outdir, force_download): <NEW_LINE> <INDENT> self._get_original_media(floppyurl, fd, outdir, force_download) <NEW_LINE> <DEDENT> def _copy_floppy(self): <NEW_LINE> <INDENT> self.log.info("Copying floppy contents for modification") <NEW_LINE> shutil.copyfile(self.orig_floppy, self.output_floppy) <NEW_LINE> <DEDENT> def install(self, timeout=None, force=False): <NEW_LINE> <INDENT> if not force and os.access(self.jeos_filename, os.F_OK): <NEW_LINE> <INDENT> self.log.info("Found cached JEOS, using it") <NEW_LINE> oz.ozutil.copyfile_sparse(self.jeos_filename, self.diskimage) <NEW_LINE> return self._generate_xml("hd", None) <NEW_LINE> <DEDENT> self.log.info("Running install for %s", self.tdl.name) <NEW_LINE> fddev = self._InstallDev("floppy", self.output_floppy, "fda") <NEW_LINE> if timeout is None: <NEW_LINE> <INDENT> timeout = 1200 <NEW_LINE> <DEDENT> dom = self.libvirt_conn.createXML(self._generate_xml("fd", fddev), 0) <NEW_LINE> self._wait_for_install_finish(dom, timeout) <NEW_LINE> if self.cache_jeos: <NEW_LINE> <INDENT> self.log.info("Caching JEOS") <NEW_LINE> oz.ozutil.mkdir_p(self.jeos_cache_dir) <NEW_LINE> oz.ozutil.copyfile_sparse(self.diskimage, self.jeos_filename) <NEW_LINE> <DEDENT> return self._generate_xml("hd", None) <NEW_LINE> <DEDENT> def _cleanup_floppy(self): <NEW_LINE> <INDENT> self.log.info("Cleaning up floppy data") <NEW_LINE> oz.ozutil.rmtree_and_sync(self.floppy_contents) <NEW_LINE> <DEDENT> def cleanup_install(self): <NEW_LINE> <INDENT> self.log.info("Cleaning up after install") <NEW_LINE> try: <NEW_LINE> <INDENT> os.unlink(self.output_floppy) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if not self.cache_original_media: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.unlink(self.orig_floppy) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass
Class for guest installation via floppy disk.
62598fba956e5f7376df573c
class SearchService(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def Search(request_iterator, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): <NEW_LINE> <INDENT> return grpc.experimental.stream_stream(request_iterator, target, '/cloud.deps.api.v1alpha.tracker.SearchService/Search', depscloud__api_dot_v1alpha_dot_tracker_dot_tracker__pb2.SearchRequest.SerializeToString, depscloud__api_dot_v1alpha_dot_tracker_dot_tracker__pb2.SearchResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def BreadthFirstSearch(request_iterator, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): <NEW_LINE> <INDENT> return grpc.experimental.stream_stream(request_iterator, target, '/cloud.deps.api.v1alpha.tracker.SearchService/BreadthFirstSearch', depscloud__api_dot_v1alpha_dot_tracker_dot_tracker__pb2.SearchRequest.SerializeToString, depscloud__api_dot_v1alpha_dot_tracker_dot_tracker__pb2.SearchResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def DepthFirstSearch(request_iterator, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): <NEW_LINE> <INDENT> return grpc.experimental.stream_stream(request_iterator, target, '/cloud.deps.api.v1alpha.tracker.SearchService/DepthFirstSearch', depscloud__api_dot_v1alpha_dot_tracker_dot_tracker__pb2.SearchRequest.SerializeToString, depscloud__api_dot_v1alpha_dot_tracker_dot_tracker__pb2.SearchResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
Missing associated documentation comment in .proto file.
62598fbae1aae11d1e7ce8e3
class SlotsTransferAdminPage(base.GSoCRequestHandler): <NEW_LINE> <INDENT> access_checker = access.PROGRAM_ADMINISTRATOR_ACCESS_CHECKER <NEW_LINE> def djangoURLPatterns(self): <NEW_LINE> <INDENT> return [ gsoc_url_patterns.url( r'admin/slots/transfer/%s$' % url_patterns.PROGRAM, self, name='gsoc_admin_slots_transfer'), ] <NEW_LINE> <DEDENT> def templatePath(self): <NEW_LINE> <INDENT> return 'modules/gsoc/slot_transfer_admin/base.html' <NEW_LINE> <DEDENT> def jsonContext(self, data, check, mutator): <NEW_LINE> <INDENT> list_content = SlotsTransferAdminList(data).getListData() <NEW_LINE> if list_content: <NEW_LINE> <INDENT> return list_content.content() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise exception.Forbidden(message='You do not have access to this data') <NEW_LINE> <DEDENT> <DEDENT> def post(self, data, check, mutator): <NEW_LINE> <INDENT> slots_list = SlotsTransferAdminList(data) <NEW_LINE> if slots_list.post(): <NEW_LINE> <INDENT> return http.HttpResponse() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise exception.Forbidden(message='You cannot change this data') <NEW_LINE> <DEDENT> <DEDENT> def context(self, data, check, mutator): <NEW_LINE> <INDENT> return { 'page_name': 'Slots transfer action page', 'slot_transfer_list': SlotsTransferAdminList(data), }
View for the the list of slot transfer requests.
62598fbabe383301e025397a
class SaldoAtualCliente(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Dialog() <NEW_LINE> self.janela = tk.Tk() <NEW_LINE> self.janela.resizable(False, False) <NEW_LINE> self.bg = "#cccccc" <NEW_LINE> self.box_nome = Box(self.janela, text="Nome", bg=self.bg) <NEW_LINE> self.box_saldo = Box(self.janela, text="Saldo", bg=self.bg) <NEW_LINE> self._columns = ("Conta", "Variação", "Saldo") <NEW_LINE> self.tela = tk.Frame(self.janela, bg=self.bg) <NEW_LINE> self.list = ttk.Treeview(self.tela) <NEW_LINE> self.scroll = ttk.Scrollbar(self.tela, orient=tk.VERTICAL, command=self.list.yview) <NEW_LINE> self._main() <NEW_LINE> <DEDENT> def _main(self): <NEW_LINE> <INDENT> self.janela.configure(bg=self.bg, bd=5) <NEW_LINE> self.tela.configure(bg=self.bg) <NEW_LINE> self.tela.pack(side=tk.BOTTOM) <NEW_LINE> self.box_nome.pack(side=tk.LEFT) <NEW_LINE> self.box_saldo.pack(side=tk.LEFT) <NEW_LINE> self.list["columns"] = self._columns <NEW_LINE> self.list["show"] = "headings" <NEW_LINE> self.scroll.config(command=self.list.yview) <NEW_LINE> self.list.column("Conta", width=100) <NEW_LINE> self.list.heading("Conta", text="Conta") <NEW_LINE> self.list.column("Variação", width=100) <NEW_LINE> self.list.heading("Variação", text="Variação") <NEW_LINE> self.list.column("Saldo", width=100) <NEW_LINE> self.list.heading("Saldo", text="Saldo") <NEW_LINE> self.scroll.pack(side=tk.RIGHT, fill=tk.Y) <NEW_LINE> self.list.pack(side=tk.LEFT, fill=tk.X) <NEW_LINE> self.main_loop() <NEW_LINE> <DEDENT> def getNome(self, src): <NEW_LINE> <INDENT> with open("../buffer.csv", 'r') as file: <NEW_LINE> <INDENT> return file.readline().split(",")[0] <NEW_LINE> <DEDENT> <DEDENT> def getSaldo(self, src): <NEW_LINE> <INDENT> with open("../buffer.csv", 'r') as file: <NEW_LINE> <INDENT> return file.readline().split(",")[-1].rstrip("\n") <NEW_LINE> <DEDENT> <DEDENT> def ler_dados(self, src): <NEW_LINE> <INDENT> with open("../buffer.csv", 'r') as file: <NEW_LINE> <INDENT> for linhas in file.readlines()[1:]: <NEW_LINE> <INDENT> for linha in linhas.splitlines(): <NEW_LINE> <INDENT> dados = linha.split(",") <NEW_LINE> self.list.insert('', 'end', values=(dados)) <NEW_LINE> <DEDENT> <DEDENT> self.list.insert('', 'end', values="-") <NEW_LINE> <DEDENT> self.box_nome.listbox.insert(tk.END, self.getNome("~/IFCE/S3/LP1/finop")) <NEW_LINE> self.box_saldo.listbox.insert(tk.END, "R$ " + self.getSaldo("~/IFCE/S3/LP1/finop")) <NEW_LINE> <DEDENT> def main_loop(self): <NEW_LINE> <INDENT> self.ler_dados("~/IFCE/S3/LP1/finop") <NEW_LINE> self.janela.wait_window()
Mostra os dados do cliente.
62598fba627d3e7fe0e0702f
class pol2Comp(ScannableMotionBase): <NEW_LINE> <INDENT> def __init__(self,name,_dettans, _tthp, _thp,devices,help=None): <NEW_LINE> <INDENT> self.setName(name) <NEW_LINE> if help is not None: self.__doc__+='\nHelp specific to '+self.name+':\n'+help <NEW_LINE> self.setInputNames([name]) <NEW_LINE> self.setExtraNames(['dettrans, thpcor','tthp']) <NEW_LINE> self.devices=devices <NEW_LINE> self.setOutputFormat(["%4.4f","%4.4f","%4.4f",'%4.4f']) <NEW_LINE> self.Units=['deg'] <NEW_LINE> self.setLevel(5) <NEW_LINE> <DEDENT> def sin_func(self,x,coefs): <NEW_LINE> <INDENT> return (coefs[0]/2)*sin((x+coefs[1])*pi/180.)+coefs[2] <NEW_LINE> <DEDENT> def calibrate(self,devices): <NEW_LINE> <INDENT> self.dettrans_sin_coefs = self.devices[dettrans] <NEW_LINE> self.tthp_sin_coefs = self.devices[tthp] <NEW_LINE> self.thp_sin_coefs = self.devices[thp] <NEW_LINE> self.dettransoffset = self.sin_func(stoke(),self.dettrans_sin_coefs)-dettrans() <NEW_LINE> self.tthpoffset = self.sin_func(stoke(),self.tthp_sin_coefs)-tthp() <NEW_LINE> self.thpoffset = self.sin_func(stoke(),self.thp_sin_coefs)-thp() <NEW_LINE> print('PA offsets = '+str(self.dettransoffset)+' '+str(self.tthpoffset)+' '+str(self.thpoffset)) <NEW_LINE> <DEDENT> def asynchronousMoveTo(self,value): <NEW_LINE> <INDENT> self.dettranscomp = self.sin_func(value,self.dettrans_sin_coefs)+self.dettransoffset <NEW_LINE> self.tthpcomp = self.sin_func(value,self.tthp_sin_coefs)+self.tthpoffset <NEW_LINE> self.thpcomp = self.sin_func(value,self.thp_sin_coefs)+self.thpoffset <NEW_LINE> dettrans.asynchronousMoveTo(self.dettranscomp) <NEW_LINE> tthp.asynchronousMoveTo(self.tthpcomp) <NEW_LINE> thp.asynchronousMoveTo(self.thpcomp) <NEW_LINE> <DEDENT> def getPosition(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> detranscomp = self.sin_func(stoke(),self.dettrans_sin_coefs)-self.dettransoffset <NEW_LINE> tthpcomp = self.sin_func(stoke(),self.tthp_sin_coefs)-self.tthpoffset <NEW_LINE> thpcomp = self.sin_func(stoke(),self.thp_sin_coefs)-self.thpoffset <NEW_LINE> return stoke(), self.detranscomp, self.tthpcomp, self.thpcomp <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("Must be calibrated first.\nYou must use pol2.calibrate() at stokes 0") <NEW_LINE> <DEDENT> <DEDENT> def isBusy(self): <NEW_LINE> <INDENT> return stoke.isBusy(), dettrans.isBusy(), tthp.isBusy(), thp.isBusy()
PA compensation device
62598fba60cbc95b063644bb
class WebSession(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.sess = requests.Session() <NEW_LINE> header = "%s v%s, %s" % (etac.NAME, etac.VERSION, etac.AUTHOR) <NEW_LINE> self.sess.headers.update({"User-Agent": header}) <NEW_LINE> <DEDENT> def get(self, url, params=None): <NEW_LINE> <INDENT> r = self._get_streaming_response(url, params=params) <NEW_LINE> return r.content <NEW_LINE> <DEDENT> def write(self, path, url, params=None): <NEW_LINE> <INDENT> r = self._get_streaming_response(url, params=params) <NEW_LINE> etau.ensure_basedir(path) <NEW_LINE> num_bytes = 0 <NEW_LINE> start_time = time.time() <NEW_LINE> with open(path, "wb") as f: <NEW_LINE> <INDENT> for chunk in r.iter_content(None): <NEW_LINE> <INDENT> num_bytes += len(chunk) <NEW_LINE> f.write(chunk) <NEW_LINE> <DEDENT> <DEDENT> time_elapsed = time.time() - start_time <NEW_LINE> _log_download_stats(num_bytes, time_elapsed) <NEW_LINE> <DEDENT> def _get_streaming_response(self, url, params=None): <NEW_LINE> <INDENT> r = self.sess.get(url, params=params, stream=True) <NEW_LINE> if r.status_code != 200: <NEW_LINE> <INDENT> raise WebSessionError("Unable to get '%s'" % url) <NEW_LINE> <DEDENT> return r
Class for downloading files from the web.
62598fba76e4537e8c3ef724
class StringJscTestSuite(unittest.TestCase): <NEW_LINE> <INDENT> __example_jsc = '"variable_root":"value_root"' <NEW_LINE> def test_load_empty(self): <NEW_LINE> <INDENT> self.assertIsNone(jsonsimpleconfig.loads("")) <NEW_LINE> <DEDENT> def test_load_example_jsc(self): <NEW_LINE> <INDENT> self.assertIsNotNone(jsonsimpleconfig.loads(StringJscTestSuite.__example_jsc)) <NEW_LINE> self.assertIsInstance(jsonsimpleconfig.loads(StringJscTestSuite.__example_jsc), jsonsimpleconfig.JscData)
String base JSC test cases.
62598fbaa219f33f346c6983
class Conc(specs_lib.Composable): <NEW_LINE> <INDENT> def __init__(self, dim, *args): <NEW_LINE> <INDENT> self.dim = dim <NEW_LINE> self.funs = args <NEW_LINE> <DEDENT> def funcall(self, x): <NEW_LINE> <INDENT> outputs = [f.funcall(x) for f in self.funs] <NEW_LINE> return tf.concat(self.dim, outputs)
Implements tensor concatenation in network specifications.
62598fba55399d3f05626691
class Pad2DImageBbox(DetectionAugmentation): <NEW_LINE> <INDENT> def __init__(self, pPad): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.p = pPad <NEW_LINE> <DEDENT> def apply(self, input_record): <NEW_LINE> <INDENT> p = self.p <NEW_LINE> image = input_record["image"] <NEW_LINE> gt_bbox = input_record["gt_bbox"] <NEW_LINE> h, w = image.shape[:2] <NEW_LINE> shape = (p.long, p.short, 3) if h >= w else (p.short, p.long, 3) <NEW_LINE> padded_image = np.zeros(shape, dtype=np.float32) <NEW_LINE> padded_image[:h, :w] = image <NEW_LINE> padded_gt_bbox = np.full(shape=(p.max_num_gt, 5), fill_value=-1, dtype=np.float32) <NEW_LINE> padded_gt_bbox[:len(gt_bbox)] = gt_bbox <NEW_LINE> input_record["image"] = padded_image <NEW_LINE> input_record["gt_bbox"] = padded_gt_bbox
input: image, ndarray(h, w, rgb) gt_bbox, ndarry(n, 5) output: image, ndarray(h, w, rgb) gt_bbox, ndarray(max_num_gt, 5)
62598fba3d592f4c4edbb03b
class WSpecType(Enum): <NEW_LINE> <INDENT> NO = 0 <NEW_LINE> ARCHIVE = 1 <NEW_LINE> SCRIPT = 2 <NEW_LINE> BOTH = 3
Type of Kaldi stype write specifiers.
62598fba5fc7496912d4833a
class subFileOld: <NEW_LINE> <INDENT> def __init__(self, data, pts, fexp, txyxy): <NEW_LINE> <INDENT> y_dat_pos = 32 <NEW_LINE> self.subflgs, self.subexp, self.subindx, self.subtime, self.subnext, self.subnois, self.subnpts, self.subscan, self.subwlevel, self.subresv = read_subheader(data[:y_dat_pos]) <NEW_LINE> yfloat = False <NEW_LINE> if self.subexp == 128: <NEW_LINE> <INDENT> yfloat = True <NEW_LINE> <DEDENT> if self.subexp > 0 and self.subexp < 128: <NEW_LINE> <INDENT> exp = self.subexp <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> exp = fexp <NEW_LINE> <DEDENT> if txyxy: <NEW_LINE> <INDENT> x_str = 'i' * pts <NEW_LINE> x_dat_pos = y_dat_pos <NEW_LINE> x_dat_end = x_dat_pos + (4 * pts) <NEW_LINE> x_raw = np.array(struct.unpack(x_str.encode('utf8'), data[x_dat_pos:x_dat_end])) <NEW_LINE> self.x = (2**(exp - 32)) * x_raw <NEW_LINE> y_dat_pos = x_dat_end <NEW_LINE> <DEDENT> y_dat_end = y_dat_pos + (4 * pts) <NEW_LINE> if yfloat: <NEW_LINE> <INDENT> y_dat_str = '<' + 'f' * pts <NEW_LINE> y_raw = struct.unpack(y_dat_str.encode('utf8'), data[y_dat_pos:y_dat_end]) <NEW_LINE> self.y = y_raw <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> y_dat_str = '>' + 'B' * 4 * pts <NEW_LINE> y_raw = struct.unpack(y_dat_str.encode('utf8'), data[y_dat_pos:y_dat_end]) <NEW_LINE> y_int = [] <NEW_LINE> for i in range(0, len(y_raw), 4): <NEW_LINE> <INDENT> y_int.append(( y_raw[i + 1] * (256**3) + y_raw[i] * (256**2) + y_raw[i + 3] * (256) + y_raw[i + 2])) <NEW_LINE> <DEDENT> y_int = np.int32(y_int) / (2**(32 - exp)) <NEW_LINE> self.y = y_int
Processes each subfile passed to it, extracts header information and data information and places them in data members. Used for the old format where the y-values are stored in an odd way Data ---- x: x-data (optional) y: y-data
62598fba63b5f9789fe852ec
class YesNo(_Symbol): <NEW_LINE> <INDENT> _attrMap = AttrMap(BASE=_Symbol, tickcolor = AttrMapValue(isColor), crosscolor = AttrMapValue(isColor), testValue = AttrMapValue(isBoolean), ) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.x = 0 <NEW_LINE> self.y = 0 <NEW_LINE> self.size = 100 <NEW_LINE> self.tickcolor = colors.green <NEW_LINE> self.crosscolor = colors.red <NEW_LINE> self.testValue = 1 <NEW_LINE> <DEDENT> def draw(self): <NEW_LINE> <INDENT> if self.testValue: <NEW_LINE> <INDENT> yn=Tickbox() <NEW_LINE> yn.tickColor=self.tickcolor <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yn=Crossbox() <NEW_LINE> yn.crossColor=self.crosscolor <NEW_LINE> <DEDENT> yn.x=self.x <NEW_LINE> yn.y=self.y <NEW_LINE> yn.size=self.size <NEW_LINE> yn.draw() <NEW_LINE> return yn <NEW_LINE> <DEDENT> def demo(self): <NEW_LINE> <INDENT> D = shapes.Drawing(200, 100) <NEW_LINE> yn = YesNo() <NEW_LINE> yn.x = 15 <NEW_LINE> yn.y = 25 <NEW_LINE> yn.size = 70 <NEW_LINE> yn.testValue = 0 <NEW_LINE> yn.draw() <NEW_LINE> D.add(yn) <NEW_LINE> yn2 = YesNo() <NEW_LINE> yn2.x = 120 <NEW_LINE> yn2.y = 25 <NEW_LINE> yn2.size = 70 <NEW_LINE> yn2.testValue = 1 <NEW_LINE> yn2.draw() <NEW_LINE> D.add(yn2) <NEW_LINE> labelFontSize = 8 <NEW_LINE> D.add(shapes.String(yn.x+(yn.size/2),(yn.y-(1.2*labelFontSize)), 'testValue=0', fillColor=colors.black, textAnchor='middle', fontSize=labelFontSize)) <NEW_LINE> D.add(shapes.String(yn2.x+(yn2.size/2),(yn2.y-(1.2*labelFontSize)), 'testValue=1', fillColor=colors.black, textAnchor='middle', fontSize=labelFontSize)) <NEW_LINE> labelFontSize = 10 <NEW_LINE> D.add(shapes.String(yn.x+85,(yn.y-20), self.__class__.__name__, fillColor=colors.black, textAnchor='middle', fontSize=labelFontSize)) <NEW_LINE> return D
This widget draw a tickbox or crossbox depending on 'testValue'. If this widget is supplied with a 'True' or 1 as a value for testValue, it will use the tickbox widget. Otherwise, it will produce a crossbox. possible attributes: 'x', 'y', 'size', 'tickcolor', 'crosscolor', 'testValue'
62598fba3317a56b869be60d
class DSMREntity(SensorEntity): <NEW_LINE> <INDENT> def __init__(self, name, device_name, device_serial, obis, config, force_update): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._obis = obis <NEW_LINE> self._config = config <NEW_LINE> self.telegram = {} <NEW_LINE> self._device_name = device_name <NEW_LINE> self._device_serial = device_serial <NEW_LINE> self._force_update = force_update <NEW_LINE> self._unique_id = f"{device_serial}_{name}".replace(" ", "_") <NEW_LINE> <DEDENT> @callback <NEW_LINE> def update_data(self, telegram): <NEW_LINE> <INDENT> self.telegram = telegram <NEW_LINE> if self.hass and self._obis in self.telegram: <NEW_LINE> <INDENT> self.async_write_ha_state() <NEW_LINE> <DEDENT> <DEDENT> def get_dsmr_object_attr(self, attribute): <NEW_LINE> <INDENT> if self._obis not in self.telegram: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> dsmr_object = self.telegram[self._obis] <NEW_LINE> return getattr(dsmr_object, attribute, None) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> if "Sags" in self._name or "Swells" in self.name: <NEW_LINE> <INDENT> return ICON_SWELL_SAG <NEW_LINE> <DEDENT> if "Failure" in self._name: <NEW_LINE> <INDENT> return ICON_POWER_FAILURE <NEW_LINE> <DEDENT> if "Power" in self._name: <NEW_LINE> <INDENT> return ICON_POWER <NEW_LINE> <DEDENT> if "Gas" in self._name: <NEW_LINE> <INDENT> return ICON_GAS <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> value = self.get_dsmr_object_attr("value") <NEW_LINE> if self._obis == obis_ref.ELECTRICITY_ACTIVE_TARIFF: <NEW_LINE> <INDENT> return self.translate_tariff(value, self._config[CONF_DSMR_VERSION]) <NEW_LINE> <DEDENT> with suppress(TypeError): <NEW_LINE> <INDENT> value = round(float(value), self._config[CONF_PRECISION]) <NEW_LINE> <DEDENT> if value is not None: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return self.get_dsmr_object_attr("unit") <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self) -> str: <NEW_LINE> <INDENT> return self._unique_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_info(self) -> DeviceInfo: <NEW_LINE> <INDENT> return { "identifiers": {(DOMAIN, self._device_serial)}, "name": self._device_name, } <NEW_LINE> <DEDENT> @property <NEW_LINE> def force_update(self): <NEW_LINE> <INDENT> return self._force_update <NEW_LINE> <DEDENT> @property <NEW_LINE> def should_poll(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def translate_tariff(value, dsmr_version): <NEW_LINE> <INDENT> if dsmr_version in ("5B",): <NEW_LINE> <INDENT> if value == "0001": <NEW_LINE> <INDENT> value = "0002" <NEW_LINE> <DEDENT> elif value == "0002": <NEW_LINE> <INDENT> value = "0001" <NEW_LINE> <DEDENT> <DEDENT> if value == "0002": <NEW_LINE> <INDENT> return "normal" <NEW_LINE> <DEDENT> if value == "0001": <NEW_LINE> <INDENT> return "low" <NEW_LINE> <DEDENT> return None
Entity reading values from DSMR telegram.
62598fbaf548e778e596b724
class QueenAnt(ScubaThrower): <NEW_LINE> <INDENT> name = 'Queen' <NEW_LINE> food_cost = 7 <NEW_LINE> is_Queen = True <NEW_LINE> implemented = True <NEW_LINE> def __init__(self, armor = 1): <NEW_LINE> <INDENT> if self.is_Queen: <NEW_LINE> <INDENT> self.is_Queen = True <NEW_LINE> QueenAnt.is_Queen = False <NEW_LINE> <DEDENT> self.armor = armor <NEW_LINE> self.ant_doubled = [] <NEW_LINE> <DEDENT> def action(self, colony): <NEW_LINE> <INDENT> if self.is_Queen: <NEW_LINE> <INDENT> queen_place = self.place.exit <NEW_LINE> while (queen_place != None): <NEW_LINE> <INDENT> if queen_place.ant: <NEW_LINE> <INDENT> if not(queen_place.ant in self.ant_doubled): <NEW_LINE> <INDENT> queen_place.ant.damage *= 2 <NEW_LINE> self.ant_doubled.append(queen_place.ant) <NEW_LINE> <DEDENT> if queen_place.ant.container and queen_place.ant.ant and not(queen_place.ant.ant in self.ant_doubled): <NEW_LINE> <INDENT> queen_place.ant.ant.damage *= 2 <NEW_LINE> self.ant_doubled.append(queen_place.ant.ant) <NEW_LINE> <DEDENT> <DEDENT> queen_place = queen_place.exit <NEW_LINE> <DEDENT> ThrowerAnt.action(self, colony) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.armor = 0 <NEW_LINE> self.place.remove_insect(self) <NEW_LINE> <DEDENT> <DEDENT> def reduce_armor(self, amount): <NEW_LINE> <INDENT> self.armor -= amount <NEW_LINE> if self.armor <= 0: <NEW_LINE> <INDENT> if self.is_Queen: <NEW_LINE> <INDENT> bees_win() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.place.remove_insect(self)
The Queen of the colony. The game is over if a bee enters her place.
62598fba5fdd1c0f98e5e10e
class OperatorFamilyToSqlTestCase(InputMapToSqlTestCase): <NEW_LINE> <INDENT> def test_create_operfam(self): <NEW_LINE> <INDENT> inmap = self.std_map() <NEW_LINE> inmap['schema sd'].update({'operator family of1 using btree': {}}) <NEW_LINE> sql = self.to_sql(inmap) <NEW_LINE> assert fix_indent(sql[0]) == CREATE_STMT <NEW_LINE> <DEDENT> def test_create_operfam_in_schema(self): <NEW_LINE> <INDENT> inmap = self.std_map() <NEW_LINE> inmap.update({'schema s1': {'operator family of1 using btree': {}}}) <NEW_LINE> sql = self.to_sql(inmap, ["CREATE SCHEMA s1"]) <NEW_LINE> assert fix_indent(sql[0]) == "CREATE OPERATOR FAMILY s1.of1 USING btree" <NEW_LINE> <DEDENT> def test_drop_operfam(self): <NEW_LINE> <INDENT> sql = self.to_sql(self.std_map(), [CREATE_STMT], superuser=True) <NEW_LINE> assert sql == ["DROP OPERATOR FAMILY sd.of1 USING btree"] <NEW_LINE> <DEDENT> def test_operfam_with_comment(self): <NEW_LINE> <INDENT> inmap = self.std_map() <NEW_LINE> inmap['schema sd'].update({'operator family of1 using btree': { 'description': 'Test operator family of1'}}) <NEW_LINE> sql = self.to_sql(inmap) <NEW_LINE> assert fix_indent(sql[0]) == CREATE_STMT <NEW_LINE> assert sql[1] == COMMENT_STMT <NEW_LINE> <DEDENT> def test_comment_on_operfam(self): <NEW_LINE> <INDENT> inmap = self.std_map() <NEW_LINE> inmap['schema sd'].update({'operator family of1 using btree': { 'description': 'Test operator family of1'}}) <NEW_LINE> sql = self.to_sql(inmap, [CREATE_STMT], superuser=True) <NEW_LINE> assert sql == [COMMENT_STMT] <NEW_LINE> <DEDENT> def test_drop_operfam_comment(self): <NEW_LINE> <INDENT> stmts = [CREATE_STMT, COMMENT_STMT] <NEW_LINE> inmap = self.std_map() <NEW_LINE> inmap['schema sd'].update({'operator family of1 using btree': {}}) <NEW_LINE> sql = self.to_sql(inmap, stmts, superuser=True) <NEW_LINE> assert sql == ["COMMENT ON OPERATOR FAMILY sd.of1 USING btree IS NULL"] <NEW_LINE> <DEDENT> def test_change_operfam_comment(self): <NEW_LINE> <INDENT> stmts = [CREATE_STMT, COMMENT_STMT] <NEW_LINE> inmap = self.std_map() <NEW_LINE> inmap['schema sd'].update({'operator family of1 using btree': { 'description': 'Changed operator family of1'}}) <NEW_LINE> sql = self.to_sql(inmap, stmts, superuser=True) <NEW_LINE> assert sql == ["COMMENT ON OPERATOR FAMILY sd.of1 USING btree IS " "'Changed operator family of1'"]
Test SQL generation from input operators
62598fba97e22403b383b085
class Salesman(Employee): <NEW_LINE> <INDENT> def __init__(self,name,sales = 0): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._sales = sales <NEW_LINE> <DEDENT> @property <NEW_LINE> def sales(self): <NEW_LINE> <INDENT> return self._sales <NEW_LINE> <DEDENT> @sales.setter <NEW_LINE> def sales(self,sales): <NEW_LINE> <INDENT> self._sales = sales if sales > 0 else 0 <NEW_LINE> <DEDENT> def get_salary(self): <NEW_LINE> <INDENT> return 1200 + self._sales * 0.05
销售
62598fba4428ac0f6e6586a1
class CommitWithoutParent(SSZZException): <NEW_LINE> <INDENT> pass
Base Class for gitutils exceptions.
62598fba9c8ee82313040233
class UserSetting(Base): <NEW_LINE> <INDENT> __tablename__ = 'np_settings_user' <NEW_LINE> __table_args__ = ( Comment('NetProfile UI user settings'), Index('np_settings_user_u_us', 'uid', 'name', unique=True), Index('np_settings_user_i_name', 'name'), { 'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8', 'info': { 'cap_menu': 'BASE_ADMIN', 'cap_read': 'ADMIN_SETTINGS', 'cap_create': 'ADMIN_DEV', 'cap_edit': 'ADMIN_DEV', 'cap_delete': 'ADMIN_DEV', 'show_in_menu': 'admin', 'menu_section': _('Settings'), 'menu_name': _('User Settings'), 'default_sort': (), 'grid_view': ('npusid', 'user', 'name', 'value'), 'grid_hidden': ('npusid',), 'detail_pane': ('netprofile_core.views', 'dpane_simple') } }) <NEW_LINE> id = Column( 'npusid', UInt32(), Sequence('np_settings_user_npusid_seq'), Comment('User setting ID'), primary_key=True, nullable=False, info={ 'header_string': _('ID') }) <NEW_LINE> user_id = Column( 'uid', UInt32(), ForeignKey('users.uid', name='np_settings_user_fk_uid', ondelete='CASCADE', onupdate='CASCADE'), Comment('User ID'), nullable=False, info={ 'header_string': _('User'), 'filter_type': 'none', 'column_flex': 1 }) <NEW_LINE> name = Column( ASCIIString(255), Comment('User setting name'), nullable=False, info={ 'header_string': _('Name'), 'column_flex': 1 }) <NEW_LINE> value = Column( ASCIIString(255), Comment('Current value of the setting'), nullable=True, default=None, server_default=text('NULL'), info={ 'header_string': _('Value'), 'column_flex': 2 }) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '%s.%s' % (str(self.user), str(self.type))
Per-user application settings.
62598fba5fdd1c0f98e5e10f
class RateLimitExceeded(RuntimeError): <NEW_LINE> <INDENT> def __init__(self, calls, period): <NEW_LINE> <INDENT> super(RateLimitExceeded, self).__init__( "Exceeded rate limit of [%s] calls every [%s] seconds." % (calls, period) )
A request failed because it exceeded the client-side rate limit.
62598fba44b2445a339b6a34
class EditRouteView(UpdateView): <NEW_LINE> <INDENT> model = Route <NEW_LINE> fields = [ 'name', 'description', ] <NEW_LINE> template_name = 'tracker_device/edit_route.html' <NEW_LINE> success_url = reverse_lazy('profile') <NEW_LINE> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> auth_errors = verify_route_ownership(request.user, kwargs.get('pk')) <NEW_LINE> super_dispatch = super(EditRouteView, self).dispatch <NEW_LINE> return auth_errors or super_dispatch(request, *args, **kwargs)
View for editing a route.
62598fbaadb09d7d5dc0a6fb
@pytest.mark.draft <NEW_LINE> @pytest.mark.components <NEW_LINE> @pytest.allure.story('Broadcasts') <NEW_LINE> @pytest.allure.feature('POST') <NEW_LINE> class Test_PFE_Components(object): <NEW_LINE> <INDENT> @pytest.allure.link('https://jira.qumu.com/browse/TC-44497') <NEW_LINE> @pytest.mark.Broadcasts <NEW_LINE> @pytest.mark.POST <NEW_LINE> def test_TC_44497_POST_Broadcasts_Unable_To_Create_Broadcast_Stream_Id_With_More_Than_100_Characters(self, context): <NEW_LINE> <INDENT> with pytest.allure.step("""Verify that user is unable to create broadcast stream ID with more than 100 characters using request POST "/broadcasts"."""): <NEW_LINE> <INDENT> broadcastCreate = context.sc.BroadcastCreate( id='StreamIDMoreThan100Character', name='StreamIDMoreThan100Character', protectedContent=None, sourceGroups=None, streamGroups=None, streams=[{ 'id': 'stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1', 'streamMetadata': { 'mimeType': 'video/mp4' }, 'connectionPoints': [{ 'mode': 'PULL', 'maxConnections': 10000, 'url': 'rtsp://wowzaec2demo.streamlock.net/vod/mp4:BigBuckBunny_115k.mov' }] }], tags=None, targetAudiences=[{ 'id': 'Broadcast_Standalone_Audience' }]) <NEW_LINE> response = check( context.cl.Broadcasts.createEntity( body=broadcastCreate ) ) <NEW_LINE> <DEDENT> with pytest.allure.step("""Verify that user is unable to create broadcast stream ID with more than 100 characters using request POST "/broadcasts"."""): <NEW_LINE> <INDENT> broadcastCreate = context.sc.BroadcastCreate( id='StreamIDMoreThan100Character', name='StreamIDMoreThan100Character', protectedContent=None, sourceGroups=None, streamGroups=None, streams=[{ 'id': 'stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1stream1', 'streamMetadata': { 'mimeType': 'video/mp4' }, 'connectionPoints': [{ 'mode': 'PULL', 'maxConnections': 10000, 'url': 'rtsp://wowzaec2demo.streamlock.net/vod/mp4:BigBuckBunny_115k.mov' }] }], tags=None, targetAudiences=[{ 'id': 'Broadcast_Standalone_Audience' }]) <NEW_LINE> request = context.cl.Broadcasts.createEntity( body=broadcastCreate ) <NEW_LINE> try: <NEW_LINE> <INDENT> client, response = check( request, quiet=True, returnResponse=True ) <NEW_LINE> <DEDENT> except (HTTPBadRequest, HTTPForbidden) as e: <NEW_LINE> <INDENT> get_error_message(e) | expect.any( should.start_with('may not be empty'), should.start_with('Invalid page parameter specified'), should.contain('Invalid Authorization Token') ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception( "Expected error message, got {} status code instead.".format( response.status_code))
PFE Broadcasts test cases.
62598fbae5267d203ee6ba7e
class ActivityDataset(Dataset): <NEW_LINE> <INDENT> images_dir = "../JIGSAWS/Suturing/pictures/" <NEW_LINE> labels_dir = '../JIGSAWS/Suturing/transcriptions/' <NEW_LINE> def __init__(self, images_dir, trial_name, trial_dir, labels_dir, transform=None): <NEW_LINE> <INDENT> self.landmarks_frame = preDataProcessing.load_labels(labels_dir, trial_name) <NEW_LINE> self.images_dir = images_dir <NEW_LINE> self.transform = transform <NEW_LINE> self.trial_dir = trial_dir <NEW_LINE> self.trial_name = trial_name <NEW_LINE> self.labels_dir = labels_dir <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.landmarks_frame) <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> img_name = os.path.join(self.images_dir, self.trial_dir, self.trial_name + "_capture1_" +str(idx+1).zfill(4)+".png") <NEW_LINE> image = io.imread(img_name) <NEW_LINE> landmarks = self.landmarks_frame[idx]-1 <NEW_LINE> if self.transform: <NEW_LINE> <INDENT> image = self.transform(image) <NEW_LINE> <DEDENT> return image, landmarks
Face Landmarks dataset.
62598fba167d2b6e312b70f4
class Host(Base): <NEW_LINE> <INDENT> __tablename__ = 'host' <NEW_LINE> network_ip = Column(String(16), primary_key=True, nullable=False) <NEW_LINE> mac_address = Column(String(18), nullable=True, unique=True) <NEW_LINE> name = Column(String) <NEW_LINE> inclusion_date = Column(String(20)) <NEW_LINE> scantime = Column(Integer) <NEW_LINE> ports = Column(String) <NEW_LINE> ip_network = Column(String(16), ForeignKey('network.ip_network'), nullable=True) <NEW_LINE> def __init__(self, ip_address, mac_address=None, hostname=None, inclusion_date=None, scantime=None, open_ports=None, ip_network=None): <NEW_LINE> <INDENT> self.ip_address = IPAddress(ip_address) <NEW_LINE> self.mac_address = mac_address <NEW_LINE> self.hostname = hostname <NEW_LINE> self.inclusion_date = inclusion_date <NEW_LINE> self.scantime = scantime <NEW_LINE> self.open_ports = open_ports <NEW_LINE> self.ip_network = ip_network <NEW_LINE> self.network_ip = str(self.ip_address) <NEW_LINE> if self.open_ports is not None: <NEW_LINE> <INDENT> self.ports = ','.join(map(str, self.open_ports.keys())) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ports = None <NEW_LINE> <DEDENT> if self.hostname is not None: <NEW_LINE> <INDENT> if len(self.hostname.values()) > 0: <NEW_LINE> <INDENT> self.name = self.hostname.values()[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.name = None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.name = None <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Host('%s, %s, %s, %s, %s, %s, %s')>" % ( self.network_ip, self.mac_address, self.name, self.inclusion_date, self.scantime, self.ports, self.ip_network )
Classe que define um ativo de rede
62598fba71ff763f4b5e78f8
class RidgeModelWrapper(BaseModelWrapper, ExplainableMixin): <NEW_LINE> <INDENT> tasks = [Tasks.REGRESSION] <NEW_LINE> algorithm = Algorithms.RIDGE <NEW_LINE> r_args = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> r_args = self._r_args if self._r_args is not None else {ModelParams.RANDOM_STATE: 777} <NEW_LINE> model = Ridge(**r_args) <NEW_LINE> super().__init__(model) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def compatible_with_dataset(cls, dataset): <NEW_LINE> <INDENT> return dataset.task in cls.tasks and dataset.data_type == DataTypes.TABULAR <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def generate_custom_ridge(cls, r_args): <NEW_LINE> <INDENT> return type("Custom" + cls.__name__, (cls, ), dict(r_args=r_args)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def true_global_importance_values(self, X): <NEW_LINE> <INDENT> mean = np.mean(X, axis=0, keepdims=False) <NEW_LINE> return np.abs(self._model.coef_ * mean).tolist()
Wrapper for ridge regression.
62598fbaf9cc0f698b1c538d
class APIReturn(object): <NEW_LINE> <INDENT> def __init__(self, obj, ok=True): <NEW_LINE> <INDENT> self.obj = obj <NEW_LINE> self.text = dumps(obj) <NEW_LINE> self.ok = ok <NEW_LINE> <DEDENT> def json(self): <NEW_LINE> <INDENT> return self.obj
Spoofs returned response from Canvas SDK. Has response.ok property and JSON contents
62598fba097d151d1a2c11b2
class MSBFirstGetter(): <NEW_LINE> <INDENT> def __init__(self, decoder, bitcount): <NEW_LINE> <INDENT> self.layers = [[AdaptiveBitGetter(decoder) for _ in range(1<<layer)] for layer in range(bitcount)] <NEW_LINE> <DEDENT> def get_value(self): <NEW_LINE> <INDENT> value = 0 <NEW_LINE> for layer in self.layers: <NEW_LINE> <INDENT> value = (value << 1) + layer[value].get_bit() <NEW_LINE> <DEDENT> return value
Reads a numbers from an BinaryArithmeticDecoder that are binarized using MSB first binary representation. The context used when reading a bit depends on all the earlier bits read for this number. So the MSB is always obtained using the same context, while the second-most significant bit is obtained using different contexts whether the MSB is one or zero. The third-most significant bit is decoded using one out of four contexts and so on.
62598fbaad47b63b2c5a79d3
class RecordMethods(dataobj.DataObj): <NEW_LINE> <INDENT> @property <NEW_LINE> def apc_records(self): <NEW_LINE> <INDENT> return self._get_list("record.jm:apc") <NEW_LINE> <DEDENT> @apc_records.setter <NEW_LINE> def apc_records(self, val): <NEW_LINE> <INDENT> self._set_with_struct("record.jm:apc", val) <NEW_LINE> <DEDENT> def has_apcs(self): <NEW_LINE> <INDENT> return len(self.apc_records) > 0 <NEW_LINE> <DEDENT> def remove_apc_by_ref(self, ref): <NEW_LINE> <INDENT> self._delete_from_list("record.jm:apc", matchsub={"ref" : ref}) <NEW_LINE> <DEDENT> def add_apc_record(self, apc_record): <NEW_LINE> <INDENT> self._add_to_list_with_struct("record.jm:apc", apc_record) <NEW_LINE> <DEDENT> @property <NEW_LINE> def doi(self): <NEW_LINE> <INDENT> return self._get_first_identifier("doi") <NEW_LINE> <DEDENT> @property <NEW_LINE> def pmid(self): <NEW_LINE> <INDENT> return self._get_first_identifier("pmid") <NEW_LINE> <DEDENT> @property <NEW_LINE> def pmcid(self): <NEW_LINE> <INDENT> return self._get_first_identifier("pmcid") <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> return self._get_first_identifier("url") <NEW_LINE> <DEDENT> @property <NEW_LINE> def identifiers(self): <NEW_LINE> <INDENT> return self._get_list("record.dc:identifier") <NEW_LINE> <DEDENT> def _get_first_identifier(self, type): <NEW_LINE> <INDENT> for ident in self._get_list("record.dc:identifier"): <NEW_LINE> <INDENT> if ident.get("type") == type: <NEW_LINE> <INDENT> return ident.get("id") <NEW_LINE> <DEDENT> <DEDENT> return None
Super-class which defines methods useful for interrogating model objects which contain APC record data
62598fba55399d3f05626693
class SignedOffBy(CommitRule): <NEW_LINE> <INDENT> name = "body-requires-signed-off-by" <NEW_LINE> id = "UC2" <NEW_LINE> def validate(self, commit): <NEW_LINE> <INDENT> flags = re.UNICODE <NEW_LINE> flags |= re.IGNORECASE <NEW_LINE> for line in commit.message.body: <NEW_LINE> <INDENT> if line.lower().startswith("signed-off-by"): <NEW_LINE> <INDENT> if not re.search(r"(^)Signed-off-by: ([-'\w.]+) ([-'\w.]+) (.*)", line, flags=flags): <NEW_LINE> <INDENT> return [RuleViolation(self.id, "Signed-off-by: must have a full name", line_nr=1)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return [RuleViolation(self.id, "Body does not contain a 'Signed-off-by:' line", line_nr=1)]
This rule will enforce that each commit contains a "Signed-off-by" line. We keep things simple here and just check whether the commit body contains a line that starts with "Signed-off-by".
62598fba4a966d76dd5ef052
class DomainListRequired(object): <NEW_LINE> <INDENT> openapi_types = { 'errors': 'DomainListRequiredErrors' } <NEW_LINE> attribute_map = { 'errors': 'errors' } <NEW_LINE> def __init__(self, errors=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._errors = None <NEW_LINE> self.discriminator = None <NEW_LINE> if errors is not None: <NEW_LINE> <INDENT> self.errors = errors <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def errors(self): <NEW_LINE> <INDENT> return self._errors <NEW_LINE> <DEDENT> @errors.setter <NEW_LINE> def errors(self, errors): <NEW_LINE> <INDENT> self._errors = errors <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, DomainListRequired): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, DomainListRequired): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
62598fba63b5f9789fe852ee
class GPU_Problem(Problem): <NEW_LINE> <INDENT> def Kernel_code(self): <NEW_LINE> <INDENT> return("This should be some kernel code")
The frame of any "live" problem, but in GPU
62598fba099cdd3c636754a2
class USB2000(Parameters): <NEW_LINE> <INDENT> xmin_default = 430.0 <NEW_LINE> xmax_default = 680.0 <NEW_LINE> valid_minmax_default = (339.0, 1024.0) <NEW_LINE> sub_base_default = True <NEW_LINE> bline_fit_default = True <NEW_LINE> fit_regions_default = ((345.0, 395.0), (900.0, 1000.0))
Define some shared parameters between ocean optics spectrometers.
62598fba1b99ca400228f5f1
class GlobalUpload(object): <NEW_LINE> <INDENT> def __init__(self, file_dict): <NEW_LINE> <INDENT> path = uploader.get_storage_path() <NEW_LINE> if not path: <NEW_LINE> <INDENT> self.storage_path = None <NEW_LINE> return <NEW_LINE> <DEDENT> self.storage_path = os.path.join(path, 'global') <NEW_LINE> try: <NEW_LINE> <INDENT> os.makedirs(self.storage_path) <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> if e.errno != 17: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> self.filename = os.path.basename(file_dict.get('filename')) if file_dict.get('filename') else None <NEW_LINE> upload_field_storage = file_dict.pop('upload', None) <NEW_LINE> if isinstance(upload_field_storage, cgi.FieldStorage): <NEW_LINE> <INDENT> self._update_filename(upload_field_storage) <NEW_LINE> self.filename = munge.munge_filename(self.filename) <NEW_LINE> file_dict['filename'] = self.filename <NEW_LINE> self.upload_file = upload_field_storage.file <NEW_LINE> <DEDENT> <DEDENT> def _update_filename(self, upload_field_storage): <NEW_LINE> <INDENT> if self.filename: <NEW_LINE> <INDENT> splitted = os.path.splitext(self.filename) <NEW_LINE> if not splitted[1]: <NEW_LINE> <INDENT> self.filename += os.path.splitext(upload_field_storage.filename)[1] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.filename = upload_field_storage.filename <NEW_LINE> <DEDENT> <DEDENT> def get_path(self): <NEW_LINE> <INDENT> filepath = os.path.join(self.get_directory(), self.filename) <NEW_LINE> return filepath <NEW_LINE> <DEDENT> def get_directory(self): <NEW_LINE> <INDENT> return self.storage_path <NEW_LINE> <DEDENT> def upload(self): <NEW_LINE> <INDENT> max_size = config.get('ckan.max_image_size', 2) <NEW_LINE> if not self.storage_path: <NEW_LINE> <INDENT> raise GlobalUploadException("No storage_path") <NEW_LINE> <DEDENT> directory = self.get_directory() <NEW_LINE> filepath = self.get_path() <NEW_LINE> if self.filename: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.makedirs(directory) <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> if e.errno != 17: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> tmp_filepath = filepath + '~' <NEW_LINE> output_file = open(tmp_filepath, 'wb+') <NEW_LINE> self.upload_file.seek(0) <NEW_LINE> current_size = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> current_size = current_size + 1 <NEW_LINE> data = self.upload_file.read(2 ** 20) <NEW_LINE> if not data: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> output_file.write(data) <NEW_LINE> if current_size > max_size: <NEW_LINE> <INDENT> os.remove(tmp_filepath) <NEW_LINE> raise GlobalUploadException('File upload too large') <NEW_LINE> <DEDENT> <DEDENT> output_file.close() <NEW_LINE> os.rename(tmp_filepath, filepath) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> def delete(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove(self.get_path()) <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> log.error("Unable to remove global file {}".format(self.get_path()))
This is heavily based on ckan.logic.uploader.ResourceUpload
62598fba7b180e01f3e4910f
class DatabasePrincipalAssignmentListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[DatabasePrincipalAssignment]'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["DatabasePrincipalAssignment"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(DatabasePrincipalAssignmentListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value
The list Kusto database principal assignments operation response. :ivar value: The list of Kusto database principal assignments. :vartype value: list[~azure.mgmt.synapse.models.DatabasePrincipalAssignment]
62598fba4c3428357761a43b
@final <NEW_LINE> class SameAliasImportViolation(ASTViolation): <NEW_LINE> <INDENT> error_template = 'Found same alias import: {0}' <NEW_LINE> code = 113
Forbids to use the same alias as the original name in imports. Reasoning: Why would you even do this in the first place? Example:: # Correct: from os import path # Wrong: from os import path as path .. versionadded:: 0.1.0
62598fba283ffb24f3cf3a04
class SpinnMachineInvalidParameterException(SpinnMachineException): <NEW_LINE> <INDENT> def __init__(self, parameter, value, problem): <NEW_LINE> <INDENT> super(SpinnMachineInvalidParameterException, self).__init__( "It is invalid to set {} to {}: {}".format( parameter, value, problem)) <NEW_LINE> self._parameter = parameter <NEW_LINE> self._value = value <NEW_LINE> self._problem = problem <NEW_LINE> <DEDENT> @property <NEW_LINE> def parameter(self): <NEW_LINE> <INDENT> return self._parameter <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> @property <NEW_LINE> def problem(self): <NEW_LINE> <INDENT> return self._problem
Indicates that there is a problem with a parameter value
62598fba66673b3332c30551
class EditFormView(LoginRequiredMixin,FormView): <NEW_LINE> <INDENT> success_url="/user/user-dashboard/" <NEW_LINE> template_name="user-edit-form.html" <NEW_LINE> form_class=AccountEditForm <NEW_LINE> def get_initial(self): <NEW_LINE> <INDENT> user_obj=self.request.user <NEW_LINE> initial={ "email":user_obj.customer.Customer_Email, "contact_number":user_obj.customer.Customer_Contact_Number, "address_line_1":user_obj.customer.Address_Line1, "address_line_2":user_obj.customer.Address_Line2, "Region":user_obj.customer.City, "State":user_obj.customer.State, "ZIP":user_obj.customer.ZIP, } <NEW_LINE> return initial <NEW_LINE> <DEDENT> def form_valid(self,form): <NEW_LINE> <INDENT> user_obj=self.request.user <NEW_LINE> Customer.objects.filter(User_customer=user_obj).update( Customer_Email=form.cleaned_data["email"], Address_Line1=form.cleaned_data["address_line_1"], Address_Line2=form.cleaned_data["address_line_2"], State="DELHI", City=form.cleaned_data["Region"], ZIP=form.cleaned_data["ZIP"], Customer_Contact_Number=form.cleaned_data["contact_number"] ) <NEW_LINE> messages.success(self.request, 'Details Updated') <NEW_LINE> return super(EditFormView, self).form_valid(form)
user edit details
62598fba498bea3a75a57ca4
class stackProfileTimeSeries(plotBase.stackPlotBase): <NEW_LINE> <INDENT> def addPlot(self, tag, **kwargs): <NEW_LINE> <INDENT> kw = dict(self.defArgs) <NEW_LINE> kw.update(kwargs) <NEW_LINE> plot = profileTimeSeries(**kw) <NEW_LINE> plotBase.stackPlotBase.addPlot(self, plot, tag) <NEW_LINE> <DEDENT> def addSample(self, tag, *args, **kwargs): <NEW_LINE> <INDENT> if tag not in self.tags: <NEW_LINE> <INDENT> self.addPlot(tag, **kwargs) <NEW_LINE> <DEDENT> self.plots[tag].addSample(*args, **kwargs) <NEW_LINE> <DEDENT> def addOverlay(self, tag, *args, **kwargs): <NEW_LINE> <INDENT> if tag not in self.tags: <NEW_LINE> <INDENT> self.addPlot(tag, **kwargs) <NEW_LINE> <DEDENT> self.plots[tag].addOverlay(*args, **kwargs)
A class for stacking multiple profiles in the same plot.
62598fbaaad79263cf42e955
class Review(): <NEW_LINE> <INDENT> def __init__(self, mongo_doc): <NEW_LINE> <INDENT> self.review_id = mongo_doc['review_id'] <NEW_LINE> self.user_id = mongo_doc['user_id'] <NEW_LINE> self.business_id = mongo_doc['business_id'] <NEW_LINE> self.stars = mongo_doc['stars']
Encapsulates the Review attributes and inherits from Entity
62598fbaadb09d7d5dc0a6fd
class ExtendedInterpolationEnvConfig(ExtendedInterpolationConfig): <NEW_LINE> <INDENT> def __init__(self, *args, remove_vars: bool = None, env: dict = None, env_sec: str = 'env', **kwargs): <NEW_LINE> <INDENT> if 'default_expect' not in kwargs: <NEW_LINE> <INDENT> kwargs['default_expect'] = True <NEW_LINE> <DEDENT> self.remove_vars = remove_vars <NEW_LINE> if env is None: <NEW_LINE> <INDENT> self.env = os.environ <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.env = env <NEW_LINE> <DEDENT> self.env_sec = env_sec <NEW_LINE> super(ExtendedInterpolationEnvConfig, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def _munge_default_vars(self, vars): <NEW_LINE> <INDENT> if vars is not None and self.remove_vars is not None: <NEW_LINE> <INDENT> for n in self.remove_vars: <NEW_LINE> <INDENT> if n in vars: <NEW_LINE> <INDENT> del vars[n] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return vars <NEW_LINE> <DEDENT> def _create_config_parser(self): <NEW_LINE> <INDENT> parser = super(ExtendedInterpolationEnvConfig, self)._create_config_parser() <NEW_LINE> sec = self.env_sec <NEW_LINE> parser.add_section(sec) <NEW_LINE> for k, v in self.env.items(): <NEW_LINE> <INDENT> logger.debug(f'adding env section {sec}: {k} -> {v}') <NEW_LINE> parser.set(sec, k, v) <NEW_LINE> <DEDENT> del self.env <NEW_LINE> return parser
A ``Config`` implementation that creates a section called ``env`` with environment variables passed.
62598fbae5267d203ee6ba80
class MultiplexTest(Test): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.compile_code() <NEW_LINE> self.set_hugepages() <NEW_LINE> self.set_numa_balance() <NEW_LINE> self.assembly_vm() <NEW_LINE> os_type = self.params.get('os_type', default='linux') <NEW_LINE> if os_type == 'windows': <NEW_LINE> <INDENT> self.log.info('Preparing VM with Windows (%s)', self.params.get('win')) <NEW_LINE> <DEDENT> if os_type == 'linux': <NEW_LINE> <INDENT> self.log.info('Preparing VM with Linux (%s)', self.params.get('distro')) <NEW_LINE> <DEDENT> <DEDENT> def compile_code(self): <NEW_LINE> <INDENT> self.log.info('Compile code') <NEW_LINE> self.log.info('gcc %s %s', self.params.get('gcc_flags', default='-O2'), 'code.c') <NEW_LINE> <DEDENT> def set_hugepages(self): <NEW_LINE> <INDENT> if self.params.get('huge_pages', default='yes') == 'yes': <NEW_LINE> <INDENT> self.log.info('Setting hugepages') <NEW_LINE> <DEDENT> <DEDENT> def set_numa_balance(self): <NEW_LINE> <INDENT> numa_balancing = self.params.get('numa_balancing', default='yes') <NEW_LINE> numa_migrate = self.params.get('numa_balancing_migrate_deferred', default='no') <NEW_LINE> if numa_balancing: <NEW_LINE> <INDENT> self.log.info('Numa balancing: %s', numa_balancing) <NEW_LINE> <DEDENT> if numa_migrate: <NEW_LINE> <INDENT> self.log.info('Numa balancing migrate deferred: %s', numa_migrate) <NEW_LINE> <DEDENT> <DEDENT> def assembly_vm(self): <NEW_LINE> <INDENT> self.log.info('Assembling VM') <NEW_LINE> drive_format = self.params.get('drive_format', default='virtio_blk') <NEW_LINE> nic_model = self.params.get('nic_model', default='virtio_net') <NEW_LINE> enable_msx_vectors = self.params.get('enable_msx_vectors', default='yes') <NEW_LINE> if drive_format: <NEW_LINE> <INDENT> self.log.info('Drive format: %s', drive_format) <NEW_LINE> <DEDENT> if nic_model: <NEW_LINE> <INDENT> self.log.info('NIC model: %s', nic_model) <NEW_LINE> <DEDENT> if enable_msx_vectors == 'yes': <NEW_LINE> <INDENT> self.log.info('Enabling msx vectors') <NEW_LINE> <DEDENT> <DEDENT> def test(self): <NEW_LINE> <INDENT> self.log.info('Executing synctest...') <NEW_LINE> self.log.info('synctest --timeout %s --tries %s', self.params.get('sync_timeout', default=12), self.params.get('sync_tries', default=3)) <NEW_LINE> self.log.info('Executing ping test...') <NEW_LINE> cmdline = f"ping --timeout {self.params.get('ping_timeout', default=10)} --tries {self.params.get('ping_tries', default=5)}" <NEW_LINE> ping_flags = self.params.get('ping_flags') <NEW_LINE> if ping_flags: <NEW_LINE> <INDENT> cmdline += f' {ping_flags}' <NEW_LINE> <DEDENT> self.log.info(cmdline)
Execute a test that uses provided parameters (for multiplexing testing). :param *: All params are only logged, they have no special meaning
62598fba76e4537e8c3ef728
class TreeNode(object): <NEW_LINE> <INDENT> def __init__(self, feature=None, label=None, root=None, children=None): <NEW_LINE> <INDENT> self.feature = feature <NEW_LINE> self.label = label <NEW_LINE> self.root = root <NEW_LINE> self.children = children
Decision Tree Node Each node has multi branches.
62598fba55399d3f05626695
class Platform(object): <NEW_LINE> <INDENT> def __init__( self, large_compute_support=True, altair_support=True, max_memory_GB=8.0, max_processors=2, temp_directory="/tmp", ): <NEW_LINE> <INDENT> self._large_compute_support = large_compute_support <NEW_LINE> self._altair_support = altair_support <NEW_LINE> self._max_memory_GB = max_memory_GB <NEW_LINE> self._max_processors = max_processors <NEW_LINE> self._temp_directory = temp_directory <NEW_LINE> <DEDENT> @property <NEW_LINE> def altair_support(self): <NEW_LINE> <INDENT> return self._altair_support <NEW_LINE> <DEDENT> @property <NEW_LINE> def large_compute_support(self): <NEW_LINE> <INDENT> return self._large_compute_support <NEW_LINE> <DEDENT> @property <NEW_LINE> def max_processors(self): <NEW_LINE> <INDENT> return self._max_processors <NEW_LINE> <DEDENT> @property <NEW_LINE> def max_memory_gigabytes(self): <NEW_LINE> <INDENT> return self._max_memory_GB <NEW_LINE> <DEDENT> @property <NEW_LINE> def temp_directory(self): <NEW_LINE> <INDENT> return self._temp_directory
Represents platform capabilities
62598fba91f36d47f2230f6b
class MaximaFunction(BuiltinFunction): <NEW_LINE> <INDENT> def __init__(self, name, nargs=2, conversions={}): <NEW_LINE> <INDENT> c = dict(maxima=name) <NEW_LINE> c.update(conversions) <NEW_LINE> BuiltinFunction.__init__(self, name=name, nargs=nargs, conversions=c) <NEW_LINE> <DEDENT> def _maxima_init_evaled_(self, *args): <NEW_LINE> <INDENT> args_maxima = [] <NEW_LINE> for a in args: <NEW_LINE> <INDENT> if isinstance(a, str): <NEW_LINE> <INDENT> args_maxima.append(a) <NEW_LINE> <DEDENT> elif hasattr(a, '_maxima_init_'): <NEW_LINE> <INDENT> args_maxima.append(a._maxima_init_()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> args_maxima.append(str(a)) <NEW_LINE> <DEDENT> <DEDENT> return "%s(%s)"%(self.name(), ', '.join(args_maxima)) <NEW_LINE> <DEDENT> def _evalf_(self, *args, **kwds): <NEW_LINE> <INDENT> parent = kwds['parent'] <NEW_LINE> if hasattr(parent, 'prec') and parent.prec() > 53: <NEW_LINE> <INDENT> raise NotImplementedError("%s not implemented for precision > 53"%self.name()) <NEW_LINE> <DEDENT> _init() <NEW_LINE> return parent(maxima("%s, numer"%self._maxima_init_evaled_(*args))) <NEW_LINE> <DEDENT> def _eval_(self, *args): <NEW_LINE> <INDENT> _init() <NEW_LINE> try: <NEW_LINE> <INDENT> s = maxima(self._maxima_init_evaled_(*args)) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if self.name() in repr(s): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return s.sage()
EXAMPLES:: sage: from sage.functions.special import MaximaFunction sage: f = MaximaFunction("jacobi_sn") sage: f(1,1) tanh(1) sage: f(1/2,1/2).n() 0.470750473655657
62598fba5fcc89381b26620d
class AlpinoCorpusReader(BracketParseCorpusReader): <NEW_LINE> <INDENT> def __init__(self, root, encoding="ISO-8859-1", tagset=None): <NEW_LINE> <INDENT> BracketParseCorpusReader.__init__( self, root, r"alpino\.xml", detect_blocks="blankline", encoding=encoding, tagset=tagset, ) <NEW_LINE> <DEDENT> def _normalize(self, t, ordered=False): <NEW_LINE> <INDENT> if t[:10] != "<alpino_ds": <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> t = re.sub(r' <node .*? cat="(\w+)".*>', r"(\1", t) <NEW_LINE> if ordered: <NEW_LINE> <INDENT> t = re.sub( r' <node. *?begin="(\d+)".*? pos="(\w+)".*? word="([^"]+)".*?/>', r"(\1 \2 \3)", t, ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> t = re.sub(r' <node .*?pos="(\w+)".*? word="([^"]+)".*?/>', r"(\1 \2)", t) <NEW_LINE> <DEDENT> t = re.sub(r" </node>", r")", t) <NEW_LINE> t = re.sub(r"<sentence>.*</sentence>", r"", t) <NEW_LINE> t = re.sub(r"</?alpino_ds.*>", r"", t) <NEW_LINE> return t <NEW_LINE> <DEDENT> def _tag(self, t, tagset=None): <NEW_LINE> <INDENT> tagged_sent = [ (int(o), w, p) for (o, p, w) in SORTTAGWRD.findall(self._normalize(t, ordered=True)) ] <NEW_LINE> tagged_sent.sort() <NEW_LINE> if tagset and tagset != self._tagset: <NEW_LINE> <INDENT> tagged_sent = [ (w, map_tag(self._tagset, tagset, p)) for (o, w, p) in tagged_sent ] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tagged_sent = [(w, p) for (o, w, p) in tagged_sent] <NEW_LINE> <DEDENT> return tagged_sent <NEW_LINE> <DEDENT> def _word(self, t): <NEW_LINE> <INDENT> tagged_sent = self._tag(t) <NEW_LINE> return [w for (w, p) in tagged_sent]
Reader for the Alpino Dutch Treebank. This corpus has a lexical breakdown structure embedded, as read by _parse Unfortunately this puts punctuation and some other words out of the sentence order in the xml element tree. This is no good for tag_ and word_ _tag and _word will be overridden to use a non-default new parameter 'ordered' to the overridden _normalize function. The _parse function can then remain untouched.
62598fba7047854f4633f557
class AudioFile: <NEW_LINE> <INDENT> chunk = 1024 <NEW_LINE> def __init__(self, file, wait=1): <NEW_LINE> <INDENT> self.wf = wave.open(file, 'rb') <NEW_LINE> self.p = pyaudio.PyAudio() <NEW_LINE> self.stream = self.p.open( format=self.p.get_format_from_width(self.wf.getsampwidth()), channels=self.wf.getnchannels(), rate=self.wf.getframerate(), output=True ) <NEW_LINE> self.data = [] <NEW_LINE> d = self.wf.readframes(self.chunk) <NEW_LINE> while d != '': <NEW_LINE> <INDENT> self.data.append(d) <NEW_LINE> d = self.wf.readframes(self.chunk) <NEW_LINE> <DEDENT> self.last_played = time.time() <NEW_LINE> self.wait = wait <NEW_LINE> <DEDENT> def blocking_play(self): <NEW_LINE> <INDENT> for d in self.data: <NEW_LINE> <INDENT> self.stream.write(d) <NEW_LINE> <DEDENT> <DEDENT> def play(self): <NEW_LINE> <INDENT> if time.time() - self.last_played > self.wait: <NEW_LINE> <INDENT> p = Thread(target=self.blocking_play) <NEW_LINE> p.start() <NEW_LINE> self.last_played = time.time() <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.stream.close() <NEW_LINE> self.p.terminate()
adapted to be asynchronous from elliotjreed.com/article.php?read=play_a_sound_wav_audio_file_in_python3
62598fba26068e7796d4cadb
class InsertionSort: <NEW_LINE> <INDENT> def __call__(self, given_list): <NEW_LINE> <INDENT> for key_pos in range(1, len(given_list)): <NEW_LINE> <INDENT> key_value = self.get_value(given_list, key_pos) <NEW_LINE> scan_pos = key_pos - 1 <NEW_LINE> scan_pos = self.inner_loop(given_list, key_value, scan_pos) <NEW_LINE> given_list[scan_pos + 1] = key_value <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def get_value(given_list, key_pos): <NEW_LINE> <INDENT> return given_list[key_pos] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def inner_loop(given_list, key_value, scan_pos): <NEW_LINE> <INDENT> while (scan_pos >= 0) and (given_list[scan_pos] > key_value): <NEW_LINE> <INDENT> given_list[scan_pos + 1] = given_list[scan_pos] <NEW_LINE> scan_pos = scan_pos - 1 <NEW_LINE> <DEDENT> return scan_pos
The Insertion sort
62598fba3539df3088ecc42e
class Employee(): <NEW_LINE> <INDENT> def __init__(self, f_name, l_name, salary): <NEW_LINE> <INDENT> self.f_name = f_name <NEW_LINE> self.l_name = l_name <NEW_LINE> self.salary = salary <NEW_LINE> <DEDENT> def give_raise(self, money=5000): <NEW_LINE> <INDENT> self.salary += money <NEW_LINE> return self.salary
关于年薪管理的一次模拟
62598fba57b8e32f525081de
class StructureSetRoiData(object): <NEW_LINE> <INDENT> def __init__(self, roi_item, data): <NEW_LINE> <INDENT> self._roi_item = roi_item <NEW_LINE> self._structure_set = roi_item._structure_set <NEW_LINE> self._workspace_id = roi_item._structure_set._workspace_id <NEW_LINE> self._requestor = roi_item._requestor <NEW_LINE> self.contours = data["contours"] <NEW_LINE> self.lines = data["lines"] <NEW_LINE> self.points = data["points"] <NEW_LINE> <DEDENT> def is_editable(self): <NEW_LINE> <INDENT> return self._structure_set._is_editable <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> if not self._structure_set._is_editable: <NEW_LINE> <INDENT> raise InvalidOperationError('Item is not editable') <NEW_LINE> <DEDENT> wid = self._workspace_id <NEW_LINE> sid = self._structure_set.id <NEW_LINE> rid = self._roi_item.id <NEW_LINE> body = { "version": 2, "contours": self.contours, "lines": self.lines, "points": self.points } <NEW_LINE> headers = { 'ProKnow-Lock': self._structure_set._lock["id"] } <NEW_LINE> _, result = self._requestor.put('/workspaces/' + wid + '/structuresets/' + sid + '/draft/rois/' + rid + '/data', json=body, headers=headers) <NEW_LINE> self._roi_item._tag = result["tag"] <NEW_LINE> print(rid, result["tag"])
This class represents the data for a stucture set ROI. It's returned by calls to the :meth:`proknow.Patients.StructureSetRoiItem.get_data` method. Note: For information on how to use contour data, please check out the :ref:`contouring-data` guide. Attributes: contours (list): The list of contours for the ROI. lines (list): The list of lines for the ROI. points (list): The list of points for the ROI.
62598fbad7e4931a7ef3c218
class LoadingTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.url = "https://docs.google.com/spreadsheet/pub?key=0AprNP7zjIYS1dEhXRnRVTDRfRlRVcFdnVlhTcEk1N3c&single=true&gid=0&output=csv" <NEW_LINE> <DEDENT> def test_load_victims(self): <NEW_LINE> <INDENT> data = urllib2.urlopen(self.url).read() <NEW_LINE> data = StringIO(data) <NEW_LINE> reader = csv.DictReader(data) <NEW_LINE> total = len(list(reader)) <NEW_LINE> data.seek(0) <NEW_LINE> reader = csv.DictReader(data) <NEW_LINE> load_victims(data) <NEW_LINE> self.assertEqual(Victim.objects.count(), total)
Tests related to data loading.
62598fba63d6d428bbee2932
class ModelOptions(object): <NEW_LINE> <INDENT> fields = None <NEW_LINE> exclude = None <NEW_LINE> fieldsets = None <NEW_LINE> ordering = None
Describes how to represent a Model for a Controller.
62598fba32920d7e50bc61d0
class Request(object): <NEW_LINE> <INDENT> def __init__(self, _reactor, deferred, url, agent='dAmnViper/dA/api/request', response=None): <NEW_LINE> <INDENT> self._reactor = _reactor <NEW_LINE> self.d = deferred <NEW_LINE> self.agent = agent <NEW_LINE> self.url = url <NEW_LINE> self.agent = agent <NEW_LINE> self.response_obj = response <NEW_LINE> if self.response_obj is None: <NEW_LINE> <INDENT> self.response_obj = Response <NEW_LINE> <DEDENT> self.start_request() <NEW_LINE> <DEDENT> def start_request(self): <NEW_LINE> <INDENT> agent = Agent(self._reactor) <NEW_LINE> d = agent.request('POST', self.url, Headers({'User-Agent': [self.agent]}), None) <NEW_LINE> d.addCallback(self.received_response) <NEW_LINE> <DEDENT> def received_response(self, response): <NEW_LINE> <INDENT> self.response = response <NEW_LINE> d = defer.Deferred() <NEW_LINE> d.addCallback(self.got_data) <NEW_LINE> response.deliverBody(ResponseReceiver(d)) <NEW_LINE> <DEDENT> def got_data(self, data): <NEW_LINE> <INDENT> self.d.callback(self.response_obj(self.response, data))
Send an API request. This is a helper object to send requests to API methods. A deferred method must be provided, as this object will call the deferred with the response from the api request.
62598fba099cdd3c636754a3
class ActivityList(abc_learning_objects.ActivityList, osid_objects.OsidList): <NEW_LINE> <INDENT> def get_next_activity(self): <NEW_LINE> <INDENT> return self.next() <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> return self._get_next_object(Activity) <NEW_LINE> <DEDENT> next_activity = property(fget=get_next_activity) <NEW_LINE> @utilities.arguments_not_none <NEW_LINE> def get_next_activities(self, n): <NEW_LINE> <INDENT> return self._get_next_n(n)
Like all ``OsidLists,`` ``ActivityList`` provides a means for accessing ``Activity`` elements sequentially either one at a time or many at a time. Examples: while (al.hasNext()) { Activity activity = al.getNextActivity(); } or while (al.hasNext()) { Activity[] activities = al.getNextActivities(al.available()); }
62598fba1b99ca400228f5f2
class EmittingOutputStream(QObject): <NEW_LINE> <INDENT> stream_signal = pyqtSignal(str) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> <DEDENT> def write(self, text: object) -> None: <NEW_LINE> <INDENT> self.stream_signal.emit(str(text)) <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> sys.stdout = sys.__stdout__ <NEW_LINE> sys.stdout.flush() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.flush() <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> sys.stdout = sys.__stdout__ <NEW_LINE> <DEDENT> def isatty(self): <NEW_LINE> <INDENT> return True
Implementation of a stream to handle logging messages to a Qt widget.
62598fba7c178a314d78d621
class Metrics: <NEW_LINE> <INDENT> def __init__(self, initval=0): <NEW_LINE> <INDENT> self.cnt = Value("i", initval) <NEW_LINE> self.prev = Value("i", initval) <NEW_LINE> self.latency = Value("i", initval) <NEW_LINE> self.lock = Lock() <NEW_LINE> <DEDENT> def inc_cnt(self): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> self.cnt.value += 1 <NEW_LINE> <DEDENT> <DEDENT> def reset_cnt(self): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> self.cnt.value = 0 <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def counter_value(self): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> return self.cnt.value <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def previous_value(self): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> return self.prev.value <NEW_LINE> <DEDENT> <DEDENT> @previous_value.setter <NEW_LINE> def previous_value(self, value): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> self.prev.value = value <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def msg_latency(self): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> return self.latency.value <NEW_LINE> <DEDENT> <DEDENT> @msg_latency.setter <NEW_LINE> def msg_latency(self, latency): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> self.latency.value = latency
Thread safe variables to capture metrics
62598fba56ac1b37e6302370
class UnitFloat(Float): <NEW_LINE> <INDENT> def __convert__(self, value): <NEW_LINE> <INDENT> if isinstance(value, basestring): <NEW_LINE> <INDENT> value = value.rstrip(string.ascii_letters) <NEW_LINE> <DEDENT> return float(value)
Represents a floating point type. If a unit is present in the string representation, it will get stripped.
62598fba9f2886367281893c
class ForceReply(base.TelegramObject): <NEW_LINE> <INDENT> force_reply: base.Boolean = fields.Field(default=True) <NEW_LINE> selective: base.Boolean = fields.Field() <NEW_LINE> @classmethod <NEW_LINE> def create(cls, selective: typing.Optional[base.Boolean] = None): <NEW_LINE> <INDENT> return cls(selective=selective)
Upon receiving a message with this object, Telegram clients will display a reply interface to the user (act as if the user has selected the bot‘s message and tapped ’Reply'). This can be extremely useful if you want to create user-friendly step-by-step interfaces without having to sacrifice privacy mode. Example: A poll bot for groups runs in privacy mode (only receives commands, replies to its messages and mentions). There could be two ways to create a new poll The last option is definitely more attractive. And if you use ForceReply in your bot‘s questions, it will receive the user’s answers even if it only receives replies, commands and mentions — without any extra work for the user. https://core.telegram.org/bots/api#forcereply
62598fbaec188e330fdf8a14
class Filter: <NEW_LINE> <INDENT> def __init__(self, field: str, value: str, case: bool): <NEW_LINE> <INDENT> if value and value[0] in SEARCH_MODIFIERS: <NEW_LINE> <INDENT> modifier, value = value[0], value[1:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> modifier = "" <NEW_LINE> <DEDENT> self.field = field.replace(".", "__") <NEW_LINE> self.modifier = modifier <NEW_LINE> for parser in DGEQ_TYPE_PARSERS: <NEW_LINE> <INDENT> v = parser(value) <NEW_LINE> if v is not ...: <NEW_LINE> <INDENT> self.value = v <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.value = value <NEW_LINE> <DEDENT> self.case = case <NEW_LINE> <DEDENT> def get(self) -> Q: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> lookup = DGEQ_FILTERS_TABLE[(self.modifier, type(self.value))] <NEW_LINE> if lookup is None: <NEW_LINE> <INDENT> raise KeyError <NEW_LINE> <DEDENT> if isinstance(self.value, str): <NEW_LINE> <INDENT> lookup = lookup[self.case] <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise SearchModifierError(self.modifier, self.value) <NEW_LINE> <DEDENT> if self.modifier not in DGEQ_EXCLUDE_SEARCH_MODIFIER: <NEW_LINE> <INDENT> q = Q(**{self.field + "__" + lookup: self.value}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> q = ~Q(**{self.field + "__" + lookup: self.value}) <NEW_LINE> <DEDENT> return q <NEW_LINE> <DEDENT> def __eq__(self, other: 'Filter'): <NEW_LINE> <INDENT> return ( self.value == self.value and self.field == self.field and self.modifier == self.modifier ) <NEW_LINE> <DEDENT> def apply(self, queryset: QuerySet) -> QuerySet: <NEW_LINE> <INDENT> return queryset.filter(self.get())
Represent a search filter in a `GenericQuery`
62598fbad268445f26639c46
class InlineForeignKeyCacheMixin(object): <NEW_LINE> <INDENT> def formfield_for_foreignkey(self, db_field, request, **kwargs): <NEW_LINE> <INDENT> formfield = super().formfield_for_foreignkey(db_field, request, **kwargs) <NEW_LINE> cache_key = "repanier_field{}".format(db_field.name) <NEW_LINE> cache_value = cache.get(cache_key) <NEW_LINE> if cache_value is not None: <NEW_LINE> <INDENT> formfield.choices = cache_value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> choices = [(None, _("---------"))] <NEW_LINE> for obj in kwargs["queryset"]: <NEW_LINE> <INDENT> choices.append((obj.id, str(obj))) <NEW_LINE> <DEDENT> formfield.choices = choices <NEW_LINE> cache.set(cache_key, choices, 300) <NEW_LINE> <DEDENT> return formfield
Cache foreignkey choices in the request object to prevent unnecessary queries.
62598fbabe383301e0253980
class TrialBalance(models.Model): <NEW_LINE> <INDENT> _name = "trial.balance" <NEW_LINE> _order = 'subject_code' <NEW_LINE> _description = u'科目余额表' <NEW_LINE> @api.one <NEW_LINE> @api.depends('cumulative_occurrence_debit', 'cumulative_occurrence_credit', 'ending_balance_debit', 'ending_balance_credit', 'subject_name_id') <NEW_LINE> def _get_year_init(self): <NEW_LINE> <INDENT> if self.subject_name_id.costs_types in ('in','out','cost'): <NEW_LINE> <INDENT> self.year_init_debit = self.year_init_credit = 0 <NEW_LINE> return True <NEW_LINE> <DEDENT> if self.subject_name_id.balance_directions == 'in': <NEW_LINE> <INDENT> self.year_init_debit = self.ending_balance_debit - self.ending_balance_credit - self.cumulative_occurrence_debit + self.cumulative_occurrence_credit <NEW_LINE> self.year_init_credit = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.year_init_credit = self.ending_balance_credit - self.ending_balance_debit - self.cumulative_occurrence_credit + self.cumulative_occurrence_debit <NEW_LINE> self.year_init_debit = 0 <NEW_LINE> <DEDENT> <DEDENT> period_id = fields.Many2one('finance.period', string=u'会计期间') <NEW_LINE> subject_code = fields.Char(u'科目编码') <NEW_LINE> subject_name_id = fields.Many2one('finance.account', string=u'科目') <NEW_LINE> year_init_debit = fields.Float(u'年初余额(借方)', default=0, compute=_get_year_init) <NEW_LINE> year_init_credit = fields.Float(u'年初余额(贷方)', default=0, compute=_get_year_init) <NEW_LINE> initial_balance_debit = fields.Float(u'期初余额(借方)', default=0) <NEW_LINE> initial_balance_credit = fields.Float(u'期初余额(贷方)', default=0) <NEW_LINE> current_occurrence_debit = fields.Float(u'本期发生额(借方)', default=0) <NEW_LINE> current_occurrence_credit = fields.Float(u'本期发生额(贷方)', default=0) <NEW_LINE> ending_balance_debit = fields.Float(u'期末余额(借方)', default=0) <NEW_LINE> ending_balance_credit = fields.Float(u'期末余额(贷方)', default=0) <NEW_LINE> cumulative_occurrence_debit = fields.Float(u'本年累计发生额(借方)', default=0) <NEW_LINE> cumulative_occurrence_credit = fields.Float(u'本年累计发生额(贷方)', default=0)
科目余额表
62598fba2c8b7c6e89bd3949
class PackageVersionAlreadyPresentError(ThothPythonExceptionError): <NEW_LINE> <INDENT> pass
An exception raised when adding a package in specific version that is already present.
62598fbaa219f33f346c6989
@vdm_module('n1', 'n2') <NEW_LINE> class fibonacci: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.n1 = 0 <NEW_LINE> self.n2 = 1 <NEW_LINE> <DEDENT> @vdm_method <NEW_LINE> def next(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @vdm_test <NEW_LINE> def prev(self): <NEW_LINE> <INDENT> n = self.n2 - self.n1 <NEW_LINE> self.n2 = self.n1 <NEW_LINE> self.n1 = n <NEW_LINE> return self.n2
state State of n1 : nat n2 : nat init s == s = mk_State(0, 1) end operations next : () ==> nat next() == (dcl n : nat := n1 + n2; n1 := n2; n2 := n; return n) post RESULT = n1~ + n2~ and n1 = n2~ and n2 = RESULT; prev : () ==> nat prev() == (dcl n : nat := n2 - n1; n2 := n1; n1 := n; return n2) post n1 + n2 = n2~ and n2 = n1~ and n2 = RESULT;
62598fba55399d3f05626697
class Entries: <NEW_LINE> <INDENT> def _doc_to_entry(self, run_start_doc): <NEW_LINE> <INDENT> uid = run_start_doc['uid'] <NEW_LINE> run_start_doc.pop('_id') <NEW_LINE> entry_metadata = {'start': run_start_doc, 'stop': catalog._get_run_stop(uid)} <NEW_LINE> args = dict( run_start_doc=run_start_doc, get_run_stop=partial(catalog._get_run_stop, uid), get_event_descriptors=partial(catalog._get_event_descriptors, uid), get_event_cursor=catalog._get_event_cursor, get_event_count=catalog._get_event_count, get_resource=catalog._get_resource, get_datum=catalog._get_datum, get_datum_cursor=catalog._get_datum_cursor, filler=catalog.filler) <NEW_LINE> return intake.catalog.local.LocalCatalogEntry( name=run_start_doc['uid'], description={}, driver='intake_bluesky.core.RunCatalog', direct_access='forbid', args=args, cache=None, parameters=[], metadata=entry_metadata, catalog_dir=None, getenv=True, getshell=True, catalog=catalog) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> yield from self.keys() <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> cursor = catalog._run_start_collection.find( catalog._query, sort=[('time', pymongo.DESCENDING)]) <NEW_LINE> for run_start_doc in cursor: <NEW_LINE> <INDENT> yield run_start_doc['uid'] <NEW_LINE> <DEDENT> <DEDENT> def values(self): <NEW_LINE> <INDENT> cursor = catalog._run_start_collection.find( catalog._query, sort=[('time', pymongo.DESCENDING)]) <NEW_LINE> for run_start_doc in cursor: <NEW_LINE> <INDENT> yield self._doc_to_entry(run_start_doc) <NEW_LINE> <DEDENT> <DEDENT> def items(self): <NEW_LINE> <INDENT> cursor = catalog._run_start_collection.find( catalog._query, sort=[('time', pymongo.DESCENDING)]) <NEW_LINE> for run_start_doc in cursor: <NEW_LINE> <INDENT> yield run_start_doc['uid'], self._doc_to_entry(run_start_doc) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> name = int(name) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if isinstance(name, int): <NEW_LINE> <INDENT> if name < 0: <NEW_LINE> <INDENT> query = catalog._query <NEW_LINE> cursor = (catalog._run_start_collection.find(query) .sort('time', pymongo.DESCENDING) .limit(name)) <NEW_LINE> *_, run_start_doc = cursor <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> query = {'$and': [catalog._query, {'scan_id': name}]} <NEW_LINE> cursor = (catalog._run_start_collection.find(query) .sort('time', pymongo.DESCENDING) .limit(1)) <NEW_LINE> run_start_doc, = cursor <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> query = {'$and': [catalog._query, {'uid': name}]} <NEW_LINE> run_start_doc = catalog._run_start_collection.find_one(query) <NEW_LINE> <DEDENT> if run_start_doc is None: <NEW_LINE> <INDENT> raise KeyError(name) <NEW_LINE> <DEDENT> return self._doc_to_entry(run_start_doc) <NEW_LINE> <DEDENT> def __contains__(self, key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self[key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True
Mock the dict interface around a MongoDB query result.
62598fba796e427e5384e919
class DecodeObject: <NEW_LINE> <INDENT> __slots__ = ('name', 'getter', 'properties') <NEW_LINE> def __init__(self, name, getter=None): <NEW_LINE> <INDENT> assert isinstance(name, str), 'Invalid name %s' % name <NEW_LINE> assert getter is None or callable(getter), 'Invalid getter %s' % getter <NEW_LINE> self.name = name <NEW_LINE> self.getter = getter <NEW_LINE> self.properties = {} <NEW_LINE> <DEDENT> def __call__(self, path, target, normalizer, **data): <NEW_LINE> <INDENT> assert isinstance(path, deque), 'Invalid path %s' % path <NEW_LINE> assert isinstance(normalizer, Normalizer), 'Invalid normalizer %s' % normalizer <NEW_LINE> if not path: return False <NEW_LINE> key = path.popleft() <NEW_LINE> if not isinstance(key, str): return False <NEW_LINE> assert isinstance(key, str), 'Invalid path element %s' % key <NEW_LINE> if path and normalizer.normalize(self.name) == key: <NEW_LINE> <INDENT> key = path.popleft() <NEW_LINE> if not isinstance(key, str): return False <NEW_LINE> <DEDENT> if self.getter is not None: target = self.getter(target) <NEW_LINE> for keyProp, decodeProp in self.properties.items(): <NEW_LINE> <INDENT> assert isinstance(keyProp, str), 'Invalid property key %s' % keyProp <NEW_LINE> if normalizer.normalize(keyProp) == key: break <NEW_LINE> <DEDENT> else: return False <NEW_LINE> try: return decodeProp(path=path, target=target, normalizer=normalizer, **data) <NEW_LINE> except InputError: raise <NEW_LINE> except: handleExploitError(decodeProp)
Exploit for object decoding.
62598fba3539df3088ecc430
class StepFailedException(Exception): <NEW_LINE> <INDENT> _FIELDS = ('reason', 'step_num', 'num_steps', 'step_desc') <NEW_LINE> def __init__( self, reason=None, step_num=None, num_steps=None, step_desc=None): <NEW_LINE> <INDENT> self.reason = reason <NEW_LINE> self.step_num = step_num <NEW_LINE> self.num_steps = num_steps <NEW_LINE> self.step_desc = step_desc <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '%s failed%s' % ( (self.step_desc or 'Step%s%s' % ( '' if self.step_num is None else ' %d' % (self.step_num + 1), '' if (self.step_num is None or self.num_steps is None) else ( ' of %d' % self.num_steps))), '' if self.reason is None else ': %s' % self.reason) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '%s(%s)' % ( self.__class__.__name__, ', '.join(('%s=%r' % (k, getattr(self, k)) for k in self._FIELDS if getattr(self, k) is not None)))
Exception to throw when a step fails. This will automatically be caught and converted to an error message by :py:meth:`mrjob.job.MRJob.run`, but you may wish to catch it if you :ref:`run your job programatically <runners-programmatically>`.
62598fbacc40096d6161a29b
class FilesWriter(Writer): <NEW_LINE> <INDENT> def __init__( self, codec=None, bitrate=None, output_sample_rate=44100, stem_names=None, multiprocess=False, synchronous=True ): <NEW_LINE> <INDENT> self.codec = codec <NEW_LINE> self.bitrate = bitrate <NEW_LINE> self.output_sample_rate = output_sample_rate <NEW_LINE> self.stem_names = stem_names <NEW_LINE> self.synchronous = synchronous <NEW_LINE> if multiprocess: <NEW_LINE> <INDENT> self._pool = Pool() <NEW_LINE> atexit.register(self._pool.close) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._pool = None <NEW_LINE> <DEDENT> self._tasks = [] <NEW_LINE> <DEDENT> def join(self, timeout=200): <NEW_LINE> <INDENT> while len(self._tasks) > 0: <NEW_LINE> <INDENT> task = self._tasks.pop() <NEW_LINE> task.get() <NEW_LINE> task.wait(timeout=timeout) <NEW_LINE> <DEDENT> <DEDENT> def __call__( self, data, path, sample_rate ): <NEW_LINE> <INDENT> nb_stems = data.shape[0] <NEW_LINE> if self.output_sample_rate is None: <NEW_LINE> <INDENT> self.output_sample_rate = sample_rate <NEW_LINE> <DEDENT> if self.stem_names is None: <NEW_LINE> <INDENT> self.stem_names = ["Stem_" + str(k) for k in range(nb_stems)] <NEW_LINE> <DEDENT> for idx in range(nb_stems): <NEW_LINE> <INDENT> if type(path) is tuple: <NEW_LINE> <INDENT> stem_filepath = str(Path( path[0], self.stem_names[idx] + path[1] )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> p = Path(path) <NEW_LINE> stem_filepath = str(Path( p.parent, self.stem_names[idx] + p.suffix )) <NEW_LINE> <DEDENT> if self._pool: <NEW_LINE> <INDENT> task = self._pool.apply_async( write_audio, ( stem_filepath, data[idx], sample_rate, self.output_sample_rate, self.codec, self.bitrate ) ) <NEW_LINE> self._tasks.append(task) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> write_audio( path=stem_filepath, data=data[idx], sample_rate=sample_rate, output_sample_rate=self.output_sample_rate, codec=self.codec, bitrate=self.bitrate ) <NEW_LINE> <DEDENT> <DEDENT> if self.synchronous and self._pool: <NEW_LINE> <INDENT> self.join()
Save Stems as multiple files Takes stems tensor and write into multiple files. Args: codec: str Specifies ffmpeg codec being used. Defaults to `None` which automatically selects default codec for each container bitrate: int, optional Bitrate in Bits per second. Defaults to `None` output_sample_rate: float, optional Optionally, applies resampling, if different to `sample_rate`. Defaults to `None` which `sample_rate`. stem_names: List(str) List of stem names to be used for writing. Defaults to `None` which results in stem names to be enumerated: `['Stem_1', 'Stem_2', ...]` multiprocess: bool Enable multiprocessing when writing files. Can speed up writing of large files. Defaults to `False`. synchronous bool: Write multiprocessed synchronous. Defaults to `True`.
62598fba7d847024c075c540
class ComputeCapabilitiesFilter(filters.BaseHostFilter): <NEW_LINE> <INDENT> run_filter_once_per_request = True <NEW_LINE> def _satisfies_extra_specs(self, host_state, instance_type): <NEW_LINE> <INDENT> if 'extra_specs' not in instance_type: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> for key, req in instance_type['extra_specs'].iteritems(): <NEW_LINE> <INDENT> scope = key.split(':') <NEW_LINE> if len(scope) > 1: <NEW_LINE> <INDENT> if scope[0] != "capabilities": <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> del scope[0] <NEW_LINE> <DEDENT> <DEDENT> cap = host_state <NEW_LINE> for index in range(0, len(scope)): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if not isinstance(cap, dict): <NEW_LINE> <INDENT> if getattr(cap, scope[index], None) is None: <NEW_LINE> <INDENT> cap = cap.stats.get(scope[index], None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cap = getattr(cap, scope[index], None) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> cap = cap.get(scope[index], None) <NEW_LINE> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if cap is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> if not extra_specs_ops.match(str(cap), req): <NEW_LINE> <INDENT> LOG.debug(_("extra_spec requirement '%(req)s' does not match " "%(cap)s'"), {'req': req, 'cap': cap}) <NEW_LINE> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def host_passes(self, host_state, filter_properties): <NEW_LINE> <INDENT> instance_type = filter_properties.get('instance_type') <NEW_LINE> if not self._satisfies_extra_specs(host_state, instance_type): <NEW_LINE> <INDENT> LOG.debug(_("%(host_state)s fails instance_type extra_specs " "requirements"), {'host_state': host_state}) <NEW_LINE> return False <NEW_LINE> <DEDENT> return True
HostFilter hard-coded to work with InstanceType records.
62598fba01c39578d7f12efe
class GenerateIAMPolicy: <NEW_LINE> <INDENT> CLINAME = "generate-iam-policy" <NEW_LINE> def __init__( self, output_file: str = "./cfn_stack_policy.json", project_root: str = "./" ): <NEW_LINE> <INDENT> project_root_path = Path(project_root).expanduser().resolve() <NEW_LINE> config = Config.create(project_root=project_root_path) <NEW_LINE> CFNPolicyGenerator(config, output_file).generate_policy()
[ALPHA] Introspects CFN Template(s) and generates an IAM policy necessary to successfully launch the template(s)
62598fba63d6d428bbee2934
class ManejadorArbolDirectorios(object): <NEW_LINE> <INDENT> _manejadoresDirectorios = None <NEW_LINE> def __init__(self, directorios): <NEW_LINE> <INDENT> self._manejadoresDirectorios = [] <NEW_LINE> for i in directorios: <NEW_LINE> <INDENT> self._manejadoresDirectorios = ManejadorArbolDirectorios(directorios[i])
Objeto contenedor de varios arboles de directorios
62598fba099cdd3c636754a4
class ProjectTagIndex(ListView): <NEW_LINE> <INDENT> model = Project <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> queryset = Project.objects.public(self.request.user) <NEW_LINE> self.tag = get_object_or_404(Tag, slug=self.kwargs.get('tag')) <NEW_LINE> queryset = queryset.filter(tags__slug__in=[self.tag.slug]) <NEW_LINE> return queryset <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super().get_context_data(**kwargs) <NEW_LINE> context['tag'] = self.tag <NEW_LINE> return context
List view of public :py:class:`Project` instances.
62598fba92d797404e388c25
class RegressionTestCase( IntegrationTest ): <NEW_LINE> <INDENT> def setUp( self ): <NEW_LINE> <INDENT> super( RegressionTestCase, self ).setUp() <NEW_LINE> gLogger.setLevel('DEBUG') <NEW_LINE> self.dirac = Dirac() <NEW_LINE> exeScriptLoc = find_all( 'exe-script.py', '..', '/DIRAC/tests/Workflow/Regression' )[0] <NEW_LINE> helloWorldLoc = find_all( 'helloWorld.py', '..', '/DIRAC/tests/Workflow/Regression' )[0] <NEW_LINE> shutil.copyfile( exeScriptLoc, './exe-script.py' ) <NEW_LINE> shutil.copyfile( helloWorldLoc, './helloWorld.py' ) <NEW_LINE> helloWorldXMLLocation = find_all( 'helloWorld.xml', '..', '/DIRAC/tests/Workflow/Regression' )[0] <NEW_LINE> self.j_u_hello = Job( helloWorldXMLLocation ) <NEW_LINE> helloWorldXMLFewMoreLocation = find_all( 'helloWorld.xml', '..', '/DIRAC/tests/Workflow/Regression' )[0] <NEW_LINE> self.j_u_helloPlus = Job( helloWorldXMLFewMoreLocation ) <NEW_LINE> <DEDENT> def tearDown( self ): <NEW_LINE> <INDENT> os.remove( 'exe-script.py' ) <NEW_LINE> os.remove( 'helloWorld.py' )
Base class for the Regression test cases
62598fba4c3428357761a43f