code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class AcademicLevel(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.mRedis = RedisHelper() <NEW_LINE> self.authors = self.mRedis.getAllAuthors() <NEW_LINE> self.authorsPN =dict() <NEW_LINE> for author in self.authors: <NEW_LINE> <INDENT> self.authorsPN[author] = sum([len(self.mRedis.getAuCoauTimes(author, coau)) for coau in self.mRedis.getAuCoauthors(author)]) <NEW_LINE> <DEDENT> self.coauNumAuLevel = dict() <NEW_LINE> self.coauNumCoauLevel = dict() <NEW_LINE> <DEDENT> def getCoauNumLevel(self): <NEW_LINE> <INDENT> index = 0 <NEW_LINE> for author in self.authors: <NEW_LINE> <INDENT> index += 1 <NEW_LINE> if index % 100000 == 0: <NEW_LINE> <INDENT> logging.info(index) <NEW_LINE> <DEDENT> coaus = self.mRedis.getAuCoauthors(author) <NEW_LINE> coauNum = len(coaus) <NEW_LINE> coauAvgLevel = sum([float(self.authorsPN.get(coau)) for coau in coaus]) / coauNum <NEW_LINE> authorLevel = float(self.authorsPN.get(author)) <NEW_LINE> AuLevels = self.coauNumAuLevel.setdefault(coauNum, []) <NEW_LINE> AuLevels.append(authorLevel) <NEW_LINE> CoauLevels = self.coauNumCoauLevel.setdefault(coauNum, []) <NEW_LINE> CoauLevels.append(coauAvgLevel) <NEW_LINE> <DEDENT> <DEDENT> def saveCoauNumLevel(self): <NEW_LINE> <INDENT> with open(OUTPUT_COAUNUM_LEVEL_PAPERNUM, 'w') as fileWriter: <NEW_LINE> <INDENT> for coauNum, levels in self.coauNumAuLevel.items(): <NEW_LINE> <INDENT> cn = str(coauNum) <NEW_LINE> auLevel = str(sum(levels) / len(levels)) <NEW_LINE> cl = self.coauNumCoauLevel.get(coauNum) <NEW_LINE> coAuLevel = str(sum(cl) / len(cl)) <NEW_LINE> fileWriter.write(cn + '\t' + auLevel + '\t' + coAuLevel + '\n') <NEW_LINE> <DEDENT> <DEDENT> fileWriter.close() <NEW_LINE> self.coauNumAuLevel = {} <NEW_LINE> self.coauNumCoauLevel = {} | docstring for AcademicLevel | 62598fa163d6d428bbee25fb |
class Logger(object): <NEW_LINE> <INDENT> __metaclass__ = MetaLogger | Whoever subclasses Logger has the benefit of a local logger that
outputs with class_name in the message context. Also if you
setLevel of the master logger (defined by LOGGER_ID) to
logging.DEBUG you even get logs on function calls. | 62598fa17d847024c075c210 |
class TestAccuracy(tf.test.TestCase): <NEW_LINE> <INDENT> def test_default(self): <NEW_LINE> <INDENT> targets = tf.constant([[2, 1, 0, 0]], dtype=tf.int32) <NEW_LINE> weights = tf.placeholder(dtype=tf.float32, shape=targets.shape) <NEW_LINE> predictions = tf.constant( [[[0.1, 0.8, 0.1], [0.1, 0.8, 0.1], [0.8, 0.1, 0.1], [0.1, 0.1, 0.8]]], dtype=tf.float32) <NEW_LINE> accuracy_t, weights_out_t = metrics.accuracy(targets, predictions, weights) <NEW_LINE> act_weights = np.asarray([[1, 1, 0, 0]], dtype=np.float32) <NEW_LINE> exp_accuracy = np.asarray([[0, 1, 0, 0]], dtype=np.float32) <NEW_LINE> with tf.Session() as sess: <NEW_LINE> <INDENT> sess.run(tf.global_variables_initializer()) <NEW_LINE> act_accuracy, act_weights_out = sess.run( fetches=[accuracy_t, weights_out_t], feed_dict={weights: act_weights}) <NEW_LINE> <DEDENT> self.assertEqual(weights, weights_out_t) <NEW_LINE> self.assertAllEqual(act_weights, act_weights_out) <NEW_LINE> self.assertAllEqual(exp_accuracy, act_accuracy) <NEW_LINE> <DEDENT> def test_no_weights(self): <NEW_LINE> <INDENT> targets = tf.constant([[2, 1, 0, 0]], dtype=tf.int32) <NEW_LINE> predictions = tf.constant( [[[0.1, 0.8, 0.1], [0.1, 0.8, 0.1], [0.8, 0.1, 0.1], [0.1, 0.1, 0.8]]], dtype=tf.float32) <NEW_LINE> accuracy_t, weights_t = metrics.accuracy(targets, predictions) <NEW_LINE> exp_accuracy = np.asarray([[0, 1, 1, 0]], dtype=np.float32) <NEW_LINE> with tf.Session() as sess: <NEW_LINE> <INDENT> sess.run(tf.global_variables_initializer()) <NEW_LINE> act_accuracy = sess.run(accuracy_t) <NEW_LINE> <DEDENT> self.assertIsNone(weights_t) <NEW_LINE> self.assertAllEqual(exp_accuracy, act_accuracy) | Test class for the liteflow.metrics.accuracy function. | 62598fa1d7e4931a7ef3bee3 |
class NSNitroNserrRwUndefactInval(NSNitroRewriteErrors): <NEW_LINE> <INDENT> pass | Nitro error code 2818
Invalid action for undefined event | 62598fa1e5267d203ee6b757 |
class Connection(rpc_common.Connection): <NEW_LINE> <INDENT> def __init__(self, conf): <NEW_LINE> <INDENT> self.topics = [] <NEW_LINE> self.reactor = ZmqReactor(conf) <NEW_LINE> <DEDENT> def create_consumer(self, topic, proxy, fanout=False): <NEW_LINE> <INDENT> _get_matchmaker().register(topic, CONF.rpc_zmq_host) <NEW_LINE> if fanout: <NEW_LINE> <INDENT> sock_type = zmq.SUB <NEW_LINE> subscribe = ('', fanout)[type(fanout) == str] <NEW_LINE> topic = 'fanout~' + topic.split('.', 1)[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sock_type = zmq.PULL <NEW_LINE> subscribe = None <NEW_LINE> topic = '.'.join((topic.split('.', 1)[0], CONF.rpc_zmq_host)) <NEW_LINE> <DEDENT> if topic in self.topics: <NEW_LINE> <INDENT> LOG.info(_("Skipping topic registration. Already registered.")) <NEW_LINE> return <NEW_LINE> <DEDENT> inaddr = "ipc://%s/zmq_topic_%s" % (CONF.rpc_zmq_ipc_dir, topic) <NEW_LINE> LOG.debug("Consumer is a zmq.%s", ['PULL', 'SUB'][sock_type == zmq.SUB]) <NEW_LINE> self.reactor.register(proxy, inaddr, sock_type, subscribe=subscribe, in_bind=False) <NEW_LINE> self.topics.append(topic) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> _get_matchmaker().stop_heartbeat() <NEW_LINE> for topic in self.topics: <NEW_LINE> <INDENT> _get_matchmaker().unregister(topic, CONF.rpc_zmq_host) <NEW_LINE> <DEDENT> self.reactor.close() <NEW_LINE> self.topics = [] <NEW_LINE> <DEDENT> def wait(self): <NEW_LINE> <INDENT> self.reactor.wait() <NEW_LINE> <DEDENT> def consume_in_thread(self): <NEW_LINE> <INDENT> _get_matchmaker().start_heartbeat() <NEW_LINE> self.reactor.consume_in_thread() | Manages connections and threads. | 62598fa19c8ee82313040093 |
class TestIETotalsByCandidate(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return IETotalsByCandidate( candidate_id = '0', cycle = 56, support_oppose_indicator = '0', total = 1.337 ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return IETotalsByCandidate( ) <NEW_LINE> <DEDENT> <DEDENT> def testIETotalsByCandidate(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True) | IETotalsByCandidate unit test stubs | 62598fa199cbb53fe6830d1d |
class TimingTables: <NEW_LINE> <INDENT> def __init__(self, size=1000, masked=False): <NEW_LINE> <INDENT> self.masked = masked <NEW_LINE> self.table = Table(masked=self.masked) <NEW_LINE> np.random.seed(12345) <NEW_LINE> self.table['i'] = np.arange(size) <NEW_LINE> self.table['a'] = np.random.random(size) <NEW_LINE> self.table['b'] = np.random.random(size) > 0.5 <NEW_LINE> self.table['c'] = np.random.random((size, 10)) <NEW_LINE> self.table['d'] = np.random.choice(np.array(list(string.ascii_letters)), size) <NEW_LINE> self.extra_row = {'a': 1.2, 'b': True, 'c': np.repeat(1, 10), 'd': 'Z'} <NEW_LINE> self.extra_column = np.random.randint(0, 100, size) <NEW_LINE> self.row_indices = np.where(self.table['a'] > 0.9)[0] <NEW_LINE> self.table_grouped = self.table.group_by('d') <NEW_LINE> self.other_table = Table(masked=self.masked) <NEW_LINE> self.other_table['i'] = np.arange(1, size, 3) <NEW_LINE> self.other_table['f'] = np.random.random() <NEW_LINE> self.other_table.sort('f') <NEW_LINE> self.other_table_2 = Table(masked=self.masked) <NEW_LINE> self.other_table_2['g'] = np.random.random(size) <NEW_LINE> self.other_table_2['h'] = np.random.random((size, 10)) <NEW_LINE> self.bool_mask = self.table['a'] > 0.6 | Object which contains two tables and various other attributes that
are useful for timing and other API tests. | 62598fa18c0ade5d55dc35b4 |
class Item(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length = 100) <NEW_LINE> notes = models.TextField(max_length = 500, blank=True) <NEW_LINE> created = models.DateField(auto_now_add=True) <NEW_LINE> priority = models.IntegerField(choices=( (0, 'Urgent'), (1, 'High'), (2, 'Normal'), (3, 'Low'), ), default=0) <NEW_LINE> due = models.DateField(null=True, blank=True) <NEW_LINE> done = models.BooleanField(default=False) <NEW_LINE> user = models.ForeignKey(User) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name + " :: " + unicode(self.created) + " ::" + unicode(self.priority) + "::" | A single todo item. | 62598fa145492302aabfc31b |
class Trainer(object): <NEW_LINE> <INDENT> SUPPORTED_LANGUAGES = ["de", "en"] <NEW_LINE> def __init__(self, config, component_builder=None, skip_validation=False): <NEW_LINE> <INDENT> self.config = config <NEW_LINE> self.skip_validation = skip_validation <NEW_LINE> self.training_data = None <NEW_LINE> self.pipeline = [] <NEW_LINE> if component_builder is None: <NEW_LINE> <INDENT> component_builder = components.ComponentBuilder() <NEW_LINE> <DEDENT> if not self.skip_validation: <NEW_LINE> <INDENT> components.validate_requirements(config.pipeline) <NEW_LINE> <DEDENT> for component_name in config.pipeline: <NEW_LINE> <INDENT> component = component_builder.create_component(component_name, config) <NEW_LINE> self.pipeline.append(component) <NEW_LINE> <DEDENT> <DEDENT> def train(self, data): <NEW_LINE> <INDENT> if not self.skip_validation: <NEW_LINE> <INDENT> components.validate_arguments(self.pipeline, self.config) <NEW_LINE> <DEDENT> self.training_data = data <NEW_LINE> context = {} <NEW_LINE> for component in self.pipeline: <NEW_LINE> <INDENT> args = components.fill_args(component.pipeline_init_args(), context, self.config.as_dict()) <NEW_LINE> updates = component.pipeline_init(*args) <NEW_LINE> if updates: <NEW_LINE> <INDENT> context.update(updates) <NEW_LINE> <DEDENT> <DEDENT> init_context = context.copy() <NEW_LINE> context["training_data"] = data <NEW_LINE> for component in self.pipeline: <NEW_LINE> <INDENT> args = components.fill_args(component.train_args(), context, self.config.as_dict()) <NEW_LINE> logging.info("Starting to train component {}".format(component.name)) <NEW_LINE> updates = component.train(*args) <NEW_LINE> logging.info("Finished training component.") <NEW_LINE> if updates: <NEW_LINE> <INDENT> context.update(updates) <NEW_LINE> <DEDENT> <DEDENT> return Interpreter(self.pipeline, context=init_context, config=self.config.as_dict()) <NEW_LINE> <DEDENT> def persist(self, path, persistor=None, model_name=None): <NEW_LINE> <INDENT> timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S') <NEW_LINE> metadata = { "language": self.config["language"], "pipeline": [component.name for component in self.pipeline], } <NEW_LINE> if model_name is None: <NEW_LINE> <INDENT> dir_name = os.path.join(path, "model_" + timestamp) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dir_name = os.path.join(path, model_name) <NEW_LINE> <DEDENT> create_dir(dir_name) <NEW_LINE> if self.training_data: <NEW_LINE> <INDENT> metadata.update(self.training_data.persist(dir_name)) <NEW_LINE> <DEDENT> for component in self.pipeline: <NEW_LINE> <INDENT> update = component.persist(dir_name) <NEW_LINE> if update: <NEW_LINE> <INDENT> metadata.update(update) <NEW_LINE> <DEDENT> <DEDENT> Metadata(metadata, dir_name).persist(dir_name) <NEW_LINE> if persistor is not None: <NEW_LINE> <INDENT> persistor.save_tar(dir_name) <NEW_LINE> <DEDENT> logging.info("Successfully saved model into '{}'".format(os.path.abspath(dir_name))) <NEW_LINE> return dir_name | Given a pipeline specification and configuration this trainer will load the data and train all components. | 62598fa157b8e32f52508041 |
class BaseSkeleton(ConfigurableWithABC, Verbose): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def train(self, input_data, target_data): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def predict(self, input_data): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def fit(self, X, y, *args, **kwargs): <NEW_LINE> <INDENT> self.train(X, y, *args, **kwargs) <NEW_LINE> return self <NEW_LINE> <DEDENT> def _repr_options(self): <NEW_LINE> <INDENT> options = [] <NEW_LINE> for option_name in self.options: <NEW_LINE> <INDENT> option_value = getattr(self, option_name) <NEW_LINE> option_value = preformat_value(option_value) <NEW_LINE> option_repr = "{}={}".format(option_name, option_value) <NEW_LINE> options.append(option_repr) <NEW_LINE> <DEDENT> return ', '.join(options) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "{}({})".format( self.__class__.__name__, self._repr_options() ) | Base class for all algorithms and networks.
| 62598fa1596a897236127ac6 |
class Remote(HMEvent, HelperEventRemote, HelperActionPress, HelperRssiPeer): <NEW_LINE> <INDENT> @property <NEW_LINE> def ELEMENT(self): <NEW_LINE> <INDENT> if "RC-2" in self.TYPE or "PB-2" in self.TYPE or "WRC2" in self.TYPE or "BRC2" in self.TYPE or "WRCC2" in self.TYPE: <NEW_LINE> <INDENT> return [1, 2] <NEW_LINE> <DEDENT> if "HM-Dis-WM55" in self.TYPE or "HM-Dis-EP-WM55" in self.TYPE: <NEW_LINE> <INDENT> return [1, 2] <NEW_LINE> <DEDENT> if "HM-RC-Dis-H-x-EU" in self.TYPE: <NEW_LINE> <INDENT> return list(range(1, 21)) <NEW_LINE> <DEDENT> if "Sec3" in self.TYPE or "Key3" in self.TYPE: <NEW_LINE> <INDENT> return [1, 2, 3] <NEW_LINE> <DEDENT> if "RC-4" in self.TYPE or "PB-4" in self.TYPE: <NEW_LINE> <INDENT> return [1, 2, 3, 4] <NEW_LINE> <DEDENT> if "HM-PBI-4-FM" in self.TYPE or "ZEL STG RM FST UP4" in self.TYPE or "263 145" in self.TYPE or "HM-PBI-X" in self.TYPE: <NEW_LINE> <INDENT> return [1, 2, 3, 4] <NEW_LINE> <DEDENT> if "Sec4" in self.TYPE or "Key4" in self.TYPE or "KRCA" in self.TYPE or "KRC4" in self.TYPE: <NEW_LINE> <INDENT> return [1, 2, 3, 4] <NEW_LINE> <DEDENT> if "PB-6" in self.TYPE or "WRC6" in self.TYPE: <NEW_LINE> <INDENT> return [1, 2, 3, 4, 5, 6] <NEW_LINE> <DEDENT> if "RC-8" in self.TYPE or "HM-MOD-EM-8" in self.TYPE: <NEW_LINE> <INDENT> return [1, 2, 3, 4, 5, 6, 7, 8] <NEW_LINE> <DEDENT> if "RC-12" in self.TYPE: <NEW_LINE> <INDENT> return [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] <NEW_LINE> <DEDENT> if "HM-OU-LED16" in self.TYPE: <NEW_LINE> <INDENT> return list(range(1, 16)) <NEW_LINE> <DEDENT> if "RC-19" in self.TYPE or "HM-PB-4Dis-WM" in self.TYPE: <NEW_LINE> <INDENT> return list(range(1, 20)) <NEW_LINE> <DEDENT> if "HMW-IO-4-FM" in self.TYPE: <NEW_LINE> <INDENT> return [1, 2, 3, 4] <NEW_LINE> <DEDENT> if "HmIP-RC8" in self.TYPE: <NEW_LINE> <INDENT> return [1, 2, 3, 4, 5, 6, 7, 8] <NEW_LINE> <DEDENT> if "HmIP-MOD-RC8" in self.TYPE: <NEW_LINE> <INDENT> return [1, 2, 3, 4, 5, 6, 7, 8] <NEW_LINE> <DEDENT> if "HmIP-WRCD" in self.TYPE or "HmIP-WRCR" in self.TYPE: <NEW_LINE> <INDENT> return [1, 2, 3] <NEW_LINE> <DEDENT> return [1] | Remote handle buttons. | 62598fa155399d3f0562636d |
class StaticFilesStorage(FileSystemStorage): <NEW_LINE> <INDENT> def __init__(self, location=None, base_url=None, *args, **kwargs): <NEW_LINE> <INDENT> if location is None: <NEW_LINE> <INDENT> location = settings.STATIC_ROOT <NEW_LINE> <DEDENT> if base_url is None: <NEW_LINE> <INDENT> base_url = settings.STATIC_URL <NEW_LINE> <DEDENT> if not location: <NEW_LINE> <INDENT> raise ImproperlyConfigured("You're using the staticfiles app " "without having set the STATIC_ROOT setting. Set it to " "the absolute path of the directory that holds static files.") <NEW_LINE> <DEDENT> if base_url is None: <NEW_LINE> <INDENT> raise ImproperlyConfigured("You're using the staticfiles app " "without having set the STATIC_URL setting. Set it to " "URL that handles the files served from STATIC_ROOT.") <NEW_LINE> <DEDENT> utils.check_settings() <NEW_LINE> super(StaticFilesStorage, self).__init__(location, base_url, *args, **kwargs) | Standard file system storage for static files.
The defaults for ``location`` and ``base_url`` are
``STATIC_ROOT`` and ``STATIC_URL``. | 62598fa1a79ad16197769eb0 |
class GetTariffResultSet(ResultSet): <NEW_LINE> <INDENT> def getJSONFromString(self, str): <NEW_LINE> <INDENT> return json.loads(str) <NEW_LINE> <DEDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None) | A ResultSet with methods tailored to the values returned by the GetTariff Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution. | 62598fa1c432627299fa2e25 |
class PseudoCmbModule(object): <NEW_LINE> <INDENT> def __init__(self, icov=WMAP7_ICOV, mu=WMAP7_MEANS, min_sz=0, max_sz=2): <NEW_LINE> <INDENT> self.icov = icov <NEW_LINE> self.mu = mu <NEW_LINE> self.a = min_sz <NEW_LINE> self.b = max_sz <NEW_LINE> <DEDENT> def computeLikelihood(self, ctx): <NEW_LINE> <INDENT> x = ctx.getParams() <NEW_LINE> diff = x[:6]-self.mu <NEW_LINE> lnprob = -np.dot(diff,np.dot(self.icov,diff))/2.0 <NEW_LINE> lnprob -= np.log(self.b-self.a) <NEW_LINE> return lnprob <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> getLogger().info("Pseudo cmb setup") | Chain for computing the likelihood of a multivariante gaussian distribution | 62598fa17047854f4633f222 |
class Schedule(db.Model): <NEW_LINE> <INDENT> __tablename__ = "schedule" <NEW_LINE> id = db.Column(db.Integer, primary_key=True, autoincrement=True) <NEW_LINE> title = db.Column(db.String(255), nullable=False) <NEW_LINE> public_id = db.Column(db.String(100), unique=True) <NEW_LINE> created_on = db.Column(db.DateTime, nullable=False) <NEW_LINE> is_active = db.Column(db.Boolean) | User Model for storing user related details | 62598fa12c8b7c6e89bd3611 |
class DetectSilence(Filter): <NEW_LINE> <INDENT> __documentation_section__ = 'Envelope Utility UGens' <NEW_LINE> __slots__ = () <NEW_LINE> _ordered_input_names = ( 'source', 'threshold', 'time', 'done_action', ) <NEW_LINE> _valid_calculation_rates = None <NEW_LINE> def __init__( self, calculation_rate=None, done_action=0, source=0, threshold=0.0001, time=0.1, ): <NEW_LINE> <INDENT> Filter.__init__( self, calculation_rate=calculation_rate, threshold=threshold, done_action=done_action, source=source, time=time, ) <NEW_LINE> <DEDENT> def _optimize_graph(self, sort_bundles): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def ar( cls, threshold=0.0001, done_action=0, source=0, time=0.1, ): <NEW_LINE> <INDENT> from supriya.tools import synthdeftools <NEW_LINE> calculation_rate = synthdeftools.CalculationRate.AUDIO <NEW_LINE> ugen = cls._new_expanded( calculation_rate=calculation_rate, threshold=threshold, done_action=done_action, source=source, time=time, ) <NEW_LINE> return ugen <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def kr( cls, threshold=0.0001, done_action=0, source=0, time=0.1, ): <NEW_LINE> <INDENT> from supriya.tools import synthdeftools <NEW_LINE> calculation_rate = synthdeftools.CalculationRate.CONTROL <NEW_LINE> ugen = cls._new_expanded( calculation_rate=calculation_rate, threshold=threshold, done_action=done_action, source=source, time=time, ) <NEW_LINE> return ugen <NEW_LINE> <DEDENT> @property <NEW_LINE> def done_action(self): <NEW_LINE> <INDENT> index = self._ordered_input_names.index('done_action') <NEW_LINE> return self._inputs[index] <NEW_LINE> <DEDENT> @property <NEW_LINE> def source(self): <NEW_LINE> <INDENT> index = self._ordered_input_names.index('source') <NEW_LINE> return self._inputs[index] <NEW_LINE> <DEDENT> @property <NEW_LINE> def threshold(self): <NEW_LINE> <INDENT> index = self._ordered_input_names.index('threshold') <NEW_LINE> return self._inputs[index] <NEW_LINE> <DEDENT> @property <NEW_LINE> def time(self): <NEW_LINE> <INDENT> index = self._ordered_input_names.index('time') <NEW_LINE> return self._inputs[index] | Evaluates `done_action` when input falls below `threshold`.
::
>>> source = ugentools.WhiteNoise.ar()
>>> source *= ugentools.Line.kr(start=1, stop=0)
>>> detect_silence = ugentools.DetectSilence.kr(
... done_action=DoneAction.FREE_SYNTH,
... source=source,
... threshold=0.0001,
... time=1.0,
... )
>>> detect_silence
DetectSilence.kr() | 62598fa1462c4b4f79dbb857 |
class Test_SessionCredentials(TestCase): <NEW_LINE> <INDENT> def test_checkUsername(self): <NEW_LINE> <INDENT> username = 'tester' <NEW_LINE> session = SessionCredentials(username) <NEW_LINE> self.assertEquals(username, session.checkUsername()) | Tests for L{querryl.cred.credentials.SessionCredentials}. | 62598fa15f7d997b871f9305 |
class TestAnonymousSurvey(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> question = "What language did you first learn to speak?" <NEW_LINE> self.my_survey = AnonymousSurvey(question) <NEW_LINE> self.responses = ["English", "Scottish", "Welsh"] <NEW_LINE> <DEDENT> def test_store_single_response(self): <NEW_LINE> <INDENT> question = "What language did you first learn to speak?" <NEW_LINE> self.my_survey.store_response(self.responses[0]) <NEW_LINE> self.assertIn(self.responses[0], self.my_survey.responses) <NEW_LINE> <DEDENT> def test_store_three_responses(self): <NEW_LINE> <INDENT> for response in self.responses: <NEW_LINE> <INDENT> self.my_survey.store_response(response) <NEW_LINE> <DEDENT> for response in self.responses: <NEW_LINE> <INDENT> self.assertIn(response, self.my_survey.responses) | Tests for the AnonymousSurvey class | 62598fa1090684286d593600 |
class PersonCase(CompanyCase): <NEW_LINE> <INDENT> context = {"default_is_company": False, "default_type": "contact"} | Test ``res.partner`` when it is a person. | 62598fa1ac7a0e7691f72357 |
class URLField(StringField): <NEW_LINE> <INDENT> def gen_value(self): <NEW_LINE> <INDENT> return gen_url(subdomain=gen_alpha()) | Field that represents an URL | 62598fa130dc7b766599f699 |
class Markdown(Plugin): <NEW_LINE> <INDENT> def __init__(self, **options): <NEW_LINE> <INDENT> import markdown <NEW_LINE> self.md = markdown.Markdown(**options) <NEW_LINE> <DEDENT> def run(self, files, stack): <NEW_LINE> <INDENT> for filename, post in files.items(): <NEW_LINE> <INDENT> post.content = self.md.reset().convert(post.content) | Convert markdown content to HTML. Options set in __init__ will be passed to parser. | 62598fa1d7e4931a7ef3bee5 |
class ProdConfig(Config): <NEW_LINE> <INDENT> ENV = 'prod' <NEW_LINE> DEBUG = False <NEW_LINE> SQLALCHEMY_DATABASE_URI = os.environ.get('ONEINOTE_SQLALCHEMY_URI') or 'postgresql://localhost/example' | Production configuration | 62598fa16aa9bd52df0d4d17 |
class TaxonUicnPlace(ModelSQL, ModelView): <NEW_LINE> <INDENT> __name__ = 'uicn.taxon_uicn_presence' <NEW_LINE> tiers = fields.Many2One('party.party', u'Tiers') <NEW_LINE> site = fields.Many2One('place.place', u'Site') <NEW_LINE> taxon = fields.Many2One('taxinomie.taxinomie', u'Taxon') <NEW_LINE> binomial = fields.Char(string=u'Nom scientifique') <NEW_LINE> vernaculaire = fields.Char(string=u'Nom vernaculaire') <NEW_LINE> famille = fields.Char(string=u'Famille') <NEW_LINE> status = fields.Selection( _STATUS, 'Statuts', help=u'Critères et catégories de l\'espèce au niveau mondial', ) <NEW_LINE> occ = fields.Integer(string=u'Occurences') <NEW_LINE> @classmethod <NEW_LINE> def __setup__(cls): <NEW_LINE> <INDENT> super(TaxonUicnPlace, cls).__setup__() <NEW_LINE> cls._order.insert(0, ('status', 'DESC')) <NEW_LINE> cls._order.insert(1, ('tiers', 'DESC')) <NEW_LINE> cls._order.insert(2, ('site', 'DESC')) <NEW_LINE> cls._order.insert(3, ('taxon', 'DESC')) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def table_query(): <NEW_LINE> <INDENT> clause = ' ' <NEW_LINE> args = [True] <NEW_LINE> if Transaction().context.get('tiers'): <NEW_LINE> <INDENT> clause += 'AND p.id = %s ' <NEW_LINE> args.append(Transaction().context['tiers']) <NEW_LINE> <DEDENT> if Transaction().context.get('status'): <NEW_LINE> <INDENT> clause += 'AND uic.status IN (%s) ' <NEW_LINE> args.append(Transaction().context['status']) <NEW_LINE> <DEDENT> return ('SELECT DISTINCT ROW_NUMBER() OVER (ORDER BY p.id) AS id, ' 'MAX(a.create_uid) AS create_uid, ' 'MAX(a.create_date) AS create_date, ' 'MAX(a.write_uid) AS write_uid, ' 'MAX(a.write_date) AS write_date, ' 'p.id AS tiers, ' 'a.id AS site, ' 'uic.status AS status, ' 't.id AS taxon, ' 't.nom_complet AS binomial, ' 't.nom_vern AS vernaculaire, ' 't.famille AS famille, ' '1 AS occ ' 'FROM place_place a, uicn_uicn uic, party_party p, uicn_taxon_rel rel, ' 'taxinomie_taxinomie t, uicn_presence uip, place_party_rel r ' 'WHERE %s ' + clause + 'AND r.party = p.id AND rel.uicn=uic.id AND r.place = a.id ' 'AND rel.taxon=t.id AND uic.presence=uip.id ' 'AND ST_DWithin(uic.geom, a.geom,0) ' 'GROUP BY p.id, t.famille, a.id, uic.status, t.id, t.nom_complet, t.nom_vern', args) | Présence de Taxons | 62598fa1796e427e5384e5df |
class TestConsoleGetAuditLogHandler(object): <NEW_LINE> <INDENT> def setup_method(self): <NEW_LINE> <INDENT> self.hmc, self.hmc_resources = standard_test_hmc() <NEW_LINE> self.uris = ( (r'/api/console', ConsoleHandler), (r'/api/console/operations/get-audit-log', ConsoleGetAuditLogHandler), ) <NEW_LINE> self.urihandler = UriHandler(self.uris) <NEW_LINE> <DEDENT> def test_cons_get_audlog(self): <NEW_LINE> <INDENT> resp = self.urihandler.get( self.hmc, '/api/console/operations/get-audit-log', True) <NEW_LINE> assert resp == [] <NEW_LINE> <DEDENT> def test_cons_get_audlog_err_no_console(self): <NEW_LINE> <INDENT> self.hmc.consoles.remove(None) <NEW_LINE> with pytest.raises(InvalidResourceError) as exc_info: <NEW_LINE> <INDENT> self.urihandler.get( self.hmc, '/api/console/operations/get-audit-log', True) <NEW_LINE> <DEDENT> exc = exc_info.value <NEW_LINE> assert exc.reason == 1 | All tests for class ConsoleGetAuditLogHandler. | 62598fa1b7558d589546347a |
class Graph: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.edgeList = {} <NEW_LINE> self.vertList = {} <NEW_LINE> self.numVertices = 0 <NEW_LINE> <DEDENT> def addVertex(self, key): <NEW_LINE> <INDENT> self.numVertices += 1 <NEW_LINE> newVertex = Vertex(key) <NEW_LINE> self.vertList[key] = newVertex <NEW_LINE> return newVertex <NEW_LINE> <DEDENT> def getVertex(self, n): <NEW_LINE> <INDENT> if n in self.vertList: <NEW_LINE> <INDENT> return self.vertList[n] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def __contains__(self, n): <NEW_LINE> <INDENT> return n in self.vertList <NEW_LINE> <DEDENT> def addEdge(self, f, t, cost=""): <NEW_LINE> <INDENT> if f not in self.vertList: <NEW_LINE> <INDENT> nv = self.addVertex(f) <NEW_LINE> <DEDENT> if t not in self.vertList: <NEW_LINE> <INDENT> nv = self.addVertex(t) <NEW_LINE> <DEDENT> self.vertList[f].addNeighbor(self.vertList[t], cost) <NEW_LINE> <DEDENT> def getVertices(self): <NEW_LINE> <INDENT> return self.vertList.keys() <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.vertList.values()) | A graph implemented as an adjacency list of vertices.
:slot: vertList (dict): A dictionary that maps a vertex key to a Vertex
object
:slot: numVertices (int): The total number of vertices in the graph | 62598fa199cbb53fe6830d1f |
class RegistroY681(Registro): <NEW_LINE> <INDENT> campos = [ CampoFixo(1, 'REG', 'Y681'), Campo(2, 'CODIGO'), CampoAlfanumerico(3, 'DESCRICAO'), CampoNumerico(4, 'VALOR', precisao=2), ] | Informações de Optantes pelo Refis (Lucro Real,
Presumido e Arbitrado) | 62598fa1442bda511e95c2a7 |
class Tpms: <NEW_LINE> <INDENT> def __init__(self, serial_number): <NEW_LINE> <INDENT> self.serial_number = serial_number <NEW_LINE> self.sensor_transmit_range = 300 <NEW_LINE> self.sensor_pressure_range = (8,300) <NEW_LINE> self.battery_life = 6 <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.get_serial_number() <NEW_LINE> <DEDENT> def get_pressure(self): <NEW_LINE> <INDENT> return 3 <NEW_LINE> <DEDENT> def get_serial_number(self): <NEW_LINE> <INDENT> return self.serial_number | Tire Pressure Monitoring System.
| 62598fa1a8370b77170f0231 |
class _data_api(Keyword): <NEW_LINE> <INDENT> name = "data_api" <NEW_LINE> ptype = str <NEW_LINE> atype = "string" | REST API version (`mandatory`). Units: ``.
Returns:
str: AFLOWLIB version of the entry, API.}
| 62598fa116aa5153ce40034c |
class Fill(Payload): <NEW_LINE> <INDENT> def __init__(self, count, byte=b'A'): <NEW_LINE> <INDENT> self.count = count <NEW_LINE> self.byte = byte <NEW_LINE> <DEDENT> def compose(self): <NEW_LINE> <INDENT> return self.count * self.byte <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> return Payload(self.compose() + other.compose()) | Memory filling payload component. | 62598fa157b8e32f52508042 |
class TestFunc(unittest.TestCase): <NEW_LINE> <INDENT> def test(self): <NEW_LINE> <INDENT> target = Solution() <NEW_LINE> self.assertEqual(8, target.maxAreaOfIsland([[1, 1, 1],[0, 1, 1],[0, 1, 0],[0, 1, 1]])) <NEW_LINE> self.assertEqual(7, target.maxAreaOfIsland([[0, 1, 1],[0, 1, 1],[0, 1, 0],[0, 1, 1]])) | Test fuction | 62598fa13539df3088ecc101 |
class Solution: <NEW_LINE> <INDENT> def replaceBlank(self, string, length): <NEW_LINE> <INDENT> return len(string.replace(' ', '%20')) | @param: string: An array of Char
@param: length: The true length of the string
@return: The true length of new string | 62598fa1d7e4931a7ef3bee6 |
class DeployRecord(models.Model): <NEW_LINE> <INDENT> project = models.ForeignKey(Project, null=True, on_delete=models.SET_NULL) <NEW_LINE> deploy_model = models.CharField(max_length=6, choices=DEPLOY_MODEL_CHOICES, default='branch') <NEW_LINE> deploy_ver = models.CharField('分支名或版本名', max_length=50, null=True, blank=True) <NEW_LINE> applicant = models.CharField('申请人', max_length=50, null=True, blank=True) <NEW_LINE> commits = models.PositiveIntegerField('构建总次数', blank=True, default=0) <NEW_LINE> status = models.PositiveSmallIntegerField('状态', blank=True, default=0) <NEW_LINE> apply_date = models.DateTimeField(auto_now_add=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'job_deploy_record' <NEW_LINE> verbose_name = 'Job项目发布历史表' <NEW_LINE> verbose_name_plural = 'Job项目发布历史表' | status: 0:构建中,未发布,1:预发,2:beta,3:正式 | 62598fa1cc0a2c111447ae5a |
class InvalidMoveError(RuntimeError): <NEW_LINE> <INDENT> def __init__(self, board: str, player: str, move: int) -> None: <NEW_LINE> <INDENT> self.code = utils.UserError.INVALID_MOVE.value[0] <NEW_LINE> self.message = utils.UserError.INVALID_MOVE.value[1].format(move=move, player=constants.PLAYERS[player].value, board=utils.binary_to_string(board)) <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return self.message | Exception representing an invalid move given the current game state | 62598fa10c0af96317c561ce |
class PerformanceScenario: <NEW_LINE> <INDENT> def __init__(self, kernel, time_step, integrator, reaction_scheduler): <NEW_LINE> <INDENT> self.sim = api.Simulation() <NEW_LINE> self.sim.set_kernel(kernel) <NEW_LINE> if integrator is not None: <NEW_LINE> <INDENT> self.integrator = integrator <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.integrator = "EulerBDIntegrator" <NEW_LINE> <DEDENT> if reaction_scheduler is not None: <NEW_LINE> <INDENT> self.reaction_scheduler = reaction_scheduler <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.reaction_scheduler = "UncontrolledApproximation" <NEW_LINE> <DEDENT> self.time_step = time_step <NEW_LINE> self.system_vars = dict() <NEW_LINE> for key in system_variables_keys: <NEW_LINE> <INDENT> self.system_vars[key] = 0. <NEW_LINE> <DEDENT> <DEDENT> def run(self, n_steps, **kwargs): <NEW_LINE> <INDENT> conf = self.sim.run_scheme_readdy() <NEW_LINE> conf.with_integrator(self.integrator) <NEW_LINE> conf.with_reaction_scheduler(self.reaction_scheduler) <NEW_LINE> conf.configure_and_run(n_steps, self.time_step) <NEW_LINE> if "skin" in kwargs: <NEW_LINE> <INDENT> conf.with_skin_size(kwargs["skin"]) <NEW_LINE> <DEDENT> <DEDENT> def performance(self): <NEW_LINE> <INDENT> return self.sim.performance_root() <NEW_LINE> <DEDENT> def set_times(self, container, idx): <NEW_LINE> <INDENT> for key in result_keys: <NEW_LINE> <INDENT> container[key][idx] = self.performance()[key].time() <NEW_LINE> <DEDENT> <DEDENT> def set_counts(self, container, idx): <NEW_LINE> <INDENT> for key in result_keys: <NEW_LINE> <INDENT> container[key][idx] = self.performance()[key].count() <NEW_LINE> <DEDENT> <DEDENT> def set_system_vars(self, container, idx): <NEW_LINE> <INDENT> for key in system_variables_keys: <NEW_LINE> <INDENT> container[key][idx] = self.system_vars[key] <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def describe(cls): <NEW_LINE> <INDENT> return "PerformanceScenario" | PerformanceScenario is a thin wrapper for Simulation. Derived classes take
a dictionary of factors (that scale typical variables like number of particles),
and configure a certain scenario. The scenario is then run, and performance
times are set or appended to containers (which usually are dictionaries of lists/arrays). | 62598fa1e1aae11d1e7ce74a |
class GoogleComputeDiskTest(unittest.TestCase): <NEW_LINE> <INDENT> @typing.no_type_check <NEW_LINE> @mock.patch('libcloudforensics.providers.gcp.internal.common.GoogleCloudComputeClient.BlockOperation') <NEW_LINE> @mock.patch('libcloudforensics.providers.gcp.internal.common.GoogleCloudComputeClient.GceApi') <NEW_LINE> def testSnapshot(self, mock_gce_api, mock_block_operation): <NEW_LINE> <INDENT> disks = mock_gce_api.return_value.disks <NEW_LINE> disks.return_value.createSnapshot.return_value.execute.return_value = None <NEW_LINE> mock_block_operation.return_value = None <NEW_LINE> snapshot, _ = gcp_mocks.FAKE_DISK.Snapshot() <NEW_LINE> self.assertIsInstance(snapshot, compute.GoogleComputeSnapshot) <NEW_LINE> self.assertTrue(snapshot.name.startswith('fake-disk')) <NEW_LINE> snapshot, _ = gcp_mocks.FAKE_DISK.Snapshot(snapshot_name='my-snapshot') <NEW_LINE> self.assertIsInstance(snapshot, compute.GoogleComputeSnapshot) <NEW_LINE> self.assertTrue(snapshot.name.startswith('my-snapshot')) <NEW_LINE> with self.assertRaises(errors.InvalidNameError): <NEW_LINE> <INDENT> gcp_mocks.FAKE_DISK.Snapshot('Non-compliant-name') | Test Google Cloud Compute Disk class. | 62598fa156b00c62f0fb26fd |
class SubtractOperation(Operation): <NEW_LINE> <INDENT> def operate(self): <NEW_LINE> <INDENT> return reduce(lambda x, y: x - y, self.terms) | Inherits `Operation` - subtracts self.terms. | 62598fa1e76e3b2f99fd8884 |
class GameOptionButton(Button): <NEW_LINE> <INDENT> def __init__(self, pos: Tuple[int, int], text: str) -> None: <NEW_LINE> <INDENT> Button.__init__(self, pos, text, width=int(.45 * LOCALBOARDSIZE), height=int(.75 * SQUARESIZE)) <NEW_LINE> self.selected: bool = False <NEW_LINE> self.selected_surface: pygame.Surface = pygame.Surface(self.rect.size) <NEW_LINE> self.update_surfaces() <NEW_LINE> <DEDENT> def update_surfaces(self) -> None: <NEW_LINE> <INDENT> self.update() <NEW_LINE> self.selected_surface.fill(MEDIUM_BLUE) <NEW_LINE> w = self.rect.width <NEW_LINE> h = self.rect.height <NEW_LINE> caption_surface = FONT.render(self.text, True, LIGHT_GRAY) <NEW_LINE> caption_rect = caption_surface.get_rect() <NEW_LINE> caption_rect.center = w // 2, h // 2 <NEW_LINE> self.selected_surface.blit(caption_surface, caption_rect) <NEW_LINE> pygame.draw.rect(self.selected_surface, BLACK, pygame.Rect((0, 0, w, h)), 1) <NEW_LINE> pygame.draw.line(self.selected_surface, WHITE, (1, 1), (w - 2, 1)) <NEW_LINE> pygame.draw.line(self.selected_surface, WHITE, (1, 1), (1, h - 2)) <NEW_LINE> pygame.draw.line(self.selected_surface, DARK_GRAY, (1, h - 1), (w - 1, h - 1)) <NEW_LINE> pygame.draw.line(self.selected_surface, DARK_GRAY, (w - 1, 1), (w - 1, h - 1)) <NEW_LINE> pygame.draw.line(self.selected_surface, GRAY, (2, h - 2), (w - 2, h - 2)) <NEW_LINE> pygame.draw.line(self.selected_surface, GRAY, (w - 2, 2), (w - 2, h - 2)) <NEW_LINE> <DEDENT> def draw_option(self, surface: pygame.Surface, update: bool = True) -> None: <NEW_LINE> <INDENT> if self.selected: <NEW_LINE> <INDENT> surface.blit(self.selected_surface, self.rect) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.draw(surface, update=False) <NEW_LINE> <DEDENT> if update: <NEW_LINE> <INDENT> pygame.display.update(self.rect) | Each option in the GameOptions menu. Extends Button class | 62598fa1462c4b4f79dbb859 |
class KBEntity(Entity): <NEW_LINE> <INDENT> def __init__(self, name, identifier, score, aliases): <NEW_LINE> <INDENT> Entity.__init__(self, name) <NEW_LINE> self.id = identifier <NEW_LINE> self.score = score <NEW_LINE> self.aliases = aliases <NEW_LINE> <DEDENT> def sparql_name(self): <NEW_LINE> <INDENT> return self.id <NEW_LINE> <DEDENT> def prefixed_sparql_name(self, prefix): <NEW_LINE> <INDENT> return "%s:%s" % (prefix, self.id) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.name) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.name == other.name | A KB entity. | 62598fa191af0d3eaad39c59 |
class Ct(_msys.Ct): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> def __init__(self, ptr, id): <NEW_LINE> <INDENT> super().__init__(ptr, id) <NEW_LINE> <DEDENT> @property <NEW_LINE> def system(self): <NEW_LINE> <INDENT> return System(self._ptr) <NEW_LINE> <DEDENT> def addChain(self): <NEW_LINE> <INDENT> return Chain(self._ptr, super().addChain()) <NEW_LINE> <DEDENT> @property <NEW_LINE> def chains(self): <NEW_LINE> <INDENT> return [Chain(self._ptr, i) for i in super().chains()] <NEW_LINE> <DEDENT> @property <NEW_LINE> def atoms(self): <NEW_LINE> <INDENT> return [Atom(self._ptr, i) for i in super().atoms()] <NEW_LINE> <DEDENT> @property <NEW_LINE> def bonds(self): <NEW_LINE> <INDENT> return [Bond(self._ptr, i) for i in super().bonds()] <NEW_LINE> <DEDENT> def append(self, system): <NEW_LINE> <INDENT> p = self._ptr <NEW_LINE> return [Atom(p, i) for i in super().append(system._ptr)] | Represents a list of Chains in a System
The Ct class exists mainly to provide a separate namespace for chains.
If you merge two systems each of which has a chain A, you probably
want the chains to remain separate. Cts accomplish this.
The Ct class also provides a key-value namespace for assigning
arbitrary properties to Systems. | 62598fa1e64d504609df92df |
@optplan.register_node_type() <NEW_LINE> class Overlap(optplan.Function): <NEW_LINE> <INDENT> type = schema_utils.polymorphic_model_type("function.overlap") <NEW_LINE> simulation = optplan.ReferenceType(optplan.Function) <NEW_LINE> overlap = optplan.ReferenceType(optplan.EmOverlap) | Defines an overlap integral.
Attributes:
type: Must be "function.overlap".
simulation: Simulation from which electric fields are obtained.
overlap: Overlap type to use. | 62598fa124f1403a926857d9 |
class QtLogStreamHandler(nxt_log.LogRecordStreamHandler): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def get_handler(cls, signal): <NEW_LINE> <INDENT> cls.new_log = signal <NEW_LINE> return cls <NEW_LINE> <DEDENT> def handle_log_record(self, record): <NEW_LINE> <INDENT> self.new_log.emit(record) | Handles logs by emitting the log record to a QtCore.Signal | 62598fa1656771135c4894d1 |
class HistogramPlot(Plot): <NEW_LINE> <INDENT> _DefaultAxesClass = HistogramAxes <NEW_LINE> def __init__(self, *data, **kwargs): <NEW_LINE> <INDENT> histargs = dict() <NEW_LINE> for key in ['bins', 'range', 'normed', 'weights', 'cumulative', 'bottom', 'histtype', 'align', 'orientation', 'rwidth', 'log', 'color', 'label', 'stacked', 'logbins']: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> histargs[key] = kwargs.pop(key) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> super(HistogramPlot, self).__init__(**kwargs) <NEW_LINE> if data: <NEW_LINE> <INDENT> ax = self.gca() <NEW_LINE> data = list(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ax = None <NEW_LINE> <DEDENT> while data: <NEW_LINE> <INDENT> dataset = data.pop(0) <NEW_LINE> if isinstance(dataset, Series): <NEW_LINE> <INDENT> ax.hist_series(dataset, **histargs) <NEW_LINE> <DEDENT> elif isinstance(dataset, Table): <NEW_LINE> <INDENT> column = data.pop() <NEW_LINE> ax.hist_table(dataset, column, **histargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ax.hist(dataset, **histargs) <NEW_LINE> <DEDENT> <DEDENT> if ax and histargs.get('logbins', True): <NEW_LINE> <INDENT> if histargs.get('orientation', 'vertical') == 'vertical': <NEW_LINE> <INDENT> ax.set_xscale('log') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ax.set_yscale('log') | A plot showing a histogram of data
| 62598fa11b99ca400228f455 |
class VaultListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[Vault]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(VaultListResult, self).__init__(**kwargs) <NEW_LINE> self.value = kwargs.get('value', None) <NEW_LINE> self.next_link = kwargs.get('next_link', None) | List of vaults.
:param value: The list of vaults.
:type value: list[~azure.mgmt.keyvault.v2021_06_01_preview.models.Vault]
:param next_link: The URL to get the next set of vaults.
:type next_link: str | 62598fa1498bea3a75a5796f |
class Converter(object): <NEW_LINE> <INDENT> regex = '[.a-zA-Z0-9:@&+$,_%%-]+' <NEW_LINE> class NotSet(object): pass <NEW_LINE> default = NotSet <NEW_LINE> def __init__(self, default=NotSet): <NEW_LINE> <INDENT> if not default is self.NotSet: <NEW_LINE> <INDENT> self.default = default <NEW_LINE> <DEDENT> <DEDENT> def to_python(self, value, env=None): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def to_url(self, value): <NEW_LINE> <INDENT> raise NotImplementedError() | A base class for urlconverters | 62598fa1a219f33f346c6668 |
class IpcCommExit(IpcComm.IpcComm): <NEW_LINE> <INDENT> def __init__(self, transmit_handler): <NEW_LINE> <INDENT> super().__init__(transmit_handler) <NEW_LINE> <DEDENT> def __ReplyToRequest__(self, reqparam, execret, state): <NEW_LINE> <INDENT> retval = 0 <NEW_LINE> jsonresparam = json.dumps({ "ret" : retval, "message" : super().__list_errormsg__[retval] }, ensure_ascii=False) <NEW_LINE> if( self.__transmit_handler__.IsInitialized() == True): <NEW_LINE> <INDENT> self.__SendReply__(jsonresparam) <NEW_LINE> <DEDENT> return 100 | 終了コマンドを表します。 | 62598fa1435de62698e9bc41 |
class DefaultConfig(object): <NEW_LINE> <INDENT> APP_NAME = 'rest-api' <NEW_LINE> DEBUG = False <NEW_LINE> LOG_LEVEL = 'WARNING' <NEW_LINE> LOG_DIR = 'logs/' <NEW_LINE> SQLALCHEMY_DATABASE_URI = "sqlite:///database/api.db" <NEW_LINE> SECRET_KEY = "Ch4ng3M3!" | Default Config (Is used when RESTAPICONFIG environment variable is not set) | 62598fa1e5267d203ee6b75b |
class sensors(): <NEW_LINE> <INDENT> cha=array(m) <NEW_LINE> chl=zeros((size(cha),3)) <NEW_LINE> chu=zeros((size(cha),3)) <NEW_LINE> for i in range(len(cha)): <NEW_LINE> <INDENT> chl[i,:]=( cha[i].loops[0].Position.x, cha[i].loops[0].Position.y, cha[i].loops[0].Position.z); <NEW_LINE> chu[i,:]=( cha[i].loops[1].Position.x, cha[i].loops[1].Position.y, cha[i].loops[1].Position.z); | chu=pos.headshape.chu | 62598fa14a966d76dd5eed30 |
class PuppetClass( Entity, EntityCreateMixin, EntityDeleteMixin, EntityReadMixin, EntitySearchMixin): <NEW_LINE> <INDENT> def __init__(self, server_config=None, **kwargs): <NEW_LINE> <INDENT> self._fields = { 'name': entity_fields.StringField( required=True, str_type='alpha', length=(6, 12), ), } <NEW_LINE> self._meta = { 'api_path': 'api/v2/puppetclasses', 'server_modes': ('sat'), } <NEW_LINE> super(PuppetClass, self).__init__(server_config, **kwargs) <NEW_LINE> <DEDENT> def search_normalize(self, results): <NEW_LINE> <INDENT> flattened_results = [] <NEW_LINE> for key in results.keys(): <NEW_LINE> <INDENT> for item in results[key]: <NEW_LINE> <INDENT> flattened_results.append(item) <NEW_LINE> <DEDENT> <DEDENT> return super(PuppetClass, self).search_normalize(flattened_results) <NEW_LINE> <DEDENT> def path(self, which=None): <NEW_LINE> <INDENT> if which in ('smart_class_parameters', 'smart_variables'): <NEW_LINE> <INDENT> return '{0}/{1}'.format( super(PuppetClass, self).path(which='self'), which ) <NEW_LINE> <DEDENT> return super(PuppetClass, self).path(which) <NEW_LINE> <DEDENT> def list_scparams(self, synchronous=True, **kwargs): <NEW_LINE> <INDENT> kwargs = kwargs.copy() <NEW_LINE> kwargs.update(self._server_config.get_client_kwargs()) <NEW_LINE> response = client.get(self.path('smart_class_parameters'), **kwargs) <NEW_LINE> return _handle_response(response, self._server_config, synchronous) <NEW_LINE> <DEDENT> def list_smart_variables(self, synchronous=True, **kwargs): <NEW_LINE> <INDENT> kwargs = kwargs.copy() <NEW_LINE> kwargs.update(self._server_config.get_client_kwargs()) <NEW_LINE> response = client.get(self.path('smart_variables'), **kwargs) <NEW_LINE> return _handle_response(response, self._server_config, synchronous) | A representation of a Puppet Class entity. | 62598fa18da39b475be0302d |
class PyNetcdf4(PythonPackage): <NEW_LINE> <INDENT> homepage = "https://github.com/Unidata/netcdf4-python" <NEW_LINE> url = "https://pypi.io/packages/source/n/netCDF4/netCDF4-1.2.7.tar.gz" <NEW_LINE> version('1.4.2', sha256='b934af350459cf9041bcdf5472e2aa56ed7321c018d918e9f325ec9a1f9d1a30') <NEW_LINE> version('1.2.7', '77b357d78f9658dd973dee901f6d86f8') <NEW_LINE> version('1.2.3.1', '24fc0101c7c441709c230e76af611d53') <NEW_LINE> depends_on('py-setuptools', type='build') <NEW_LINE> depends_on('py-cython@0.19:', type='build') <NEW_LINE> depends_on('py-numpy@1.7:', type=('build', 'run')) <NEW_LINE> depends_on('py-cftime', type=('build', 'run')) <NEW_LINE> depends_on('netcdf') <NEW_LINE> depends_on('hdf5@1.8.0:+hl') <NEW_LINE> def setup_environment(self, spack_env, run_env): <NEW_LINE> <INDENT> spack_env.set('USE_SETUPCFG', '0') <NEW_LINE> spack_env.set('HDF5_INCDIR', self.spec['hdf5'].prefix.include) <NEW_LINE> spack_env.set('HDF5_LIBDIR', self.spec['hdf5'].prefix.lib) <NEW_LINE> spack_env.set('NETCDF4_INCDIR', self.spec['netcdf'].prefix.include) <NEW_LINE> spack_env.set('NETCDF4_LIBDIR', self.spec['netcdf'].prefix.lib) | Python interface to the netCDF Library. | 62598fa1fbf16365ca793f09 |
class Cmd: <NEW_LINE> <INDENT> raw_cmd = None <NEW_LINE> cmd = None <NEW_LINE> cmd_args = None <NEW_LINE> COMMAND_LOCATION = 0 <NEW_LINE> CMD_NAME = None <NEW_LINE> sub_cmd_obj = None <NEW_LINE> def __init__(self, raw_str): <NEW_LINE> <INDENT> self.raw_cmd = raw_str <NEW_LINE> self.parse_cmd() <NEW_LINE> <DEDENT> def execute(self, wist_game, cmd_line): <NEW_LINE> <INDENT> if self.is_cmd_exist(): <NEW_LINE> <INDENT> if "execute" in dir(self.sub_cmd_obj): <NEW_LINE> <INDENT> self.sub_cmd_obj.cmd_args = self.cmd_args <NEW_LINE> return self.sub_cmd_obj.execute(wist_game, cmd_line) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError("Execute not implemented in sub class") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Command " + self.cmd + " does not exist") <NEW_LINE> <DEDENT> <DEDENT> def parse_cmd(self): <NEW_LINE> <INDENT> parsed = self.raw_cmd.split(' ') <NEW_LINE> final_parsed = [] <NEW_LINE> for word in parsed: <NEW_LINE> <INDENT> if word != '': <NEW_LINE> <INDENT> final_parsed.append(word) <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> self.cmd = final_parsed[self.COMMAND_LOCATION] <NEW_LINE> self.cmd_args = final_parsed[self.COMMAND_LOCATION + 1:] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> <DEDENT> def is_cmd_exist(self): <NEW_LINE> <INDENT> sub_cmd_names = [sub_cmd.CMD_NAME for sub_cmd in type(self).__subclasses__()] <NEW_LINE> if self.cmd in sub_cmd_names: <NEW_LINE> <INDENT> self.sub_cmd_obj = type(self).__subclasses__()[sub_cmd_names.index(self.cmd)]() <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | Cmd structure is command name and then arguments, seprated by spaces.
sub class must set CMD_NAME attribute
must be aware of the game instance | 62598fa1d58c6744b42dc1fa |
class PrintPrimary(Primary): <NEW_LINE> <INDENT> def __call__(self, context): <NEW_LINE> <INDENT> path = context['path'] <NEW_LINE> suffix = context['args'] <NEW_LINE> context['buffer'].append(path) <NEW_LINE> if suffix: <NEW_LINE> <INDENT> context['buffer'].append(suffix) <NEW_LINE> return context <NEW_LINE> <DEDENT> if getattr(self, 'null', False): <NEW_LINE> <INDENT> context['buffer'].append('\x00') <NEW_LINE> <DEDENT> return context | Prints out the filename
similar to `find . -print` | 62598fa1cc0a2c111447ae5c |
class _Formatter(object): <NEW_LINE> <INDENT> def __init__(self, original): <NEW_LINE> <INDENT> self._original = original <NEW_LINE> <DEDENT> def formatTime(self, record, datefmt=None): <NEW_LINE> <INDENT> return self._original.formatTime(record, datefmt) <NEW_LINE> <DEDENT> def format(self, record): <NEW_LINE> <INDENT> if record.exc_info and not record.exc_text: <NEW_LINE> <INDENT> record.exc_text = self.formatException(record.exc_info) <NEW_LINE> <DEDENT> return self._original.format(record) <NEW_LINE> <DEDENT> def formatException(self, exc_info): <NEW_LINE> <INDENT> return _reformat_stack(self._original.formatException(exc_info)) | Formats exceptions nicely.
Is is very important that this class does not throw exceptions. | 62598fa167a9b606de545e19 |
class ClassList(models.Model): <NEW_LINE> <INDENT> branch = models.ForeignKey("Branch", verbose_name="分校") <NEW_LINE> course = models.ForeignKey('Course') <NEW_LINE> class_type_choices = ( (0, '面授(脱产)'), (1, '面授(周末)'), (2, '网络班'), ) <NEW_LINE> class_type = models.SmallIntegerField(choices=class_type_choices, verbose_name="班级类型") <NEW_LINE> semester = models.PositiveSmallIntegerField(verbose_name="学期") <NEW_LINE> teachers = models.ManyToManyField("UserProfile") <NEW_LINE> start_data = models.DateField(verbose_name="开班时间") <NEW_LINE> end_data = models.DateField(verbose_name="结业日期", null=True, blank=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = ('branch', 'course', 'semester') <NEW_LINE> verbose_name_plural = verbose_name = "班级" <NEW_LINE> ordering = ['id'] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s %s %s" % (self.branch, self.course, self.semester) | 班级表 | 62598fa1d486a94d0ba2be26 |
class RefundQuery_pub(Wxpay_client_pub): <NEW_LINE> <INDENT> def __init__(self, timeout=WxPayConf_pub.CURL_TIMEOUT): <NEW_LINE> <INDENT> self.url = "https://api.mch.weixin.qq.com/pay/refundquery" <NEW_LINE> self.curl_timeout = timeout <NEW_LINE> super(RefundQuery_pub, self).__init__() <NEW_LINE> <DEDENT> def createXml(self): <NEW_LINE> <INDENT> if any(self.parameters[key] is None for key in ("out_refund_no", "out_trade_no", "transaction_id", "refund_id")): <NEW_LINE> <INDENT> raise ValueError("missing parameter") <NEW_LINE> <DEDENT> self.parameters["appid"] = WxPayConf_pub.APPID <NEW_LINE> self.parameters["mch_id"] = WxPayConf_pub.MCHID <NEW_LINE> self.parameters["nonce_str"] = self.createNoncestr() <NEW_LINE> self.parameters["sign"] = self.getSign(self.parameters) <NEW_LINE> return self.arrayToXml(self.parameters) <NEW_LINE> <DEDENT> def getResult(self): <NEW_LINE> <INDENT> self.postXmlSSL() <NEW_LINE> self.result = self.xmlToArray(self.response) <NEW_LINE> return self.result | 退款查询接口 | 62598fa163d6d428bbee2600 |
class DeDuplicationRequestMiddleware(object): <NEW_LINE> <INDENT> def process_request(self, request, spider): <NEW_LINE> <INDENT> if not request.url: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> channel_id = request.meta.get('channel_id', 0) <NEW_LINE> if is_dup_detail(request.url, spider.name, channel_id): <NEW_LINE> <INDENT> raise IgnoreRequest("Spider: %s, DeDuplicationRequest: %s" % (spider.name, request.url)) | 去重 - 请求
(数据结构:集合) | 62598fa1498bea3a75a57970 |
class ClusterPubSub(PubSub): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ClusterPubSub, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> async def execute_command(self, *args, **kwargs): <NEW_LINE> <INDENT> await self.connection_pool.initialize() <NEW_LINE> if self.connection is None: <NEW_LINE> <INDENT> self.connection = self.connection_pool.get_connection( 'pubsub', channel=args[1], ) <NEW_LINE> self.connection.register_connect_callback(self.on_connect) <NEW_LINE> <DEDENT> connection = self.connection <NEW_LINE> await self._execute(connection, connection.send_command, *args) | Wrapper for PubSub class. | 62598fa1e64d504609df92e0 |
class CSC(_Compressed2d): <NEW_LINE> <INDENT> def __init__(self, arg, shape=None, prune=False, fill_value=0): <NEW_LINE> <INDENT> super().__init__(arg, shape=shape, compressed_axes=(1,), fill_value=fill_value) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_scipy_sparse(cls, x): <NEW_LINE> <INDENT> x = x.asformat("csc", copy=False) <NEW_LINE> return cls((x.data, x.indices, x.indptr), shape=x.shape) <NEW_LINE> <DEDENT> def transpose(self, axes: None = None, copy: bool = False) -> CSR: <NEW_LINE> <INDENT> if axes is not None: <NEW_LINE> <INDENT> raise ValueError() <NEW_LINE> <DEDENT> if copy: <NEW_LINE> <INDENT> self = self.copy() <NEW_LINE> <DEDENT> return CSR((self.data, self.indices, self.indptr), self.shape[::-1]) | The CSC or CCS scheme stores a n-dimensional array using n+1 one-dimensional arrays.
The 3 arrays are same as GCCS. The remaining n-2 arrays are for storing the indices of
the non-zero values of the sparse matrix. CSC is simply the transpose of CSR.
Sparse supports 2-D CSC. | 62598fa1f548e778e596b3fd |
class ActionDataset(Dataset): <NEW_LINE> <INDENT> def __init__(self, data_filepath, crop=None): <NEW_LINE> <INDENT> self.crop = crop <NEW_LINE> print("[I] Loading data from %s" % data_filepath) <NEW_LINE> with open(data_filepath, 'r') as f: <NEW_LINE> <INDENT> self.action_seqs = [np.fromstring(l, dtype=np.uint8, sep=',') for l in f.readlines()] <NEW_LINE> <DEDENT> if self.crop is not None: <NEW_LINE> <INDENT> self.action_seqs = [s for s in self.action_seqs if len(s) > self.crop] <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.action_seqs) <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> r = self.action_seqs[idx] <NEW_LINE> if self.crop is not None: <NEW_LINE> <INDENT> s = np.random.choice(len(r) - self.crop + 1) <NEW_LINE> r = r[s:s + self.crop] <NEW_LINE> <DEDENT> return np.unpackbits(r[None], axis=0)[:6] | Dataset class that reads the Actions dataset | 62598fa185dfad0860cbf99c |
class cd: <NEW_LINE> <INDENT> def __init__(self, newPath): <NEW_LINE> <INDENT> self.newPath = newPath <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.savedPath = os.getcwd() <NEW_LINE> os.chdir(self.newPath) <NEW_LINE> <DEDENT> def __exit__(self, etype, value, traceback): <NEW_LINE> <INDENT> os.chdir(self.savedPath) | Context manager for changing the current working directory, and
return to original location when finished. | 62598fa1498bea3a75a57971 |
class GolemResourceType(models.Model): <NEW_LINE> <INDENT> _name = 'golem.resource.type' <NEW_LINE> _description = 'GOLEM Resource Type' <NEW_LINE> _order = 'name asc' <NEW_LINE> _sql_constraints = [('golem_resource_type_name_uniq', 'UNIQUE (name)', 'Resource type must be unique.')] <NEW_LINE> name = fields.Char(string='Resource Type', required=True, index=True) | GOLEM Resource Type | 62598fa1be8e80087fbbeeaf |
class Process(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'process' <NEW_LINE> pid = db.Column(db.String(), nullable=False, primary_key=True) <NEW_LINE> user_id = db.Column( db.Integer, db.ForeignKey('user.id'), nullable=False ) <NEW_LINE> command = db.Column(db.String(), nullable=False) <NEW_LINE> desc = db.Column(db.String(), nullable=False) <NEW_LINE> arguments = db.Column(db.String(), nullable=True) <NEW_LINE> logdir = db.Column(db.String(), nullable=True) <NEW_LINE> start_time = db.Column(db.String(), nullable=True) <NEW_LINE> end_time = db.Column(db.String(), nullable=True) <NEW_LINE> exit_code = db.Column(db.Integer(), nullable=True) <NEW_LINE> acknowledge = db.Column(db.String(), nullable=True) | Define the Process table. | 62598fa132920d7e50bc5ea6 |
class EventFIBCFlowMod(EventFIBCBase): <NEW_LINE> <INDENT> pass | FIBC FlowMod event | 62598fa17d43ff248742732a |
class Path: <NEW_LINE> <INDENT> def __init__(self, testbed, host, tb, is_dir=None): <NEW_LINE> <INDENT> self.testbed = testbed <NEW_LINE> self.host = host <NEW_LINE> self.tb = tb <NEW_LINE> self.is_dir = is_dir <NEW_LINE> <DEDENT> def copydown(self, check_existing=False): <NEW_LINE> <INDENT> if check_existing and self.testbed.execute(['test', '-e', self.tb])[0] == 0: <NEW_LINE> <INDENT> adtlog.debug('copydown: tb path %s already exists' % self.tb) <NEW_LINE> return <NEW_LINE> <DEDENT> self.testbed.check_exec(['mkdir', '-p', os.path.dirname(self.tb)]) <NEW_LINE> if os.path.isdir(self.host): <NEW_LINE> <INDENT> self.testbed.command('copydown', (self.host + '/', self.tb + '/')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.testbed.command('copydown', (self.host, self.tb)) <NEW_LINE> <DEDENT> if self.testbed.user: <NEW_LINE> <INDENT> rc = self.testbed.execute(['chown', '-R', self.testbed.user, '--', self.tb], stderr=subprocess.PIPE)[0] <NEW_LINE> if rc != 0: <NEW_LINE> <INDENT> self.testbed.check_exec(['chmod', '-R', 'go+rwX', '--', self.tb]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def copyup(self, check_existing=False): <NEW_LINE> <INDENT> if check_existing and os.path.exists(self.host): <NEW_LINE> <INDENT> adtlog.debug('copyup: host path %s already exists' % self.host) <NEW_LINE> return <NEW_LINE> <DEDENT> os.makedirs(os.path.dirname(self.host), exist_ok=True, mode=0o2755) <NEW_LINE> assert self.is_dir is not None <NEW_LINE> if self.is_dir: <NEW_LINE> <INDENT> self.testbed.command('copyup', (self.tb + '/', self.host + '/')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.testbed.command('copyup', (self.tb, self.host)) <NEW_LINE> <DEDENT> <DEDENT> def copyup_rec(self, check_existing=False, rec_level=-1): <NEW_LINE> <INDENT> assert self.is_dir is not None <NEW_LINE> if self.is_dir and (rec_level == -1 or rec_level > 0): <NEW_LINE> <INDENT> os.makedirs(self.host, exist_ok=True, mode=0o2755) <NEW_LINE> r, o, e = self.testbed.execute(['ls', '--file-type', '-1', self.tb], stdout=subprocess.PIPE, stderr=subprocess.PIPE) <NEW_LINE> for f in o.split('\n'): <NEW_LINE> <INDENT> if f.endswith('/'): <NEW_LINE> <INDENT> is_dir = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> is_dir = False <NEW_LINE> <DEDENT> if f.endswith('/') or f.endswith('@'): <NEW_LINE> <INDENT> newp = Path(self.testbed, self.host + '/' + f[:-1], self.tb + '/' + f[:-1], is_dir) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> newp = Path(self.testbed, self.host + '/' + f, self.tb + '/' + f, False) <NEW_LINE> <DEDENT> new_rec = rec_level if rec_level == -1 else rec_level-1 <NEW_LINE> newp.copyup_rec(check_existing, new_rec) <NEW_LINE> <DEDENT> <DEDENT> elif not self.is_dir: <NEW_LINE> <INDENT> if check_existing and (os.path.exists(self.host) or os.path.islink(self.host)): <NEW_LINE> <INDENT> adtlog.debug('copyup: host path %s already exists' % self.host) <NEW_LINE> return <NEW_LINE> <DEDENT> self.testbed.command('copyupnolink', (self.tb, self.host)) | Represent a file/dir with a host and a testbed path | 62598fa13617ad0b5ee05fa1 |
class SpatiaLiteFunctionParam(SpatiaLiteFunction): <NEW_LINE> <INDENT> sql_template = '%(function)s(%(geo_col)s, %(geometry)s, %%s)' | For SpatiaLite functions that take another parameter. | 62598fa1435de62698e9bc44 |
class Photo(models.Model): <NEW_LINE> <INDENT> LICENSES = ( ('http://creativecommons.org/licenses/by/2.0/', 'CC Attribution'), ('http://creativecommons.org/licenses/by-nd/2.0/', 'CC Attribution-NoDerivs'), ('http://creativecommons.org/licenses/by-nc-nd/2.0/', 'CC Attribution-NonCommercial-NoDerivs'), ('http://creativecommons.org/licenses/by-nc/2.0/', 'CC Attribution-NonCommercial'), ('http://creativecommons.org/licenses/by-nc-sa/2.0/', 'CC Attribution-NonCommercial-ShareAlike'), ('http://creativecommons.org/licenses/by-sa/2.0/', 'CC Attribution-ShareAlike'), ) <NEW_LINE> title = models.CharField(max_length=255) <NEW_LINE> slug = models.SlugField() <NEW_LINE> photo = models.ImageField(upload_to="photos") <NEW_LINE> photo_thumbnail = ImageSpecField(source='photo', processors=[ResizeToFit(460),Adjust(color=1.1)], format='JPEG', options={'quality': 75}) <NEW_LINE> photo_admin_thumbnail = ImageSpecField(source='photo', processors=[ResizeToFit(100),Adjust(color=1.1)], format='JPEG', options={'quality': 60}) <NEW_LINE> taken_by = models.CharField(max_length=100, blank=True) <NEW_LINE> license = models.URLField(blank=True, choices=LICENSES) <NEW_LINE> description = models.TextField(blank=True) <NEW_LINE> tags = TagField() <NEW_LINE> uploaded = models.DateTimeField(auto_now_add=True) <NEW_LINE> modified = models.DateTimeField(auto_now=True) <NEW_LINE> _exif = models.TextField(blank=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'media_photos' <NEW_LINE> <DEDENT> def _set_exif(self, d): <NEW_LINE> <INDENT> self._exif = simplejson.dumps(d) <NEW_LINE> <DEDENT> def _get_exif(self): <NEW_LINE> <INDENT> if self._exif: <NEW_LINE> <INDENT> return simplejson.loads(self._exif) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> <DEDENT> exif = property(_get_exif, _set_exif, "Photo EXIF data, as a dict.") <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return '%s' % self.title <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> return '%s%s' % (settings.MEDIA_URL, self.photo) <NEW_LINE> <DEDENT> @permalink <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return ('photo_detail', None, { 'slug': self.slug }) | Photo model | 62598fa163b5f9789fe84fc5 |
@final <NEW_LINE> class SetPlayerAttributeAction(EventAction[SetPlayerAttributeActionParameters]): <NEW_LINE> <INDENT> name = "set_player_attribute" <NEW_LINE> param_class = SetPlayerAttributeActionParameters <NEW_LINE> def start(self) -> None: <NEW_LINE> <INDENT> attribute = self.parameters[0] <NEW_LINE> value = self.parameters[1] <NEW_LINE> CommonAction.set_character_attribute(self.session.player, attribute, value) | Set the given attribute of the player character to the given value.
Script usage:
.. code-block::
set_player_attribute <name>,<value>
Script parameters:
name: Name of the attribute.
value: Value of the attribute. | 62598fa1d6c5a102081e1f97 |
class ProductAlternativeUnits(object): <NEW_LINE> <INDENT> swagger_types = { 'name': 'str', 'multiplier': 'float', 'sales_unit': 'bool', 'purchase_unit': 'bool' } <NEW_LINE> attribute_map = { 'name': 'Name', 'multiplier': 'Multiplier', 'sales_unit': 'SalesUnit', 'purchase_unit': 'PurchaseUnit' } <NEW_LINE> def __init__(self, name=None, multiplier=None, sales_unit=None, purchase_unit=None): <NEW_LINE> <INDENT> self._name = None <NEW_LINE> self._multiplier = None <NEW_LINE> self._sales_unit = None <NEW_LINE> self._purchase_unit = None <NEW_LINE> self.discriminator = None <NEW_LINE> if name is not None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> if multiplier is not None: <NEW_LINE> <INDENT> self.multiplier = multiplier <NEW_LINE> <DEDENT> if sales_unit is not None: <NEW_LINE> <INDENT> self.sales_unit = sales_unit <NEW_LINE> <DEDENT> if purchase_unit is not None: <NEW_LINE> <INDENT> self.purchase_unit = purchase_unit <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def multiplier(self): <NEW_LINE> <INDENT> return self._multiplier <NEW_LINE> <DEDENT> @multiplier.setter <NEW_LINE> def multiplier(self, multiplier): <NEW_LINE> <INDENT> self._multiplier = multiplier <NEW_LINE> <DEDENT> @property <NEW_LINE> def sales_unit(self): <NEW_LINE> <INDENT> return self._sales_unit <NEW_LINE> <DEDENT> @sales_unit.setter <NEW_LINE> def sales_unit(self, sales_unit): <NEW_LINE> <INDENT> self._sales_unit = sales_unit <NEW_LINE> <DEDENT> @property <NEW_LINE> def purchase_unit(self): <NEW_LINE> <INDENT> return self._purchase_unit <NEW_LINE> <DEDENT> @purchase_unit.setter <NEW_LINE> def purchase_unit(self, purchase_unit): <NEW_LINE> <INDENT> self._purchase_unit = purchase_unit <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ProductAlternativeUnits): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fa16fb2d068a7693d5d |
class TagFilter(object): <NEW_LINE> <INDENT> swagger_types = { 'type': 'str', 'tags': 'list[str]' } <NEW_LINE> attribute_map = { 'type': 'type', 'tags': 'tags' } <NEW_LINE> def __init__(self, type=None, tags=None): <NEW_LINE> <INDENT> self._type = None <NEW_LINE> self._tags = None <NEW_LINE> self.discriminator = None <NEW_LINE> if type is not None: <NEW_LINE> <INDENT> self.type = type <NEW_LINE> <DEDENT> if tags is not None: <NEW_LINE> <INDENT> self.tags = tags <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return self._type <NEW_LINE> <DEDENT> @type.setter <NEW_LINE> def type(self, type): <NEW_LINE> <INDENT> allowed_values = ["all", "untagged", "specific-tags"] <NEW_LINE> if type not in allowed_values: <NEW_LINE> <INDENT> raise ValueError( "Invalid value for `type` ({0}), must be one of {1}" .format(type, allowed_values) ) <NEW_LINE> <DEDENT> self._type = type <NEW_LINE> <DEDENT> @property <NEW_LINE> def tags(self): <NEW_LINE> <INDENT> return self._tags <NEW_LINE> <DEDENT> @tags.setter <NEW_LINE> def tags(self, tags): <NEW_LINE> <INDENT> self._tags = tags <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, TagFilter): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fa1cc0a2c111447ae5e |
@hashableAttrs(repr=False) <NEW_LINE> class Track: <NEW_LINE> <INDENT> song: Song = attr.ib(hash=False, eq=False, repr=False) <NEW_LINE> number: int = attr.ib(default=1, hash=False, eq=False) <NEW_LINE> fretCount: int = 24 <NEW_LINE> offset: int = 0 <NEW_LINE> isPercussionTrack: bool = False <NEW_LINE> is12StringedGuitarTrack: bool = False <NEW_LINE> isBanjoTrack: bool = False <NEW_LINE> isVisible: bool = True <NEW_LINE> isSolo: bool = False <NEW_LINE> isMute: bool = False <NEW_LINE> indicateTuning: bool = False <NEW_LINE> name: str = 'Track 1' <NEW_LINE> measures: List['Measure'] = None <NEW_LINE> strings: List['GuitarString'] = None <NEW_LINE> port: int = 1 <NEW_LINE> channel: MidiChannel = attr.Factory(MidiChannel) <NEW_LINE> color: Color = Color.red <NEW_LINE> settings: TrackSettings = attr.Factory(TrackSettings) <NEW_LINE> useRSE: bool = False <NEW_LINE> rse: TrackRSE = attr.Factory(TrackRSE) <NEW_LINE> def __attrs_post_init__(self): <NEW_LINE> <INDENT> if self.strings is None: <NEW_LINE> <INDENT> self.strings = [GuitarString(n, v) for n, v in [(1, 64), (2, 59), (3, 55), (4, 50), (5, 45), (6, 40)]] <NEW_LINE> <DEDENT> if self.measures is None: <NEW_LINE> <INDENT> self.measures = [Measure(self, header) for header in self.song.measureHeaders] | A track contains multiple measures. | 62598fa1c432627299fa2e2b |
class ServerProxy(xmlrpc.ServerProxy): <NEW_LINE> <INDENT> def __init__( self, uri, encoding=None, verbose=False, allow_none=False, use_datetime=False, use_builtin_types=False, auth=None, headers=None, timeout=5.0, session=None, ): <NEW_LINE> <INDENT> if not headers: <NEW_LINE> <INDENT> headers = { "User-Agent": "python/aioxmlrpc", "Accept": "text/xml", "Content-Type": "text/xml", } <NEW_LINE> <DEDENT> self._session = session or httpx.AsyncClient(headers=headers) <NEW_LINE> transport = AioTransport( use_https=uri.startswith("https://"), session=self._session, auth=auth, timeout=timeout, use_datetime=use_datetime, use_builtin_types=use_builtin_types, ) <NEW_LINE> super().__init__( uri, transport, encoding, verbose, allow_none, use_datetime, use_builtin_types, ) <NEW_LINE> <DEDENT> async def __request(self, methodname, params): <NEW_LINE> <INDENT> request = xmlrpc.dumps( params, methodname, encoding=self.__encoding, allow_none=self.__allow_none ).encode(self.__encoding) <NEW_LINE> response = await self.__transport.request( self.__host, self.__handler, request, verbose=self.__verbose ) <NEW_LINE> if len(response) == 1: <NEW_LINE> <INDENT> response = response[0] <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return _Method(self.__request, name) | ``xmlrpc.ServerProxy`` subclass for asyncio support | 62598fa17047854f4633f228 |
class UpdateNetworkSwitchSettingsMtuModel(object): <NEW_LINE> <INDENT> _names = { "default_mtu_size":'defaultMtuSize', "overrides":'overrides' } <NEW_LINE> def __init__(self, default_mtu_size=None, overrides=None): <NEW_LINE> <INDENT> self.default_mtu_size = default_mtu_size <NEW_LINE> self.overrides = overrides <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dictionary(cls, dictionary): <NEW_LINE> <INDENT> if dictionary is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> default_mtu_size = dictionary.get('defaultMtuSize') <NEW_LINE> overrides = None <NEW_LINE> if dictionary.get('overrides') != None: <NEW_LINE> <INDENT> overrides = list() <NEW_LINE> for structure in dictionary.get('overrides'): <NEW_LINE> <INDENT> overrides.append(meraki_sdk.models.override_model.OverrideModel.from_dictionary(structure)) <NEW_LINE> <DEDENT> <DEDENT> return cls(default_mtu_size, overrides) | Implementation of the 'updateNetworkSwitchSettingsMtu' model.
TODO: type model description here.
Attributes:
default_mtu_size (int): MTU size for the entire network. Default value
is 9578.
overrides (list of OverrideModel): Override MTU size for individual
switches or switch profiles. An empty array will clear overrides. | 62598fa1a79ad16197769eb6 |
class AppMarkupDetails(Base): <NEW_LINE> <INDENT> name = "app_markup_details" <NEW_LINE> def __call__(self, date_from: str, date_to: str, app_id: Optional[int] = None, client_loginid: Optional[str] = None, description: Optional[int] = None, limit: Optional[Union[int, float, Decimal]] = None, offset: Optional[Union[int, float, Decimal]] = None, sort: Optional[str] = None, sort_fields: Optional[List] = None, passthrough: Optional[Any] = None, req_id: Optional[int] = None): <NEW_LINE> <INDENT> data = { "app_markup_details": int(1), "date_from": date_from, "date_to": date_to } <NEW_LINE> if app_id: <NEW_LINE> <INDENT> data['app_id'] = int(app_id) <NEW_LINE> <DEDENT> if client_loginid: <NEW_LINE> <INDENT> data['client_loginid'] = str(client_loginid) <NEW_LINE> <DEDENT> if description: <NEW_LINE> <INDENT> data['description'] = int(description) <NEW_LINE> <DEDENT> if limit: <NEW_LINE> <INDENT> data['limit'] = limit <NEW_LINE> <DEDENT> if offset: <NEW_LINE> <INDENT> data['offset'] = offset <NEW_LINE> <DEDENT> if sort: <NEW_LINE> <INDENT> data['sort'] = str(sort) <NEW_LINE> <DEDENT> if sort_fields: <NEW_LINE> <INDENT> data['sort_fields'] = sort_fields <NEW_LINE> <DEDENT> return self.send_websocket_request(self.name, data, passthrough=passthrough, req_id=req_id) | Class for Binary app_markup_details websocket channel. | 62598fa199cbb53fe6830d24 |
class Fuzzdata(BaseFuzzdata): <NEW_LINE> <INDENT> def get_fuzz_data(self): <NEW_LINE> <INDENT> yield [(BaseFuzzdata.get_random_ascii_string(), BaseFuzzdata.get_random_ascii_string())] | Just generates random key/value string pairs ad nauseam | 62598fa12ae34c7f260aaf31 |
@CommandProvider <NEW_LINE> class Repackage(MachCommandBase): <NEW_LINE> <INDENT> @Command('repackage', category='misc', description='Repackage artifacts into different formats.') <NEW_LINE> def repackage(self): <NEW_LINE> <INDENT> print("Usage: ./mach repackage [dmg|installer|mar] [args...]") <NEW_LINE> <DEDENT> @SubCommand('repackage', 'dmg', description='Repackage a tar file into a .dmg for OSX') <NEW_LINE> @CommandArgument('--input', '-i', type=str, required=True, help='Input filename') <NEW_LINE> @CommandArgument('--output', '-o', type=str, required=True, help='Output filename') <NEW_LINE> def repackage_dmg(self, input, output): <NEW_LINE> <INDENT> if not os.path.exists(input): <NEW_LINE> <INDENT> print('Input file does not exist: %s' % input) <NEW_LINE> return 1 <NEW_LINE> <DEDENT> if not os.path.exists(os.path.join(self.topobjdir, 'config.status')): <NEW_LINE> <INDENT> print('config.status not found. Please run |mach configure| ' 'prior to |mach repackage|.') <NEW_LINE> return 1 <NEW_LINE> <DEDENT> from mozbuild.repackaging.dmg import repackage_dmg <NEW_LINE> repackage_dmg(input, output) <NEW_LINE> <DEDENT> @SubCommand('repackage', 'installer', description='Repackage into a Windows installer exe') <NEW_LINE> @CommandArgument('--tag', type=str, required=True, help='The .tag file used to build the installer') <NEW_LINE> @CommandArgument('--setupexe', type=str, required=True, help='setup.exe file inside the installer') <NEW_LINE> @CommandArgument('--package', type=str, required=False, help='Optional package .zip for building a full installer') <NEW_LINE> @CommandArgument('--output', '-o', type=str, required=True, help='Output filename') <NEW_LINE> @CommandArgument('--package-name', type=str, required=False, help='Name of the package being rebuilt') <NEW_LINE> @CommandArgument('--sfx-stub', type=str, required=True, help='Path to the self-extraction stub.') <NEW_LINE> def repackage_installer(self, tag, setupexe, package, output, package_name, sfx_stub): <NEW_LINE> <INDENT> from mozbuild.repackaging.installer import repackage_installer <NEW_LINE> repackage_installer( topsrcdir=self.topsrcdir, tag=tag, setupexe=setupexe, package=package, output=output, package_name=package_name, sfx_stub=sfx_stub, ) <NEW_LINE> <DEDENT> @SubCommand('repackage', 'mar', description='Repackage into complete MAR file') <NEW_LINE> @CommandArgument('--input', '-i', type=str, required=True, help='Input filename') <NEW_LINE> @CommandArgument('--mar', type=str, required=True, help='Mar binary path') <NEW_LINE> @CommandArgument('--output', '-o', type=str, required=True, help='Output filename') <NEW_LINE> def repackage_mar(self, input, mar, output): <NEW_LINE> <INDENT> from mozbuild.repackaging.mar import repackage_mar <NEW_LINE> repackage_mar(self.topsrcdir, input, mar, output) | Repackages artifacts into different formats.
This is generally used after packages are signed by the signing
scriptworkers in order to bundle things up into shippable formats, such as a
.dmg on OSX or an installer exe on Windows. | 62598fa1e64d504609df92e1 |
class RegulatedTemperature(GetSpotValue, CTypeValue): <NEW_LINE> <INDENT> _nParam = SpotCamConstant.REGULATEDTEMPERATURE <NEW_LINE> _ctype = ctypes.c_short | The temperature to which the image sensor is regulated, in tenths of a
degree C. | 62598fa1dd821e528d6d8d86 |
class GANEstimator(estimator.Estimator): <NEW_LINE> <INDENT> def __init__(self, model_dir=None, generator_fn=None, discriminator_fn=None, generator_loss_fn=None, discriminator_loss_fn=None, generator_optimizer=None, discriminator_optimizer=None, get_hooks_fn=None, add_summaries=None, use_loss_summaries=True, config=None): <NEW_LINE> <INDENT> def _model_fn(features, labels, mode): <NEW_LINE> <INDENT> gopt = (generator_optimizer() if callable(generator_optimizer) else generator_optimizer) <NEW_LINE> dopt = (discriminator_optimizer() if callable(discriminator_optimizer) else discriminator_optimizer) <NEW_LINE> gan_head = head_lib.gan_head( generator_loss_fn, discriminator_loss_fn, gopt, dopt, use_loss_summaries, get_hooks_fn=get_hooks_fn) <NEW_LINE> return _gan_model_fn( features, labels, mode, generator_fn, discriminator_fn, gan_head, add_summaries) <NEW_LINE> <DEDENT> super(GANEstimator, self).__init__( model_fn=_model_fn, model_dir=model_dir, config=config) | An estimator for Generative Adversarial Networks (GANs).
This Estimator is backed by TFGAN. The network functions follow the TFGAN API
except for one exception: if either `generator_fn` or `discriminator_fn` have
an argument called `mode`, then the tf.Estimator mode is passed in for that
argument. This helps with operations like batch normalization, which have
different train and evaluation behavior.
Example:
```python
import tensorflow as tf
tfgan = tf.contrib.gan
# See TFGAN's `train.py` for a description of the generator and
# discriminator API.
def generator_fn(generator_inputs):
...
return generated_data
def discriminator_fn(data, conditioning):
...
return logits
# Create GAN estimator.
gan_estimator = tfgan.estimator.GANEstimator(
model_dir,
generator_fn=generator_fn,
discriminator_fn=discriminator_fn,
generator_loss_fn=tfgan.losses.wasserstein_generator_loss,
discriminator_loss_fn=tfgan.losses.wasserstein_discriminator_loss,
generator_optimizer=tf.train.AdamOptimizer(0.1, 0.5),
discriminator_optimizer=tf.train.AdamOptimizer(0.1, 0.5))
# Train estimator.
gan_estimator.train(train_input_fn, steps)
# Evaluate resulting estimator.
gan_estimator.evaluate(eval_input_fn)
# Generate samples from generator.
predictions = np.array([
x for x in gan_estimator.predict(predict_input_fn)])
``` | 62598fa130dc7b766599f69e |
class ObjectDescriptor(object): <NEW_LINE> <INDENT> def __init__(self, id=Ice._struct_marker, type='', proxyOptions=''): <NEW_LINE> <INDENT> if id is Ice._struct_marker: <NEW_LINE> <INDENT> self.id = _M_Ice.Identity() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.id = id <NEW_LINE> <DEDENT> self.type = type <NEW_LINE> self.proxyOptions = proxyOptions <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> _h = 0 <NEW_LINE> _h = 5 * _h + Ice.getHash(self.id) <NEW_LINE> _h = 5 * _h + Ice.getHash(self.type) <NEW_LINE> _h = 5 * _h + Ice.getHash(self.proxyOptions) <NEW_LINE> return _h % 0x7fffffff <NEW_LINE> <DEDENT> def __compare(self, other): <NEW_LINE> <INDENT> if other is None: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> elif not isinstance(other, _M_IceGrid.ObjectDescriptor): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.id is None or other.id is None: <NEW_LINE> <INDENT> if self.id != other.id: <NEW_LINE> <INDENT> return (-1 if self.id is None else 1) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.id < other.id: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> elif self.id > other.id: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> <DEDENT> if self.type is None or other.type is None: <NEW_LINE> <INDENT> if self.type != other.type: <NEW_LINE> <INDENT> return (-1 if self.type is None else 1) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.type < other.type: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> elif self.type > other.type: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> <DEDENT> if self.proxyOptions is None or other.proxyOptions is None: <NEW_LINE> <INDENT> if self.proxyOptions != other.proxyOptions: <NEW_LINE> <INDENT> return (-1 if self.proxyOptions is None else 1) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.proxyOptions < other.proxyOptions: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> elif self.proxyOptions > other.proxyOptions: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> <DEDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> r = self.__compare(other) <NEW_LINE> if r is NotImplemented: <NEW_LINE> <INDENT> return r <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return r < 0 <NEW_LINE> <DEDENT> <DEDENT> def __le__(self, other): <NEW_LINE> <INDENT> r = self.__compare(other) <NEW_LINE> if r is NotImplemented: <NEW_LINE> <INDENT> return r <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return r <= 0 <NEW_LINE> <DEDENT> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> r = self.__compare(other) <NEW_LINE> if r is NotImplemented: <NEW_LINE> <INDENT> return r <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return r > 0 <NEW_LINE> <DEDENT> <DEDENT> def __ge__(self, other): <NEW_LINE> <INDENT> r = self.__compare(other) <NEW_LINE> if r is NotImplemented: <NEW_LINE> <INDENT> return r <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return r >= 0 <NEW_LINE> <DEDENT> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> r = self.__compare(other) <NEW_LINE> if r is NotImplemented: <NEW_LINE> <INDENT> return r <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return r == 0 <NEW_LINE> <DEDENT> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> r = self.__compare(other) <NEW_LINE> if r is NotImplemented: <NEW_LINE> <INDENT> return r <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return r != 0 <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return IcePy.stringify(self, _M_IceGrid._t_ObjectDescriptor) <NEW_LINE> <DEDENT> __repr__ = __str__ | An Ice object descriptor. | 62598fa1f548e778e596b3ff |
class TestDatasets(Dataset): <NEW_LINE> <INDENT> def __init__(self, csv_file, hp): <NEW_LINE> <INDENT> self.landmarks_frame = pd.read_csv(csv_file, sep='\|', header=None) <NEW_LINE> self.hp = hp <NEW_LINE> if self.hp.spm_model is not None: <NEW_LINE> <INDENT> self.sp = spm.SentencePieceProcessor() <NEW_LINE> self.sp.Load(self.hp.spm_model) <NEW_LINE> <DEDENT> if self.hp.mean_file is not None and self.hp.var_file is not None: <NEW_LINE> <INDENT> self.mean_value = np.load(self.hp.mean_file).reshape(-1, self.hp.mel_dim) <NEW_LINE> self.var_value = np.load(self.hp.var_file).reshape(-1, self.hp.mel_dim) <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.landmarks_frame) <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> mel_output = self.landmarks_frame.loc[idx, 0] <NEW_LINE> text = self.landmarks_frame.loc[idx, 1].strip() <NEW_LINE> if self.hp.is_multi_speaker: <NEW_LINE> <INDENT> spk_emb_name = self.landmarks_frame.loc[idx, 2] <NEW_LINE> if self.hp.spk_emb_type == 'speaker_id': <NEW_LINE> <INDENT> spk_emb = int(spk_emb_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> spk_emb = np.load(spk_emb_name.strip()) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> spk_emb = None <NEW_LINE> <DEDENT> if self.hp.spm_model is not None: <NEW_LINE> <INDENT> textids = [self.sp.bos_id()] + self.sp.EncodeAsIds(text)+ [self.sp.eos_id()] <NEW_LINE> text = np.array([int(t) for t in textids], dtype=np.int32) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> text = np.array([int(t) for t in text.split(' ')], dtype=np.int32) <NEW_LINE> <DEDENT> text_length = len(text) <NEW_LINE> pos_text = np.arange(1, text_length+1) <NEW_LINE> sample = {'text': text, 'text_length':text_length, 'mel_output':mel_output, 'pos_text':pos_text, 'spk_emb':spk_emb, 'is_multi_speaker':self.hp.is_multi_speaker, 'spk_emb_type': self.hp.spk_emb_type} <NEW_LINE> return sample <NEW_LINE> <DEDENT> def _check_files(self): <NEW_LINE> <INDENT> drop_indices = [] <NEW_LINE> for idx, mel_name in enumerate(self.landmarks_frame.loc[:,0]): <NEW_LINE> <INDENT> if os.path.exists(mel_name): <NEW_LINE> <INDENT> drop_indices.extend([idx]) <NEW_LINE> <DEDENT> <DEDENT> return self.landmarks_frame.drop(drop_indices).reset_index(drop=True) | Test dataset. | 62598fa17047854f4633f229 |
class MedidaAreasPorRegiones(object): <NEW_LINE> <INDENT> def __init__(self, segman, img_trans): <NEW_LINE> <INDENT> self.regiones = [0 for i in range(9)] <NEW_LINE> self.centros = [] <NEW_LINE> ancho, alto = img_trans.size <NEW_LINE> for segmento in segman.get_segmentos(): <NEW_LINE> <INDENT> for pixel in segmento.get_elementos_enteros(): <NEW_LINE> <INDENT> self.sumar_a_region(pixel, ancho, alto) <NEW_LINE> <DEDENT> <DEDENT> superficie = (ancho*alto) / 9 <NEW_LINE> self.porcentajes = [] <NEW_LINE> for i in self.regiones: <NEW_LINE> <INDENT> self.porcentajes.append(i/superficie) <NEW_LINE> <DEDENT> <DEDENT> def sumar_a_region(self, pixel, ancho, alto): <NEW_LINE> <INDENT> off_x = int(3 * pixel[0] / ancho) <NEW_LINE> off_y = int(3 * pixel[1] / alto) <NEW_LINE> self.regiones[3 * off_y + off_x] += 1 <NEW_LINE> <DEDENT> def get_valor(self): <NEW_LINE> <INDENT> return self.porcentajes | Calculamos la suma de las areas de los segmentos que caen en las 9 regiones definidas.
Para ver donde cae un segmento, usamos su centro de masa. (esto ya no (pixel por pixel)) | 62598fa163d6d428bbee2603 |
class Dirichlet(Continuous): <NEW_LINE> <INDENT> def __init__(self, a, transform=transforms.stick_breaking, *args, **kwargs): <NEW_LINE> <INDENT> self.k = shape = a.shape[0] <NEW_LINE> if "shape" not in kwargs.keys(): <NEW_LINE> <INDENT> kwargs.update({"shape": shape}) <NEW_LINE> <DEDENT> super(Dirichlet, self).__init__(transform=transform, *args, **kwargs) <NEW_LINE> self.a = a <NEW_LINE> self.mean = a / sum(a) <NEW_LINE> self.mode = switch(all(a > 1), (a - 1) / sum(a - 1), nan) <NEW_LINE> <DEDENT> def random(self, point=None, size=None): <NEW_LINE> <INDENT> a = draw_values([self.a], point=point) <NEW_LINE> samples = generate_samples(lambda a, size=None: st.dirichlet.rvs(a, None if size == a.shape else size), a, dist_shape=self.shape, size=size) <NEW_LINE> return samples <NEW_LINE> <DEDENT> def logp(self, value): <NEW_LINE> <INDENT> k = self.k <NEW_LINE> a = self.a <NEW_LINE> return bound( sum(logpow(value, a - 1) - gammaln(a), axis=0) + gammaln(sum(a)), k > 1, all(a > 0), all(value >= 0), all(value <= 1)) | Dirichlet
This is a multivariate continuous distribution.
.. math::
f(\mathbf{x}) = rac{\Gamma(\sum_{i=1}^k heta_i)}{\prod \Gamma( heta_i)}\prod_{i=1}^{k-1} x_i^{ heta_i - 1}
\cdot\left(1-\sum_{i=1}^{k-1}x_i
ight)^ heta_k
:Parameters:
a : float tensor
a > 0
concentration parameters
last index is the k index
:Support:
x : vector
sum(x) == 1 and x > 0
.. note::
Only the first `k-1` elements of `x` are expected. Can be used
as a parent of Multinomial and Categorical nevertheless. | 62598fa11b99ca400228f457 |
class CounterMeta(type): <NEW_LINE> <INDENT> counter = 0 <NEW_LINE> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> instance = type.__call__(self, *args, **kwargs) <NEW_LINE> instance.counter = CounterMeta.counter <NEW_LINE> CounterMeta.counter += 1 <NEW_LINE> return instance | A simple meta class which adds a ``_counter`` attribute to the instances of
the classes it is used on. This counter is simply incremented for each new
instance. | 62598fa1498bea3a75a57973 |
class FilesModTimePollerThread(QObject): <NEW_LINE> <INDENT> timesAvailable = Signal(list) <NEW_LINE> def __init__(self, parent=None): <NEW_LINE> <INDENT> super(FilesModTimePollerThread, self).__init__(parent) <NEW_LINE> self._thread = None <NEW_LINE> self._mutex = Lock() <NEW_LINE> self._threadPool = ThreadPool(4) <NEW_LINE> self._stopFlag = Event() <NEW_LINE> self._refreshInterval = 5 <NEW_LINE> self._files = [] <NEW_LINE> <DEDENT> def start(self, files=None): <NEW_LINE> <INDENT> if self._thread: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._stopFlag.clear() <NEW_LINE> self._files = files or [] <NEW_LINE> self._thread = Thread(target=self.run) <NEW_LINE> self._thread.start() <NEW_LINE> <DEDENT> def setFiles(self, files): <NEW_LINE> <INDENT> with self._mutex: <NEW_LINE> <INDENT> self._files = files <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> if not self._thread: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._stopFlag.set() <NEW_LINE> self._thread.join() <NEW_LINE> self._thread = None <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getFileLastModTime(f): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return os.path.getmtime(f) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> while not self._stopFlag.wait(self._refreshInterval): <NEW_LINE> <INDENT> with self._mutex: <NEW_LINE> <INDENT> files = list(self._files) <NEW_LINE> <DEDENT> times = self._threadPool.map(FilesModTimePollerThread.getFileLastModTime, files) <NEW_LINE> with self._mutex: <NEW_LINE> <INDENT> if files == self._files: <NEW_LINE> <INDENT> self.timesAvailable.emit(times) | Thread responsible for non-blocking polling of last modification times of a list of files.
Uses a Python ThreadPool internally to split tasks on multiple threads. | 62598fa1442bda511e95c2ac |
class LibcxxwrapJulia(CMakePackage): <NEW_LINE> <INDENT> homepage = "https://github.com/JuliaInterop/libcxxwrap-julia" <NEW_LINE> url = "https://github.com/JuliaInterop/libcxxwrap-julia/archive/refs/tags/v0.8.3.tar.gz" <NEW_LINE> git = "https://github.com/JuliaInterop/libcxxwrap-julia.git" <NEW_LINE> maintainers = ['eloop'] <NEW_LINE> version('master', branch='master') <NEW_LINE> version('0.8.3', sha256='b0421d11bdee5ce8af4922de6dfe3b0e5d69b07bb52894e3a22a477bbd27ee9e') <NEW_LINE> version('0.8.2', sha256='f8b171def3d61904ba8f9a9052a405c25afbfb9a3c5af3dd30bc36a0184ed539') <NEW_LINE> depends_on('julia') | This is the C++ library component of the CxxWrap.jl package, distributed as a
regular CMake library for use in other C++ projects. | 62598fa14527f215b58e9d35 |
class basicstore(object): <NEW_LINE> <INDENT> def __init__(self, path, vfstype): <NEW_LINE> <INDENT> vfs = vfstype(path) <NEW_LINE> setvfsmode(vfs) <NEW_LINE> self.path = vfs.base <NEW_LINE> self.createmode = vfs.createmode <NEW_LINE> self.rawvfs = vfs <NEW_LINE> self.vfs = vfsmod.filtervfs(vfs, encodedir) <NEW_LINE> self.opener = self.vfs <NEW_LINE> <DEDENT> def join(self, f): <NEW_LINE> <INDENT> return self.path + "/" + encodedir(f) <NEW_LINE> <DEDENT> def _walk(self, relpath, recurse): <NEW_LINE> <INDENT> path = self.path <NEW_LINE> if relpath: <NEW_LINE> <INDENT> path += "/" + relpath <NEW_LINE> <DEDENT> striplen = len(self.path) + 1 <NEW_LINE> l = [] <NEW_LINE> if self.rawvfs.isdir(path): <NEW_LINE> <INDENT> visit = [path] <NEW_LINE> readdir = self.rawvfs.readdir <NEW_LINE> while visit: <NEW_LINE> <INDENT> p = visit.pop() <NEW_LINE> for f, kind, st in readdir(p, stat=True): <NEW_LINE> <INDENT> fp = p + "/" + f <NEW_LINE> if kind == stat.S_IFREG and f[-2:] in (".d", ".i"): <NEW_LINE> <INDENT> n = util.pconvert(fp[striplen:]) <NEW_LINE> l.append((decodedir(n), n, st.st_size)) <NEW_LINE> <DEDENT> elif kind == stat.S_IFDIR and recurse: <NEW_LINE> <INDENT> visit.append(fp) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> l.sort() <NEW_LINE> return l <NEW_LINE> <DEDENT> def datafiles(self): <NEW_LINE> <INDENT> return self._walk("data", True) + self._walk("meta", True) <NEW_LINE> <DEDENT> def topfiles(self): <NEW_LINE> <INDENT> return reversed(self._walk("", False)) <NEW_LINE> <DEDENT> def walk(self): <NEW_LINE> <INDENT> for x in self.datafiles(): <NEW_LINE> <INDENT> yield x <NEW_LINE> <DEDENT> for x in self.topfiles(): <NEW_LINE> <INDENT> yield x <NEW_LINE> <DEDENT> <DEDENT> def copylist(self): <NEW_LINE> <INDENT> return ["requires"] + _data.split() <NEW_LINE> <DEDENT> def write(self, tr): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def invalidatecaches(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def markremoved(self, fn): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __contains__(self, path): <NEW_LINE> <INDENT> path = "/".join(("data", path)) <NEW_LINE> if self.vfs.exists(path + ".i"): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if not path.endswith("/"): <NEW_LINE> <INDENT> path = path + "/" <NEW_LINE> <DEDENT> return self.vfs.exists(path) | base class for local repository stores | 62598fa121bff66bcd722ab6 |
class fetchRequest_args: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRING, 'functionName', None, None, ), ) <NEW_LINE> def __init__(self, functionName=None,): <NEW_LINE> <INDENT> self.functionName = functionName <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.functionName = iprot.readString().decode('utf-8') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('fetchRequest_args') <NEW_LINE> if self.functionName is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('functionName', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.functionName.encode('utf-8')) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- functionName | 62598fa13617ad0b5ee05fa4 |
class GPXTrackpoint(Element): <NEW_LINE> <INDENT> name = 'trkpt' <NEW_LINE> @property <NEW_LINE> def lat(self): <NEW_LINE> <INDENT> return float(self.elem.get('lat')) <NEW_LINE> <DEDENT> @property <NEW_LINE> def lon(self): <NEW_LINE> <INDENT> return float(self.elem.get('lon')) <NEW_LINE> <DEDENT> @property <NEW_LINE> def elev(self): <NEW_LINE> <INDENT> for echild in self.elem: <NEW_LINE> <INDENT> if echild.tag.endswith('ele'): <NEW_LINE> <INDENT> return float(echild.text) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def datetime(self): <NEW_LINE> <INDENT> for echild in self.elem: <NEW_LINE> <INDENT> if echild.tag.endswith('time'): <NEW_LINE> <INDENT> return datetime.datetime.strptime( echild.text[:19], '%Y-%m-%dT%H:%M:%S') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def time(self): <NEW_LINE> <INDENT> if self.datetime: <NEW_LINE> <INDENT> return time.mktime(self.datetime.timetuple()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return np.nan | Wrapper for GPX trkpt elements.
Attributes:
- *lat, lon, elev*: floats
- *datetime*: datetime object
| 62598fa157b8e32f52508045 |
class Polyhedron(Element): <NEW_LINE> <INDENT> def __init__(self,LP= [[0,0,0], [1,0,0], [0,1,0], [0,0,1]] ,LV= [[0,1,2], [1,2,3], [0,1,3], [0,2,3]], Pr=0): <NEW_LINE> <INDENT> Element.__init__(self,'Polyhedron',Priority=str(Pr)) <NEW_LINE> for P in LP: <NEW_LINE> <INDENT> self.append(Vertex(x=P[0],y=P[1],z=P[2])) <NEW_LINE> self.append(Face(x=P[0], y=P[1],z=P[2])) | AddPolyhedron.m | 62598fa16aa9bd52df0d4d1d |
class Tweet(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(User) <NEW_LINE> message = models.CharField(max_length=140) <NEW_LINE> date_posted = models.DateTimeField(default=datetime.datetime.now) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return "Tweet from: %s, Txt: %s" % (self.user.username, self.message,) | Model que representa los tweets. | 62598fa1f7d966606f747e35 |
class PersonalProfileQuerySet(models.QuerySet): <NEW_LINE> <INDENT> def regular_user(self): <NEW_LINE> <INDENT> return self.filter(is_trainer=False) <NEW_LINE> <DEDENT> def trainer(self): <NEW_LINE> <INDENT> return self.filter(is_trainer=True) | Details about Personal Profile | 62598fa17b25080760ed72fb |
class ServerFlag(_constantflags): <NEW_LINE> <INDENT> _prefix = 'SERVER_' <NEW_LINE> STATUS_IN_TRANS = 1 << 0 <NEW_LINE> STATUS_AUTOCOMMIT = 1 << 1 <NEW_LINE> MORE_RESULTS_EXISTS = 1 << 3 <NEW_LINE> QUERY_NO_GOOD_INDEX_USED = 1 << 4 <NEW_LINE> QUERY_NO_INDEX_USED = 1 << 5 <NEW_LINE> STATUS_CURSOR_EXISTS = 1 << 6 <NEW_LINE> STATUS_LAST_ROW_SENT = 1 << 7 <NEW_LINE> STATUS_DB_DROPPED = 1 << 8 <NEW_LINE> STATUS_NO_BACKSLASH_ESCAPES = 1 << 9 <NEW_LINE> desc = { 'SERVER_STATUS_IN_TRANS': (1 << 0, 'Transaction has started'), 'SERVER_STATUS_AUTOCOMMIT': (1 << 1, 'Server in auto_commit mode'), 'SERVER_MORE_RESULTS_EXISTS': (1 << 3, 'Multi query - next query exists'), 'SERVER_QUERY_NO_GOOD_INDEX_USED': (1 << 4, ''), 'SERVER_QUERY_NO_INDEX_USED': (1 << 5, ''), 'SERVER_STATUS_CURSOR_EXISTS': (1 << 6, ''), 'SERVER_STATUS_LAST_ROW_SENT': (1 << 7, ''), 'SERVER_STATUS_DB_DROPPED': (1 << 8, 'A database was dropped'), 'SERVER_STATUS_NO_BACKSLASH_ESCAPES': (1 << 9, ''), } | Server flags as found in the MySQL sources mysql-src/include/mysql_com.h | 62598fa1d486a94d0ba2be29 |
class TwoLayerNet(object): <NEW_LINE> <INDENT> def __init__(self, input_dim=3*32*32, hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0): <NEW_LINE> <INDENT> self.params = {} <NEW_LINE> self.reg = reg <NEW_LINE> self.params['W1'] = weight_scale * np.random.randn(input_dim, hidden_dim) <NEW_LINE> self.params['b1'] = np.zeros(hidden_dim) <NEW_LINE> self.params['W2'] = weight_scale * np.random.randn(hidden_dim, num_classes) <NEW_LINE> self.params['b2'] = np.zeros(num_classes) <NEW_LINE> <DEDENT> def loss(self, X, y=None): <NEW_LINE> <INDENT> scores = None <NEW_LINE> out, cache_1 = affine_relu_forward(X, self.params['W1'], self.params['b1']) <NEW_LINE> scores, cache_2 = affine_forward(out, self.params['W2'], self.params['b2']) <NEW_LINE> if y is None: <NEW_LINE> <INDENT> return scores <NEW_LINE> <DEDENT> loss, grads = 0, {} <NEW_LINE> loss, dout = softmax_loss(scores, y) <NEW_LINE> loss += 0.5 * self.reg * (np.sum(self.params['W1']**2) + np.sum(self.params['W2']**2)) <NEW_LINE> dout_hidden, grads['W2'], grads['b2'] = affine_backward(dout, cache_2) <NEW_LINE> tmp, grads['W1'], grads['b1'] = affine_relu_backward(dout_hidden, cache_1) <NEW_LINE> grads['W2'] += self.reg * self.params['W2'] <NEW_LINE> grads['W1'] += self.reg * self.params['W1'] <NEW_LINE> return loss, grads | A two-layer fully-connected neural network with ReLU nonlinearity and
softmax loss that uses a modular layer design. We assume an input dimension
of D, a hidden dimension of H, and perform classification over C classes.
The architecure should be affine - relu - affine - softmax.
Note that this class does not implement gradient descent; instead, it
will interact with a separate Solver object that is responsible for running
optimization.
The learnable parameters of the model are stored in the dictionary
self.params that maps parameter names to numpy arrays. | 62598fa15fdd1c0f98e5ddeb |
class GetAdStatsNode(template.Node): <NEW_LINE> <INDENT> def __init__(self, ad, varname, start=None, end=None): <NEW_LINE> <INDENT> self.ad = template.Variable(ad) <NEW_LINE> self.start = start <NEW_LINE> self.end = end <NEW_LINE> self.varname = varname.strip() <NEW_LINE> <DEDENT> def render(self, context): <NEW_LINE> <INDENT> ad = self.ad.resolve(context) <NEW_LINE> filter=[] <NEW_LINE> if self.start: <NEW_LINE> <INDENT> filter.append(start) <NEW_LINE> <DEDENT> if self.end: <NEW_LINE> <INDENT> filter.append(end) <NEW_LINE> <DEDENT> imps = ad.impressions(*filter) <NEW_LINE> clks = ad.clicks(*filter) <NEW_LINE> context[self.varname] = [imps, clks] <NEW_LINE> return '' | Retrieves the stats of an ad object.
Usage::
{% get_ad_stats for ad as stats %}
{% get_ad_stats for ad as stats from 2010-08-01 to 2011-08-01 %}
{% get_ad_stats for ad as stats from 2010-08-01 %}
{% get_ad_stats for ad as stats to 2011-08-01 %} | 62598fa18a43f66fc4bf1fcf |
class BCRNN(RNN): <NEW_LINE> <INDENT> def __init__(self, num_modules, **kwargs): <NEW_LINE> <INDENT> super(BCRNN, self).__init__(**kwargs) <NEW_LINE> self.num_modules = num_modules <NEW_LINE> <DEDENT> def bind(self, *args, **kwargs): <NEW_LINE> <INDENT> super(BCRNN, self).bind(*args, **kwargs) <NEW_LINE> if self.output_size % self.num_modules != 0: <NEW_LINE> <INDENT> raise util.ConfigurationError( 'layer "{}": size {} is not a multiple of num_modules {}' .format(self.name, self.output_size, self.num_modules)) <NEW_LINE> <DEDENT> <DEDENT> def setup(self): <NEW_LINE> <INDENT> super(BCRNN, self).setup() <NEW_LINE> n = self.output_size // self.num_modules <NEW_LINE> mask = np.zeros((self.output_size, self.output_size), 'f') <NEW_LINE> rates = np.zeros((self.output_size, ), 'f') <NEW_LINE> for i, r in enumerate(1 - np.logspace(-1e-4, -6, self.num_modules)): <NEW_LINE> <INDENT> mask[i*n:, i*n:(i+1)*n] = 1 <NEW_LINE> rates[i*n:(i+1)*n] = r <NEW_LINE> <DEDENT> self._mask = theano.shared(mask, name='mask') <NEW_LINE> self._rates = theano.shared(rates, name='rates') <NEW_LINE> <DEDENT> def _step(self, _, x_t, pre_tm1, h_tm1): <NEW_LINE> <INDENT> pre_t = x_t + TT.dot(h_tm1, self.find('hh') * self._mask) <NEW_LINE> pre = self._rates * pre_tm1 + (1 - self._rates) * pre_t <NEW_LINE> return [pre, self.activate(pre)] <NEW_LINE> <DEDENT> def to_spec(self): <NEW_LINE> <INDENT> spec = super(BCRNN, self).to_spec() <NEW_LINE> spec['num_modules'] = self.num_modules <NEW_LINE> return spec | Blocked cascading recurrent network layer.
Notes
-----
In a vanilla RNN the output from the layer at the previous time step is
incorporated into the input of the layer at the current time step:
.. math::
h_t = \sigma(x_t W_{xh} + h_{t-1} W_{hh} + b)
where :math:`\sigma(\cdot)` is the :ref:`activation function <activations>`
of the layer, and the subscript represents the time step of the data being
processed.
A blocked cascading RNN (BCRNN) adopts the same update equation but *masks*
the elements of :math:`W_{hh}` in a block-triangular fashion.
*Parameters*
- ``b`` --- bias
- ``xh`` --- matrix connecting inputs to hiddens
- ``hh`` --- matrix connecting hiddens to hiddens
*Outputs*
- ``out`` --- the post-activation state of the layer
- ``pre`` --- the pre-activation state of the layer | 62598fa1fff4ab517ebcd641 |
class ListUserViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> serializer_class = ListUserSerializer <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> queryset = User.objects.all() <NEW_LINE> return queryset | VIewset usuairo | 62598fa107f4c71912baf297 |
@toolbar_pool.register <NEW_LINE> class StaffMemberToolbar(CMSToolbar): <NEW_LINE> <INDENT> def populate(self): <NEW_LINE> <INDENT> if hasattr(self.request.user,'staffmember') and hasattr(self.request.user.staffmember,'instructor') and self.request.user.has_perm('core.view_own_instructor_stats'): <NEW_LINE> <INDENT> menu = self.toolbar.get_or_create_menu('core-staffmember', _('Staff')) <NEW_LINE> menu.add_link_item(_('Your Stats'), url=reverse('staffMemberStats')) <NEW_LINE> <DEDENT> if hasattr(self.request.user,'staffmember') and self.request.user.has_perm('core.update_instructor_bio'): <NEW_LINE> <INDENT> menu = self.toolbar.get_or_create_menu('core-staffmember', _('Staff')) <NEW_LINE> menu.add_link_item(_('Update Your Contact Info'), url=reverse('staffBioChange')) <NEW_LINE> <DEDENT> if self.request.user.has_perm('core.view_staff_directory'): <NEW_LINE> <INDENT> menu = self.toolbar.get_or_create_menu('core-staffmember', _('Staff')) <NEW_LINE> menu.add_link_item(_('Instructor/Staff Directory'), url=reverse('staffDirectory')) <NEW_LINE> menu.add_break('post_directory_break') <NEW_LINE> <DEDENT> addBreak = False <NEW_LINE> if self.request.user.has_perm('core.send_email'): <NEW_LINE> <INDENT> menu = self.toolbar.get_or_create_menu('core-staffmember', _('Staff')) <NEW_LINE> menu.add_link_item(_('Email Students'), url=reverse('emailStudents')) <NEW_LINE> addBreak = True <NEW_LINE> <DEDENT> if self.request.user.has_perm('core.report_substitute_teaching'): <NEW_LINE> <INDENT> menu = self.toolbar.get_or_create_menu('core-staffmember', _('Staff')) <NEW_LINE> menu.add_link_item(_('Report Substitute Teaching'), url=reverse('substituteTeacherForm')) <NEW_LINE> addBreak = True <NEW_LINE> <DEDENT> if addBreak: <NEW_LINE> <INDENT> menu.add_break('post_instructor_functions_break') <NEW_LINE> <DEDENT> if self.request.user.has_perm('core.change_staffmember'): <NEW_LINE> <INDENT> menu = self.toolbar.get_or_create_menu('core-staffmember', _('Staff')) <NEW_LINE> menu.add_link_item(_('Manage Staff/Instructors'), url=reverse('admin:core_staffmember_changelist')) | Adds items to the toolbar to add class Series and Events. | 62598fa17d847024c075c219 |
class BuiltinModule(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.is_main_module = False <NEW_LINE> self.to_be_mangled = False <NEW_LINE> self.exported_functions = dict() <NEW_LINE> self.dependent_modules = dict() <NEW_LINE> <DEDENT> def call_function(self, _, func_name): <NEW_LINE> <INDENT> importFrom = ast.ImportFrom(module=self.name, names=[ast.alias(name=func_name, asname=None)], level=0) <NEW_LINE> self.exported_functions[func_name] = importFrom <NEW_LINE> return func_name <NEW_LINE> <DEDENT> def import_function(self, _, func_name): <NEW_LINE> <INDENT> return func_name | Represent a builtin module.
it offer the same interface as ImportedModule class, but do not try to
validate function imported from here. | 62598fa144b2445a339b6897 |
class KNearestNeighbor(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def train(self, X, y): <NEW_LINE> <INDENT> self.X_train = X <NEW_LINE> self.y_train = y <NEW_LINE> <DEDENT> def predict(self, X, k=1, num_loops=0): <NEW_LINE> <INDENT> if num_loops == 0: <NEW_LINE> <INDENT> dists = self.compute_distances_no_loops(X) <NEW_LINE> <DEDENT> elif num_loops == 1: <NEW_LINE> <INDENT> dists = self.compute_distances_one_loop(X) <NEW_LINE> <DEDENT> elif num_loops == 2: <NEW_LINE> <INDENT> dists = self.compute_distances_two_loops(X) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Invalid value %d for num_loops' % num_loops) <NEW_LINE> <DEDENT> return self.predict_labels(dists, k=k) <NEW_LINE> <DEDENT> def compute_distances_two_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> for i in xrange(num_test): <NEW_LINE> <INDENT> for j in xrange(num_train): <NEW_LINE> <INDENT> def EuclideanDistance(x,y): <NEW_LINE> <INDENT> return np.sqrt(((x - y) ** 2).sum()) <NEW_LINE> <DEDENT> dists[i,j] = EuclideanDistance(X[i], self.X_train[j]) <NEW_LINE> <DEDENT> <DEDENT> return dists <NEW_LINE> <DEDENT> def compute_distances_one_loop(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> for i in xrange(num_test): <NEW_LINE> <INDENT> dists[i,:] = np.sqrt(np.sum((X[i] - self.X_train) ** 2, axis = 1)) <NEW_LINE> <DEDENT> return dists <NEW_LINE> <DEDENT> def compute_distances_no_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> dists = np.array(np.mat(X) * np.mat(self.X_train).T * (-2)) + np.sum(self.X_train ** 2, axis = 1) <NEW_LINE> dists = dists.T + np.sum(X ** 2,axis = 1) <NEW_LINE> dists = dists.T <NEW_LINE> dists = np.sqrt(dists) <NEW_LINE> return dists <NEW_LINE> <DEDENT> def predict_labels(self, dists, k=1): <NEW_LINE> <INDENT> num_test = dists.shape[0] <NEW_LINE> y_pred = np.zeros(num_test) <NEW_LINE> for i in xrange(num_test): <NEW_LINE> <INDENT> closest_y = [] <NEW_LINE> idx = np.argsort(dists[i,::], axis = -1) <NEW_LINE> for j in xrange(k): <NEW_LINE> <INDENT> closest_y.append(self.y_train[idx[j]]) <NEW_LINE> <DEDENT> y_pred[i] = np.argmax(np.bincount(closest_y)) <NEW_LINE> <DEDENT> return y_pred | a kNN classifier with L2 distance | 62598fa1498bea3a75a57975 |
class WeightlogTestCase(WorkoutManagerTestCase): <NEW_LINE> <INDENT> def test_get_workout_session(self): <NEW_LINE> <INDENT> user1 = User.objects.get(pk=1) <NEW_LINE> user2 = User.objects.get(pk=2) <NEW_LINE> workout1 = Workout.objects.get(pk=2) <NEW_LINE> workout2 = Workout.objects.get(pk=2) <NEW_LINE> WorkoutLog.objects.all().delete() <NEW_LINE> log = WorkoutLog() <NEW_LINE> log.user = user1 <NEW_LINE> log.date = datetime.date(2014, 1, 5) <NEW_LINE> log.exercise = Exercise.objects.get(pk=1) <NEW_LINE> log.workout = workout1 <NEW_LINE> log.weight = 10 <NEW_LINE> log.reps = 10 <NEW_LINE> log.save() <NEW_LINE> session1 = WorkoutSession() <NEW_LINE> session1.user = user1 <NEW_LINE> session1.workout = workout1 <NEW_LINE> session1.notes = 'Something here' <NEW_LINE> session1.impression = '3' <NEW_LINE> session1.date = datetime.date(2014, 1, 5) <NEW_LINE> session1.save() <NEW_LINE> session2 = WorkoutSession() <NEW_LINE> session2.user = user1 <NEW_LINE> session2.workout = workout1 <NEW_LINE> session2.notes = 'Something else here' <NEW_LINE> session2.impression = '1' <NEW_LINE> session2.date = datetime.date(2014, 1, 1) <NEW_LINE> session2.save() <NEW_LINE> session3 = WorkoutSession() <NEW_LINE> session3.user = user2 <NEW_LINE> session3.workout = workout2 <NEW_LINE> session3.notes = 'The notes here' <NEW_LINE> session3.impression = '2' <NEW_LINE> session3.date = datetime.date(2014, 1, 5) <NEW_LINE> session3.save() <NEW_LINE> self.assertEqual(log.get_workout_session(), session1) | Tests other model methods | 62598fa101c39578d7f12bd2 |
class BodyguardAnt(Ant): <NEW_LINE> <INDENT> name = 'Bodyguard' <NEW_LINE> implemented = True <NEW_LINE> food_cost = 4 <NEW_LINE> container = True <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> Ant.__init__(self, 2) <NEW_LINE> self.ant = None <NEW_LINE> <DEDENT> def contain_ant(self, ant): <NEW_LINE> <INDENT> if self.can_contain(ant): <NEW_LINE> <INDENT> self.ant = ant <NEW_LINE> <DEDENT> <DEDENT> def action(self, colony): <NEW_LINE> <INDENT> if self.ant != None: <NEW_LINE> <INDENT> self.ant.action(colony) <NEW_LINE> <DEDENT> <DEDENT> def reduce_armor(self, amount): <NEW_LINE> <INDENT> self.armor -= amount <NEW_LINE> if self.armor <= 0: <NEW_LINE> <INDENT> guarded_ant = self.ant <NEW_LINE> current_place = self.place <NEW_LINE> self.place.remove_insect(self) <NEW_LINE> current_place.ant = guarded_ant | BodyguardAnt provides protection to other Ants. | 62598fa1d7e4931a7ef3beed |
class L2Regularizer(Regularizer): <NEW_LINE> <INDENT> def __init__(self, reg): <NEW_LINE> <INDENT> super().__init__(reg) <NEW_LINE> <DEDENT> def loss(self, w): <NEW_LINE> <INDENT> return self._lambda * np.square(np.linalg.norm(w[:-1], 2)) <NEW_LINE> <DEDENT> def gradient(self, w): <NEW_LINE> <INDENT> gradient = np.zeros_like(w) <NEW_LINE> gradient[:-1] = self._lambda * w[:-1] <NEW_LINE> return gradient | docstring for L2Regularizer | 62598fa1442bda511e95c2ae |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.