_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q38500
Cache.fetch
train
def fetch( self, url, filename=None, decompress=False, force=False, timeout=None, use_wget_if_available=True): """ Return the local path to the downloaded copy of a given URL. Don't download the file again if it's already present, unless `force` is True. """ key = (url, decompress) if not force and key in self._local_paths: path = self._local_paths[key] if exists(path): return path else: del self._local_paths[key] path = download.fetch_file( url, filename=filename, decompress=decompress, subdir=self.subdir, force=force, timeout=timeout, use_wget_if_available=use_wget_if_available) self._local_paths[key] = path return path
python
{ "resource": "" }
q38501
Cache.local_path
train
def local_path(self, url, filename=None, decompress=False, download=False): """ What will the full local path be if we download the given file? """ if download: return self.fetch(url=url, filename=filename, decompress=decompress) else: filename = self.local_filename(url, filename, decompress) return join(self.cache_directory_path, filename)
python
{ "resource": "" }
q38502
connect_redis
train
def connect_redis(redis_client, name=None, transaction=False): """ Connect your redis-py instance to redpipe. Example: .. code:: python redpipe.connect_redis(redis.StrictRedis(), name='users') Do this during your application bootstrapping. You can also pass a redis-py-cluster instance to this method. .. code:: python redpipe.connect_redis(rediscluster.StrictRedisCluster(), name='users') You are allowed to pass in either the strict or regular instance. .. code:: python redpipe.connect_redis(redis.StrictRedis(), name='a') redpipe.connect_redis(redis.Redis(), name='b') redpipe.connect_redis(rediscluster.StrictRedisCluster(...), name='c') redpipe.connect_redis(rediscluster.RedisCluster(...), name='d') :param redis_client: :param name: nickname you want to give to your connection. :param transaction: :return: """ return ConnectionManager.connect_redis( redis_client=redis_client, name=name, transaction=transaction)
python
{ "resource": "" }
q38503
ConnectionManager.connect
train
def connect(cls, pipeline_method, name=None): """ Low level logic to bind a callable method to a name. Don't call this directly unless you know what you are doing. :param pipeline_method: callable :param name: str optional :return: None """ new_pool = pipeline_method().connection_pool try: if cls.get(name).connection_pool != new_pool: raise AlreadyConnected("can't change connection for %s" % name) except InvalidPipeline: pass cls.connections[name] = pipeline_method
python
{ "resource": "" }
q38504
ConnectionManager.connect_redis
train
def connect_redis(cls, redis_client, name=None, transaction=False): """ Store the redis connection in our connector instance. Do this during your application bootstrapping. We call the pipeline method of the redis client. The ``redis_client`` can be either a redis or rediscluster client. We use the interface, not the actual class. That means we can handle either one identically. It doesn't matter if you pass in `Redis` or `StrictRedis`. the interface for direct redis commands will behave indentically. Keyspaces will work with either, but it presents the same interface that the Redis class does, not StrictRedis. The transaction flag is a boolean value we hold on to and pass to the invocation of something equivalent to: .. code-block:: python redis_client.pipeline(transaction=transation) Unlike redis-py, this flag defaults to False. You can configure it to always use the MULTI/EXEC flags, but I don't see much point. If you need transactional support I recommend using a LUA script. **RedPipe** is about improving network round-trip efficiency. :param redis_client: redis.StrictRedis() or redis.Redis() :param name: identifier for the connection, optional :param transaction: bool, defaults to False :return: None """ connection_pool = redis_client.connection_pool if connection_pool.connection_kwargs.get('decode_responses', False): raise InvalidPipeline('decode_responses set to True') def pipeline_method(): """ A closure wrapping the pipeline. :return: pipeline object """ return redis_client.pipeline(transaction=transaction) # set up the connection. cls.connect(pipeline_method=pipeline_method, name=name)
python
{ "resource": "" }
q38505
remoteIndexer1to2
train
def remoteIndexer1to2(oldIndexer): """ Previously external application code was responsible for adding a RemoteListener to a batch work source as a reliable listener. This precluded the possibility of the RemoteListener resetting itself unilaterally. With version 2, RemoteListener takes control of adding itself as a reliable listener and keeps track of the sources with which it is associated. This upgrader creates that tracking state. """ newIndexer = oldIndexer.upgradeVersion( oldIndexer.typeName, 1, 2, indexCount=oldIndexer.indexCount, installedOn=oldIndexer.installedOn, indexDirectory=oldIndexer.indexDirectory) listeners = newIndexer.store.query( batch._ReliableListener, batch._ReliableListener.listener == newIndexer) for listener in listeners: _IndexerInputSource( store=newIndexer.store, indexer=newIndexer, source=listener.processor) return newIndexer
python
{ "resource": "" }
q38506
remoteIndexer2to3
train
def remoteIndexer2to3(oldIndexer): """ The documentType keyword was added to all indexable items. Indexes need to be regenerated for this to take effect. Also, PyLucene no longer stores the text of messages it indexes, so deleting and re-creating the indexes will make them much smaller. """ newIndexer = oldIndexer.upgradeVersion( oldIndexer.typeName, 2, 3, indexCount=oldIndexer.indexCount, installedOn=oldIndexer.installedOn, indexDirectory=oldIndexer.indexDirectory) # the 3->4 upgrader for PyLuceneIndexer calls reset(), so don't do it # here. also, it won't work because it's a DummyItem if oldIndexer.typeName != PyLuceneIndexer.typeName: newIndexer.reset() return newIndexer
python
{ "resource": "" }
q38507
pyLuceneIndexer4to5
train
def pyLuceneIndexer4to5(old): """ Copy attributes, reset index due because information about deleted documents has been lost, and power up for IFulltextIndexer so other code can find this item. """ new = old.upgradeVersion(PyLuceneIndexer.typeName, 4, 5, indexCount=old.indexCount, installedOn=old.installedOn, indexDirectory=old.indexDirectory) new.reset() new.store.powerUp(new, ixmantissa.IFulltextIndexer) return new
python
{ "resource": "" }
q38508
RemoteIndexer.reset
train
def reset(self): """ Process everything all over again. """ self.indexCount = 0 indexDir = self.store.newDirectory(self.indexDirectory) if indexDir.exists(): indexDir.remove() for src in self.getSources(): src.removeReliableListener(self) src.addReliableListener(self, style=iaxiom.REMOTE)
python
{ "resource": "" }
q38509
RemoteIndexer._flush
train
def _flush(self): """ Deal with pending result-affecting things. This should always be called before issuing a search. """ remove = self.store.query(_RemoveDocument) documentIdentifiers = list(remove.getColumn("documentIdentifier")) if VERBOSE: log.msg("%s/%d removing %r" % (self.store, self.storeID, documentIdentifiers)) reader = self.openReadIndex() map(reader.remove, documentIdentifiers) reader.close() remove.deleteFromStore()
python
{ "resource": "" }
q38510
_SQLiteIndex.add
train
def add(self, document): """ Add a document to the database. """ docid = int(document.uniqueIdentifier()) text = u' '.join(document.textParts()) self.store.executeSQL(self.addSQL, (docid, text))
python
{ "resource": "" }
q38511
_SQLiteIndex.remove
train
def remove(self, docid): """ Remove a document from the database. """ docid = int(docid) self.store.executeSQL(self.removeSQL, (docid,))
python
{ "resource": "" }
q38512
_SQLiteIndex.search
train
def search(self, term, keywords=None, sortAscending=True): """ Search the database. """ if sortAscending: direction = 'ASC' else: direction = 'DESC' return [_SQLiteResultWrapper(r[0]) for r in self.store.querySQL(self.searchSQL % (direction,), (term,))]
python
{ "resource": "" }
q38513
SQLiteIndexer._getStore
train
def _getStore(self): """ Get the Store used for FTS. If it does not exist, it is created and initialised. """ storeDir = self.store.newDirectory(self.indexDirectory) if not storeDir.exists(): store = Store(storeDir) self._initStore(store) return store else: return Store(storeDir)
python
{ "resource": "" }
q38514
SingleType.dump
train
def dump(self, value): """Dumps the value to string. :returns: Returns the stringified version of the value. :raises: TypeError, ValueError """ value = self.__convert__(value) self.__validate__(value) return self.__serialize__(value)
python
{ "resource": "" }
q38515
Integer.simulate
train
def simulate(self): """Generates a random integer in the available range.""" min_ = (-sys.maxsize - 1) if self._min is None else self._min max_ = sys.maxsize if self._max is None else self._max return random.randint(min_, max_)
python
{ "resource": "" }
q38516
String.simulate
train
def simulate(self): """Returns a randomly constructed string. Simulate randomly constructs a string with a length between min and max. If min is not present, a minimum length of 1 is assumed, if max is not present a maximum length of 10 is used. """ min_ = 1 if self._min is None else self._min max_ = 10 if self._max is None else self._max n = min_ if (min_ >= max_) else random.randint(min_, max_) chars = string.ascii_letters + string.digits return ''.join(random.choice(chars) for x in range(n))
python
{ "resource": "" }
q38517
Stream.simulate
train
def simulate(self): """Simulates a stream of types.""" # Simulates zero to 10 types return [t.simulate() for t in itertools.islice(self, random.choice(range(10)))]
python
{ "resource": "" }
q38518
ExcelDAM.__findRange
train
def __findRange(self, excelLib, start, end): ''' return low and high as excel range ''' inc = 1 low = 0 high = 0 dates = excelLib.readCol(0, 1) for index, date in enumerate(dates): if int(start) <= int(date): low = index + inc break if low: for index, date in reversed(list(enumerate(dates))): if int(date) <= int(end): high = index + inc break return low, high
python
{ "resource": "" }
q38519
SR850.snap
train
def snap(self, *args): """Records multiple values at once. It takes two to six arguments specifying which values should be recorded together. Valid arguments are 'x', 'y', 'r', 'theta', 'aux1', 'aux2', 'aux3', 'aux4', 'frequency', 'trace1', 'trace2', 'trace3' and 'trace4'. snap is faster since it avoids communication overhead. 'x' and 'y' are recorded together, as well as 'r' and 'theta'. Between these pairs, there is a delay of approximately 10 us. 'aux1', 'aux2', 'aux3' and 'aux4' have am uncertainty of up to 32 us. It takes at least 40 ms or a period to calculate the frequency. E.g.:: lockin.snap('x', 'theta', 'trace3') """ length = len(args) if not 2 <= length <= 6: msg = 'snap takes 2 to 6 arguments, {0} given.'.format(length) raise TypeError(msg) # The program data type. param = Enum( 'x', 'y', 'r', 'theta', 'aux1', 'aux2', 'aux3', 'aux4', 'frequency', 'trace1', 'trace2', 'trace3', 'trace4' ) # construct command, cmd = 'SNAP?', (Float,) * length, (param, ) * length return self._ask(cmd, *args)
python
{ "resource": "" }
q38520
SR850.fit
train
def fit(self, range, function=None): """Fits a function to the active display's data trace within a specified range of the time window. E.g.:: # Fit's a gaussian to the first 30% of the time window. lockin.fit(range=(0, 30), function='gauss') :param start: The left limit of the time window in percent. :param stop: The right limit of the time window in percent. :param function: The function used to fit the data, either 'line', 'exp', 'gauss' or None, the default. The configured fit function is left unchanged if function is None. .. note:: Fitting takes some time. Check the status byte to see when the operation is done. A running scan will be paused until the fitting is complete. .. warning:: The SR850 will generate an error if the active display trace is not stored when the fit command is executed. """ if function is not None: self.fit_function = function cmd = 'FITT', Integer(min=0, max=100), Integer(min=0, max=100) self._write(cmd, start, stop)
python
{ "resource": "" }
q38521
SR850.calculate_statistics
train
def calculate_statistics(self, start, stop): """Starts the statistics calculation. :param start: The left limit of the time window in percent. :param stop: The right limit of the time window in percent. .. note:: The calculation takes some time. Check the status byte to see when the operation is done. A running scan will be paused until the operation is complete. .. warning:: The SR850 will generate an error if the active display trace is not stored when the command is executed. """ cmd = 'STAT', Integer, Integer self._write(cmd, start, stop)
python
{ "resource": "" }
q38522
SR850.calculate
train
def calculate(self, operation=None, trace=None, constant=None, type=None): """Starts the calculation. The calculation operates on the trace graphed in the active display. The math operation is defined by the :attr:`~.SR850.math_operation`, the second argument by the :attr:`~.SR850.math_argument_type`. For convenience, the operation and the second argument, can be specified via the parameters :param operation: Set's the math operation if not `None`. See :attr:`~.SR850.math_operation` for details. :param trace: If the trace argument is used, it sets the :attr:`~.math_trace_argument` to it and sets the :attr:`~.math_argument_type` to 'trace' :param constant: If constant is not `None`, the :attr:`~.math_constant`is set with this value and the :attr:`~.math_argument_type` is set to 'constant' :param type: If type is not `None`, the :attr:`~.math_argument_type` is set to this value. E.g. instead of:: lockin.math_operation = '*' lockin.math_argument_type = 'constant' lockin.math_constant = 1.337 lockin.calculate() one can write:: lockin.calculate(operation='*', constant=1.337) .. note:: Do not use trace, constant and type together. .. note:: The calculation takes some time. Check the status byte to see when the operation is done. A running scan will be paused until the operation is complete. .. warning:: The SR850 will generate an error if the active display trace is not stored when the command is executed. """ if operation is not None: self.math_operation = operation if trace is not None: self.math_trace_argument = trace type = 'trace' elif constant is not None: self.math_constant = constant type = 'constant' if type is not None: self.math_argument_type = type self._write('CALC')
python
{ "resource": "" }
q38523
Mark.bin
train
def bin(self): """The bin index of this mark. :returns: An integer bin index or None if the mark is inactive. """ bin = self._query(('MBIN?', Integer, Integer), self.idx) return None if bin == -1 else bin
python
{ "resource": "" }
q38524
MarkList.active
train
def active(self): """The indices of the active marks.""" # TODO avoid direct usage of transport object. marks = tuple(int(x) for x in transport.ask('MACT').split(',')) return marks[1:]
python
{ "resource": "" }
q38525
Connection.put_and_track
train
def put_and_track(self, url, payload, refresh_rate_sec=1): """ Put and track progress, displaying progress bars. May display the wrong progress if 2 things post/put on the same procedure name at the same time. """ if not url.startswith('/v1/procedures'): raise Exception("The only supported route is /v1/procedures") parts = url.split('/') len_parts = len(parts) if len_parts not in [4, 6]: raise Exception( "You must either PUT a procedure or a procedure run") proc_id = parts[3] run_id = None if len_parts == 4: if 'params' not in payload: payload['params'] = {} payload['params']['runOnCreation'] = True elif len_parts == 6: run_id = parts[-1] pm = ProgressMonitor(self, refresh_rate_sec, proc_id, run_id, self.notebook) t = threading.Thread(target=pm.monitor_progress) t.start() try: return self.put(url, payload) except Exception as e: print(e) finally: pass pm.event.set() t.join()
python
{ "resource": "" }
q38526
Connection.post_and_track
train
def post_and_track(self, url, payload, refresh_rate_sec=1): """ Post and track progress, displaying progress bars. May display the wrong progress if 2 things post/put on the same procedure name at the same time. """ if not url.startswith('/v1/procedures'): raise Exception("The only supported route is /v1/procedures") if url.endswith('/runs'): raise Exception( "Posting and tracking run is unsupported at the moment") if len(url.split('/')) != 3: raise Exception("You must POST a procedure") if 'params' not in payload: payload['params'] = {} payload['params']['runOnCreation'] = False res = self.post('/v1/procedures', payload).json() proc_id = res['id'] pm = ProgressMonitor(self, refresh_rate_sec, proc_id, notebook=self.notebook) t = threading.Thread(target=pm.monitor_progress) t.start() try: return self.post('/v1/procedures/{}/runs'.format(proc_id), {}) except Exception as e: print(e) finally: pm.event.set() t.join()
python
{ "resource": "" }
q38527
StringEndpointPort._makeService
train
def _makeService(self): """ Construct a service for the endpoint as described. """ if self._endpointService is None: _service = service else: _service = self._endpointService return _service( self.description.encode('ascii'), self.factory.getFactory())
python
{ "resource": "" }
q38528
ListOptions.postOptions
train
def postOptions(self): """ Display details about the ports which already exist. """ store = self.parent.parent.getStore() port = None factories = {} for portType in [TCPPort, SSLPort, StringEndpointPort]: for port in store.query(portType): key = port.factory.storeID if key not in factories: factories[key] = (port.factory, []) factories[key][1].append(port) for factory in store.powerupsFor(IProtocolFactoryFactory): key = factory.storeID if key not in factories: factories[key] = (factory, []) def key((factory, ports)): return factory.storeID for factory, ports in sorted(factories.values(), key=key): if ports: print '%d) %r listening on:' % (factory.storeID, factory) for port in ports: if getattr(port, 'interface', None): interface = "interface " + port.interface else: interface = "any interface" if isinstance(port, TCPPort): print ' %d) TCP, %s, port %d' % ( port.storeID, interface, port.portNumber) elif isinstance(port, SSLPort): if port.certificatePath is not None: pathPart = 'certificate %s' % ( port.certificatePath.path,) else: pathPart = 'NO CERTIFICATE' if port.portNumber is not None: portPart = 'port %d' % (port.portNumber,) else: portPart = 'NO PORT' print ' %d) SSL, %s, %s, %s' % ( port.storeID, interface, portPart, pathPart) elif isinstance(port, StringEndpointPort): print ' {:d}) Endpoint {!r}'.format( port.storeID, port.description) else: print '%d) %r is not listening.' % (factory.storeID, factory) if not factories: print "There are no ports configured." raise SystemExit(0)
python
{ "resource": "" }
q38529
DeleteOptions._delete
train
def _delete(self, store, portIDs): """ Try to delete the ports with the given store IDs. @param store: The Axiom store from which to delete items. @param portIDs: A list of Axiom store IDs for TCPPort or SSLPort items. @raise L{SystemExit}: If one of the store IDs does not identify a port item. """ for portID in portIDs: try: port = store.getItemByID(portID) except KeyError: print "%d does not identify an item." % (portID,) raise SystemExit(1) if isinstance(port, (TCPPort, SSLPort, StringEndpointPort)): port.deleteFromStore() else: print "%d does not identify a port." % (portID,) raise SystemExit(1)
python
{ "resource": "" }
q38530
DeleteOptions.postOptions
train
def postOptions(self): """ Delete the ports specified with the port-identifier option. """ if self.portIdentifiers: store = self.parent.parent.getStore() store.transact(self._delete, store, self.portIdentifiers) print "Deleted." raise SystemExit(0) else: self.opt_help()
python
{ "resource": "" }
q38531
absl_flags
train
def absl_flags(): """ Extracts absl-py flags that the user has specified and outputs their key-value mapping. By default, extracts only those flags in the current __package__ and mainfile. Useful to put into a trial's param_map. """ # TODO: need same thing for argparse flags_dict = flags.FLAGS.flags_by_module_dict() # only include parameters from modules the user probably cares about def _relevant_module(module_name): if __package__ and __package__ in module_name: return True if module_name == sys.argv[0]: return True return False return { flag.name: flag.value for module, flags in flags_dict.items() for flag in flags if _relevant_module(module)}
python
{ "resource": "" }
q38532
pipeline
train
def pipeline(pipe=None, name=None, autoexec=False, exit_handler=None): """ This is the foundational function for all of redpipe. Everything goes through here. create pipelines, nest pipelines, get pipelines for a specific name. It all happens here. Here's a simple example: .. code:: python with pipeline() as pipe: pipe.set('foo', 'bar') foo = pipe.get('foo') pipe.execute() print(foo) > bar Now let's look at how we can nest a pipeline. .. code:: python def process(key, pipe=None): with pipeline(pipe, autoexec=True) as pipe: return pipe.incr(key) with pipeline() as pipe: key1 = process('key1', pipe) key2 = process('key2', pipe) pipe.execute() print([key1, key2]) > [1, 1] :param pipe: a Pipeline() or NestedPipeline() object, or None :param name: str, optional. the name of the connection to use. :param autoexec: bool, if true, implicitly execute the pipe :return: Pipeline or NestedPipeline """ if pipe is None: return Pipeline(name=name, autoexec=autoexec, exit_handler=exit_handler) try: if pipe.supports_redpipe_pipeline(): return NestedPipeline( parent=pipe, name=name, autoexec=autoexec, exit_handler=exit_handler ) except AttributeError: pass raise InvalidPipeline('check your configuration')
python
{ "resource": "" }
q38533
autoexec
train
def autoexec(pipe=None, name=None, exit_handler=None): """ create a pipeline with a context that will automatically execute the pipeline upon leaving the context if no exception was raised. :param pipe: :param name: :return: """ return pipeline(pipe=pipe, name=name, autoexec=True, exit_handler=exit_handler)
python
{ "resource": "" }
q38534
dump_etree_helper
train
def dump_etree_helper(container_name, data, rules, nsmap, attrib): """Convert DataCite JSON format to DataCite XML. JSON should be validated before it is given to to_xml. """ output = etree.Element(container_name, nsmap=nsmap, attrib=attrib) for rule in rules: if rule not in data: continue element = rules[rule](rule, data[rule]) for e in element: output.append(e) return output
python
{ "resource": "" }
q38535
etree_to_string
train
def etree_to_string(root, pretty_print=True, xml_declaration=True, encoding='utf-8'): """Dump XML etree as a string.""" return etree.tostring( root, pretty_print=pretty_print, xml_declaration=xml_declaration, encoding=encoding, ).decode('utf-8')
python
{ "resource": "" }
q38536
Rules.rule
train
def rule(self, key): """Decorate as a rule for a key in top level JSON.""" def register(f): self.rules[key] = f return f return register
python
{ "resource": "" }
q38537
DatabaseTable.from_dataframe
train
def from_dataframe(cls, name, df, indices, primary_key=None): """Infer table metadata from a DataFrame""" # ordered list (column_name, column_type) pairs column_types = [] # which columns have nullable values nullable = set() # tag cached database by dataframe's number of rows and columns for column_name in df.columns: values = df[column_name] if values.isnull().any(): nullable.add(column_name) column_db_type = db_type(values.dtype) column_types.append((column_name.replace(" ", "_"), column_db_type)) def make_rows(): return list(tuple(row) for row in df.values) return cls( name=name, column_types=column_types, make_rows=make_rows, indices=indices, nullable=nullable, primary_key=primary_key)
python
{ "resource": "" }
q38538
ParkingApi.detail_parking
train
def detail_parking(self, **kwargs): """Obtain detailed info of a given parking. Args: lang (str): Language code (*es* or *en*). day (int): Day of the month in format DD. The number is automatically padded if it only has one digit. month (int): Month number in format MM. The number is automatically padded if it only has one digit. year (int): Year number in format YYYY. hour (int): Hour of the day in format hh. The number is automatically padded if it only has one digit. minute (int): Minute of the hour in format mm. The number is automatically padded if it only has one digit. parking (int): ID of the parking to query. family (str): Family code of the parking (3 chars). Returns: Status boolean and parsed response (list[ParkingDetails]), or message string in case of error. """ # Endpoint parameters date = util.datetime_string( kwargs.get('day', 1), kwargs.get('month', 1), kwargs.get('year', 1970), kwargs.get('hour', 0), kwargs.get('minute', 0) ) params = { 'language': util.language_code(kwargs.get('lang')), 'publicData': True, 'date': date, 'id': kwargs.get('parking'), 'family': kwargs.get('family') } # Request result = self.make_request('detail_parking', {}, **params) if not util.check_result(result): return False, result.get('message', 'UNKNOWN ERROR') # Parse values = util.response_list(result, 'Data') return True, [emtype.ParkingDetails(**a) for a in values]
python
{ "resource": "" }
q38539
ParkingApi.detail_poi
train
def detail_poi(self, **kwargs): """Obtain detailed info of a given POI. Args: family (str): Family code of the POI (3 chars). lang (str): Language code (*es* or *en*). id (int): Optional, ID of the POI to query. Passing value -1 will result in information from all POIs. Returns: Status boolean and parsed response (list[PoiDetails]), or message string in case of error. """ # Endpoint parameters params = { 'language': util.language_code(kwargs.get('lang')), 'family': kwargs.get('family') } if kwargs.get('id'): params['id'] = kwargs['id'] # Request result = self.make_request('detail_poi', {}, **params) if not util.check_result(result): return False, result.get('message', 'UNKNOWN ERROR') # Parse values = util.response_list(result, 'Data') return True, [emtype.PoiDetails(**a) for a in values]
python
{ "resource": "" }
q38540
ParkingApi.icon_description
train
def icon_description(self, **kwargs): """Obtain a list of elements that have an associated icon. Args: lang (str): Language code (*es* or *en*). Returns: Status boolean and parsed response (list[IconDescription]), or message string in case of error. """ # Endpoint parameters params = {'language': util.language_code(kwargs.get('lang'))} # Request result = self.make_request('icon_description', {}, **params) if not util.check_result(result): return False, result.get('message', 'UNKNOWN ERROR') # Parse values = util.response_list(result, 'Data') return True, [emtype.IconDescription(**a) for a in values]
python
{ "resource": "" }
q38541
ParkingApi.info_parking_poi
train
def info_parking_poi(self, **kwargs): """Obtain generic information on POIs and parkings. This returns a list of elements in a given radius from the coordinates. Args: radius (int): Radius of the search (in meters). latitude (double): Latitude in decimal degrees. longitude (double): Longitude in decimal degrees. lang (str): Language code (*es* or *en*). day (int): Day of the month in format DD. The number is automatically padded if it only has one digit. month (int): Month number in format MM. The number is automatically padded if it only has one digit. year (int): Year number in format YYYY. hour (int): Hour of the day in format hh. The number is automatically padded if it only has one digit. minute (int): Minute of the hour in format mm. The number is automatically padded if it only has one digit. poi_info (list[tuple]): List of tuples with the format ``(list[family], type, category)`` to query. Check the API documentation. min_free (list[int]): Number of free spaces to check. Must have the same length of ``poi_info``. field_codes (list[tuple]): List of tuples with the format ``(list[codes], name)``. Check the API documentation. Returns: Status boolean and parsed response (list[InfoParkingPoi]), or message string in case of error. """ # Endpoint parameters date = util.datetime_string( kwargs.get('day', 1), kwargs.get('month', 1), kwargs.get('year', 1970), kwargs.get('hour', 0), kwargs.get('minute', 0) ) family_categories = [] for element in kwargs.get('poi_info', []): family_categories.append({ 'poiCategory': { 'lstCategoryTypes': element[0] }, 'poiFamily': element[1], 'poiType': element[2] }) field_codes = [] for element in kwargs.get('field_codes', []): field_codes.append({ 'codes': { 'lstCodes': element[0] }, 'nameField': element[1] }) params = { 'TFamilyTTypeTCategory': { 'lstFamilyTypeCategory': family_categories }, 'coordinate': { 'latitude': str(kwargs.get('latitude', '0.0')), 'longitude': str(kwargs.get('longitude', '0.0')) }, 'dateTimeUse': date, 'language': util.language_code(kwargs.get('lang')), 'minimumPlacesAvailable': { 'lstminimumPlacesAvailable': kwargs.get('min_free', []) }, 'nameFieldCodes': { 'lstNameFieldCodes': field_codes }, 'radius': str(kwargs.get('radius', '0')) } # Request result = self.make_request('info_parking_poi', {}, **params) if not util.check_result(result): return False, result.get('message', 'UNKNOWN ERROR') # Parse values = util.response_list(result, 'Data') return True, [emtype.InfoParkingPoi(**a) for a in values]
python
{ "resource": "" }
q38542
ParkingApi.list_street_poi_parking
train
def list_street_poi_parking(self, **kwargs): """Obtain a list of addresses and POIs. This endpoint uses an address to perform the search Args: lang (str): Language code (*es* or *en*). address (str): Address in which to perform the search. Returns: Status boolean and parsed response (list[ParkingPoi]), or message string in case of error. """ # Endpoint parameters url_args = { 'language': util.language_code(kwargs.get('lang')), 'address': kwargs.get('address', '') } # Request result = self.make_request('list_street_poi_parking', url_args) if not util.check_result(result): return False, result.get('message', 'UNKNOWN ERROR') # Parse values = util.response_list(result, 'Data') return True, [emtype.ParkingPoi(**a) for a in values]
python
{ "resource": "" }
q38543
ParkingApi.list_types_poi
train
def list_types_poi(self, **kwargs): """Obtain a list of families, types and categories of POI. Args: lang (str): Language code (*es* or *en*). Returns: Status boolean and parsed response (list[ParkingPoiType]), or message string in case of error. """ # Endpoint parameters url_args = {'language': util.language_code(kwargs.get('lang'))} # Request result = self.make_request('list_poi_types', url_args) if not util.check_result(result): return False, result.get('message', 'UNKNOWN ERROR') # Parse values = util.response_list(result, 'Data') return True, [emtype.ParkingPoiType(**a) for a in values]
python
{ "resource": "" }
q38544
YahooDAM.readQuotes
train
def readQuotes(self, start, end): ''' read quotes from Yahoo Financial''' if self.symbol is None: LOG.debug('Symbol is None') return [] return self.__yf.getQuotes(self.symbol, start, end)
python
{ "resource": "" }
q38545
deepcopy
train
def deepcopy(value): """ The default copy.deepcopy seems to copy all objects and some are not `copy-able`. We only need to make sure the provided data is a copy per key, object does not need to be copied. """ if not isinstance(value, (dict, list, tuple)): return value if isinstance(value, dict): copy = {} for k, v in value.items(): copy[k] = deepcopy(v) if isinstance(value, tuple): copy = list(range(len(value))) for k in get_keys(list(value)): copy[k] = deepcopy(value[k]) copy = tuple(copy) if isinstance(value, list): copy = list(range(len(value))) for k in get_keys(value): copy[k] = deepcopy(value[k]) return copy
python
{ "resource": "" }
q38546
LS370._factory_default
train
def _factory_default(self, confirm=False): """Resets the device to factory defaults. :param confirm: This function should not normally be used, to prevent accidental resets, a confirm value of `True` must be used. """ if confirm is True: self._write(('DFLT', Integer), 99) else: raise ValueError('Reset to factory defaults was not confirmed.')
python
{ "resource": "" }
q38547
PersistentSessionWrapper.createSessionForKey
train
def createSessionForKey(self, key, user): """ Create a persistent session in the database. @type key: L{bytes} @param key: The persistent session identifier. @type user: L{bytes} @param user: The username the session will belong to. """ PersistentSession( store=self.store, sessionKey=key, authenticatedAs=user)
python
{ "resource": "" }
q38548
PersistentSessionWrapper.authenticatedUserForKey
train
def authenticatedUserForKey(self, key): """ Find a persistent session for a user. @type key: L{bytes} @param key: The persistent session identifier. @rtype: L{bytes} or C{None} @return: The avatar ID the session belongs to, or C{None} if no such session exists. """ session = self.store.findFirst( PersistentSession, PersistentSession.sessionKey == key) if session is None: return None else: session.renew() return session.authenticatedAs
python
{ "resource": "" }
q38549
PersistentSessionWrapper.removeSessionWithKey
train
def removeSessionWithKey(self, key): """ Remove a persistent session, if it exists. @type key: L{bytes} @param key: The persistent session identifier. """ self.store.query( PersistentSession, PersistentSession.sessionKey == key).deleteFromStore()
python
{ "resource": "" }
q38550
PersistentSessionWrapper._cleanSessions
train
def _cleanSessions(self): """ Clean expired sesisons. """ tooOld = extime.Time() - timedelta(seconds=PERSISTENT_SESSION_LIFETIME) self.store.query( PersistentSession, PersistentSession.lastUsed < tooOld).deleteFromStore() self._lastClean = self._clock.seconds()
python
{ "resource": "" }
q38551
PersistentSessionWrapper._maybeCleanSessions
train
def _maybeCleanSessions(self): """ Clean expired sessions if it's been long enough since the last clean. """ sinceLast = self._clock.seconds() - self._lastClean if sinceLast > self.sessionCleanFrequency: self._cleanSessions()
python
{ "resource": "" }
q38552
PersistentSessionWrapper.cookieDomainForRequest
train
def cookieDomainForRequest(self, request): """ Pick a domain to use when setting cookies. @type request: L{nevow.inevow.IRequest} @param request: Request to determine cookie domain for @rtype: C{str} or C{None} @return: Domain name to use when setting cookies, or C{None} to indicate that only the domain in the request should be used """ host = request.getHeader('host') if host is None: # This is a malformed request that we cannot possibly handle # safely, fall back to the default behaviour. return None host = host.split(':')[0] for domain in self._domains: suffix = "." + domain if host == domain: # The request is for a domain which is directly recognized. if self._enableSubdomains: # Subdomains are enabled, so the suffix is returned to # enable the cookie for this domain and all its subdomains. return suffix # Subdomains are not enabled, so None is returned to allow the # default restriction, which will enable this cookie only for # the domain in the request, to apply. return None if self._enableSubdomains and host.endswith(suffix): # The request is for a subdomain of a directly recognized # domain and subdomains are enabled. Drop the unrecognized # subdomain portion and return the suffix to enable the cookie # for this domain and all its subdomains. return suffix if self._enableSubdomains: # No directly recognized domain matched the request. If subdomains # are enabled, prefix the request domain with "." to make the # cookie valid for that domain and all its subdomains. This # probably isn't extremely useful. Perhaps it shouldn't work this # way. return "." + host # Subdomains are disabled and the domain from the request was not # recognized. Return None to get the default behavior. return None
python
{ "resource": "" }
q38553
PersistentSessionWrapper.savorSessionCookie
train
def savorSessionCookie(self, request): """ Make the session cookie last as long as the persistent session. @type request: L{nevow.inevow.IRequest} @param request: The HTTP request object for the guard login URL. """ cookieValue = request.getSession().uid request.addCookie( self.cookieKey, cookieValue, path='/', max_age=PERSISTENT_SESSION_LIFETIME, domain=self.cookieDomainForRequest(request))
python
{ "resource": "" }
q38554
PersistentSessionWrapper.login
train
def login(self, request, session, creds, segments): """ Called to check the credentials of a user. Here we extend guard's implementation to preauthenticate users if they have a valid persistent session. @type request: L{nevow.inevow.IRequest} @param request: The HTTP request being handled. @type session: L{nevow.guard.GuardSession} @param session: The user's current session. @type creds: L{twisted.cred.credentials.ICredentials} @param creds: The credentials the user presented. @type segments: L{tuple} @param segments: The remaining segments of the URL. @return: A deferred firing with the user's avatar. """ self._maybeCleanSessions() if isinstance(creds, credentials.Anonymous): preauth = self.authenticatedUserForKey(session.uid) if preauth is not None: self.savorSessionCookie(request) creds = userbase.Preauthenticated(preauth) def cbLoginSuccess(input): """ User authenticated successfully. Create the persistent session, and associate it with the username. (XXX it doesn't work like this now) """ user = request.args.get('username') if user is not None: # create a database session and associate it with this user cookieValue = session.uid if request.args.get('rememberMe'): self.createSessionForKey(cookieValue, creds.username) self.savorSessionCookie(request) return input return ( guard.SessionWrapper.login( self, request, session, creds, segments) .addCallback(cbLoginSuccess))
python
{ "resource": "" }
q38555
PersistentSessionWrapper.explicitLogout
train
def explicitLogout(self, session): """ Handle a user-requested logout. Here we override guard's behaviour for the logout action to delete the persistent session. In this case the user has explicitly requested a logout, so the persistent session must be deleted to require the user to log in on the next request. @type session: L{nevow.guard.GuardSession} @param session: The session of the user logging out. """ guard.SessionWrapper.explicitLogout(self, session) self.removeSessionWithKey(session.uid)
python
{ "resource": "" }
q38556
PersistentSessionWrapper.getCredentials
train
def getCredentials(self, request): """ Derive credentials from an HTTP request. Override SessionWrapper.getCredentials to add the Host: header to the credentials. This will make web-based virtual hosting work. @type request: L{nevow.inevow.IRequest} @param request: The request being handled. @rtype: L{twisted.cred.credentials.1ICredentials} @return: Credentials derived from the HTTP request. """ username = usernameFromRequest(request) password = request.args.get('password', [''])[0] return credentials.UsernamePassword(username, password)
python
{ "resource": "" }
q38557
startMenu
train
def startMenu(translator, navigation, tag): """ Drop-down menu-style navigation view. For each primary navigation element available, a copy of the I{tab} pattern will be loaded from the tag. It will have its I{href} slot filled with the URL for that navigation item. It will have its I{name} slot filled with the user-visible name of the navigation element. It will have its I{kids} slot filled with a list of secondary navigation for that element. For each secondary navigation element available beneath each primary navigation element, a copy of the I{subtabs} pattern will be loaded from the tag. It will have its I{kids} slot filled with a self-similar structure. @type translator: L{IWebTranslator} provider @type navigation: L{list} of L{Tab} @rtype: {nevow.stan.Tag} """ setTabURLs(navigation, translator) getp = IQ(tag).onePattern def fillSlots(tabs): for tab in tabs: if tab.children: kids = getp('subtabs').fillSlots('kids', fillSlots(tab.children)) else: kids = '' yield dictFillSlots(getp('tab'), dict(href=tab.linkURL, name=tab.name, kids=kids)) return tag.fillSlots('tabs', fillSlots(navigation))
python
{ "resource": "" }
q38558
applicationNavigation
train
def applicationNavigation(ctx, translator, navigation): """ Horizontal, primary-only navigation view. For the navigation element currently being viewed, copies of the I{selected-app-tab} and I{selected-tab-contents} patterns will be loaded from the tag. For all other navigation elements, copies of the I{app-tab} and I{tab-contents} patterns will be loaded. For either case, the former pattern will have its I{name} slot filled with the name of the navigation element and its I{tab-contents} slot filled with the latter pattern. The latter pattern will have its I{href} slot filled with a link to the corresponding navigation element. The I{tabs} slot on the tag will be filled with all the I{selected-app-tab} or I{app-tab} pattern copies. @type ctx: L{nevow.context.WebContext} @type translator: L{IWebTranslator} provider @type navigation: L{list} of L{Tab} @rtype: {nevow.stan.Tag} """ setTabURLs(navigation, translator) selectedTab = getSelectedTab(navigation, url.URL.fromContext(ctx)) getp = IQ(ctx.tag).onePattern tabs = [] for tab in navigation: if tab == selectedTab or selectedTab in tab.children: p = 'selected-app-tab' contentp = 'selected-tab-contents' else: p = 'app-tab' contentp = 'tab-contents' childTabs = [] for subtab in tab.children: try: subtabp = getp("subtab") except NodeNotFound: continue childTabs.append( dictFillSlots(subtabp, { 'name': subtab.name, 'href': subtab.linkURL, 'tab-contents': getp("subtab-contents") })) tabs.append(dictFillSlots( getp(p), {'name': tab.name, 'tab-contents': getp(contentp).fillSlots( 'href', tab.linkURL), 'subtabs': childTabs})) ctx.tag.fillSlots('tabs', tabs) return ctx.tag
python
{ "resource": "" }
q38559
_generate
train
def _generate(): """ Generate a new SSH key pair. """ privateKey = rsa.generate_private_key( public_exponent=65537, key_size=4096, backend=default_backend()) return Key(privateKey).toString('openssh')
python
{ "resource": "" }
q38560
ShellServer._draw
train
def _draw(self): """ Call the drawing API for the main menu widget with the current known terminal size and the terminal. """ self._window.draw(self._width, self._height, self.terminal)
python
{ "resource": "" }
q38561
ShellServer.reactivate
train
def reactivate(self): """ Called when a sub-protocol is finished. This disconnects the sub-protocol and redraws the main menu UI. """ self._protocol.connectionLost(None) self._protocol = None self.terminal.reset() self._window.filthy() self._window.repaint()
python
{ "resource": "" }
q38562
ShellServer.keystrokeReceived
train
def keystrokeReceived(self, keyID, modifier): """ Forward input events to the application-supplied protocol if one is currently active, otherwise forward them to the main menu UI. """ if self._protocol is not None: self._protocol.keystrokeReceived(keyID, modifier) else: self._window.keystrokeReceived(keyID, modifier)
python
{ "resource": "" }
q38563
FloatingIP.fetch
train
def fetch(self): """ Fetch & return a new `FloatingIP` object representing the floating IP's current state :rtype: FloatingIP :raises DOAPIError: if the API endpoint replies with an error (e.g., if the floating IP no longer exists) """ api = self.doapi_manager return api._floating_ip(api.request(self.url)["floating_ip"])
python
{ "resource": "" }
q38564
FloatingIP.assign
train
def assign(self, droplet_id): """ Assign the floating IP to a droplet :param droplet_id: the droplet to assign the floating IP to as either an ID or a `Droplet` object :type droplet_id: integer or `Droplet` :return: an `Action` representing the in-progress operation on the floating IP :rtype: Action :raises DOAPIError: if the API endpoint replies with an error """ if isinstance(droplet_id, Droplet): droplet_id = droplet_id.id return self.act(type='assign', droplet_id=droplet_id)
python
{ "resource": "" }
q38565
SSHKey._id
train
def _id(self): r""" The `SSHKey`'s ``id`` field, or if that is not defined, its ``fingerprint`` field. If neither field is defined, accessing this attribute raises a `TypeError`. """ if self.get("id") is not None: return self.id elif self.get("fingerprint") is not None: return self.fingerprint else: raise TypeError('SSHKey has neither .id nor .fingerprint')
python
{ "resource": "" }
q38566
SSHKey.fetch
train
def fetch(self): """ Fetch & return a new `SSHKey` object representing the SSH key's current state :rtype: SSHKey :raises DOAPIError: if the API endpoint replies with an error (e.g., if the SSH key no longer exists) """ api = self.doapi_manager return api._ssh_key(api.request(self.url)["ssh_key"])
python
{ "resource": "" }
q38567
MongrelRequest.parse
train
def parse(msg): """ Helper method for parsing a Mongrel2 request string and returning a new `MongrelRequest` instance. """ sender, conn_id, path, rest = msg.split(' ', 3) headers, rest = tnetstring.pop(rest) body, _ = tnetstring.pop(rest) if type(headers) is str: headers = json.loads(headers) return MongrelRequest(sender, conn_id, path, headers, body)
python
{ "resource": "" }
q38568
MongrelRequest.should_close
train
def should_close(self): """ Check whether the HTTP connection of this request should be closed after the request is finished. We check for the `Connection` HTTP header and for the HTTP Version (only `HTTP/1.1` supports keep-alive. """ if self.headers.get('connection') == 'close': return True elif 'content-length' in self.headers or \ self.headers.get('METHOD') in ['HEAD', 'GET']: return self.headers.get('connection') != 'keep-alive' elif self.headers.get('VERSION') == 'HTTP/1.0': return True else: return False
python
{ "resource": "" }
q38569
PPMS.system_status
train
def system_status(self): """The system status codes.""" flag, timestamp, status = self._query(('GETDAT? 1', (Integer, Float, Integer))) return { # convert unix timestamp to datetime object 'timestamp': datetime.datetime.fromtimestamp(timestamp), # bit 0-3 represent the temperature controller status 'temperature': STATUS_TEMPERATURE[status & 0xf], # bit 4-7 represent the magnet status 'magnet': STATUS_MAGNET[(status >> 4) & 0xf], # bit 8-11 represent the chamber status 'chamber': STATUS_CHAMBER[(status >> 8) & 0xf], # bit 12-15 represent the sample position status 'sample_position': STATUS_SAMPLE_POSITION[(status >> 12) & 0xf], }
python
{ "resource": "" }
q38570
PPMS.beep
train
def beep(self, duration, frequency): """Generates a beep. :param duration: The duration in seconds, in the range 0.1 to 5. :param frequency: The frequency in Hz, in the range 500 to 5000. """ cmd = 'BEEP', [Float(min=0.1, max=5.0), Integer(min=500, max=5000)] self._write(cmd, duration, frequency)
python
{ "resource": "" }
q38571
PPMS.move
train
def move(self, position, slowdown=0): """Move to the specified sample position. :param position: The target position. :param slowdown: The slowdown code, an integer in the range 0 to 14, used to scale the stepper motor speed. 0, the default, is the fastest rate and 14 the slowest. """ cmd = 'MOVE', [Float, Integer, Integer(min=0, max=14)] self._write(cmd, position, 0, slowdown)
python
{ "resource": "" }
q38572
PPMS.move_to_limit
train
def move_to_limit(self, position): """Move to limit switch and define it as position. :param position: The new position of the limit switch. """ cmd = 'MOVE', [Float, Integer] self._write(cmd, position, 1)
python
{ "resource": "" }
q38573
PPMS.redefine_position
train
def redefine_position(self, position): """Redefines the current position to the new position. :param position: The new position. """ cmd = 'MOVE', [Float, Integer] self._write(cmd, position, 2)
python
{ "resource": "" }
q38574
PPMS.set_field
train
def set_field(self, field, rate, approach='linear', mode='persistent', wait_for_stability=True, delay=1): """Sets the magnetic field. :param field: The target field in Oersted. .. note:: The conversion is 1 Oe = 0.1 mT. :param rate: The field rate in Oersted per minute. :param approach: The approach mode, either 'linear', 'no overshoot' or 'oscillate'. :param mode: The state of the magnet at the end of the charging process, either 'persistent' or 'driven'. :param wait_for_stability: If `True`, the function call blocks until the target field is reached and stable. :param delay: Specifies the frequency in seconds how often the magnet status is checked. (This has no effect if wait_for_stability is `False`). """ self.target_field = field, rate, approach, mode if wait_for_stability and self.system_status['magnet'].startswith('persist'): # Wait until the persistent switch heats up. time.sleep(self.magnet_config[5]) while wait_for_stability: status = self.system_status['magnet'] if status in ('persistent, stable', 'driven, stable'): break time.sleep(delay)
python
{ "resource": "" }
q38575
dumpgrants
train
def dumpgrants(destination, as_json=None, setspec=None): """Harvest grants from OpenAIRE and store them locally.""" if os.path.isfile(destination): click.confirm("Database '{0}' already exists." "Do you want to write to it?".format(destination), abort=True) # no cover dumper = OAIREDumper(destination, setspec=setspec) dumper.dump(as_json=as_json)
python
{ "resource": "" }
q38576
SetFrontPage.postOptions
train
def postOptions(self): """ Find an installed offering and set the site front page to its application's front page. """ o = self.store.findFirst( offering.InstalledOffering, (offering.InstalledOffering.offeringName == self["name"])) if o is None: raise usage.UsageError("No offering of that name" " is installed.") fp = self.store.findUnique(publicweb.FrontPage) fp.defaultApplication = o.application
python
{ "resource": "" }
q38577
_legacySpecialCases
train
def _legacySpecialCases(form, patterns, parameter): """ Create a view object for the given parameter. This function implements the remaining view construction logic which has not yet been converted to the C{viewFactory}-style expressed in L{_LiveFormMixin.form}. @type form: L{_LiveFormMixin} @param form: The form fragment which contains the given parameter. @type patterns: L{PatternDictionary} @type parameter: L{Parameter}, L{ChoiceParameter}, or L{ListParameter}. """ p = patterns[parameter.type + '-input-container'] if parameter.type == TEXTAREA_INPUT: p = dictFillSlots(p, dict(label=parameter.label, name=parameter.name, value=parameter.default or '')) elif parameter.type == MULTI_TEXT_INPUT: subInputs = list() for i in xrange(parameter.count): subInputs.append(dictFillSlots(patterns['input'], dict(name=parameter.name + '_' + str(i), type='text', value=parameter.defaults[i]))) p = dictFillSlots(p, dict(label=parameter.label or parameter.name, inputs=subInputs)) else: if parameter.default is not None: value = parameter.default else: value = '' if parameter.type == CHECKBOX_INPUT and parameter.default: inputPattern = 'checked-checkbox-input' else: inputPattern = 'input' p = dictFillSlots( p, dict(label=parameter.label or parameter.name, input=dictFillSlots(patterns[inputPattern], dict(name=parameter.name, type=parameter.type, value=value)))) p(**{'class' : 'liveform_'+parameter.name}) if parameter.description: description = patterns['description'].fillSlots( 'description', parameter.description) else: description = '' return dictFillSlots( patterns['parameter-input'], dict(input=p, description=description))
python
{ "resource": "" }
q38578
Parameter.clone
train
def clone(self, default): """ Make a copy of this parameter, supplying a different default. @type default: C{unicode} or C{NoneType} @param default: A value which will be initially presented in the view as the value for this parameter, or C{None} if no such value is to be presented. @rtype: L{Parameter} """ return self.__class__( self.name, self.type, self.coercer, self.label, self.description, default, self.viewFactory)
python
{ "resource": "" }
q38579
ListChangeParameter._prepareSubForm
train
def _prepareSubForm(self, liveForm): """ Utility for turning liveforms into subforms, and compacting them as necessary. @param liveForm: a liveform. @type liveForm: L{LiveForm} @return: a sub form. @rtype: L{LiveForm} """ liveForm = liveForm.asSubForm(self.name) # XXX Why did this work??? # if we are compact, tell the liveform so it can tell its parameters # also if self._parameterIsCompact: liveForm.compact() return liveForm
python
{ "resource": "" }
q38580
ListChangeParameter._newIdentifier
train
def _newIdentifier(self): """ Make a new identifier for an as-yet uncreated model object. @rtype: C{int} """ id = self._allocateID() self._idsToObjects[id] = self._NO_OBJECT_MARKER self._lastValues[id] = None return id
python
{ "resource": "" }
q38581
ListChangeParameter._coerceSingleRepetition
train
def _coerceSingleRepetition(self, dataSet): """ Make a new liveform with our parameters, and get it to coerce our data for us. """ # make a liveform because there is some logic in _coerced form = LiveForm(lambda **k: None, self.parameters, self.name) return form.fromInputs(dataSet)
python
{ "resource": "" }
q38582
ListChangeParameter.coercer
train
def coercer(self, dataSets): """ Coerce all of the repetitions and sort them into creations, edits and deletions. @rtype: L{ListChanges} @return: An object describing all of the creations, modifications, and deletions represented by C{dataSets}. """ # Xxx - This does a slightly complex (hey, it's like 20 lines, how # complex could it really be?) thing to figure out which elements are # newly created, which elements were edited, and which elements no # longer exist. It might be simpler if the client kept track of this # and passed a three-tuple of lists (or whatever - some separate data # structures) to the server, so everything would be all figured out # already. This would require the client # (Mantissa.LiveForm.RepeatableForm) to be more aware of what events # the user is triggering in the browser so that it could keep state for # adds/deletes/edits separately from DOM and widget objects. This # would remove the need for RepeatedLiveFormWrapper. def makeSetter(identifier, values): def setter(defaultObject): self._idsToObjects[identifier] = defaultObject self._lastValues[identifier] = values return setter created = self._coerceAll(self._extractCreations(dataSets)) edited = self._coerceAll(self._extractEdits(dataSets)) coerceDeferred = gatherResults([created, edited]) def cbCoerced((created, edited)): receivedIdentifiers = set() createObjects = [] for (identifier, dataSet) in created: receivedIdentifiers.add(identifier) createObjects.append( CreateObject(dataSet, makeSetter(identifier, dataSet))) editObjects = [] for (identifier, dataSet) in edited: receivedIdentifiers.add(identifier) lastValues = self._lastValues[identifier] if dataSet != lastValues: modelObject = self._objectFromID(identifier) editObjects.append(EditObject(modelObject, dataSet)) self._lastValues[identifier] = dataSet deleted = [] for identifier in set(self._idsToObjects) - receivedIdentifiers: existing = self._objectFromID(identifier) if existing is not self._NO_OBJECT_MARKER: deleted.append(existing) self._idsToObjects.pop(identifier) return ListChanges(createObjects, editObjects, deleted) coerceDeferred.addCallback(cbCoerced) return coerceDeferred
python
{ "resource": "" }
q38583
ChoiceParameter.clone
train
def clone(self, choices): """ Make a copy of this parameter, supply different choices. @param choices: A sequence of L{Option} instances. @type choices: C{list} @rtype: L{ChoiceParameter} """ return self.__class__( self.name, choices, self.label, self.description, self.multiple, self.viewFactory)
python
{ "resource": "" }
q38584
_LiveFormMixin.compact
train
def compact(self): """ Switch to the compact variant of the live form template. By default, this will simply create a loader for the C{self.compactFragmentName} template and compact all of this form's parameters. """ self.docFactory = webtheme.getLoader(self.compactFragmentName) for param in self.parameters: param.compact()
python
{ "resource": "" }
q38585
_LiveFormMixin.submitbutton
train
def submitbutton(self, request, tag): """ Render an INPUT element of type SUBMIT which will post this form to the server. """ return tags.input(type='submit', name='__submit__', value=self._getDescription())
python
{ "resource": "" }
q38586
_LiveFormMixin.form
train
def form(self, request, tag): """ Render the inputs for a form. @param tag: A tag with: - I{form} and I{description} slots - I{liveform} and I{subform} patterns, to fill the I{form} slot - An I{inputs} slot, to fill with parameter views - L{IParameterView.patternName}I{-input-container} patterns for each parameter type in C{self.parameters} """ patterns = PatternDictionary(self.docFactory) inputs = [] for parameter in self.parameters: view = parameter.viewFactory(parameter, None) if view is not None: view.setDefaultTemplate( tag.onePattern(view.patternName + '-input-container')) setFragmentParent = getattr(view, 'setFragmentParent', None) if setFragmentParent is not None: setFragmentParent(self) inputs.append(view) else: inputs.append(_legacySpecialCases(self, patterns, parameter)) if self.subFormName is None: pattern = tag.onePattern('liveform') else: pattern = tag.onePattern('subform') return dictFillSlots( tag, dict(form=pattern.fillSlots('inputs', inputs), description=self._getDescription()))
python
{ "resource": "" }
q38587
_LiveFormMixin.invoke
train
def invoke(self, formPostEmulator): """ Invoke my callable with input from the browser. @param formPostEmulator: a dict of lists of strings in a format like a cgi-module form post. """ result = self.fromInputs(formPostEmulator) result.addCallback(lambda params: self.callable(**params)) return result
python
{ "resource": "" }
q38588
_LiveFormMixin.fromInputs
train
def fromInputs(self, received): """ Convert some random strings received from a browser into structured data, using a list of parameters. @param received: a dict of lists of strings, i.e. the canonical Python form of web form post. @rtype: L{Deferred} @return: A Deferred which will be called back with a dict mapping parameter names to coerced parameter values. """ results = [] for parameter in self.parameters: name = parameter.name.encode('ascii') d = maybeDeferred(parameter.fromInputs, received) d.addCallback(lambda value, name=name: (name, value)) results.append(d) return gatherResults(results).addCallback(dict)
python
{ "resource": "" }
q38589
ListChangeParameterView.repeater
train
def repeater(self, req, tag): """ Render some UI for repeating our form. """ repeater = inevow.IQ(self.docFactory).onePattern('repeater') return repeater.fillSlots( 'object-description', self.parameter.modelObjectDescription)
python
{ "resource": "" }
q38590
FormParameterView.input
train
def input(self, request, tag): """ Add the wrapped form, as a subform, as a child of the given tag. """ subform = self.parameter.form.asSubForm(self.parameter.name) subform.setFragmentParent(self) return tag[subform]
python
{ "resource": "" }
q38591
GeoApi.get_arrive_stop
train
def get_arrive_stop(self, **kwargs): """Obtain bus arrival info in target stop. Args: stop_number (int): Stop number to query. lang (str): Language code (*es* or *en*). Returns: Status boolean and parsed response (list[Arrival]), or message string in case of error. """ # Endpoint parameters params = { 'idStop': kwargs.get('stop_number'), 'cultureInfo': util.language_code(kwargs.get('lang')) } # Request result = self.make_request('geo', 'get_arrive_stop', **params) # Funny endpoint, no status code if not util.check_result(result, 'arrives'): return False, 'UNKNOWN ERROR' # Parse values = util.response_list(result, 'arrives') return True, [emtype.Arrival(**a) for a in values]
python
{ "resource": "" }
q38592
GeoApi.get_groups
train
def get_groups(self, **kwargs): """Obtain line types and details. Args: lang (str): Language code (*es* or *en*). Returns: Status boolean and parsed response (list[GeoGroupItem]), or message string in case of error. """ # Endpoint parameters params = { 'cultureInfo': util.language_code(kwargs.get('lang')) } # Request result = self.make_request('geo', 'get_groups', **params) if not util.check_result(result): return False, result.get('resultDescription', 'UNKNOWN ERROR') # Parse values = util.response_list(result, 'resultValues') return True, [emtype.GeoGroupItem(**a) for a in values]
python
{ "resource": "" }
q38593
GeoApi.get_info_line
train
def get_info_line(self, **kwargs): """Obtain basic information on a bus line on a given date. Args: day (int): Day of the month in format DD. The number is automatically padded if it only has one digit. month (int): Month number in format MM. The number is automatically padded if it only has one digit. year (int): Year number in format YYYY. lines (list[int] | int): Lines to query, may be empty to get all the lines. lang (str): Language code (*es* or *en*). Returns: Status boolean and parsed response (list[Line]), or message string in case of error. """ # Endpoint parameters select_date = '%02d/%02d/%d' % ( kwargs.get('day', '01'), kwargs.get('month', '01'), kwargs.get('year', '1970') ) params = { 'fecha': select_date, 'line': util.ints_to_string(kwargs.get('lines', [])), 'cultureInfo': util.language_code(kwargs.get('lang')) } # Request result = self.make_request('geo', 'get_info_line', **params) # Funny endpoint, no status code if not util.check_result(result, 'Line'): return False, 'UNKNOWN ERROR' # Parse values = util.response_list(result, 'Line') return True, [emtype.Line(**a) for a in values]
python
{ "resource": "" }
q38594
GeoApi.get_poi
train
def get_poi(self, **kwargs): """Obtain a list of POI in the given radius. Args: latitude (double): Latitude in decimal degrees. longitude (double): Longitude in decimal degrees. types (list[int] | int): POI IDs (or empty list to get all). radius (int): Radius (in meters) of the search. lang (str): Language code (*es* or *en*). Returns: Status boolean and parsed response (list[Poi]), or message string in case of error. """ # Endpoint parameters params = { 'coordinateX': kwargs.get('longitude'), 'coordinateY': kwargs.get('latitude'), 'tipos': util.ints_to_string(kwargs.get('types')), 'Radius': kwargs.get('radius'), 'cultureInfo': util.language_code(kwargs.get('lang')) } # Request result = self.make_request('geo', 'get_poi', **params) # Funny endpoint, no status code if not util.check_result(result, 'poiList'): return False, 'UNKNOWN ERROR' # Parse values = util.response_list(result, 'poiList') return True, [emtype.Poi(**a) for a in values]
python
{ "resource": "" }
q38595
GeoApi.get_poi_types
train
def get_poi_types(self, **kwargs): """Obtain POI types. Args: lang (str): Language code (*es* or *en*). Returns: Status boolean and parsed response (list[PoiType]), or message string in case of error. """ # Endpoint parameters params = { 'cultureInfo': util.language_code(kwargs.get('lang')) } # Request result = self.make_request('geo', 'get_poi_types', **params) # Parse values = result.get('types', []) return True, [emtype.PoiType(**a) for a in values]
python
{ "resource": "" }
q38596
GeoApi.get_route_lines_route
train
def get_route_lines_route(self, **kwargs): """Obtain itinerary for one or more lines in the given date. Args: day (int): Day of the month in format DD. The number is automatically padded if it only has one digit. month (int): Month number in format MM. The number is automatically padded if it only has one digit. year (int): Year number in format YYYY. lines (list[int] | int): Lines to query, may be empty to get all the lines. Returns: Status boolean and parsed response (list[RouteLinesItem]), or message string in case of error. """ # Endpoint parameters select_date = '%02d/%02d/%d' % ( kwargs.get('day', '01'), kwargs.get('month', '01'), kwargs.get('year', '1970') ) params = { 'SelectDate': select_date, 'Lines': util.ints_to_string(kwargs.get('lines', [])) } # Request result = self.make_request('geo', 'get_route_lines_route', **params) if not util.check_result(result): return False, result.get('resultDescription', 'UNKNOWN ERROR') # Parse values = util.response_list(result, 'resultValues') return True, [emtype.RouteLinesItem(**a) for a in values]
python
{ "resource": "" }
q38597
GeoApi.get_stops_line
train
def get_stops_line(self, **kwargs): """Obtain information on the stops of the given lines. Arguments: lines (list[int] | int): Lines to query, may be empty to get all the lines. direction (str): Optional, either *forward* or *backward*. lang (str): Language code (*es* or *en*). Returns: Status boolean and parsed response (list[Stop]), or message string in case of error. """ # Endpoint parameters params = { 'line': util.ints_to_string(kwargs.get('lines', [])), 'direction': util.direction_code(kwargs.get('direction', '')), 'cultureInfo': util.language_code(kwargs.get('lang')) } # Request result = self.make_request('geo', 'get_stops_line', **params) # Funny endpoint, no status code # Only interested in 'stop' if not util.check_result(result, 'stop'): return False, 'UNKNOWN ERROR' # Parse values = util.response_list(result, 'stop') return True, [emtype.Stop(**a) for a in values]
python
{ "resource": "" }
q38598
GeoApi.get_street
train
def get_street(self, **kwargs): """Obtain a list of nodes related to a location within a given radius. Not sure of its use, but... Args: street_name (str): Name of the street to search. street_number (int): Street number to search. radius (int): Radius (in meters) of the search. stops (int): Number of the stop to search. lang (str): Language code (*es* or *en*). Returns: Status boolean and parsed response (list[Site]), or message string in case of error. """ # Endpoint parameters params = { 'description': kwargs.get('street_name'), 'streetNumber': kwargs.get('street_number'), 'Radius': kwargs.get('radius'), 'Stops': kwargs.get('stops'), 'cultureInfo': util.language_code(kwargs.get('lang')) } # Request result = self.make_request('geo', 'get_street', **params) # Funny endpoint, no status code if not util.check_result(result, 'site'): return False, 'UNKNOWN ERROR' # Parse values = util.response_list(result, 'site') return True, [emtype.Site(**a) for a in values]
python
{ "resource": "" }
q38599
GeoApi.get_street_from_xy
train
def get_street_from_xy(self, **kwargs): """Obtain a list of streets around the specified point. Args: latitude (double): Latitude in decimal degrees. longitude (double): Longitude in decimal degrees. radius (int): Radius (in meters) of the search. lang (str): Language code (*es* or *en*). Returns: Status boolean and parsed response (list[Street]), or message string in case of error. """ # Endpoint parameters params = { 'coordinateX': kwargs.get('longitude'), 'coordinateY': kwargs.get('latitude'), 'Radius': kwargs.get('radius'), 'cultureInfo': util.language_code(kwargs.get('lang')) } # Request result = self.make_request('geo', 'get_street_from_xy', **params) # Funny endpoint, no status code if not util.check_result(result, 'site'): return False, 'UNKNOWN ERROR' # Parse values = util.response_list(result, 'site') return True, [emtype.Street(**a) for a in values]
python
{ "resource": "" }