_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q39800
Elastic._get_connection_from_url
train
def _get_connection_from_url(self, url, timeout, **kwargs): """Returns a connection object given a string url""" url = self._decode_url(url, "") if url.scheme == 'http' or url.scheme == 'https': return HttpConnection(url.geturl(), timeout=timeout, **kwargs) else: if sys.version_info[0] > 2: raise ValueError("Thrift transport is not available " "for Python 3") try: from thrift_connection import ThriftConnection except ImportError: raise ImportError("The 'thrift' python package " "does not seem to be installed.") return ThriftConnection(url.hostname, url.port, timeout=timeout, **kwargs)
python
{ "resource": "" }
q39801
userToJson
train
def userToJson(user): """Returns a serializable User dict :param user: User to get info for :type user: User :returns: dict """ obj = { 'id': user.id, 'username': user.username, 'name': user.get_full_name(), 'email': user.email, } return obj
python
{ "resource": "" }
q39802
commentToJson
train
def commentToJson(comment): """Returns a serializable Comment dict :param comment: Comment to get info for :type comment: Comment :returns: dict """ obj = { 'id': comment.id, 'comment': comment.comment, 'user': userToJson(comment.user), 'date': comment.submit_date.isoformat(), } return obj
python
{ "resource": "" }
q39803
getPutData
train
def getPutData(request): """Adds raw post to the PUT and DELETE querydicts on the request so they behave like post :param request: Request object to add PUT/DELETE to :type request: Request """ dataDict = {} data = request.body for n in urlparse.parse_qsl(data): dataDict[n[0]] = n[1] setattr(request, 'PUT', dataDict) setattr(request, 'DELETE', dataDict)
python
{ "resource": "" }
q39804
getHashForFile
train
def getHashForFile(f): """Returns a hash value for a file :param f: File to hash :type f: str :returns: str """ hashVal = hashlib.sha1() while True: r = f.read(1024) if not r: break hashVal.update(r) f.seek(0) return hashVal.hexdigest()
python
{ "resource": "" }
q39805
uniqueID
train
def uniqueID(size=6, chars=string.ascii_uppercase + string.digits): """A quick and dirty way to get a unique string""" return ''.join(random.choice(chars) for x in xrange(size))
python
{ "resource": "" }
q39806
getObjectsFromGuids
train
def getObjectsFromGuids(guids): """Gets the model objects based on a guid list :param guids: Guids to get objects for :type guids: list :returns: list """ guids = guids[:] img = list(Image.objects.filter(guid__in=guids)) vid = list(Video.objects.filter(guid__in=guids)) objects = img + vid sortedobjects = [] if objects: while guids: for obj in iter(objects): if obj.guid == guids[0]: sortedobjects.append(obj) guids.pop(0) break return sortedobjects
python
{ "resource": "" }
q39807
getClientIP
train
def getClientIP(request): """Returns the best IP address found from the request""" forwardedfor = request.META.get('HTTP_X_FORWARDED_FOR') if forwardedfor: ip = forwardedfor.split(',')[0] else: ip = request.META.get('REMOTE_ADDR') return ip
python
{ "resource": "" }
q39808
__discoverPlugins
train
def __discoverPlugins(): """ Discover the plugin classes contained in Python files, given a list of directory names to scan. Return a list of plugin classes. """ for app in settings.INSTALLED_APPS: if not app.startswith('django'): module = __import__(app) moduledir = path.Path(module.__file__).parent plugin = moduledir / 'frog_plugin.py' if plugin.exists(): file_, fpath, desc = imp.find_module('frog_plugin', [moduledir]) if file_: imp.load_module('frog_plugin', file_, fpath, desc) return FrogPluginRegistry.plugins
python
{ "resource": "" }
q39809
Result.append
train
def append(self, val): """Appends the object to the end of the values list. Will also set the value to the first item in the values list :param val: Object to append :type val: primitive """ self.values.append(val) self.value = self.values[0]
python
{ "resource": "" }
q39810
Result.asDict
train
def asDict(self): """Returns a serializable object""" return { 'isError': self.isError, 'message': self.message, 'values': self.values, 'value': self.value, }
python
{ "resource": "" }
q39811
ServerMixin.auth
train
def auth(self, password): """Request for authentication in a password-protected Redis server. Redis can be instructed to require a password before allowing clients to execute commands. This is done using the ``requirepass`` directive in the configuration file. If the password does not match, an :exc:`~tredis.exceptions.AuthError` exception will be raised. :param password: The password to authenticate with :type password: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.AuthError`, :exc:`~tredis.exceptions.RedisError` """ future = concurrent.TracebackFuture() def on_response(response): """Process the redis response :param response: The future with the response :type response: tornado.concurrent.Future """ exc = response.exception() if exc: if exc.args[0] == b'invalid password': future.set_exception(exceptions.AuthError(exc)) else: future.set_exception(exc) else: future.set_result(response.result()) execute_future = self._execute([b'AUTH', password], b'OK') self.io_loop.add_future(execute_future, on_response) return future
python
{ "resource": "" }
q39812
ServerMixin.info
train
def info(self, section=None): """The INFO command returns information and statistics about the server in a format that is simple to parse by computers and easy to read by humans. The optional parameter can be used to select a specific section of information: - server: General information about the Redis server - clients: Client connections section - memory: Memory consumption related information - persistence: RDB and AOF related information - stats: General statistics - replication: Master/slave replication information - cpu: CPU consumption statistics - commandstats: Redis command statistics - cluster: Redis Cluster section - keyspace: Database related statistics It can also take the following values: - all: Return all sections - default: Return only the default set of sections When no parameter is provided, the default option is assumed. :param str section: Optional :return: dict """ cmd = [b'INFO'] if section: cmd.append(section) return self._execute(cmd, format_callback=common.format_info_response)
python
{ "resource": "" }
q39813
ServerMixin.select
train
def select(self, index=0): """Select the DB with having the specified zero-based numeric index. New connections always use DB ``0``. :param int index: The database to select :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` :raises: :exc:`~tredis.exceptions.InvalidClusterCommand` """ if self._clustering: raise exceptions.InvalidClusterCommand future = self._execute( [b'SELECT', ascii(index).encode('ascii')], b'OK') def on_selected(f): self._connection.database = index self.io_loop.add_future(future, on_selected) return future
python
{ "resource": "" }
q39814
ServerMixin.time
train
def time(self): """Retrieve the current time from the redis server. :rtype: float :raises: :exc:`~tredis.exceptions.RedisError` """ def format_response(value): """Format a TIME response into a datetime.datetime :param list value: TIME response is a list of the number of seconds since the epoch and the number of micros as two byte strings :rtype: float """ seconds, micros = value return float(seconds) + (float(micros) / 1000000.0) return self._execute([b'TIME'], format_callback=format_response)
python
{ "resource": "" }
q39815
VodTVP.get_show_name
train
def get_show_name(self): """ Get video show name from the website. It's located in the div with 'data-hover' attribute under the 'title' key. Returns: str: Video show name. """ div = self.soup.find('div', attrs={'data-hover': True}) data = json.loads(div['data-hover']) show_name = data.get('title') return show_name
python
{ "resource": "" }
q39816
BaseClient.ping
train
def ping(self, callback=None, **kwargs): """ Ping request to check status of elasticsearch host """ self.client.fetch( self.mk_req('', method='HEAD', **kwargs), callback = callback )
python
{ "resource": "" }
q39817
BaseClient.info
train
def info(self, callback=None, **kwargs): """ Get the basic info from the current cluster. """ self.client.fetch( self.mk_req('', method='GET', **kwargs), callback = callback )
python
{ "resource": "" }
q39818
ExceptionReporter.get_traceback_data
train
def get_traceback_data(self): """Return a dictionary containing traceback information.""" default_template_engine = None if default_template_engine is None: template_loaders = [] frames = self.get_traceback_frames() for i, frame in enumerate(frames): if 'vars' in frame: frame_vars = [] for k, v in frame['vars']: v = pformat(v) # The escape filter assume unicode, make sure that works if isinstance(v, six.binary_type): v = v.decode('utf-8', 'replace') # don't choke on non-utf-8 input # Trim large blobs of data if v and len(v) > 4096: v = '%s... <trimmed %d bytes string>' % (v[0:4096], len(v)) frame_vars.append((k, v)) frame['vars'] = frame_vars frames[i] = frame unicode_hint = '' if self.exc_type and issubclass(self.exc_type, UnicodeError): start = getattr(self.exc_value, 'start', None) end = getattr(self.exc_value, 'end', None) if start is not None and end is not None: unicode_str = self.exc_value.args[1] c = { 'is_email': False, 'frames': frames, 'sys_executable': sys.executable, 'sys_version_info': '%d.%d.%d' % sys.version_info[0:3], 'sys_path': sys.path, } # Check whether exception info is available if self.exc_type: c['exception_type'] = self.exc_type.__name__ if self.exc_value: c['exception_value'] = self.exc_value if frames: c['lastframe'] = frames[-1] return c
python
{ "resource": "" }
q39819
ExceptionReporter.get_traceback_html
train
def get_traceback_html(self, **kwargs): "Return HTML version of debug 500 HTTP error page." t = Template(TECHNICAL_500_TEMPLATE) c = self.get_traceback_data() c['kwargs'] = kwargs return t.render(Context(c))
python
{ "resource": "" }
q39820
ExceptionReporter.get_traceback_frames
train
def get_traceback_frames(self): """Returns the traceback frames as a list""" frames = [] tb = self.tb while tb is not None: # Support for __traceback_hide__ which is used by a few libraries # to hide internal frames. if tb.tb_frame.f_locals.get('__traceback_hide__'): tb = tb.tb_next continue filename = tb.tb_frame.f_code.co_filename function = tb.tb_frame.f_code.co_name lineno = tb.tb_lineno - 1 loader = tb.tb_frame.f_globals.get('__loader__') module_name = tb.tb_frame.f_globals.get('__name__') or '' pre_context_lineno, pre_context, context_line, post_context = self._get_lines_from_file( filename, lineno, 7, loader, module_name, ) if pre_context_lineno is not None: frames.append({ 'tb': tb, 'type': 'django' if module_name.startswith('django.') else 'user', 'filename': filename, 'function': function, 'lineno': lineno + 1, 'vars': list(six.iteritems(tb.tb_frame.f_locals)), 'id': id(tb), 'pre_context': pre_context, 'context_line': context_line, 'post_context': post_context, 'pre_context_lineno': pre_context_lineno + 1, }) tb = tb.tb_next return frames
python
{ "resource": "" }
q39821
ExceptionReporter.format_exception
train
def format_exception(self): """ Return the same data as from traceback.format_exception. """ import traceback frames = self.get_traceback_frames() tb = [(f['filename'], f['lineno'], f['function'], f['context_line']) for f in frames] list = ['Traceback (most recent call last):\n'] list += traceback.format_list(tb) list += traceback.format_exception_only(self.exc_type, self.exc_value) return list
python
{ "resource": "" }
q39822
KeysMixin.expire
train
def expire(self, key, timeout): """Set a timeout on key. After the timeout has expired, the key will automatically be deleted. A key with an associated timeout is often said to be volatile in Redis terminology. The timeout is cleared only when the key is removed using the :meth:`~tredis.RedisClient.delete` method or overwritten using the :meth:`~tredis.RedisClient.set` or :meth:`~tredis.RedisClient.getset` methods. This means that all the operations that conceptually alter the value stored at the key without replacing it with a new one will leave the timeout untouched. For instance, incrementing the value of a key with :meth:`~tredis.RedisClient.incr`, pushing a new value into a list with :meth:`~tredis.RedisClient.lpush`, or altering the field value of a hash with :meth:`~tredis.RedisClient.hset` are all operations that will leave the timeout untouched. The timeout can also be cleared, turning the key back into a persistent key, using the :meth:`~tredis.RedisClient.persist` method. If a key is renamed with :meth:`~tredis.RedisClient.rename`, the associated time to live is transferred to the new key name. If a key is overwritten by :meth:`~tredis.RedisClient.rename`, like in the case of an existing key ``Key_A`` that is overwritten by a call like ``client.rename(Key_B, Key_A)`` it does not matter if the original ``Key_A`` had a timeout associated or not, the new key ``Key_A`` will inherit all the characteristics of ``Key_B``. .. note:: **Time complexity**: ``O(1)`` :param key: The key to set an expiration for :type key: :class:`str`, :class:`bytes` :param int timeout: The number of seconds to set the timeout to :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute( [b'EXPIRE', key, ascii(timeout).encode('ascii')], 1)
python
{ "resource": "" }
q39823
KeysMixin.migrate
train
def migrate(self, host, port, key, destination_db, timeout, copy=False, replace=False): """Atomically transfer a key from a source Redis instance to a destination Redis instance. On success the key is deleted from the original instance and is guaranteed to exist in the target instance. The command is atomic and blocks the two instances for the time required to transfer the key, at any given time the key will appear to exist in a given instance or in the other instance, unless a timeout error occurs. .. note:: **Time complexity**: This command actually executes a DUMP+DEL in the source instance, and a RESTORE in the target instance. See the pages of these commands for time complexity. Also an ``O(N)`` data transfer between the two instances is performed. :param host: The host to migrate the key to :type host: bytes, str :param int port: The port to connect on :param key: The key to migrate :type key: bytes, str :param int destination_db: The database number to select :param int timeout: The maximum idle time in milliseconds :param bool copy: Do not remove the key from the local instance :param bool replace: Replace existing key on the remote instance :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ command = [ b'MIGRATE', host, ascii(port).encode('ascii'), key, ascii(destination_db).encode('ascii'), ascii(timeout).encode('ascii') ] if copy is True: command.append(b'COPY') if replace is True: command.append(b'REPLACE') return self._execute(command, b'OK')
python
{ "resource": "" }
q39824
KeysMixin.sort
train
def sort(self, key, by=None, external=None, offset=0, limit=None, order=None, alpha=False, store_as=None): """Returns or stores the elements contained in the list, set or sorted set at key. By default, sorting is numeric and elements are compared by their value interpreted as double precision floating point number. The ``external`` parameter is used to specify the `GET <http://redis.io/commands/sort#retrieving-external-keys>_` parameter for retrieving external keys. It can be a single string or a list of strings. .. note:: **Time complexity**: ``O(N+M*log(M))`` where ``N`` is the number of elements in the list or set to sort, and ``M`` the number of returned elements. When the elements are not sorted, complexity is currently ``O(N)`` as there is a copy step that will be avoided in next releases. :param key: The key to get the refcount for :type key: :class:`str`, :class:`bytes` :param by: The optional pattern for external sorting keys :type by: :class:`str`, :class:`bytes` :param external: Pattern or list of patterns to return external keys :type external: :class:`str`, :class:`bytes`, list :param int offset: The starting offset when using limit :param int limit: The number of elements to return :param order: The sort order - one of ``ASC`` or ``DESC`` :type order: :class:`str`, :class:`bytes` :param bool alpha: Sort the results lexicographically :param store_as: When specified, the key to store the results as :type store_as: :class:`str`, :class:`bytes`, None :rtype: list|int :raises: :exc:`~tredis.exceptions.RedisError` :raises: :exc:`ValueError` """ if order and order not in [b'ASC', b'DESC', 'ASC', 'DESC']: raise ValueError('invalid sort order "{}"'.format(order)) command = [b'SORT', key] if by: command += [b'BY', by] if external and isinstance(external, list): for entry in external: command += [b'GET', entry] elif external: command += [b'GET', external] if limit: command += [ b'LIMIT', ascii(offset).encode('utf-8'), ascii(limit).encode('utf-8') ] if order: command.append(order) if alpha is True: command.append(b'ALPHA') if store_as: command += [b'STORE', store_as] return self._execute(command)
python
{ "resource": "" }
q39825
KeysMixin.wait
train
def wait(self, num_slaves, timeout=0): """his command blocks the current client until all the previous write commands are successfully transferred and acknowledged by at least the specified number of slaves. If the timeout, specified in milliseconds, is reached, the command returns even if the specified number of slaves were not yet reached. The command will always return the number of slaves that acknowledged the write commands sent before the :meth:`~tredis.RedisClient.wait` command, both in the case where the specified number of slaves are reached, or when the timeout is reached. .. note:: **Time complexity**: ``O(1)`` :param int num_slaves: Number of slaves to acknowledge previous writes :param int timeout: Timeout in milliseconds :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ command = [ b'WAIT', ascii(num_slaves).encode('ascii'), ascii(timeout).encode('ascii') ] return self._execute(command)
python
{ "resource": "" }
q39826
enable_thread_profiling
train
def enable_thread_profiling(profile_dir, exception_callback=None): """ Monkey-patch the threading.Thread class with our own ProfiledThread. Any subsequent imports of threading.Thread will reference ProfiledThread instead. """ global profiled_thread_enabled, Thread, Process if os.path.isdir(profile_dir): _Profiler.profile_dir = profile_dir else: raise OSError('%s does not exist' % profile_dir) _Profiler.exception_callback = exception_callback Thread = threading.Thread = ProfiledThread Process = multiprocessing.Process = ProfiledProcess profiled_thread_enabled = True
python
{ "resource": "" }
q39827
enable_thread_logging
train
def enable_thread_logging(exception_callback=None): """ Monkey-patch the threading.Thread class with our own LoggedThread. Any subsequent imports of threading.Thread will reference LoggedThread instead. """ global logged_thread_enabled, Thread LoggedThread.exception_callback = exception_callback Thread = threading.Thread = LoggedThread logged_thread_enabled = True
python
{ "resource": "" }
q39828
AseParser.parse_from_calc
train
def parse_from_calc(self): """ Parses the datafolder, stores results. This parser for this simple code does simply store in the DB a node representing the file of forces in real space """ from aiida.common.exceptions import InvalidOperation from aiida.common import aiidalogger from aiida.backends.djsite.utils import get_dblogger_extra import ase, ase.io parserlogger = aiidalogger.getChild('aseparser') logger_extra = get_dblogger_extra(self._calc) # suppose at the start that the job is successful successful = True # check that calculation is in the right state state = self._calc.get_state() if state != calc_states.PARSING: raise InvalidOperation("Calculation not in {} state" .format(calc_states.PARSING) ) # select the folder object out_folder = self._calc.get_retrieved_node() # check what is inside the folder list_of_files = out_folder.get_folder_list() # at least the stdout should exist if not self._calc._OUTPUT_FILE_NAME in list_of_files: successful = False parserlogger.error("Standard output not found",extra=logger_extra) return successful,() # output structure has_out_atoms = True if self._calc._output_aseatoms in list_of_files else False if has_out_atoms: out_atoms = ase.io.read( out_folder.get_abs_path( self._calc._output_aseatoms ) ) out_structure = StructureData().set_ase(out_atoms) # load the results dictionary json_outfile = out_folder.get_abs_path( self._calc._OUTPUT_FILE_NAME ) with open(json_outfile,'r') as f: json_params = json.load(f) # extract arrays from json_params dictionary_array = {} for k,v in list(json_params.iteritems()): if isinstance(v, (list,tuple)): dictionary_array[k] = json_params.pop(k) # look at warnings warnings = [] with open(out_folder.get_abs_path( self._calc._SCHED_ERROR_FILE )) as f: errors = f.read() if errors: warnings = [errors] json_params['warnings'] = warnings # save the outputs new_nodes_list= [] # save the arrays if dictionary_array: array_data = ArrayData() for k,v in dictionary_array.iteritems(): array_data.set_array(k,numpy.array(v)) new_nodes_list.append( (self._outarray_name, array_data) ) # save the parameters if json_params: parameter_data = ParameterData( dict=json_params ) new_nodes_list.append( (self._outdict_name, parameter_data) ) if has_out_atoms: structure_data = StructureData() new_nodes_list.append( (self._outstruc_name, structure_data) ) return successful,new_nodes_list
python
{ "resource": "" }
q39829
HashesMixin.hset
train
def hset(self, key, field, value): """Sets `field` in the hash stored at `key` to `value`. If `key` does not exist, a new key holding a hash is created. If `field` already exists in the hash, it is overwritten. .. note:: **Time complexity**: always ``O(1)`` :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param field: The field in the hash to set :type key: :class:`str`, :class:`bytes` :param value: The value to set the field to :returns: ``1`` if `field` is a new field in the hash and `value` was set; otherwise, ``0`` if `field` already exists in the hash and the value was updated :rtype: int """ return self._execute([b'HSET', key, field, value])
python
{ "resource": "" }
q39830
HashesMixin.hgetall
train
def hgetall(self, key): """ Returns all fields and values of the has stored at `key`. The underlying redis `HGETALL`_ command returns an array of pairs. This method converts that to a Python :class:`dict`. It will return an empty :class:`dict` when the key is not found. .. note:: **Time complexity**: ``O(N)`` where ``N`` is the size of the hash. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :returns: a :class:`dict` of key to value mappings for all fields in the hash .. _HGETALL: http://redis.io/commands/hgetall """ def format_response(value): return dict(zip(value[::2], value[1::2])) return self._execute( [b'HGETALL', key], format_callback=format_response)
python
{ "resource": "" }
q39831
HashesMixin.hmset
train
def hmset(self, key, value_dict): """ Sets fields to values as in `value_dict` in the hash stored at `key`. Sets the specified fields to their respective values in the hash stored at `key`. This command overwrites any specified fields already existing in the hash. If `key` does not exist, a new key holding a hash is created. .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of fields being set. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param value_dict: field to value mapping :type value_dict: :class:`dict` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ if not value_dict: future = concurrent.TracebackFuture() future.set_result(False) else: command = [b'HMSET', key] command.extend(sum(value_dict.items(), ())) future = self._execute(command) return future
python
{ "resource": "" }
q39832
HashesMixin.hmget
train
def hmget(self, key, *fields): """ Returns the values associated with the specified `fields` in a hash. For every ``field`` that does not exist in the hash, :data:`None` is returned. Because a non-existing keys are treated as empty hashes, calling :meth:`hmget` against a non-existing key will return a list of :data:`None` values. .. note:: *Time complexity*: ``O(N)`` where ``N`` is the number of fields being requested. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param fields: iterable of field names to retrieve :returns: a :class:`dict` of field name to value mappings for each of the requested fields :rtype: dict """ def format_response(val_array): return dict(zip(fields, val_array)) command = [b'HMGET', key] command.extend(fields) return self._execute(command, format_callback=format_response)
python
{ "resource": "" }
q39833
HashesMixin.hdel
train
def hdel(self, key, *fields): """ Remove the specified fields from the hash stored at `key`. Specified fields that do not exist within this hash are ignored. If `key` does not exist, it is treated as an empty hash and this command returns zero. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param fields: iterable of field names to retrieve :returns: the number of fields that were removed from the hash, not including specified by non-existing fields. :rtype: int """ if not fields: future = concurrent.TracebackFuture() future.set_result(0) else: future = self._execute([b'HDEL', key] + list(fields)) return future
python
{ "resource": "" }
q39834
HashesMixin.hsetnx
train
def hsetnx(self, key, field, value): """ Sets `field` in the hash stored at `key` only if it does not exist. Sets `field` in the hash stored at `key` only if `field` does not yet exist. If `key` does not exist, a new key holding a hash is created. If `field` already exists, this operation has no effect. .. note:: *Time complexity*: ``O(1)`` :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param field: The field in the hash to set :type key: :class:`str`, :class:`bytes` :param value: The value to set the field to :returns: ``1`` if `field` is a new field in the hash and `value` was set. ``0`` if `field` already exists in the hash and no operation was performed :rtype: int """ return self._execute([b'HSETNX', key, field, value])
python
{ "resource": "" }
q39835
AuthProgs.raise_and_log_error
train
def raise_and_log_error(self, error, message): """Raise error, including message and original traceback. error: the error to raise message: the user-facing error message """ self.log('raising %s, traceback %s\n' % (error, traceback.format_exc())) raise error(message)
python
{ "resource": "" }
q39836
AuthProgs.get_client_ip
train
def get_client_ip(self): """Return the client IP from the environment.""" if self.client_ip: return self.client_ip try: client = os.environ.get('SSH_CONNECTION', os.environ.get('SSH_CLIENT')) self.client_ip = client.split()[0] self.logdebug('client_ip: %s\n' % self.client_ip) return self.client_ip except: raise SSHEnvironmentError('cannot identify the ssh client ' 'IP address')
python
{ "resource": "" }
q39837
AuthProgs.check_keyname
train
def check_keyname(self, rule): """If a key name is specified, verify it is permitted.""" keynames = rule.get('keynames') if not keynames: self.logdebug('no keynames requirement.\n') return True if not isinstance(keynames, list): keynames = [keynames] if self.keyname in keynames: self.logdebug('keyname "%s" matches rule.\n' % self.keyname) return True else: self.logdebug('keyname "%s" does not match rule.\n' % self.keyname) return False
python
{ "resource": "" }
q39838
AuthProgs.check_client_ip
train
def check_client_ip(self, rule): """If a client IP is specified, verify it is permitted.""" if not rule.get('from'): self.logdebug('no "from" requirement.\n') return True allow_from = rule.get('from') if not isinstance(allow_from, list): allow_from = [allow_from] client_ip = self.get_client_ip() if client_ip in allow_from: self.logdebug('client_ip %s in %s\n' % (client_ip, allow_from)) return True else: self.logdebug('client_ip %s not in %s' % (client_ip, allow_from)) return False
python
{ "resource": "" }
q39839
AuthProgs.get_merged_config
train
def get_merged_config(self): """Get merged config file. Returns an open StringIO containing the merged config file. """ if self.yamldocs: return loadfiles = [] if self.configfile: loadfiles.append(self.configfile) if self.configdir: # Gets list of all non-dotfile files from configdir. loadfiles.extend( [f for f in [os.path.join(self.configdir, x) for x in os.listdir(self.configdir)] if os.path.isfile(f) and not os.path.basename(f).startswith('.')]) merged_configfile = io.StringIO() merged_configfile.write('-\n') for thefile in loadfiles: self.logdebug('reading in config file %s\n' % thefile) merged_configfile.write(open(thefile).read()) merged_configfile.write('\n-\n') merged_configfile.seek(0) self.logdebug('merged log file: """\n%s\n"""\n' % merged_configfile.read()) merged_configfile.seek(0) return merged_configfile
python
{ "resource": "" }
q39840
AuthProgs.load
train
def load(self): """Load our config, log and raise on error.""" try: merged_configfile = self.get_merged_config() self.yamldocs = yaml.load(merged_configfile, Loader=Loader) # Strip out the top level 'None's we get from concatenation. # Functionally not required, but makes dumps cleaner. self.yamldocs = [x for x in self.yamldocs if x] self.logdebug('parsed_rules:\n%s\n' % pretty(self.yamldocs)) except (yaml.scanner.ScannerError, yaml.parser.ParserError): self.raise_and_log_error(ConfigError, 'error parsing config.')
python
{ "resource": "" }
q39841
AuthProgs.dump_config
train
def dump_config(self): """Pretty print the configuration dict to stdout.""" yaml_content = self.get_merged_config() print('YAML Configuration\n%s\n' % yaml_content.read()) try: self.load() print('Python Configuration\n%s\n' % pretty(self.yamldocs)) except ConfigError: sys.stderr.write( 'config parse error. try running with --logfile=/dev/tty\n') raise
python
{ "resource": "" }
q39842
AuthProgs.install_key_data
train
def install_key_data(self, keydata, target): """Install the key data into the open file.""" target.seek(0) contents = target.read() ssh_opts = 'no-port-forwarding' if keydata in contents: raise InstallError('key data already in file - refusing ' 'to double-install.\n') command = '%s --run' % self.authprogs_binary if self.logfile: command += ' --logfile=%s' % self.logfile if self.keyname: command += ' --keyname=%s' % self.keyname target.write('command="%(command)s",%(ssh_opts)s %(keydata)s\n' % {'command': command, 'keydata': keydata, 'ssh_opts': ssh_opts})
python
{ "resource": "" }
q39843
AuthProgs.install_key
train
def install_key(self, keyfile, authorized_keys): """Install a key into the authorized_keys file.""" # Make the directory containing the authorized_keys # file, if it doesn't exist. (Typically ~/.ssh). # Ignore errors; we'll fail shortly if we can't # create the authkeys file. try: os.makedirs(os.path.dirname(authorized_keys), 0o700) except OSError: pass keydata = open(keyfile).read() target_fd = os.open(authorized_keys, os.O_RDWR | os.O_CREAT, 0o600) self.install_key_data(keydata, os.fdopen(target_fd, 'w+'))
python
{ "resource": "" }
q39844
AuthProgs.find_match_scp
train
def find_match_scp(self, rule): # pylint: disable-msg=R0911,R0912 """Handle scp commands.""" orig_list = [] orig_list.extend(self.original_command_list) binary = orig_list.pop(0) allowed_binaries = ['scp', '/usr/bin/scp'] if binary not in allowed_binaries: self.logdebug('skipping scp processing - binary "%s" ' 'not in approved list.\n' % binary) return filepath = orig_list.pop() arguments = orig_list if '-f' in arguments: if not rule.get('allow_download'): self.logdebug('scp denied - downloading forbidden.\n') return if '-t' in arguments: if not rule.get('allow_upload'): self.log('scp denied - uploading forbidden.\n') return if '-r' in arguments: if not rule.get('allow_recursion'): self.log('scp denied - recursive transfers forbidden.\n') return if '-p' in arguments: if not rule.get('allow_permissions', 'true'): self.log('scp denied - set/getting permissions ' 'forbidden.\n') return if rule.get('files'): files = rule.get('files') if not isinstance(files, list): files = [files] if filepath not in files: self.log('scp denied - file "%s" - not in approved ' 'list %s\n' % (filepath, files)) return # Allow it! return {'command': self.original_command_list}
python
{ "resource": "" }
q39845
AuthProgs.find_match
train
def find_match(self): """Load the config and find a matching rule. returns the results of find_match_command, a dict of the command and (in the future) other metadata. """ self.load() for yamldoc in self.yamldocs: self.logdebug('\nchecking rule """%s"""\n' % yamldoc) if not yamldoc: continue if not self.check_client_ip(yamldoc): # Rejected - Client IP does not match continue if not self.check_keyname(yamldoc): # Rejected - keyname does not match continue rules = yamldoc.get('allow') if not isinstance(rules, list): rules = [rules] for rule in rules: rule_type = rule.get('rule_type', 'command') if rule_type == 'command': sub = self.find_match_command elif rule_type == 'scp': sub = self.find_match_scp else: self.log('fatal: no such rule_type "%s"\n' % rule_type) self.raise_and_log_error(ConfigError, 'error parsing config.') match = sub(rule) if match: return match # No matches, time to give up. raise CommandRejected('command "%s" denied.' % self.original_command_string)
python
{ "resource": "" }
q39846
AuthProgs.exec_command
train
def exec_command(self): """Glean the command to run and exec. On problems, sys.exit. This method should *never* return. """ if not self.original_command_string: raise SSHEnvironmentError('no SSH command found; ' 'interactive shell disallowed.') command_info = {'from': self.get_client_ip(), 'keyname': self.keyname, 'ssh_original_comand': self.original_command_string, 'time': time.time()} os.environ['AUTHPROGS_KEYNAME'] = self.keyname retcode = 126 try: match = self.find_match() command_info['command'] = match.get('command') self.logdebug('find_match returned "%s"\n' % match) command = match['command'] retcode = subprocess.call(command) command_info['code'] = retcode self.log('result: %s\n' % command_info) sys.exit(retcode) except (CommandRejected, OSError) as err: command_info['exception'] = '%s' % err self.log('result: %s\n' % command_info) sys.exit(retcode)
python
{ "resource": "" }
q39847
_py2_crc16
train
def _py2_crc16(value): """Calculate the CRC for the value in Python 2 :param str value: The value to return for the CRC Checksum :rtype: int """ crc = 0 for byte in value: crc = ((crc << 8) & 0xffff) ^ \ _CRC16_LOOKUP[((crc >> 8) ^ ord(byte)) & 0xff] return crc
python
{ "resource": "" }
q39848
_py3_crc16
train
def _py3_crc16(value): """Calculate the CRC for the value in Python 3 :param bytes value: The value to return for the CRC Checksum :rtype: int """ crc = 0 for byte in value: crc = ((crc << 8) & 0xffff) ^ _CRC16_LOOKUP[((crc >> 8) ^ byte) & 0xff] return crc
python
{ "resource": "" }
q39849
Extractor.validate_url
train
def validate_url(cls, url: str) -> Optional[Match[str]]: """Check if the Extractor can handle the given url.""" match = re.match(cls._VALID_URL, url) return match
python
{ "resource": "" }
q39850
Extractor.get_info
train
def get_info(self) -> dict: """Get information about the videos from YoutubeDL package.""" with suppress_stdout(): with youtube_dl.YoutubeDL() as ydl: info_dict = ydl.extract_info(self.url, download=False) return info_dict
python
{ "resource": "" }
q39851
Extractor.update_entries
train
def update_entries(entries: Entries, data: dict) -> None: """Update each entry in the list with some data.""" # TODO: Is mutating the list okay, making copies is such a pain in the ass for entry in entries: entry.update(data)
python
{ "resource": "" }
q39852
get_extra_context
train
def get_extra_context(site, ctx): 'Returns extra data useful to the templates.' # XXX: clean this up from obsolete stuff ctx['site'] = site ctx['feeds'] = feeds = site.active_feeds.order_by('name') def get_mod_chk(k): mod, chk = ( (max(vals) if vals else None) for vals in ( filter(None, it.imap(op.attrgetter(k), feeds)) for k in ['last_modified', 'last_checked'] ) ) chk = chk or datetime(1970, 1, 1, 0, 0, 0, 0, timezone.utc) ctx['last_modified'], ctx['last_checked'] = mod or chk, chk return ctx[k] for k in 'last_modified', 'last_checked': ctx[k] = lambda: get_mod_chk(k) # media_url is set here for historical reasons, # use static_url or STATIC_URL (from django context) in any new templates. ctx['media_url'] = ctx['static_url'] =\ '{}feedjack/{}'.format(settings.STATIC_URL, site.template)
python
{ "resource": "" }
q39853
get_posts_tags
train
def get_posts_tags(subscribers, object_list, feed, tag_name): '''Adds a qtags property in every post object in a page. Use "qtags" instead of "tags" in templates to avoid unnecesary DB hits.''' tagd = dict() user_obj = None tag_obj = None tags = models.Tag.objects.extra( select=dict(post_id='{0}.{1}'.format( *it.imap( connection.ops.quote_name, ('feedjack_post_tags', 'post_id') ) )), tables=['feedjack_post_tags'], where=[ '{0}.{1}={2}.{3}'.format(*it.imap( connection.ops.quote_name, ('feedjack_tag', 'id', 'feedjack_post_tags', 'tag_id') )), '{0}.{1} IN ({2})'.format( connection.ops.quote_name('feedjack_post_tags'), connection.ops.quote_name('post_id'), ', '.join([str(post.id) for post in object_list]) ) ] ) for tag in tags: if tag.post_id not in tagd: tagd[tag.post_id] = list() tagd[tag.post_id].append(tag) if tag_name and tag.name == tag_name: tag_obj = tag subd = dict() for sub in subscribers: subd[sub.feed.id] = sub for post in object_list: if post.id in tagd: post.qtags = tagd[post.id] else: post.qtags = list() post.subscriber = subd[post.feed.id] if feed == post.feed: user_obj = post.subscriber return user_obj, tag_obj
python
{ "resource": "" }
q39854
get_page
train
def get_page(site, page=1, **criterias): 'Returns a paginator object and a requested page from it.' global _since_formats_vary if 'since' in criterias: since = criterias['since'] if since in _since_offsets: since = datetime.today() - timedelta(_since_offsets[since]) else: if _since_formats_vary: for fmt, substs in it.product( list(_since_formats), it.chain.from_iterable( it.combinations(_since_formats_vary, n) for n in xrange(1, len(_since_formats_vary)) ) ): for src, dst in substs: fmt = fmt.replace(src, dst) _since_formats.add(fmt) _since_formats_vary = None # to avoid doing it again for fmt in _since_formats: try: since = datetime.strptime(since, fmt) except ValueError: pass else: break else: raise Http404 # invalid format try: criterias['since'] = timezone.make_aware( since, timezone.get_current_timezone() ) except ( timezone.pytz.exceptions.AmbiguousTimeError if timezone.pytz else RuntimeError ): # Since there's no "right" way here anyway... criterias['since'] = since.replace(tzinfo=timezone) order_force = criterias.pop('asc', None) posts = models.Post.objects.filtered(site, **criterias)\ .sorted(site.order_posts_by, force=order_force)\ .select_related('feed') paginator = Paginator(posts, site.posts_per_page) try: return paginator.page(page) except InvalidPage: raise Http404
python
{ "resource": "" }
q39855
StringsMixin.bitpos
train
def bitpos(self, key, bit, start=None, end=None): """Return the position of the first bit set to ``1`` or ``0`` in a string. The position is returned, thinking of the string as an array of bits from left to right, where the first byte's most significant bit is at position 0, the second byte's most significant bit is at position ``8``, and so forth. The same bit position convention is followed by :meth:`~tredis.RedisClient.getbit` and :meth:`~tredis.RedisClient.setbit`. By default, all the bytes contained in the string are examined. It is possible to look for bits only in a specified interval passing the additional arguments start and end (it is possible to just pass start, the operation will assume that the end is the last byte of the string. However there are semantic differences as explained later). The range is interpreted as a range of bytes and not a range of bits, so ``start=0`` and ``end=2`` means to look at the first three bytes. Note that bit positions are returned always as absolute values starting from bit zero even when start and end are used to specify a range. Like for the :meth:`~tredis.RedisClient.getrange` command start and end can contain negative values in order to index bytes starting from the end of the string, where ``-1`` is the last byte, ``-2`` is the penultimate, and so forth. Non-existent keys are treated as empty strings. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` :param key: The key to get :type key: :class:`str`, :class:`bytes` :param int bit: The bit value to search for (``1`` or ``0``) :param int start: The start position to evaluate in the string :param int end: The end position to evaluate in the string :returns: The position of the first bit set to ``1`` or ``0`` :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`, :exc:`ValueError` """ if 0 < bit > 1: raise ValueError('bit must be 1 or 0, not {}'.format(bit)) command = [b'BITPOS', key, ascii(bit)] if start is not None and end is None: raise ValueError('Can not specify start without an end') elif start is None and end is not None: raise ValueError('Can not specify start without an end') elif start is not None and end is not None: command += [ascii(start), ascii(end)] return self._execute(command)
python
{ "resource": "" }
q39856
StringsMixin.decrby
train
def decrby(self, key, decrement): """Decrements the number stored at key by decrement. If the key does not exist, it is set to 0 before performing the operation. An error is returned if the key contains a value of the wrong type or contains a string that can not be represented as integer. This operation is limited to 64 bit signed integers. See :meth:`~tredis.RedisClient.incr` for extra information on increment/decrement operations. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to decrement :type key: :class:`str`, :class:`bytes` :param int decrement: The amount to decrement by :returns: The value of key after the decrement :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'DECRBY', key, ascii(decrement)])
python
{ "resource": "" }
q39857
StringsMixin.incrby
train
def incrby(self, key, increment): """Increments the number stored at key by increment. If the key does not exist, it is set to 0 before performing the operation. An error is returned if the key contains a value of the wrong type or contains a string that can not be represented as integer. This operation is limited to 64 bit signed integers. See :meth:`~tredis.RedisClient.incr` for extra information on increment/decrement operations. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to increment :type key: :class:`str`, :class:`bytes` :param int increment: The amount to increment by :returns: The value of key after the increment :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'INCRBY', key, ascii(increment)])
python
{ "resource": "" }
q39858
StringsMixin.setex
train
def setex(self, key, seconds, value): """Set key to hold the string value and set key to timeout after a given number of seconds. :meth:`~tredis.RedisClient.setex` is atomic, and can be reproduced by using :meth:`~tredis.RedisClient.set` and :meth:`~tredis.RedisClient.expire` inside an :meth:`~tredis.RedisClient.multi` / :meth:`~tredis.RedisClient.exec` block. It is provided as a faster alternative to the given sequence of operations, because this operation is very common when Redis is used as a cache. An error is returned when seconds is invalid. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to set :type key: :class:`str`, :class:`bytes` :param int seconds: Number of seconds for TTL :param value: The value to set :type value: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'SETEX', key, ascii(seconds), value], b'OK')
python
{ "resource": "" }
q39859
StringsMixin.setrange
train
def setrange(self, key, offset, value): """Overwrites part of the string stored at key, starting at the specified offset, for the entire length of value. If the offset is larger than the current length of the string at key, the string is padded with zero-bytes to make offset fit. Non-existing keys are considered as empty strings, so this command will make sure it holds a string large enough to be able to set value at offset. .. note:: The maximum offset that you can set is 2 :sup:`29` -1 (536870911), as Redis Strings are limited to 512 megabytes. If you need to grow beyond this size, you can use multiple keys. .. warning:: When setting the last possible byte and the string value stored at key does not yet hold a string value, or holds a small string value, Redis needs to allocate all intermediate memory which can block the server for some time. On a 2010 MacBook Pro, setting byte number 536870911 (512MB allocation) takes ~300ms, setting byte number 134217728 (128MB allocation) takes ~80ms, setting bit number 33554432 (32MB allocation) takes ~30ms and setting bit number 8388608 (8MB allocation) takes ~8ms. Note that once this first allocation is done, subsequent calls to :meth:`~tredis.RedisClient.setrange` for the same key will not have the allocation overhead. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)``, not counting the time taken to copy the new string in place. Usually, this string is very small so the amortized complexity is ``O(1)``. Otherwise, complexity is ``O(M)`` with ``M`` being the length of the value argument. :param key: The key to get the bit from :type key: :class:`str`, :class:`bytes` :param value: The value to set :type value: :class:`str`, :class:`bytes`, :class:`int` :returns: The length of the string after it was modified by the command :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'SETRANGE', key, ascii(offset), value])
python
{ "resource": "" }
q39860
Extension.negotiate_safe
train
def negotiate_safe(self, name, params): """ `name` and `params` are sent in the HTTP request by the client. Check if the extension name is supported by this extension, and validate the parameters. Returns a dict with accepted parameters, or None if not accepted. """ for param in params.iterkeys(): if param not in self.defaults: return try: return dict(self.negotiate(name, params)) except (KeyError, ValueError, AssertionError): pass
python
{ "resource": "" }
q39861
get
train
def get(request): """Gets the currently logged in users preferences :returns: json """ res = Result() obj, created = UserPref.objects.get_or_create(user=request.user, defaults={'data': json.dumps(DefaultPrefs.copy())}) data = obj.json() data['subscriptions'] = [_.json() for _ in GallerySubscription.objects.filter(user=request.user)] res.append(data) return JsonResponse(res.asDict())
python
{ "resource": "" }
q39862
post
train
def post(request): """Sets a key to a value on the currently logged in users preferences :param key: Key to set :type key: str :param val: Value to set :type val: primitive :returns: json """ data = request.POST or json.loads(request.body)['body'] key = data.get('key', None) val = data.get('val', None) res = Result() if key is not None and val is not None: obj, created = UserPref.objects.get_or_create(user=request.user) if created: obj.data = json.dumps(DefaultPrefs.copy()) obj.save() try: val = json.loads(val) except (TypeError, ValueError): pass obj.setKey(key, val) obj.save() res.append(obj.json()) return JsonResponse(res.asDict())
python
{ "resource": "" }
q39863
get_modified_date
train
def get_modified_date(parsed, raw): 'Return best possible guess to post modification timestamp.' if parsed: return feedparser_ts(parsed) if not raw: return None # Parse weird timestamps that feedparser can't handle, e.g.: July 30, 2013 ts, val = None, raw.replace('_', ' ') if not ts: # coreutils' "date" parses virtually everything, but is more expensive to use from subprocess import Popen, PIPE with open(os.devnull, 'w') as devnull: proc = Popen(['date', '+%s', '-d', val], stdout=PIPE, stderr=devnull) val = proc.stdout.read() if not proc.wait(): ts = datetime.fromtimestamp(int(val.strip()), tz=timezone.utc) if ts: return ts raise ValueError('Unrecognized raw value format: {0!r}'.format(val))
python
{ "resource": "" }
q39864
query_realtime_routine
train
def query_realtime_routine(bus_name, cur_station=None): '''Get real time routine. TODO support fuzzy matching. :param bus_name: the routine name of the bus. :param cur_station: current station, deaults to starting station of the routine. ''' routines = query_routines(bus_name) if not routines: return rv = [] for routine in routines: bid = routine['bid'] _cur_station = cur_station or routine['starting_station'] page = _get_realtime_page(bus_name, bid, _cur_station) rv.append(extract_bus_routine(page)) return rv
python
{ "resource": "" }
q39865
getRoot
train
def getRoot(): """Convenience to return the media root with forward slashes""" root = settings.MEDIA_ROOT.replace('\\', '/') if not root.endswith('/'): root += '/' return path.Path(root)
python
{ "resource": "" }
q39866
emailUser
train
def emailUser(video, error=None): """Emails the author of the video that it has finished processing""" html = render_to_string('frog/video_email.html', { 'user': video.author, 'error': error, 'video': video, 'SITE_URL': FROG_SITE_URL, }) subject, from_email, to = 'Video Processing Finished{}'.format(error or ''), 'noreply@frogmediaserver.com', video.author.email text_content = 'This is an important message.' html_content = html send_mail(subject, text_content, from_email, [to], html_message=html_content)
python
{ "resource": "" }
q39867
SortedSetsMixin.zrange
train
def zrange(self, key, start=0, stop=-1, with_scores=False): """Returns the specified range of elements in the sorted set stored at key. The elements are considered to be ordered from the lowest to the highest score. Lexicographical order is used for elements with equal score. See :meth:`tredis.Client.zrevrange` when you need the elements ordered from highest to lowest score (and descending lexicographical order for elements with equal score). Both start and stop are zero-based indexes, where ``0`` is the first element, ``1`` is the next element and so on. They can also be negative numbers indicating offsets from the end of the sorted set, with ``-1`` being the last element of the sorted set, ``-2`` the penultimate element and so on. ``start`` and ``stop`` are inclusive ranges, so for example ``ZRANGE myzset 0 1`` will return both the first and the second element of the sorted set. Out of range indexes will not produce an error. If start is larger than the largest index in the sorted set, or ``start > stop``, an empty list is returned. If stop is larger than the end of the sorted set Redis will treat it like it is the last element of the sorted set. It is possible to pass the ``WITHSCORES`` option in order to return the scores of the elements together with the elements. The returned list will contain ``value1,score1,...,valueN,scoreN`` instead of ``value1,...,valueN``. Client libraries are free to return a more appropriate data type (suggestion: an array with (value, score) arrays/tuples). .. note:: **Time complexity**: ``O(log(N)+M)`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements returned. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param int start: The starting index of the sorted set :param int stop: The ending index of the sorted set :param bool with_scores: Return the scores with the elements :rtype: list :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'ZRANGE', key, start, stop] if with_scores: command += ['WITHSCORES'] return self._execute(command)
python
{ "resource": "" }
q39868
SortedSetsMixin.zrem
train
def zrem(self, key, *members): """Removes the specified members from the sorted set stored at key. Non existing members are ignored. An error is returned when key exists and does not hold a sorted set. .. note:: **Time complexity**: ``O(M*log(N))`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements to be removed. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param members: One or more member values to remove :type members: :class:`str`, :class:`bytes` :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'ZREM', key] + list(members))
python
{ "resource": "" }
q39869
SortedSetsMixin.zremrangebyscore
train
def zremrangebyscore(self, key, min_score, max_score): """Removes all elements in the sorted set stored at key with a score between min and max. Intervals are described in :meth:`~tredis.RedisClient.zrangebyscore`. Returns the number of elements removed. .. note:: **Time complexity**: ``O(log(N)+M)`` with ``N`` being the number of elements in the sorted set and M the number of elements removed by the operation. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param min_score: Lowest score definition :type min_score: :class:`str`, :class:`bytes` :param max_score: Highest score definition :type max_score: :class:`str`, :class:`bytes` :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'ZREMRANGEBYSCORE', key, min_score, max_score])
python
{ "resource": "" }
q39870
SortedSetsMixin.zrevrange
train
def zrevrange(self, key, start=0, stop=-1, with_scores=False): """Returns the specified range of elements in the sorted set stored at key. The elements are considered to be ordered from the highest to the lowest score. Descending lexicographical order is used for elements with equal score. Apart from the reversed ordering, :py:meth:`~tredis.Client.zrevrange` is similar to :py:meth:`~tredis.Client.zrange` . .. note:: **Time complexity**: ``O(log(N)+M)`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements returned. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param int start: The starting index of the sorted set :param int stop: The ending index of the sorted set :param bool with_scores: Return the scores with the elements :rtype: list :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'ZREVRANGE', key, start, stop] if with_scores: command += ['WITHSCORES'] return self._execute(command)
python
{ "resource": "" }
q39871
getkey
train
def getkey(stype, site_id=None, key=None): 'Returns the cache key depending on its type.' base = '{0}.feedjack'.format(settings.CACHE_MIDDLEWARE_KEY_PREFIX) if stype == T_HOST: return '{0}.hostcache'.format(base) elif stype == T_ITEM: return '{0}.{1}.item.{2}'.format(base, site_id, str2md5(key)) elif stype == T_META: return '{0}.{1}.meta'.format(base, site_id) elif stype == T_INTERVAL: return '{0}.interval.{1}'.format(base, str2md5(key))
python
{ "resource": "" }
q39872
feed_interval_get
train
def feed_interval_get(feed_id, parameters): 'Get adaptive interval between checks for a feed.' val = cache.get(getkey( T_INTERVAL, key=feed_interval_key(feed_id, parameters) )) return val if isinstance(val, tuple) else (val, None)
python
{ "resource": "" }
q39873
feed_interval_set
train
def feed_interval_set(feed_id, parameters, interval, interval_ts): 'Set adaptive interval between checks for a feed.' cache.set(getkey( T_INTERVAL, key=feed_interval_key(feed_id, parameters) ), (interval, interval_ts))
python
{ "resource": "" }
q39874
feed_interval_delete
train
def feed_interval_delete(feed_id, parameters): 'Invalidate cached adaptive interval value.' cache.delete(getkey( T_INTERVAL, key=feed_interval_key(feed_id, parameters) ))
python
{ "resource": "" }
q39875
cache_set
train
def cache_set(site, key, data): '''Sets cache data for a site. All keys related to a site are stored in a meta key. This key is per-site.''' tkey = getkey(T_ITEM, site.id, key) mkey = getkey(T_META, site.id) tmp = cache.get(mkey) longdur = 365*24*60*60 if not tmp: tmp = [tkey] cache.set(mkey, [tkey], longdur) elif tkey not in tmp: tmp.append(tkey) cache.set(mkey, tmp, longdur) cache.set(tkey, data, site.cache_duration)
python
{ "resource": "" }
q39876
cache_delsite
train
def cache_delsite(site_id): 'Removes all cache data from a site.' mkey = getkey(T_META, site_id) tmp = cache.get(mkey) if not tmp: return for tkey in tmp: cache.delete(tkey) cache.delete(mkey)
python
{ "resource": "" }
q39877
_canvas_route
train
def _canvas_route(self, *args, **kwargs): """ Decorator for canvas route """ def outer(view_fn): @self.route(*args, **kwargs) def inner(*args, **kwargs): fn_args = getargspec(view_fn) try: idx = fn_args.args.index(_ARG_KEY) except ValueError: idx = -1 if idx > -1: if 'error' in flask_request.args: return redirect('%s?error=%s' % ( self.config.get('CANVAS_ERROR_URI', '/'), flask_request.args.get('error'))) if 'signed_request' not in flask_request.form: self.logger.error('signed_request not in request.form') abort(403) try: _, decoded_data = _decode_signed_user( *flask_request.form['signed_request'].split('.')) except ValueError as e: self.logger.error(e.message) abort(403) if 'oauth_token' not in decoded_data: app.logger.info('unauthorized user, redirecting') return _authorize() user = User(**decoded_data) if not app.config.get('CANVAS_SKIP_AUTH_CHECK', False) \ and not user.has_permissions(): self.logger.info( 'user does not have the required permission set.') return _authorize() self.logger.info('all required permissions have been granted') args = args[:idx - 1] + (user,) + args[idx:] return view_fn(*args, **kwargs) return inner return outer
python
{ "resource": "" }
q39878
_decode_signed_user
train
def _decode_signed_user(encoded_sig, encoded_data): """ Decodes the ``POST``ed signed data """ decoded_sig = _decode(encoded_sig) decoded_data = loads(_decode(encoded_data)) if decoded_sig != hmac.new(app.config['CANVAS_CLIENT_SECRET'], encoded_data, sha256).digest(): raise ValueError("sig doesn't match hash") return decoded_sig, decoded_data
python
{ "resource": "" }
q39879
User.request
train
def request(self, path, data=None, method='GET'): """ Convenience Facebook request function. Utility function to request resources via the graph API, with the format expected by Facebook. """ url = '%s%s?access_token=%s' % ( 'https://graph.facebook.com', path, self['oauth_token']) req = Request(url, data=data) req.get_method = lambda: method return loads(urlopen(req).read())
python
{ "resource": "" }
q39880
User.has_permissions
train
def has_permissions(self): """ Check current user permission set Checks the current user permission set against the one being requested by the application. """ perms = self.request('/me/permissions')['data'][0].keys() return all(k in perms for k in app.config[ 'CANVAS_SCOPE'].split(','))
python
{ "resource": "" }
q39881
get_calculator_impstr
train
def get_calculator_impstr(calculator_name): """ Returns the import string for the calculator """ if calculator_name.lower() == "gpaw" or calculator_name is None: return "from gpaw import GPAW as custom_calculator" elif calculator_name.lower() == "espresso": return "from espresso import espresso as custom_calculator" else: possibilities = {"abinit":"abinit.Abinit", "aims":"aims.Aims", "ase_qmmm_manyqm":"AseQmmmManyqm", "castep":"Castep", "dacapo":"Dacapo", "dftb":"Dftb", "eam":"EAM", "elk":"ELK", "emt":"EMT", "exciting":"Exciting", "fleur":"FLEUR", "gaussian":"Gaussian", "gromacs":"Gromacs", "mopac":"Mopac", "morse":"MorsePotential", "nwchem":"NWChem", 'siesta':"Siesta", "tip3p":"TIP3P", "turbomole":"Turbomole", "vasp":"Vasp", } current_val = possibilities.get(calculator_name.lower()) package, class_name = (calculator_name,current_val) if current_val else calculator_name.rsplit('.',1) return "from ase.calculators.{} import {} as custom_calculator".format(package, class_name)
python
{ "resource": "" }
q39882
get_optimizer_impstr
train
def get_optimizer_impstr(optimizer_name): """ Returns the import string for the optimizer """ possibilities = {"bfgs":"BFGS", "bfgslinesearch":"BFGSLineSearch", "fire":"FIRE", "goodoldquasinewton":"GoodOldQuasiNewton", "hesslbfgs":"HessLBFGS", "lbfgs":"LBFGS", "lbfgslinesearch":"LBFGSLineSearch", "linelbfgs":"LineLBFGS", "mdmin":"MDMin", "ndpoly":"NDPoly", "quasinewton":"QuasiNewton", "scipyfmin":"SciPyFmin", "scipyfminbfgs":"SciPyFminBFGS", "scipyfmincg":"SciPyFminCG", "scipyfminpowell":"SciPyFminPowell", "scipygradientlessoptimizer":"SciPyGradientlessOptimizer", } current_val = possibilities.get(optimizer_name.lower()) if current_val: return "from ase.optimize import {} as custom_optimizer".format(current_val) else: package,current_val = optimizer_name.rsplit('.',1) return "from ase.optimize.{} import {} as custom_optimizer".format(package,current_val)
python
{ "resource": "" }
q39883
convert_the_getters
train
def convert_the_getters(getters): """ A function used to prepare the arguments of calculator and atoms getter methods """ return_list = [] for getter in getters: if isinstance(getter,basestring): out_args = "" method_name = getter else: method_name, a = getter out_args = convert_the_args(a) return_list.append( (method_name, out_args) ) return return_list
python
{ "resource": "" }
q39884
convert_the_args
train
def convert_the_args(raw_args): """ Function used to convert the arguments of methods """ if not raw_args: return "" if isinstance(raw_args,dict): out_args = ", ".join([ "{}={}".format(k,v) for k,v in raw_args.iteritems() ]) elif isinstance(raw_args,(list,tuple)): new_list = [] for x in raw_args: if isinstance(x,basestring): new_list.append(x) elif isinstance(x,dict): new_list.append( ", ".join([ "{}={}".format(k,v) for k,v in x.iteritems() ]) ) else: raise ValueError("Error preparing the getters") out_args = ", ".join(new_list) else: raise ValueError("Couldn't recognize list of getters") return out_args
python
{ "resource": "" }
q39885
Converter.dd_docs
train
def dd_docs(self): """Copy and convert various documentation files.""" top = os.path.join(os.path.dirname(__file__)) doc = os.path.join(top, 'doc') # Markdown to ronn to man page man_md = os.path.join(doc, 'authprogs.md') man_ronn = os.path.join(doc, 'authprogs.1.ronn') man_1 = os.path.join(doc, 'authprogs.1') # Create manpage try: if not os.path.exists(man_1): shutil.copy(man_md, man_ronn) self.created.append(man_ronn) retval = subprocess.call(['ronn', '-r', man_ronn]) if retval != 0: raise Exception('ronn man page conversion failed, ' 'returned %s' % retval) self.created.append(man_1) except: raise Exception('ronn required for manpage conversion - do you ' 'have it installed?') # Markdown files in docs dir get converted to .html for name in MARKDOWN2HTML: htmlfile = os.path.join(doc, '%s.html' % name) if os.path.exists(htmlfile): continue target = open(htmlfile, 'w') self.created.append(htmlfile) stdout = runcmd(['python', '-m', 'markdown', os.path.join(doc, '%s.md' % name)])[1] if not stdout: raise Exception('markdown conversion failed, no output.') target.write(stdout) target.close() # Markdown files in top level just get renamed sans .md for name in MARKDOWN2TEXT: target = os.path.join(top, name) if os.path.exists(target): continue source = os.path.join(top, '%s.md' % target) shutil.copy(source, target) self.created.append(target)
python
{ "resource": "" }
q39886
Converter.rm_docs
train
def rm_docs(self): """Remove converted docs.""" for filename in self.created: if os.path.exists(filename): os.unlink(filename)
python
{ "resource": "" }
q39887
post
train
def post(request): """Returns a serialized object""" data = request.POST or json.loads(request.body)['body'] guid = data.get('guid', None) res = Result() if guid: obj = getObjectsFromGuids([guid,])[0] comment = Comment() comment.comment = data.get('comment', 'No comment') comment.user = request.user comment.user_name = request.user.get_full_name() comment.user_email = request.user.email comment.content_object = obj # For our purposes, we never have more than one site comment.site_id = 1 comment.save() obj.comment_count += 1 obj.save() emailComment(comment, obj, request) res.append(commentToJson(comment)) return JsonResponse(res.asDict())
python
{ "resource": "" }
q39888
emailComment
train
def emailComment(comment, obj, request): """Send an email to the author about a new comment""" if not obj.author.frog_prefs.get().json()['emailComments']: return if obj.author == request.user: return html = render_to_string('frog/comment_email.html', { 'user': comment.user, 'comment': comment.comment, 'object': obj, 'action_type': 'commented on', 'image': isinstance(obj, Image), 'SITE_URL': FROG_SITE_URL, }) subject = '{}: Comment from {}'.format(getSiteConfig()['name'], comment.user_name) fromemail = comment.user_email to = obj.author.email text_content = 'This is an important message.' html_content = html send_mail(subject, text_content, fromemail, [to], html_message=html_content)
python
{ "resource": "" }
q39889
ClusterMixin.cluster_nodes
train
def cluster_nodes(self): """Each node in a Redis Cluster has its view of the current cluster configuration, given by the set of known nodes, the state of the connection we have with such nodes, their flags, properties and assigned slots, and so forth. ``CLUSTER NODES`` provides all this information, that is, the current cluster configuration of the node we are contacting, in a serialization format which happens to be exactly the same as the one used by Redis Cluster itself in order to store on disk the cluster state (however the on disk cluster state has a few additional info appended at the end). Note that normally clients willing to fetch the map between Cluster hash slots and node addresses should use ``CLUSTER SLOTS`` instead. ``CLUSTER NODES``, that provides more information, should be used for administrative tasks, debugging, and configuration inspections. It is also used by ``redis-trib`` in order to manage a cluster. .. versionadded:: 0.7.0 :rtype: list(:class:`~tredis.cluster.ClusterNode`) :raises: :exc:`~tredis.exceptions.RedisError` """ def format_response(result): values = [] for row in result.decode('utf-8').split('\n'): if not row: continue parts = row.split(' ') slots = [] for slot in parts[8:]: if '-' in slot: sparts = slot.split('-') slots.append((int(sparts[0]), int(sparts[1]))) else: slots.append((int(slot), int(slot))) ip_port = common.split_connection_host_port(parts[1]) values.append( ClusterNode(parts[0], ip_port[0], ip_port[1], parts[2], parts[3], int(parts[4]), int(parts[5]), int(parts[6]), parts[7], slots)) return values return self._execute( ['CLUSTER', 'NODES'], format_callback=format_response)
python
{ "resource": "" }
q39890
SetsMixin.sadd
train
def sadd(self, key, *members): """Add the specified members to the set stored at key. Specified members that are already a member of this set are ignored. If key does not exist, a new set is created before adding the specified members. An error is returned when the value stored at key is not a set. Returns :data:`True` if all requested members are added. If more than one member is passed in and not all members are added, the number of added members is returned. .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of members to be added. :param key: The key of the set :type key: :class:`str`, :class:`bytes` :param members: One or more positional arguments to add to the set :type key: :class:`str`, :class:`bytes` :returns: Number of items added to the set :rtype: bool, int """ return self._execute([b'SADD', key] + list(members), len(members))
python
{ "resource": "" }
q39891
SetsMixin.smove
train
def smove(self, source, destination, member): """Move member from the set at source to the set at destination. This operation is atomic. In every given moment the element will appear to be a member of source or destination for other clients. If the source set does not exist or does not contain the specified element, no operation is performed and :data:`False` is returned. Otherwise, the element is removed from the source set and added to the destination set. When the specified element already exists in the destination set, it is only removed from the source set. An error is returned if source or destination does not hold a set value. .. note:: **Time complexity**: ``O(1)`` :param source: The source set key :type source: :class:`str`, :class:`bytes` :param destination: The destination set key :type destination: :class:`str`, :class:`bytes` :param member: The member value to move :type member: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'SMOVE', source, destination, member], 1)
python
{ "resource": "" }
q39892
SetsMixin.spop
train
def spop(self, key, count=None): """Removes and returns one or more random elements from the set value store at key. This operation is similar to :meth:`~tredis.RedisClient.srandmember`, that returns one or more random elements from a set but does not remove it. The count argument will be available in a later version and is not available in 2.6, 2.8, 3.0 Redis 3.2 will be the first version where an optional count argument can be passed to :meth:`~tredis.RedisClient.spop` in order to retrieve multiple elements in a single call. The implementation is already available in the unstable branch. .. note:: **Time complexity**: Without the count argument ``O(1)``, otherwise ``O(N)`` where ``N`` is the absolute value of the passed count. :param key: The key to get one or more random members from :type key: :class:`str`, :class:`bytes` :param int count: The number of members to return :rtype: bytes, list :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'SPOP', key] if count: # pragma: nocover command.append(ascii(count).encode('ascii')) return self._execute(command)
python
{ "resource": "" }
q39893
SetsMixin.srandmember
train
def srandmember(self, key, count=None): """When called with just the key argument, return a random element from the set value stored at key. Starting from Redis version 2.6, when called with the additional count argument, return an array of count distinct elements if count is positive. If called with a negative count the behavior changes and the command is allowed to return the same element multiple times. In this case the number of returned elements is the absolute value of the specified count. When called with just the key argument, the operation is similar to :meth:`~tredis.RedisClient.spop`, however while :meth:`~tredis.RedisClient.spop` also removes the randomly selected element from the set, :meth:`~tredis.RedisClient.srandmember` will just return a random element without altering the original set in any way. .. note:: **Time complexity**: Without the count argument ``O(1)``, otherwise ``O(N)`` where ``N`` is the absolute value of the passed count. :param key: The key to get one or more random members from :type key: :class:`str`, :class:`bytes` :param int count: The number of members to return :rtype: bytes, list :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'SRANDMEMBER', key] if count: command.append(ascii(count).encode('ascii')) return self._execute(command)
python
{ "resource": "" }
q39894
SetsMixin.srem
train
def srem(self, key, *members): """Remove the specified members from the set stored at key. Specified members that are not a member of this set are ignored. If key does not exist, it is treated as an empty set and this command returns ``0``. An error is returned when the value stored at key is not a set. Returns :data:`True` if all requested members are removed. If more than one member is passed in and not all members are removed, the number of removed members is returned. .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of members to be removed. :param key: The key to remove the member from :type key: :class:`str`, :class:`bytes` :param mixed members: One or more member values to remove :rtype: bool, int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'SREM', key] + list(members), len(members))
python
{ "resource": "" }
q39895
TokFm._extract_id
train
def _extract_id(self) -> str: """ Get video_id needed to obtain the real_url of the video. Raises: VideoIdNotMatchedError: If video_id is not matched with regular expression. """ match = re.match(self._VALID_URL, self.url) if match: return match.group('video_id') else: raise VideoIdNotMatchedError
python
{ "resource": "" }
q39896
convert_time_units
train
def convert_time_units(t): """ Convert time in seconds into reasonable time units. """ if t == 0: return '0 s' order = log10(t) if -9 < order < -6: time_units = 'ns' factor = 1000000000 elif -6 <= order < -3: time_units = 'us' factor = 1000000 elif -3 <= order < -1: time_units = 'ms' factor = 1000. elif -1 <= order: time_units = 's' factor = 1 return "{:.3f} {}".format(factor * t, time_units)
python
{ "resource": "" }
q39897
globalize_indentation
train
def globalize_indentation(src): """ Strip the indentation level so the code runs in the global scope. """ lines = src.splitlines() indent = len(lines[0]) - len(lines[0].strip(' ')) func_src = '' for ii, l in enumerate(src.splitlines()): line = l[indent:] func_src += line + '\n' return func_src
python
{ "resource": "" }
q39898
remove_decorators
train
def remove_decorators(src): """ Remove decorators from the source code """ src = src.strip() src_lines = src.splitlines() multi_line = False n_deleted = 0 for n in range(len(src_lines)): line = src_lines[n - n_deleted].strip() if (line.startswith('@') and 'Benchmark' in line) or multi_line: del src_lines[n - n_deleted] n_deleted += 1 if line.endswith(')'): multi_line = False else: multi_line = True setup_src = '\n'.join(src_lines) return setup_src
python
{ "resource": "" }
q39899
walk_tree
train
def walk_tree(start, attr): """ Recursively walk through a tree relationship. This iterates a tree in a top-down approach, fully reaching the end of a lineage before moving onto the next sibling of that generation. """ path = [start] for child in path: yield child idx = path.index(child) for grandchild in reversed(getattr(child, attr)): path.insert(idx + 1, grandchild)
python
{ "resource": "" }