_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q11100
InstanceAssertionsMixin.assert_instance_created
train
def assert_instance_created(self, model_class, **kwargs): """ Checks if a model instance was created in the database. For example:: >>> with self.assert_instance_created(Article, slug='lorem-ipsum'): ... Article.objects.create(slug='lorem-ipsum') """ return _InstanceContext( self.assert_instance_does_not_exist, self.assert_instance_exists, model_class, **kwargs )
python
{ "resource": "" }
q11101
InstanceAssertionsMixin.assert_instance_deleted
train
def assert_instance_deleted(self, model_class, **kwargs): """ Checks if the model instance was deleted from the database. For example:: >>> with self.assert_instance_deleted(Article, slug='lorem-ipsum'): ... Article.objects.get(slug='lorem-ipsum').delete() """ return _InstanceContext( self.assert_instance_exists, self.assert_instance_does_not_exist, model_class, **kwargs )
python
{ "resource": "" }
q11102
_split_into_mimetype_and_priority
train
def _split_into_mimetype_and_priority(x): """Split an accept header item into mimetype and priority. >>> _split_into_mimetype_and_priority('text/*') ('text/*', 1.0) >>> _split_into_mimetype_and_priority('application/json;q=0.5') ('application/json', 0.5) """ if ';' in x: content_type, priority = x.split(';') casted_priority = float(priority.split('=')[1]) else: content_type, casted_priority = x, 1.0 content_type = content_type.lstrip().rstrip() # Replace ' text/html' to 'text/html' return content_type, casted_priority
python
{ "resource": "" }
q11103
_parse_and_sort_accept_header
train
def _parse_and_sort_accept_header(accept_header): """Parse and sort the accept header items. >>> _parse_and_sort_accept_header('application/json;q=0.5, text/*') [('text/*', 1.0), ('application/json', 0.5)] """ return sorted([_split_into_mimetype_and_priority(x) for x in accept_header.split(',')], key=lambda x: x[1], reverse=True)
python
{ "resource": "" }
q11104
accept_best_match
train
def accept_best_match(accept_header, mimetypes): """Return a mimetype best matched the accept headers. >>> accept_best_match('application/json, text/html', ['application/json', 'text/plain']) 'application/json' >>> accept_best_match('application/json;q=0.5, text/*', ['application/json', 'text/plain']) 'text/plain' """ for mimetype_pattern, _ in _parse_and_sort_accept_header(accept_header): matched_types = fnmatch.filter(mimetypes, mimetype_pattern) if matched_types: return matched_types[0] return mimetypes[0]
python
{ "resource": "" }
q11105
match_url_vars_type
train
def match_url_vars_type(url_vars, type_hints): """ Match types of url vars. >>> match_url_vars_type({'user_id': '1'}, {'user_id': int}) (True, {'user_id': 1}) >>> match_url_vars_type({'user_id': 'foo'}, {'user_id': int}) (False, {}) """ typed_url_vars = {} try: for k, v in url_vars.items(): arg_type = type_hints.get(k) if arg_type and arg_type != str: typed_url_vars[k] = arg_type(v) else: typed_url_vars[k] = v except ValueError: return False, {} return True, typed_url_vars
python
{ "resource": "" }
q11106
match_path
train
def match_path(rule, path): """ Match path. >>> match_path('/foo', '/foo') (True, {}) >>> match_path('/foo', '/bar') (False, {}) >>> match_path('/users/{user_id}', '/users/1') (True, {'user_id': '1'}) >>> match_path('/users/{user_id}', '/users/not-integer') (True, {'user_id': 'not-integer'}) """ split_rule = split_by_slash(rule) split_path = split_by_slash(path) url_vars = {} if len(split_rule) != len(split_path): return False, {} for r, p in zip(split_rule, split_path): if r.startswith('{') and r.endswith('}'): url_vars[r[1:-1]] = p continue if r != p: return False, {} return True, url_vars
python
{ "resource": "" }
q11107
Router.match
train
def match(self, path, method): """ Get callback and url_vars. >>> from kobin import Response >>> r = Router() >>> def view(user_id: int) -> Response: ... return Response(f'You are {user_id}') ... >>> r.add('/users/{user_id}', 'GET', 'user-detail', view) >>> callback, url_vars = r.match('/users/1', 'GET') >>> url_vars {'user_id': 1} >>> response = callback(**url_vars) >>> response.body [b'You are 1'] >>> callback, url_vars = r.match('/notfound', 'GET') Traceback (most recent call last): ... kobin.responses.HTTPError """ if path != '/': path = path.rstrip('/') method = method.upper() status = 404 for p, n, m in self.endpoints: matched, url_vars = match_path(p, path) if not matched: # path: not matched continue if method not in m: # path: matched, method: not matched status = 405 raise HTTPError(status=status, body=f'Method not found: {path} {method}') # it has security issue?? callback, type_hints = m[method] type_matched, typed_url_vars = match_url_vars_type(url_vars, type_hints) if not type_matched: continue # path: not matched (types are different) return callback, typed_url_vars raise HTTPError(status=status, body=f'Not found: {path}')
python
{ "resource": "" }
q11108
Router.reverse
train
def reverse(self, name, **kwargs): """ Reverse routing. >>> from kobin import Response >>> r = Router() >>> def view(user_id: int) -> Response: ... return Response(f'You are {user_id}') ... >>> r.add('/users/{user_id}', 'GET', 'user-detail', view) >>> r.reverse('user-detail', user_id=1) '/users/1' """ for p, n, _ in self.endpoints: if name == n: return p.format(**kwargs)
python
{ "resource": "" }
q11109
synchronized
train
def synchronized(fn): ''' A decorator which acquires a lock before attempting to execute its wrapped function. Releases the lock in a finally clause. :param fn: The function to wrap. ''' lock = threading.Lock() @functools.wraps(fn) def decorated(*args, **kwargs): lock.acquire() try: return fn(*args, **kwargs) finally: lock.release() return decorated
python
{ "resource": "" }
q11110
ARef.notify_watches
train
def notify_watches(self, oldval, newval): ''' Passes `oldval` and `newval` to each `fn` in the watches dictionary, passing along its respective key and the reference to this object. :param oldval: The old value which will be passed to the watch. :param newval: The new value which will be passed to the watch. ''' watches = self._watches.copy() for k in watches: fn = watches[k] if isinstance(fn, collections.Callable): fn(k, self, oldval, newval)
python
{ "resource": "" }
q11111
Atom.swap
train
def swap(self, fn, *args, **kwargs): ''' Given a mutator `fn`, calls `fn` with the atom's current state, `args`, and `kwargs`. The return value of this invocation becomes the new value of the atom. Returns the new value. :param fn: A function which will be passed the current state. Should return a new state. This absolutely *MUST NOT* mutate the reference to the current state! If it does, this function may loop indefinitely. :param \*args: Arguments to be passed to `fn`. :param \*\*kwargs: Keyword arguments to be passed to `fn`. ''' while True: oldval = self.deref() newval = fn(oldval, *args, **kwargs) if self._state.compare_and_set(oldval, newval): self.notify_watches(oldval, newval) return newval
python
{ "resource": "" }
q11112
Atom.reset
train
def reset(self, newval): ''' Resets the atom's value to `newval`, returning `newval`. :param newval: The new value to set. ''' oldval = self._state.get() self._state.set(newval) self.notify_watches(oldval, newval) return newval
python
{ "resource": "" }
q11113
Atom.compare_and_set
train
def compare_and_set(self, oldval, newval): ''' Given `oldval` and `newval`, sets the atom's value to `newval` if and only if `oldval` is the atom's current value. Returns `True` upon success, otherwise `False`. :param oldval: The old expected value. :param newval: The new value which will be set if and only if `oldval` equals the current value. ''' ret = self._state.compare_and_set(oldval, newval) if ret: self.notify_watches(oldval, newval) return ret
python
{ "resource": "" }
q11114
find_conda
train
def find_conda(): """ Try to find conda on the system """ USER_HOME = os.path.expanduser('~') CONDA_HOME = os.environ.get('CONDA_HOME', '') PROGRAMDATA = os.environ.get('PROGRAMDATA', '') # Search common install paths and sys path search_paths = [ # Windows join(PROGRAMDATA, 'miniconda2', 'scripts'), join(PROGRAMDATA, 'miniconda3', 'scripts'), join(USER_HOME, 'miniconda2', 'scripts'), join(USER_HOME, 'miniconda3', 'scripts'), join(CONDA_HOME, 'scripts'), # Linux join(USER_HOME, 'miniconda2', 'bin'), join(USER_HOME, 'miniconda3', 'bin'), join(CONDA_HOME, 'bin'), # TODO: OSX ] + os.environ.get("PATH", "").split(";" if 'win' in sys.path else ":") cmd = 'conda.exe' if IS_WIN else 'conda' for conda_path in search_paths: conda = join(conda_path, cmd) if exists(conda): return sh.Command(conda) # Try to let the system find it return sh.conda
python
{ "resource": "" }
q11115
cp
train
def cp(src, dst): """ Like cp -R src dst """ print("[DEBUG]: -> copying {} to {}".format(src, dst)) if os.path.isfile(src): if not exists(dirname(dst)): os.makedirs(dirname(dst)) shutil.copy(src, dst) else: copy_tree(src, dst)
python
{ "resource": "" }
q11116
find_commands
train
def find_commands(cls): """ Finds commands by finding the subclasses of Command""" cmds = [] for subclass in cls.__subclasses__(): cmds.append(subclass) cmds.extend(find_commands(subclass)) return cmds
python
{ "resource": "" }
q11117
Link.link
train
def link(self, path, pkg): """ Link the package in the current directory. """ # Check if a custom linker exists to handle linking this package #for ep in pkg_resources.iter_entry_points(group="enaml_native_linker"): # if ep.name.replace("-", '_') == pkg.replace("-", '_'): # linker = ep.load() # print("Custom linker {} found for '{}'. Linking...".format( # linker, pkg)) # if linker(self.ctx, path): # return #: Use the default builtin linker script if exists(join(path, pkg, 'build.gradle')): print(Colors.BLUE+"[INFO] Linking {}/build.gradle".format( pkg)+Colors.RESET) self.link_android(path, pkg) if exists(join(path, pkg, 'Podfile')): print(Colors.BLUE+"[INFO] Linking {}/Podfile".format( pkg)+Colors.RESET) self.link_ios(path, pkg)
python
{ "resource": "" }
q11118
Link.is_app_linked
train
def is_app_linked(source, pkg, java_package): """ Returns true if the compile project line exists exists in the file """ for line in source.split("\n"): if java_package in line: return True return False
python
{ "resource": "" }
q11119
Unlink.run
train
def run(self, args=None): """ The name IS required here. """ print(Colors.BLUE+"[INFO] Unlinking {}...".format( args.names)+Colors.RESET) for name in args.names: self.unlink(Link.package_dir, name)
python
{ "resource": "" }
q11120
Unlink.unlink
train
def unlink(self, path, pkg): """ Unlink the package in the current directory. """ #: Check if a custom unlinker exists to handle unlinking this package for ep in pkg_resources.iter_entry_points( group="enaml_native_unlinker"): if ep.name.replace("-", '_') == pkg.replace("-", '_'): unlinker = ep.load() print("Custom unlinker {} found for '{}'. " "Unlinking...".format(unlinker, pkg)) if unlinker(self.ctx, path): return if exists(join(path, 'android', pkg, 'build.gradle')): print("[Android] unlinking {}".format(pkg)) self.unlink_android(path, pkg) for target in ['iphoneos', 'iphonesimulator']: if exists(join(path, target, pkg, 'Podfile')): print("[iOS] unlinking {}".format(pkg)) self.unlink_ios(path, pkg)
python
{ "resource": "" }
q11121
Server.run_tornado
train
def run_tornado(self, args): """ Tornado dev server implementation """ server = self import tornado.ioloop import tornado.web import tornado.websocket ioloop = tornado.ioloop.IOLoop.current() class DevWebSocketHandler(tornado.websocket.WebSocketHandler): def open(self): super(DevWebSocketHandler, self).open() server.on_open(self) def on_message(self, message): server.on_message(self, message) def on_close(self): super(DevWebSocketHandler, self).on_close() server.on_close(self) class MainHandler(tornado.web.RequestHandler): def get(self): self.write(server.index_page) #: Set the call later method server.call_later = ioloop.call_later server.add_callback = ioloop.add_callback app = tornado.web.Application([ (r"/", MainHandler), (r"/dev", DevWebSocketHandler), ]) app.listen(self.port) print("Tornado Dev server started on {}".format(self.port)) ioloop.start()
python
{ "resource": "" }
q11122
Server.run_twisted
train
def run_twisted(self, args): """ Twisted dev server implementation """ server = self from twisted.internet import reactor from twisted.web import resource from twisted.web.static import File from twisted.web.server import Site from autobahn.twisted.websocket import (WebSocketServerFactory, WebSocketServerProtocol) from autobahn.twisted.resource import WebSocketResource class DevWebSocketHandler(WebSocketServerProtocol): def onConnect(self, request): super(DevWebSocketHandler, self).onConnect(request) server.on_open(self) def onMessage(self, payload, isBinary): server.on_message(self, payload) def onClose(self, wasClean, code, reason): super(DevWebSocketHandler,self).onClose(wasClean, code, reason) server.on_close(self) def write_message(self, message, binary=False): self.sendMessage(message, binary) #: Set the call later method server.call_later = reactor.callLater server.add_callback = reactor.callFromThread factory = WebSocketServerFactory(u"ws://0.0.0.0:{}".format(self.port)) factory.protocol = DevWebSocketHandler class MainHandler(resource.Resource): def render_GET(self, req): return str(server.index_page) root = resource.Resource() root.putChild("", MainHandler()) root.putChild("dev", WebSocketResource(factory)) reactor.listenTCP(self.port, Site(root)) print("Twisted Dev server started on {}".format(self.port)) reactor.run()
python
{ "resource": "" }
q11123
Server.on_message
train
def on_message(self, handler, msg): """ In remote debugging mode this simply acts as a forwarding proxy for the two clients. """ if self.remote_debugging: #: Forward to other clients for h in self.handlers: if h != handler: h.write_message(msg, True) else: print(msg)
python
{ "resource": "" }
q11124
Server.send_message
train
def send_message(self, msg): """ Send a message to the client. This should not be used in remote debugging mode. """ if not self.handlers: return #: Client not connected for h in self.handlers: h.write_message(msg)
python
{ "resource": "" }
q11125
EnamlNativeCli._default_commands
train
def _default_commands(self): """ Build the list of CLI commands by finding subclasses of the Command class Also allows commands to be installed using the "enaml_native_command" entry point. This entry point should return a Command subclass """ commands = [c() for c in find_commands(Command)] #: Get commands installed via entry points for ep in pkg_resources.iter_entry_points( group="enaml_native_command"): c = ep.load() if not issubclass(c, Command): print("Warning: entry point {} did not return a valid enaml " "cli command! This command will be ignored!".format( ep.name)) commands.append(c()) return commands
python
{ "resource": "" }
q11126
EnamlNativeCli._default_ctx
train
def _default_ctx(self): """ Return the package config or context and normalize some of the values """ if not self.in_app_directory: print("Warning: {} does not exist. Using the default.".format( self.package)) ctx = {} else: with open(self.package) as f: ctx = dict(yaml.load(f, Loader=yaml.RoundTripLoader)) if self.in_app_directory: # Update the env for each platform excluded = list(ctx.get('excluded', [])) for env in [ctx['ios'], ctx['android']]: if 'python_build_dir' not in env: env['python_build_dir'] = expanduser(abspath('build/python')) if 'conda_prefix' not in env: env['conda_prefix'] = os.environ.get( 'CONDA_PREFIX', expanduser(abspath('venv'))) # Join the shared and local exclusions env['excluded'] = list(env.get('excluded', [])) + excluded return ctx
python
{ "resource": "" }
q11127
EnamlNativeCli._default_parser
train
def _default_parser(self): """ Generate a parser using the command list """ parser = ArgumentParser(prog='enaml-native') #: Build commands by name cmds = {c.title: c for c in self.commands} #: Build parser, prepare commands subparsers = parser.add_subparsers() for c in self.commands: p = subparsers.add_parser(c.title, help=c.help) c.parser = p for (flags, kwargs) in c.args: p.add_argument(*flags.split(), **kwargs) p.set_defaults(cmd=c) c.ctx = self.ctx c.cmds = cmds c.cli = self return parser
python
{ "resource": "" }
q11128
EnamlNativeCli.start
train
def start(self): """ Run the commands""" self.check_dependencies() self.args = self.parser.parse_args() # Python 3 doesn't set the cmd if no args are given if not hasattr(self.args, 'cmd'): self.parser.print_help() return cmd = self.args.cmd try: if cmd.app_dir_required and not self.in_app_directory: raise EnvironmentError( "'enaml-native {}' must be run within an app root " "directory not: {}".format(cmd.title, os.getcwd())) cmd.run(self.args) except sh.ErrorReturnCode as e: raise
python
{ "resource": "" }
q11129
find_data
train
def find_data(folder): """ Include everything in the folder """ for (path, directories, filenames) in os.walk(folder): for filename in filenames: yield os.path.join('..', path, filename)
python
{ "resource": "" }
q11130
generate
train
def generate(number=4, choice=SystemRandom().choice, words=words, joiner=" "): """ Generate a random passphrase from the GSL. """ return joiner.join(choice(words) for each in range(number))
python
{ "resource": "" }
q11131
Trk.write_meta_info
train
def write_meta_info(self, byte1, byte2, data): "Worker method for writing meta info" write_varlen(self.data, 0) # tick write_byte(self.data, byte1) write_byte(self.data, byte2) write_varlen(self.data, len(data)) write_chars(self.data, data)
python
{ "resource": "" }
q11132
how_long
train
def how_long(length=4, choices=len(words), speed=1000 * 1000 * 1000 * 1000, optimism=2): """ How long might it take to guess a password? @param length: the number of words that we're going to choose. @type length: L{int} @param choice: the number of words we might choose between. @type choice: L{int} @param speed: the speed of our hypothetical password guesser, in guesses per second. @type speed: L{int} @param optimism: When we start guessing all the options, we probably won't have to guess I{all} of them to get a hit. This assumes that the guesser will have to guess only C{1/optimism} of the total number of possible options before it finds a hit. """ return ((choices ** length) / (speed * optimism))
python
{ "resource": "" }
q11133
SeqBase.zip
train
def zip(self, other): """ zips two sequences unifying the corresponding points. """ return self.__class__(p1 % p2 for p1, p2 in zip(self, other))
python
{ "resource": "" }
q11134
SeqBase.display
train
def display(self, format="png"): """ Return an object that can be used to display this sequence. This is used for IPython Notebook. :param format: "png" or "svg" """ from sebastian.core.transforms import lilypond seq = HSeq(self) | lilypond() lily_output = write_lilypond.lily_format(seq) if not lily_output.strip(): #In the case of empty lily outputs, return self to get a textual display return self if format == "png": suffix = ".preview.png" args = ["lilypond", "--png", "-dno-print-pages", "-dpreview"] elif format == "svg": suffix = ".preview.svg" args = ["lilypond", "-dbackend=svg", "-dno-print-pages", "-dpreview"] f = tempfile.NamedTemporaryFile(suffix=suffix) basename = f.name[:-len(suffix)] args.extend(["-o" + basename, "-"]) #Pass shell=True so that if your $PATH contains ~ it will #get expanded. This also changes the way the arguments get #passed in. To work correctly, pass them as a string p = sp.Popen(" ".join(args), stdin=sp.PIPE, shell=True) stdout, stderr = p.communicate("{ %s }" % lily_output) if p.returncode != 0: # there was an error #raise IOError("Lilypond execution failed: %s%s" % (stdout, stderr)) return None if not ipython: return f.read() if format == "png": return Image(data=f.read(), filename=f.name, format="png") else: return SVG(data=f.read(), filename=f.name)
python
{ "resource": "" }
q11135
HSeq.append
train
def append(self, point): """ appends a copy of the given point to this sequence """ point = Point(point) self._elements.append(point)
python
{ "resource": "" }
q11136
HSeq.repeat
train
def repeat(self, count): """ repeat sequence given number of times to produce a new sequence """ x = HSeq() for i in range(count): x = x.concatenate(self) return x
python
{ "resource": "" }
q11137
arpeggio
train
def arpeggio(pattern, point): """ turns each subsequence into an arpeggio matching the given ``pattern``. """ point['sequence'] = HSeq(point['sequence'][i] for i in pattern) return point
python
{ "resource": "" }
q11138
fill
train
def fill(duration, point): """ fills the subsequence of the point with repetitions of its subsequence and sets the ``duration`` of each point. """ point['sequence'] = point['sequence'] * (point[DURATION_64] / (8 * duration)) | add({DURATION_64: duration}) return point
python
{ "resource": "" }
q11139
expand
train
def expand(sequence): """ expands a tree of sequences into a single, flat sequence, recursively. """ expanse = [] for point in sequence: if 'sequence' in point: expanse.extend(expand(point['sequence'])) else: expanse.append(point) return sequence.__class__(expanse)
python
{ "resource": "" }
q11140
debug
train
def debug(sequence): """ adds information to the sequence for better debugging, currently only an index property on each point in the sequence. """ points = [] for i, p in enumerate(sequence): copy = Point(p) copy['index'] = i points.append(copy) return sequence.__class__(points)
python
{ "resource": "" }
q11141
transform_sequence
train
def transform_sequence(f): """ A decorator to take a function operating on a point and turn it into a function returning a callable operating on a sequence. The functions passed to this decorator must define a kwarg called "point", or have point be the last positional argument """ @wraps(f) def wrapper(*args, **kwargs): #The arguments here are the arguments passed to the transform, #ie, there will be no "point" argument #Send a function to seq.map_points with all of its arguments applied except #point return lambda seq: seq.map_points(partial(f, *args, **kwargs)) return wrapper
python
{ "resource": "" }
q11142
subseq
train
def subseq(start_offset=0, end_offset=None): """ Return a portion of the input sequence """ def _(sequence): return sequence.subseq(start_offset, end_offset) return _
python
{ "resource": "" }
q11143
lilypond
train
def lilypond(point): """ Generate lilypond representation for a point """ #If lilypond already computed, leave as is if "lilypond" in point: return point #Defaults: pitch_string = "" octave_string = "" duration_string = "" preamble = "" dynamic_string = "" if "pitch" in point: octave = point["octave"] pitch = point["pitch"] if octave > 4: octave_string = "'" * (octave - 4) elif octave < 4: octave_string = "," * (4 - octave) else: octave_string = "" m = modifiers(pitch) if m > 0: modifier_string = "is" * m elif m < 0: modifier_string = "es" * -m else: modifier_string = "" pitch_string = letter(pitch).lower() + modifier_string if DURATION_64 in point: duration = point[DURATION_64] if duration > 0: if duration % 3 == 0: # dotted note duration_string = str(192 // (2 * duration)) + "." else: duration_string = str(64 // duration) #TODO: for now, if we have a duration but no pitch, show a 'c' with an x note if duration_string: if not pitch_string: pitch_string = "c" octave_string = "'" preamble = r'\xNote ' if "dynamic" in point: dynamic = point["dynamic"] if dynamic == "crescendo": dynamic_string = "\<" elif dynamic == "diminuendo": dynamic_string = "\>" else: dynamic_string = "\%s" % (dynamic,) point["lilypond"] = "%s%s%s%s%s" % (preamble, pitch_string, octave_string, duration_string, dynamic_string) return point
python
{ "resource": "" }
q11144
dynamics
train
def dynamics(start, end=None): """ Apply dynamics to a sequence. If end is specified, it will crescendo or diminuendo linearly from start to end dynamics. You can pass any of these strings as dynamic markers: ['pppppp', 'ppppp', 'pppp', 'ppp', 'pp', 'p', 'mp', 'mf', 'f', 'ff', 'fff', ''ffff] Args: start: beginning dynamic marker, if no end is specified all notes will get this marker end: ending dynamic marker, if unspecified the entire sequence will get the start dynamic marker Example usage: s1 | dynamics('p') # play a sequence in piano s2 | dynamics('p', 'ff') # crescendo from p to ff s3 | dynamics('ff', 'p') # diminuendo from ff to p """ def _(sequence): if start in _dynamic_markers_to_velocity: start_velocity = _dynamic_markers_to_velocity[start] start_marker = start else: raise ValueError("Unknown start dynamic: %s, must be in %s" % (start, _dynamic_markers_to_velocity.keys())) if end is None: end_velocity = start_velocity end_marker = start_marker elif end in _dynamic_markers_to_velocity: end_velocity = _dynamic_markers_to_velocity[end] end_marker = end else: raise ValueError("Unknown end dynamic: %s, must be in %s" % (start, _dynamic_markers_to_velocity.keys())) retval = sequence.__class__([Point(point) for point in sequence._elements]) velocity_interval = (float(end_velocity) - float(start_velocity)) / (len(retval) - 1) if len(retval) > 1 else 0 velocities = [int(start_velocity + velocity_interval * pos) for pos in range(len(retval))] # insert dynamics markers for lilypond if start_velocity > end_velocity: retval[0]["dynamic"] = "diminuendo" retval[-1]["dynamic"] = end_marker elif start_velocity < end_velocity: retval[0]["dynamic"] = "crescendo" retval[-1]["dynamic"] = end_marker else: retval[0]["dynamic"] = start_marker for point, velocity in zip(retval, velocities): point["velocity"] = velocity return retval return _
python
{ "resource": "" }
q11145
query_revisions_by_revids
train
def query_revisions_by_revids(session, revids, **params): """ Gets a set of revisions by their IDs by repeatedly querying in batches. If an ID cannot be found, it is ignored. """ doc = session.get(action='query', prop='revisions', revids=revids, **params) for page_doc in doc['query'].get('pages', {}).values(): revisions = page_doc.get('revisions', []) if 'revisions' in page_doc: del page_doc['revisions'] for revision_doc in revisions: revision_doc['page'] = page_doc yield revision_doc
python
{ "resource": "" }
q11146
load_metafile
train
def load_metafile(filepath): """ Load a metadata file from the filesystem """ try: with open(filepath, 'r', encoding='utf-8') as file: return email.message_from_file(file) except FileNotFoundError: logger.warning("Category file %s not found", filepath) orm.delete(c for c in model.Category if c.file_path == filepath) orm.commit() return None
python
{ "resource": "" }
q11147
Category.name
train
def name(self): """ Get the display name of the category """ if self._meta and self._meta.get('name'): # get it from the meta file return self._meta.get('name') # infer it from the basename return self.basename.replace('_', ' ').title()
python
{ "resource": "" }
q11148
Category.description
train
def description(self): """ Get the textual description of the category """ if self._meta and self._meta.get_payload(): return utils.TrueCallableProxy(self._description) return utils.CallableProxy(None)
python
{ "resource": "" }
q11149
Category.breadcrumb
train
def breadcrumb(self): """ Get the category hierarchy leading up to this category, including root and self. For example, path/to/long/category will return a list containing Category('path'), Category('path/to'), and Category('path/to/long'). """ ret = [] here = self while here: ret.append(here) here = here.parent return list(reversed(ret))
python
{ "resource": "" }
q11150
Category.sort_name
train
def sort_name(self): """ Get the sorting name of this category """ if self._record and self._record.sort_name: return self._record.sort_name return self.name
python
{ "resource": "" }
q11151
Category.parent
train
def parent(self): """ Get the parent category """ if self.path: return Category(os.path.dirname(self.path)) return None
python
{ "resource": "" }
q11152
Category._get_subcats
train
def _get_subcats(self, recurse=False): """ Get the subcategories of this category recurse -- whether to include their subcategories as well """ if recurse: # No need to filter return sorted([Category(e) for e in self._subcats_recursive], key=lambda c: c.sort_breadcrumb) # get all the subcategories, with only the first subdir added # number of path components to ingest parts = len(self.path.split('/')) + 1 if self.path else 1 # convert the subcategories into separated pathlists with only 'parts' # parts subcats = [c.split('/')[:parts] for c in self._subcats_recursive] # join them back into a path, and make unique subcats = {'/'.join(c) for c in subcats} # convert to a bunch of Category objects return sorted([Category(c) for c in subcats], key=lambda c: c.sort_name or c.name)
python
{ "resource": "" }
q11153
Category._first
train
def _first(self, **spec): """ Get the earliest entry in this category, optionally including subcategories """ for record in self._entries(spec).order_by(model.Entry.local_date, model.Entry.id)[:1]: return entry.Entry(record) return None
python
{ "resource": "" }
q11154
Category._last
train
def _last(self, **spec): """ Get the latest entry in this category, optionally including subcategories """ for record in self._entries(spec).order_by(orm.desc(model.Entry.local_date), orm.desc(model.Entry.id))[:1]: return entry.Entry(record) return None
python
{ "resource": "" }
q11155
static_url
train
def static_url(path, absolute=False): """ Shorthand for returning a URL for the requested static file. Arguments: path -- the path to the file (relative to the static files directory) absolute -- whether the link should be absolute or relative """ if os.sep != '/': path = '/'.join(path.split(os.sep)) return flask.url_for('static', filename=path, _external=absolute)
python
{ "resource": "" }
q11156
make_tag
train
def make_tag(name, attrs, start_end=False): """ Build an HTML tag from the given name and attributes. Arguments: name -- the name of the tag (p, div, etc.) attrs -- a dict of attributes to apply to the tag start_end -- whether this tag should be self-closing """ text = '<' + name if isinstance(attrs, dict): attr_list = attrs.items() elif isinstance(attrs, list): attr_list = attrs elif attrs is not None: raise TypeError("Unhandled attrs type " + str(type(attrs))) for key, val in attr_list: if val is not None: escaped = html.escape(str(val), False).replace('"', '&#34;') text += ' {}="{}"'.format(key, escaped) if start_end: text += ' /' text += '>' return flask.Markup(text)
python
{ "resource": "" }
q11157
file_fingerprint
train
def file_fingerprint(fullpath): """ Get a metadata fingerprint for a file """ stat = os.stat(fullpath) return ','.join([str(value) for value in [stat.st_ino, stat.st_mtime, stat.st_size] if value])
python
{ "resource": "" }
q11158
remap_args
train
def remap_args(input_args, remap): """ Generate a new argument list by remapping keys. The 'remap' dict maps from destination key -> priority list of source keys """ out_args = input_args for dest_key, src_keys in remap.items(): remap_value = None if isinstance(src_keys, str): src_keys = [src_keys] for key in src_keys: if key in input_args: remap_value = input_args[key] break if remap_value is not None: if out_args is input_args: out_args = {**input_args} out_args[dest_key] = remap_value return out_args
python
{ "resource": "" }
q11159
remap_link_target
train
def remap_link_target(path, absolute=False): """ remap a link target to a static URL if it's prefixed with @ """ if path.startswith('@'): # static resource return static_url(path[1:], absolute=absolute) if absolute: # absolute-ify whatever the URL is return urllib.parse.urljoin(flask.request.url, path) return path
python
{ "resource": "" }
q11160
get_category
train
def get_category(filename): """ Get a default category name from a filename in a cross-platform manner """ return '/'.join(os.path.dirname(filename).split(os.sep))
python
{ "resource": "" }
q11161
CallableProxy._default
train
def _default(self): """ Get the default function return """ if self._default_args: return self._func( *self._default_args, **self._default_kwargs) return self._func(**self._default_kwargs)
python
{ "resource": "" }
q11162
setup
train
def setup(): """ Set up the database """ try: db.bind(**config.database_config) except OSError: # Attempted to connect to a file-based database where the file didn't # exist db.bind(**config.database_config, create_db=True) rebuild = True try: db.generate_mapping(create_tables=True) with orm.db_session: version = GlobalConfig.get(key='schema_version') if version and version.int_value != SCHEMA_VERSION: logger.info("Existing database has schema version %d", version.int_value) else: rebuild = False except: # pylint:disable=bare-except logger.exception("Error mapping schema") if rebuild: logger.info("Rebuilding schema") try: db.drop_all_tables(with_all_data=True) db.create_tables() except: raise RuntimeError("Unable to upgrade schema automatically; please " + "delete the existing database and try again.") with orm.db_session: if not GlobalConfig.get(key='schema_version'): logger.info("setting schema version to %d", SCHEMA_VERSION) GlobalConfig(key='schema_version', int_value=SCHEMA_VERSION) orm.commit()
python
{ "resource": "" }
q11163
Entry.visible
train
def visible(self): """ Returns true if the entry should be viewable """ return self.status not in (PublishStatus.DRAFT.value, PublishStatus.GONE.value)
python
{ "resource": "" }
q11164
extract_card
train
def extract_card(text, config, image_search_path): """ Extract card data based on the provided texts. """ card = CardData() parser = CardParser(card, config, image_search_path) misaka.Markdown(parser, extensions=markdown.ENABLED_EXTENSIONS)(text) return card
python
{ "resource": "" }
q11165
CardParser.paragraph
train
def paragraph(self, content): """ Turn the first paragraph of text into the summary text """ if not self._out.description: self._out.description = content return ' '
python
{ "resource": "" }
q11166
CardParser.image
train
def image(self, raw_url, title='', alt=''): ''' extract the images ''' max_images = self._config.get('count') if max_images is not None and len(self._out.images) >= max_images: # We already have enough images, so bail out return ' ' image_specs = raw_url if title: image_specs += ' "{}"'.format(title) alt, container_args = image.parse_alt_text(alt) spec_list, _ = image.get_spec_list(image_specs, container_args) for spec in spec_list: if not spec: continue self._out.images.append(self._render_image(spec, alt)) if max_images is not None and len(self._out.images) >= max_images: break return ' '
python
{ "resource": "" }
q11167
CardParser._render_image
train
def _render_image(self, spec, alt=''): """ Given an image spec, try to turn it into a card image per the configuration """ # pylint: disable=unused-argument try: path, image_args, _ = image.parse_image_spec(spec) except Exception as err: # pylint: disable=broad-except # we tried™ logger.exception("Got error on spec %s: %s", spec, err) return None img = image.get_image(path, self._image_search_path) if img: image_config = {**image_args, **self._config, 'absolute': True} return img.get_rendition(1, **image_config)[0] return None
python
{ "resource": "" }
q11168
publ
train
def publ(name, cfg): """ Create a Flask app and configure it for use with Publ """ config.setup(cfg) app = _PublApp(name, template_folder=config.template_folder, static_folder=config.static_folder, static_url_path=config.static_url_path) for route in [ '/', '/<path:category>/', '/<template>', '/<path:category>/<template>', ]: app.add_url_rule(route, 'category', rendering.render_category) for route in [ '/<int:entry_id>', '/<int:entry_id>-', '/<int:entry_id>-<slug_text>', '/<path:category>/<int:entry_id>', '/<path:category>/<int:entry_id>-', '/<path:category>/<int:entry_id>-<slug_text>', ]: app.add_url_rule(route, 'entry', rendering.render_entry) app.add_url_rule('/<path:path>.PUBL_PATHALIAS', 'path_alias', rendering.render_path_alias) app.add_url_rule('/_async/<path:filename>', 'async', image.get_async) app.add_url_rule('/_', 'chit', rendering.render_transparent_chit) app.add_url_rule('/_file/<path:filename>', 'asset', rendering.retrieve_asset) app.config['TRAP_HTTP_EXCEPTIONS'] = True app.register_error_handler( werkzeug.exceptions.HTTPException, rendering.render_exception) app.jinja_env.globals.update( # pylint: disable=no-member get_view=view.get_view, arrow=arrow, static=utils.static_url, get_template=rendering.get_template ) caching.init_app(app) maint = maintenance.Maintenance() if config.index_rescan_interval: maint.register(functools.partial(index.scan_index, config.content_folder), config.index_rescan_interval) if config.image_cache_interval and config.image_cache_age: maint.register(functools.partial(image.clean_cache, config.image_cache_age), config.image_cache_interval) app.before_request(maint.run) if 'CACHE_THRESHOLD' in config.cache: app.after_request(set_cache_expiry) if app.debug: # We're in debug mode so we don't want to scan until everything's up # and running app.before_first_request(startup) else: # In production, register the exception handler and scan the index # immediately app.register_error_handler(Exception, rendering.render_exception) startup() return app
python
{ "resource": "" }
q11169
startup
train
def startup(): """ Startup routine for initiating the content indexer """ model.setup() index.scan_index(config.content_folder) index.background_scan(config.content_folder)
python
{ "resource": "" }
q11170
set_cache_expiry
train
def set_cache_expiry(response): """ Set the cache control headers """ if response.cache_control.max_age is None and 'CACHE_DEFAULT_TIMEOUT' in config.cache: response.cache_control.max_age = config.cache['CACHE_DEFAULT_TIMEOUT'] return response
python
{ "resource": "" }
q11171
_PublApp.path_alias_regex
train
def path_alias_regex(self, regex): """ A decorator that adds a path-alias regular expression; calls add_path_regex """ def decorator(func): """ Adds the function to the regular expression alias list """ self.add_path_regex(regex, func) return decorator
python
{ "resource": "" }
q11172
_PublApp.get_path_regex
train
def get_path_regex(self, path): """ Evaluate the registered path-alias regular expressions """ for regex, func in self._regex_map: match = re.match(regex, path) if match: return func(match) return None, None
python
{ "resource": "" }
q11173
guess_title
train
def guess_title(basename): """ Attempt to guess the title from the filename """ base, _ = os.path.splitext(basename) return re.sub(r'[ _-]+', r' ', base).title()
python
{ "resource": "" }
q11174
get_entry_id
train
def get_entry_id(entry, fullpath, assign_id): """ Get or generate an entry ID for an entry """ warn_duplicate = False if 'Entry-ID' in entry: entry_id = int(entry['Entry-ID']) else: entry_id = None # See if we've inadvertently duplicated an entry ID if entry_id: try: other_entry = model.Entry.get(id=entry_id) if (other_entry and os.path.isfile(other_entry.file_path) and not os.path.samefile(other_entry.file_path, fullpath)): warn_duplicate = entry_id entry_id = None except FileNotFoundError: # the other file doesn't exist, so just let it go pass # Do we need to assign a new ID? if not entry_id and not assign_id: # We're not assigning IDs yet return None if not entry_id: # See if we already have an entry with this file path by_filepath = model.Entry.get(file_path=fullpath) if by_filepath: entry_id = by_filepath.id if not entry_id: # We still don't have an ID; generate one pseudo-randomly, based on the # entry file path. This approach averages around 0.25 collisions per ID # generated while keeping the entry ID reasonably short. In general, # count*N averages 1/(N-1) collisions per ID. limit = max(10, orm.get(orm.count(e) for e in model.Entry) * 5) attempt = 0 while not entry_id or model.Entry.get(id=entry_id): # Stably generate a quasi-random entry ID from the file path md5 = hashlib.md5() md5.update("{} {}".format(fullpath, attempt).encode('utf-8')) entry_id = int.from_bytes(md5.digest(), byteorder='big') % limit attempt = attempt + 1 if warn_duplicate is not False: logger.warning("Entry '%s' had ID %d, which belongs to '%s'. Reassigned to %d", fullpath, warn_duplicate, other_entry.file_path, entry_id) return entry_id
python
{ "resource": "" }
q11175
save_file
train
def save_file(fullpath, entry): """ Save a message file out, without mangling the headers """ with tempfile.NamedTemporaryFile('w', delete=False) as file: tmpfile = file.name # we can't just use file.write(str(entry)) because otherwise the # headers "helpfully" do MIME encoding normalization. # str(val) is necessary to get around email.header's encoding # shenanigans for key, val in entry.items(): print('{}: {}'.format(key, str(val)), file=file) print('', file=file) file.write(entry.get_payload()) shutil.move(tmpfile, fullpath)
python
{ "resource": "" }
q11176
expire_file
train
def expire_file(filepath): """ Expire a record for a missing file """ load_message.cache_clear() orm.delete(pa for pa in model.PathAlias if pa.entry.file_path == filepath) orm.delete(item for item in model.Entry if item.file_path == filepath) orm.commit()
python
{ "resource": "" }
q11177
expire_record
train
def expire_record(record): """ Expire a record for a missing entry """ load_message.cache_clear() # This entry no longer exists so delete it, and anything that references it # SQLite doesn't support cascading deletes so let's just clean up # manually orm.delete(pa for pa in model.PathAlias if pa.entry == record) record.delete() orm.commit()
python
{ "resource": "" }
q11178
Entry._link
train
def _link(self, *args, **kwargs): """ Returns a link, potentially pre-redirected """ if self._record.redirect_url: return links.resolve(self._record.redirect_url, self.search_path, kwargs.get('absolute')) return self._permalink(*args, **kwargs)
python
{ "resource": "" }
q11179
Entry._permalink
train
def _permalink(self, absolute=False, expand=True, **kwargs): """ Returns a canonical URL for the item """ return flask.url_for('entry', entry_id=self._record.id, category=self._record.category if expand else None, slug_text=self._record.slug_text if expand else None, _external=absolute, **kwargs)
python
{ "resource": "" }
q11180
Entry.search_path
train
def search_path(self): """ The relative image search path for this entry """ return [os.path.dirname(self._record.file_path)] + self.category.search_path
python
{ "resource": "" }
q11181
Entry._message
train
def _message(self): """ get the message payload """ filepath = self._record.file_path try: return load_message(filepath) except FileNotFoundError: expire_file(filepath) empty = email.message.Message() empty.set_payload('') return empty
python
{ "resource": "" }
q11182
Entry.body
train
def body(self): """ Get the above-the-fold entry body text """ body, _, is_markdown = self._entry_content return TrueCallableProxy( self._get_markup, body, is_markdown) if body else CallableProxy(None)
python
{ "resource": "" }
q11183
Entry.more
train
def more(self): """ Get the below-the-fold entry body text """ _, more, is_markdown = self._entry_content return TrueCallableProxy( self._get_markup, more, is_markdown) if more else CallableProxy(None)
python
{ "resource": "" }
q11184
Entry.card
train
def card(self): """ Get the entry's OpenGraph card """ body, more, is_markdown = self._entry_content return TrueCallableProxy( self._get_card, body or more) if is_markdown else CallableProxy(None)
python
{ "resource": "" }
q11185
Entry.summary
train
def summary(self): """ Get the entry's summary text """ if self.get('Summary'): return self.get('Summary') body, more, is_markdown = self._entry_content return TrueCallableProxy( self._get_summary, body or more) if is_markdown else CallableProxy(None)
python
{ "resource": "" }
q11186
Entry.last_modified
train
def last_modified(self): """ Get the date of last file modification """ if self.get('Last-Modified'): return arrow.get(self.get('Last-Modified')) return self.date
python
{ "resource": "" }
q11187
Entry._get_markup
train
def _get_markup(self, text, is_markdown, **kwargs): """ get the rendered markup for an entry is_markdown -- whether the entry is formatted as Markdown kwargs -- parameters to pass to the Markdown processor """ if is_markdown: return markdown.to_html( text, config=kwargs, search_path=self.search_path) return html_entry.process( text, config=kwargs, search_path=self.search_path)
python
{ "resource": "" }
q11188
Entry._get_summary
train
def _get_summary(self, text, **kwargs): """ Render out just the summary """ card = cards.extract_card(text, kwargs, self.search_path) return flask.Markup((card.description or '').strip())
python
{ "resource": "" }
q11189
Entry._previous
train
def _previous(self, **kwargs): """ Get the previous item in any particular category """ spec = self._pagination_default_spec(kwargs) spec.update(kwargs) query = queries.build_query(spec) query = queries.where_before_entry(query, self._record) for record in query.order_by(orm.desc(model.Entry.local_date), orm.desc(model.Entry.id))[:1]: return Entry(record) return None
python
{ "resource": "" }
q11190
Entry._next
train
def _next(self, **kwargs): """ Get the next item in any particular category """ spec = self._pagination_default_spec(kwargs) spec.update(kwargs) query = queries.build_query(spec) query = queries.where_after_entry(query, self._record) for record in query.order_by(model.Entry.local_date, model.Entry.id)[:1]: return Entry(record) return None
python
{ "resource": "" }
q11191
setup
train
def setup(cfg): """ set up the global configuration from an object """ # copy the necessary configuration values over this_module = sys.modules[__name__] for name, value in cfg.items(): if hasattr(this_module, name): setattr(this_module, name, value)
python
{ "resource": "" }
q11192
mime_type
train
def mime_type(template): """ infer the content-type from the extension """ _, ext = os.path.splitext(template.filename) return EXTENSION_MAP.get(ext, 'text/html; charset=utf-8')
python
{ "resource": "" }
q11193
map_template
train
def map_template(category, template_list): """ Given a file path and an acceptable list of templates, return the best-matching template's path relative to the configured template directory. Arguments: category -- The path to map template_list -- A template to look up (as a string), or a list of templates. """ if isinstance(template_list, str): template_list = [template_list] for template in template_list: path = os.path.normpath(category) while path is not None: for extension in ['', '.html', '.htm', '.xml', '.json']: candidate = os.path.join(path, template + extension) file_path = os.path.join(config.template_folder, candidate) if os.path.isfile(file_path): return Template(template, candidate, file_path) parent = os.path.dirname(path) if parent != path: path = parent else: path = None
python
{ "resource": "" }
q11194
get_template
train
def get_template(template, relation): """ Given an entry or a category, return the path to a related template """ if isinstance(relation, Entry): path = relation.category.path elif isinstance(relation, Category): path = relation.path else: path = relation tmpl = map_template(path, template) return tmpl.filename if tmpl else None
python
{ "resource": "" }
q11195
image_function
train
def image_function(template=None, entry=None, category=None): """ Get a function that gets an image """ path = [] if entry is not None: path += entry.search_path if category is not None: # Since the category might be different than the entry's category we add # this too path += category.search_path if template is not None: path.append(os.path.join( config.content_folder, os.path.dirname(template.filename))) return lambda filename: image.get_image(filename, path)
python
{ "resource": "" }
q11196
render_publ_template
train
def render_publ_template(template, **kwargs): """ Render out a template, providing the image function based on the args. Returns tuple of (rendered text, etag) """ text = render_template( template.filename, template=template, image=image_function( template=template, category=kwargs.get('category'), entry=kwargs.get('entry')), **kwargs ) return text, caching.get_etag(text)
python
{ "resource": "" }
q11197
render_error
train
def render_error(category, error_message, error_codes, exception=None): """ Render an error page. Arguments: category -- The category of the request error_message -- The message to provide to the error template error_codes -- The applicable HTTP error code(s). Will usually be an integer or a list of integers; the HTTP error response will always be the first error code in the list, and the others are alternates for looking up the error template to use. exception -- Any exception that led to this error page """ if isinstance(error_codes, int): error_codes = [error_codes] error_code = error_codes[0] template_list = [str(code) for code in error_codes] template_list.append(str(int(error_code / 100) * 100)) template_list.append('error') template = map_template(category, template_list) if template: return render_publ_template( template, _url_root=request.url_root, category=Category(category), error={'code': error_code, 'message': error_message}, exception=exception)[0], error_code # no template found, so fall back to default Flask handler return flask.abort(error_code)
python
{ "resource": "" }
q11198
render_exception
train
def render_exception(error): """ Catch-all renderer for the top-level exception handler """ _, _, category = str.partition(request.path, '/') qsize = index.queue_length() if isinstance(error, http_error.NotFound) and qsize: response = flask.make_response(render_error( category, "Site reindex in progress (qs={})".format(qsize), 503)) response.headers['Retry-After'] = qsize response.headers['Refresh'] = max(5, qsize / 5) return response, 503 if isinstance(error, http_error.HTTPException): return render_error(category, error.name, error.code, exception={ 'type': type(error).__name__, 'str': error.description, 'args': error.args }) return render_error(category, "Exception occurred", 500, exception={ 'type': type(error).__name__, 'str': str(error), 'args': error.args })
python
{ "resource": "" }
q11199
render_category
train
def render_category(category='', template=None): """ Render a category page. Arguments: category -- The category to render template -- The template to render it with """ # pylint:disable=too-many-return-statements # See if this is an aliased path redir = get_redirect() if redir: return redir # Forbidden template types if template and template.startswith('_'): raise http_error.Forbidden("Template is private") if template in ['entry', 'error']: raise http_error.BadRequest("Invalid view requested") if category: # See if there's any entries for the view... if not orm.select(e for e in model.Entry if e.category == category or e.category.startswith(category + '/')): raise http_error.NotFound("No such category") if not template: template = Category(category).get('Index-Template') or 'index' tmpl = map_template(category, template) if not tmpl: # this might actually be a malformed category URL test_path = '/'.join((category, template)) if category else template logger.debug("Checking for malformed category %s", test_path) record = orm.select( e for e in model.Entry if e.category == test_path).exists() if record: return redirect(url_for('category', category=test_path, **request.args)) # nope, we just don't know what this is raise http_error.NotFound("No such view") view_spec = view.parse_view_spec(request.args) view_spec['category'] = category view_obj = view.View(view_spec) rendered, etag = render_publ_template( tmpl, _url_root=request.url_root, category=Category(category), view=view_obj) if request.if_none_match.contains(etag): return 'Not modified', 304 return rendered, {'Content-Type': mime_type(tmpl), 'ETag': etag}
python
{ "resource": "" }