code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
if not hasattr(state, "parent"): raise ValueError( "You can only use has_equal_value() on the state resulting from check_column, check_row or check_result." ) if incorrect_msg is None: incorrect_msg = "Column `{{col}}` seems to be incorrect.{{' Make sure you arranged the rows correctly.' if ordered else ''}}" # First of all, check if number of rows correspond has_nrows(state) if not ordered: stu_res, sol_res = sort_rows(state) else: stu_res = state.student_result sol_res = state.solution_result for sol_col_name, sol_col_vals in sol_res.items(): stu_col_vals = stu_res[sol_col_name] if ndigits is not None: try: sol_col_vals = round_seq(sol_col_vals, ndigits) stu_col_vals = round_seq(stu_col_vals, ndigits) except: pass if sol_col_vals != stu_col_vals: _msg = state.build_message( incorrect_msg, fmt_kwargs={"col": sol_col_name, "ordered": ordered} ) state.do_test(_msg) return state
def has_equal_value(state, ordered=False, ndigits=None, incorrect_msg=None)
Verify if a student and solution query result match up. This function must always be used after 'zooming' in on certain columns or records (check_column, check_row or check_result). ``has_equal_value`` then goes over all columns that are still left in the solution query result, and compares each column with the corresponding column in the student query result. Args: ordered: if set to False, the default, all rows are sorted (according to the first column and the following columns as tie breakers). if set to True, the order of rows in student and solution query have to match. digits: if specified, number of decimals to use when comparing column values. incorrect_msg: if specified, this overrides the automatically generated feedback message in case a column in the student query result does not match a column in the solution query result. :Example: Suppose we are testing the following SELECT statements * solution: ``SELECT artist_id as id, name FROM artists ORDER BY name`` * student : ``SELECT artist_id, name FROM artists`` We can write the following SCTs: :: # passes, as order is not important by default Ex().check_column('name').has_equal_value() # fails, as order is deemed important Ex().check_column('name').has_equal_value(ordered=True) # check_column fails, as id is not in the student query result Ex().check_column('id').has_equal_value() # check_all_columns fails, as id not in the student query result Ex().check_all_columns().has_equal_value()
3.90446
3.629215
1.075841
return state.to_child( student_result={k.lower(): v for k, v in state.student_result.items()}, solution_result={k.lower(): v for k, v in state.solution_result.items()}, )
def lowercase(state)
Convert all column names to their lower case versions to improve robustness :Example: Suppose we are testing the following SELECT statements * solution: ``SELECT artist_id as id FROM artists`` * student : ``SELECT artist_id as ID FROM artists`` We can write the following SCTs: :: # fails, as id and ID have different case Ex().check_column('id').has_equal_value() # passes, as lowercase() is being used Ex().lowercase().check_column('id').has_equal_value()
3.767578
4.397121
0.856828
state1 = lowercase(state) state2 = check_all_columns(state1) has_equal_value(state2) return state2
def check_result(state)
High level function which wraps other SCTs for checking results. ``check_result()`` * uses ``lowercase()``, then * runs ``check_all_columns()`` on the state produced by ``lowercase()``, then * runs ``has_equal_value`` on the state produced by ``check_all_columns()``.
11.774121
3.117747
3.776484
if error_msg is None: error_msg = "Running `{{query}}` after your submission generated an error." if expand_msg is None: expand_msg = "The autograder verified the result of running `{{query}}` against the database. " msg_kwargs = {"query": query} # before redoing the query, # make sure that it didn't generate any errors has_no_error(state) _msg = state.build_message(error_msg, fmt_kwargs=msg_kwargs) # sqlbackend makes sure all queries are run in transactions. # Rerun the solution code first, after which we run the provided query with dbconn(state.solution_conn) as conn: _ = runQuery(conn, state.solution_code) sol_res = runQuery(conn, query) if sol_res is None: raise ValueError("Solution failed: " + _msg) # sqlbackend makes sure all queries are run in transactions. # Rerun the student code first, after wich we run the provided query with dbconn(state.student_conn) as conn: _ = runQuery(conn, state.student_code) stu_res = runQuery(conn, query) if stu_res is None: state.do_test(_msg) return state.to_child( append_message={"msg": expand_msg, "kwargs": msg_kwargs}, student_result=stu_res, solution_result=sol_res, )
def check_query(state, query, error_msg=None, expand_msg=None)
Run arbitrary queries against to the DB connection to verify the database state. For queries that do not return any output (INSERTs, UPDATEs, ...), you cannot use functions like ``check_col()`` and ``is_equal()`` to verify the query result. ``check_query()`` will rerun the solution query in the transaction prepared by sqlbackend, and immediately afterwards run the query specified in ``query``. Next, it will also run this query after rerunning the student query in a transaction. Finally, it produces a child state with these results, that you can then chain off of with functions like ``check_column()`` and ``has_equal_value()``. Args: query: A SQL query as a string that is executed after the student query is re-executed. error_msg: if specified, this overrides the automatically generated feedback message in case the query generated an error. expand_msg: if specified, this overrides the automatically generated feedback message that is prepended to feedback messages that are thrown further in the SCT chain. :Example: Suppose we are checking whether an INSERT happened correctly: :: INSERT INTO company VALUES (2, 'filip', 28, 'sql-lane', 42) We can write the following SCT: :: Ex().check_query('SELECT COUNT(*) AS c FROM company').has_equal_value()
5.026285
4.402292
1.141743
# if it has already been wrapped, we return original if hasattr(f, "lower_cased"): return f @wraps(f) def wrapper(*args, **kwargs): f.lower_cased = True return f(*args, **kwargs).lower() return wrapper
def lower_case(f)
Decorator specifically for turning mssql AST into lowercase
3.675702
3.489472
1.053369
startdir = argd['DIR'] or '/' if not os.path.isdir(startdir): raise InvalidArg('not a valid start directory: {}'.format(startdir)) if argd['--progress']: return walk_dir_progress(startdir) return walk_dir_animated(startdir)
def main(argd)
Main entry point, expects doctopt arg dict as argd.
5.339293
5.381363
0.992182
if kwargs.get('file', None) is None: kwargs['file'] = sys.stderr print(*args, **kwargs)
def print_err(*args, **kwargs)
A wrapper for print() that uses stderr by default.
2.640329
1.980264
1.333322
p = AnimatedProgress( 'Walking {}...'.format(path), frames=Frames.dots_orbit.as_rainbow(), show_time=True, ) rootcnt = 0 print('\nStarting animated progress.') with p: for root, dirs, files in os.walk(path): rootcnt += 1 if rootcnt % 50 == 0: p.text = 'Walking {}...'.format(C(root, 'cyan')) if rootcnt > maxdircnt: # Stop is called because we are printing before the # AnimatedProgress is finished running. p.stop() print('\nFinished walking {} directories.'.format( C(maxdircnt, 'blue', style='bright') )) break else: # AnimatedProgress still running, `stop` it before printing. p.stop() print_err('\nNever made it to {} directories ({}).'.format( C(maxdircnt, 'blue', style='bright'), C(rootcnt, 'red', style='bright'), )) print('\nFinished with animated progress.') return 0
def walk_dir_animated(path, maxdircnt=1000)
Walk a directory, printing status updates along the way.
5.102711
4.919632
1.037214
p = ProgressBar( 'Walking {}'.format(C(path, 'cyan')), bars=Bars.numbers_blue.with_wrapper(('(', ')')), show_time=True, file=file, ) rootcnt = 0 print('\nStarting progress bar...') p.start() for root, dirs, files in os.walk(path): rootcnt += 1 if rootcnt % 100 == 0: p.update( percent=min((rootcnt / maxdircnt) * 100, 100), text='Walking {}...'.format( C(os.path.split(root)[-1], 'cyan'), ) ) if rootcnt > maxdircnt: # Stop is called because we are printing before the # AnimatedProgress is finished running. p.stop() print( '\nFinished walking {} directories.'.format( C(maxdircnt, 'blue', style='bright') ), file=file, ) break else: # AnimatedProgress still running, `stop` it before printing. p.stop() print_err( '\nNever made it to {} directories ({}).'.format( C(maxdircnt, 'blue', style='bright'), C(rootcnt, 'red', style='bright'), ) ) print('\nFinished with progress bar.') return 0
def walk_dir_progress(path, maxdircnt=5000, file=sys.stdout)
Walk a directory, printing status updates along the way.
4.345921
4.311003
1.0081
newlines = [] bigindent = (' ' * 16) in_opts = False for line in s.split('\n'): linestripped = line.strip('\n').strip().strip(':') if linestripped == 'Usage': # label line = line.replace('Usage', str(C('Usage', **ARGS_LABEL))) elif linestripped == 'Options': line = line.replace('Options', str(C('Options', **ARGS_LABEL))) in_opts = True elif (':' in line) and (not line.startswith(bigindent)): # opt,desc line. colorize it. lineparts = line.split(':') opt = lineparts[0] vals = [lineparts[1]] if len(lineparts) == 2 else lineparts[1:] # colorize opt if ',' in opt: opts = opt.split(',') else: opts = [opt] optstr = ','.join(str(C(o, **ARGS_OPTIONS)) for o in opts) # colorize desc valstr = ':'.join(str(C(val, **ARGS_DESC)) for val in vals) line = ':'.join((optstr, valstr)) elif in_opts and line.startswith(bigindent): # continued desc string.. # Make any 'Default:Value' parts look the same as the opt,desc. line = ':'.join(str(C(s, **ARGS_DESC)) for s in line.split(':')) elif (not line.startswith(' ')): # header line. line = str(C(line, **ARGS_HEADER)) else: # Everything else, usage mainly. if SCRIPT: line = line.replace(SCRIPT, str(C(SCRIPT, **ARGS_SCRIPT))) newlines.append( '{}{}'.format(line, C('', style='reset_all')) ) return '\n'.join(newlines)
def _coloredhelp(s)
Colorize the usage string for docopt (ColorDocoptExit, docoptextras)
3.895509
3.9427
0.988031
# docopt documentation is appended programmatically after this func def. global SCRIPT global ARGS_DESC, ARGS_HEADER, ARGS_LABEL, ARGS_OPTIONS global ARGS_SCRIPT, ARGS_VERSION SCRIPT = script if colors: # Setup colors, if any were given. ARGS_DESC.update( colors.get('desc', colors.get('description', {})) ) ARGS_HEADER.update(colors.get('header', {})) ARGS_LABEL.update(colors.get('label', {})) ARGS_OPTIONS.update(colors.get('options', {})) ARGS_SCRIPT.update(colors.get('script', {})) ARGS_VERSION.update(colors.get('version', {})) return _old_docopt( doc, argv=argv, help=help, version=version, options_first=options_first, )
def docopt( doc, argv=None, help=True, version=None, options_first=False, script=None, colors=None)
This is a wrapper for docopt.docopt that also sets SCRIPT to `script`. When SCRIPT is set, it can be colorized for the usage string. A dict of Colr options can be passed with `colors` to alter the styles. Available color options keys: desc : Colr args for the description of options. label : Colr args for the 'Usage:' and 'Options:' labels. header : Colr args for the top line (program name), and any other line that is not indented at all. options : Colr args for the options themselves ('-h,--help'). script : Colr args for the script name, if found in the usage text. version : Colr args for the version when --version is used. Example: # `colors` only updates the default settings. You must override # them to change ALL the settings. argd = docopt( ..., script=SCRIPT, colors={'script': {'fore': 'red'}} ) Original docopt documentation follows:
3.484301
2.787694
1.249886
accepted_methods = ('0', '1', '2', '3', '4') methodstr = str(method) if methodstr not in accepted_methods: raise ValueError('Invalid method, expected {}. Got: {!r}'.format( ', '.join(accepted_methods), method, )) if methodstr == '4': methods = (2, 3) else: methods = (method, ) return EscapeCode( ''.join(str(EscapeCode('{}J'.format(m))) for m in methods) )
def display(method=EraseMethod.ALL_MOVE)
Clear the screen or part of the screen, and possibly moves the cursor to the "home" position (1, 1). See `method` argument below. Esc[<method>J Arguments: method: One of these possible values: EraseMethod.END or 0: Clear from cursor to the end of the screen. EraseMethod.START or 1: Clear from cursor to the start of the screen. EraseMethod.ALL_MOVE or 2: Clear all, and move home. EraseMethod.ALL_ERASE or 3: Clear all, and erase scrollback buffer. EraseMethod.ALL_MOVE_ERASE or 4: Like doing 2 and 3 in succession. This is a feature of Colr. It is not standard. Default: EraseMethod.ALL_MOVE (2)
4.581668
3.898471
1.175247
methods = ('0', '1', '2') if str(method) not in methods: raise ValueError('Invalid method, expected {}. Got: {!r}'.format( ', '.join(methods), method, )) return EscapeCode('{}K'.format(method))
def line(method=EraseMethod.ALL)
Erase a line, or part of a line. See `method` argument below. Cursor position does not change. Esc[<method>K Arguments: method : One of these possible values: EraseMethod.END or 0: Clear from cursor to the end of the line. EraseMethod.START or 1: Clear from cursor to the start of the line. EraseMethod.ALL or 2: Clear the entire line. Default: EraseMethod.ALL (2)
6.344834
5.683559
1.116349
try: fileno = file.fileno() except (AttributeError, UnsupportedOperation): # Unable to use fileno to re-open unbuffered. Oh well. # The output may be line buffered, which isn't that great for # repeatedly drawing and erasing text, or hiding/showing the cursor. return file filedesc = _alreadyopen.get(fileno, None) if filedesc is not None: return filedesc filedesc = fdopen(fileno, 'wb', 0) _alreadyopen[fileno] = filedesc # TODO: sys.stdout/stderr don't need to be closed. # But would it be worth it to try and close these opened files? return filedesc
def try_unbuffered_file(file, _alreadyopen={})
Try re-opening a file in an unbuffered mode and return it. If that fails, just return the original file. This function remembers the file descriptors it opens, so it never opens the same one twice. This is meant for files like sys.stdout or sys.stderr.
6.323499
6.119498
1.033336
self.stop_flag.value = False self.time_started.value = time() self.time_elapsed.value = 0 while True: if self.stop_flag.value: break self.update_text() with self.time_started.get_lock(): start = self.time_started.value with self.time_elapsed.get_lock(): self.time_elapsed.value = time() - start if ( self.timeout.value and (self.time_elapsed.value > self.timeout.value)): self.stop() raise ProgressTimedOut( self.name, self.time_elapsed.value, )
def _loop(self)
This is the loop that runs in the subproces. It is called from `run` and is responsible for all printing, text updates, and time management.
3.00961
2.816408
1.068599
try: self._loop() except Exception: # Send the exception through the exc_queue, so the parent # process can check it. typ, val, tb = sys.exc_info() tb_lines = traceback.format_exception(typ, val, tb) self.exc_queue.put((val, tb_lines))
def run(self)
Runs the printer loop in a subprocess. This is called by multiprocessing.
4.209608
4.137311
1.017474
self.stop_flag.value = True with self.lock: ( Control().text(C(' ', style='reset_all')) .pos_restore().move_column(1).erase_line() .write(self.file) )
def stop(self)
Stop this WriterProcessBase, and reset the cursor.
21.371321
16.091999
1.328071
self.write() try: newtext = self.text_queue.get_nowait() self._text = newtext except Empty: pass
def update_text(self)
Write the current text, and check for any new text changes. This also updates the elapsed time.
6.240669
4.968879
1.255951
if self._text is not None: with self.lock: self.file.write(str(self._text).encode()) self.file.flush() sleep(self.nice_delay)
def write(self)
Write the current text to self.file, and flush it. This can be overridden to handle custom writes.
5.411006
4.617328
1.171891
if self._exception is not None: return self._exception try: exc, tblines = self.exc_queue.get_nowait() except Empty: self._exception, self.tb_lines = None, None else: # Raise any exception that the subprocess encountered and sent. self._exception, self.tb_lines = exc, tblines return self._exception
def exception(self)
Try retrieving the last subprocess exception. If set, the exception is returned. Otherwise None is returned.
5.29432
4.875816
1.085833
if isinstance(value, str): value = value.split(self.join_str) if not (value and isinstance(value, (list, tuple))): raise TypeError( ' '.join(( 'Expecting str or list/tuple of formats {!r}.', 'Got: ({}) {!r}' )).format( self.default_format, type(value).__name__, value, )) self._fmt = value
def fmt(self, value)
Sets self.fmt, with some extra help for plain format strings.
4.840201
4.490167
1.077956
try: Control().cursor_hide().write(file=self.file) super().run() except KeyboardInterrupt: self.stop() finally: Control().cursor_show().write(file=self.file)
def run(self)
Overrides WriterProcess.run, to handle KeyboardInterrupts better. This should not be called by any user. `multiprocessing` calls this in a subprocess. Use `self.start` to start this instance.
6.512777
5.216103
1.248591
super().stop() while not self.stopped: # stop() should block, so printing afterwards isn't interrupted. sleep(0.001) # Retrieve the latest exception, if any. exc = self.exception if exc is not None: raise exc
def stop(self)
Stop this animated progress, and block until it is finished.
9.751722
8.68409
1.122941
if self.text is None: # Text has not been sent through the pipe yet. # Do not write anything until it is set to non-None value. return None if self._last_text == self.text: char_delay = 0 else: char_delay = self.char_delay self._last_text = self.text with self.lock: ctl = Control().move_column(1).pos_save().erase_line() if char_delay == 0: ctl.text(str(self)).write(file=self.file) else: self.write_char_delay(ctl, char_delay) ctl.delay(self.delay) return None
def write(self)
Writes a single frame of the progress spinner to the terminal. This function updates the current frame before returning.
5.684389
5.521275
1.029543
self.current_frame += 1 if self.current_frame == self.frame_len: self.current_frame = 0
def _advance_frame(self)
Sets `self.current_frame` to the next frame, looping to the beginning if needed.
2.839183
2.331674
1.217659
# User frameslists might not be a FrameSet. delay = userdelay or getattr(frameslist, 'delay', None) delay = (delay or self.default_delay) - self.nice_delay if delay < 0: delay = 0 return delay
def _get_delay(self, userdelay, frameslist)
Get the appropriate delay value to use, trying in this order: userdelay frameslist.delay default_delay The user can override the frameslist's delay by specifiying a value, and if neither are given the default is used.
7.025011
7.09482
0.99016
for i, fmt in enumerate(self.fmt): if '{text' in fmt: # The text will use a write delay. ctl.text(fmt.format(text=self.text)) if i != (self.fmt_len - 1): ctl.text(self.join_str) ctl.write( file=self.file, delay=delay ) else: # Anything else is written with no delay. ctl.text(fmt.format( frame=self.frames[self.current_frame], elapsed=self.elapsed )) if i != (self.fmt_len - 1): # Add the join_str to pieces, except the last piece. ctl.text(self.join_str) ctl.write(file=self.file) return ctl
def write_char_delay(self, ctl, delay)
Write the formatted format pieces in order, applying a delay between characters for the text only.
4.356654
3.964998
1.098778
if percent is not None: self.percent = percent if text is not None: self.message = text super().update()
def update(self, percent=None, text=None)
Update the progress bar percentage and message.
2.676791
2.302816
1.162399
try: val = getattr(cls, name) except AttributeError: for attr in (a for a in dir(cls) if not a.startswith('_')): try: val = getattr(cls, attr) except AttributeError: # Is known to happen. continue valname = getattr(val, 'name', None) if valname == name: return val else: raise ValueError('No {} with that name: {}'.format( cls.__name__, name, )) else: return val
def cls_get_by_name(cls, name)
Return a class attribute by searching the attributes `name` attribute.
3.122953
3.0735
1.01609
return [ fset.name for fset in cls_sets(cls, wanted_cls, registered=registered) ]
def cls_names(cls, wanted_cls, registered=True)
Return a list of attributes for all `wanted_cls` attributes in this class, where `wanted_cls` is the desired attribute type.
5.06405
8.172447
0.619649
name = name or getattr(frameset, 'name', None) if name is None: raise ValueError( '`name` is needed when the `frameset` has no name attribute.' ) kwargs = {'name': name} for initarg in init_args: kwargs[initarg] = getattr(frameset, initarg, None) newframeset = new_class(frameset, **kwargs) # Mark this FrameSet/BarSet as a registered item (not basic/original). newframeset._registered = True setattr(cls, name, newframeset) return newframeset
def cls_register(cls, frameset, new_class, init_args, name=None)
Register a new FrameSet or FrameSet subclass as a member/attribute of a class. Returns the new FrameSet or FrameSet subclass. Arguments: frameset : An existing FrameSet, or an iterable of strings. init_args : A list of properties from the `frameset` to try to use for initializing the new FrameSet. new_class : The class type to initialize. name : New name for the FrameSet, also used as the classes attribute name. If the `frameset` object has not `name` attribute, this argument is required. It must not be empty when given.
4.521895
3.994556
1.132014
sets = [] for attr in dir(cls): if attr.startswith('_'): continue val = getattr(cls, attr, None) if not isinstance(val, wanted_cls): continue if (not registered) and getattr(val, '_registered', False): continue sets.append(val) return sets
def cls_sets(cls, wanted_cls, registered=True)
Return a list of all `wanted_cls` attributes in this class, where `wanted_cls` is the desired attribute type.
2.375473
2.550588
0.931343
# Get the basic frame types first. frametypes = cls.sets(registered=False) _colornames = [ # 'black', disabled for now, it won't show on my terminal. 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white', ] _colornames.extend('light{}'.format(s) for s in _colornames[:]) for colorname in _colornames: for framesobj in frametypes: framename = '{}_{}'.format(framesobj.name, colorname) cls.register( framesobj.as_colr(fore=colorname), name=framename, )
def _build_color_variants(cls)
Build colorized variants of all frames and return a list of all frame object names.
5.790921
5.630857
1.028426
if wrapper: data = tuple(barset.wrap_str(s, wrapper=wrapper) for s in barset) elif use_wrapper: data = tuple(barset.wrap_str(s) for s in barset) else: data = barset.data return cls( data, name=name, delay=delay )
def from_barset( cls, barset, name=None, delay=None, use_wrapper=True, wrapper=None)
Copy a BarSet's frames to create a new FrameSet. Arguments: barset : An existing BarSet object to copy frames from. name : A name for the new FrameSet. delay : Delay for the animation. use_wrapper : Whether to use the old barset's wrapper in the frames. wrapper : A new wrapper pair to use for each frame. This overrides the `use_wrapper` option.
2.768843
3.107911
0.890902
return self._as_gradient( ('wrapper', ), name=name, style=style, rgb_mode=rgb_mode, )
def as_gradient(self, name=None, style=None, rgb_mode=False)
Wrap each frame in a Colr object, using `Colr.gradient`. Arguments: name : Starting color name. One of `Colr.gradient_names`.
5.241721
7.498338
0.699051
if not self: return self.wrap_str() length = len(self) # Using mod 100, to provide some kind of "auto reset". 0 is 0 though. percentmod = (int(percent) % 100) or min(percent, 100) index = int((length / 100) * percentmod) try: barstr = str(self[index]) except IndexError: barstr = self[-1] return self.wrap_str(barstr)
def as_percent(self, percent)
Return a string representing a percentage of this progress bar. BarSet('1234567890', wrapper=('[, ']')).as_percent(50) >>> '[12345 ]'
6.677156
6.291315
1.061329
return self._as_rainbow( ('wrapper', ), offset=offset, style=style, rgb_mode=rgb_mode, )
def as_rainbow(self, offset=35, style=None, rgb_mode=False)
Wrap each frame in a Colr object, using `Colr.rainbow`.
5.227703
5.613727
0.931236
return cls( cls._generate_move( char, width=width or cls.default_width, fill_char=str(fill_char or cls.default_fill_char), bounce=bounce, reverse=reverse, back_char=back_char, ), name=name, wrapper=wrapper or cls.default_wrapper, )
def from_char( cls, char, name=None, width=None, fill_char=None, bounce=False, reverse=False, back_char=None, wrapper=None)
Create progress bar frames from a "moving" character. The frames simulate movement of the character, from left to right through empty space (`fill_char`). Arguments: char : Character to move across the bar. name : Name for the new BarSet. width : Width of the progress bar. Default: 25 fill_char : Character to fill empty space. Default: ' ' (space) bounce : Whether the frames should simulate a bounce from one side to another. Default: False reverse : Whether the character should start on the right. Default: False back_char : Character to use when "bouncing" backward. Default: `char`
2.506328
2.531013
0.990247
fill_char = fill_char or cls.default_fill_char maxlen = len(s) frames = [] for pos in range(1, maxlen): framestr = s[:pos] # Not using ljust, because fill_char may be a str, not a char. frames.append( ''.join(( framestr, fill_char * (maxlen - len(framestr)) )) ) frames.append(s) return cls( frames, name=name, wrapper=wrapper, )
def from_str(cls, s, name=None, fill_char=None, wrapper=None)
Create progress bar frames from a single string. The frames simulate growth, from an empty string to the final string (`s`). Arguments: s : Final string for a complete progress bar. name : Name for the new BarSet. fill_char : Character to fill empty space. Default: ' ' (space) wrapper : Wrapping characters for the new bar set. Default: cls.default_wrapper
3.710456
3.832614
0.968127
width = width or cls.default_width char = str(char) filler = str(fill_char or cls.default_fill_char) * (width - len(char)) rangeargs = RangeMoveArgs( (0, width, 1), (width, 0, -1), ) if reverse: # Reverse the arguments for range to start from the right. # Not using swap, because the stopping point is different. rangeargs = RangeMoveArgs( (width, -1, -1), (0, width - 1, 1), ) yield from ( ''.join((filler[:i], char, filler[i:])) for i in range(*rangeargs.forward) ) if bounce: bouncechar = char if back_char is None else back_char yield from ( ''.join((filler[:i], str(bouncechar), filler[i:])) for i in range(*rangeargs.backward) )
def _generate_move( cls, char, width=None, fill_char=None, bounce=False, reverse=True, back_char=None)
Yields strings that simulate movement of a character from left to right. For use with `BarSet.from_char`. Arguments: char : Character to move across the progress bar. width : Width for the progress bar. Default: cls.default_width fill_char : String for empty space. Default: cls.default_fill_char bounce : Whether to move the character in both directions. reverse : Whether to start on the right side. back_char : Character to use for the bounce's backward movement. Default: `char`
3.271867
3.274803
0.999103
name = name or '{}_custom_wrapper'.format(self.name) return self.__class__(self.data, name=name, wrapper=wrapper)
def with_wrapper(self, wrapper=None, name=None)
Copy this BarSet, and return a new BarSet with the specified name and wrapper. If no name is given, `{self.name}_custom_wrapper` is used. If no wrapper is given, the new BarSet will have no wrapper.
4.401759
3.335721
1.319583
wrapper = wrapper or (self.wrapper or ('', '')) return str('' if s is None else s).join(wrapper)
def wrap_str(self, s=None, wrapper=None)
Wrap a string in self.wrapper, with some extra handling for empty/None strings. If `wrapper` is set, use it instead.
6.681264
6.985337
0.95647
return cls_register(cls, barset, BarSet, ('wrapper', ), name=name)
def register(cls, barset, name=None)
Register a new BarSet as a member/attribute of this class. Returns the new BarSet. Arguments: barset : An existing BarSet, or an iterable of strings. name : New name for the BarSet, also used as the classes attribute name. If the `barset` object has not `name` attribute, this argument is required. It must not be empty when given.
20.735277
22.669001
0.914697
return cls_register(cls, frameset, FrameSet, ('delay', ), name=name)
def register(cls, frameset, name=None)
Register a new FrameSet as a member/attribute of this class. Returns the new FrameSet. Arguments: frameset : An existing FrameSet, or an iterable of strings. name : New name for the FrameSet, also used as the classes attribute name. If the `frameset` object has not `name` attribute, this argument is required. It must not be empty when given.
19.365353
25.199245
0.768489
message = '{}: {}'.format(pyuv.errno.errorcode.get(errno, errno), pyuv.errno.strerror(errno)) return cls(message, errno)
def from_errno(cls, errno)
Create a new instance from a :mod:`pyuv.errno` error code.
4.251856
3.656586
1.162794
if self._protocol is not None: raise TransportError('already started') self._protocol = protocol self._protocol.connection_made(self) if self._readable: self.resume_reading() if self._writable: self._writing = True self._can_write.set()
def start(self, protocol)
Bind to *protocol* and start calling callbacks on it.
3.356592
3.331861
1.007423
if high is None: high = self.write_buffer_size if low is None: low = high // 2 if low > high: low = high self._write_buffer_high = high self._write_buffer_low = low
def set_write_buffer_limits(self, high=None, low=None)
Set the low and high watermark for the write buffer.
2.137376
2.044427
1.045465
if self._closing or self._handle.closed: return elif self._protocol is None: raise TransportError('transport not started') # If the write buffer is empty, close now. Otherwise defer to # _on_write_complete that will close when the buffer is empty. if self._write_buffer_size == 0: self._handle.close(self._on_close_complete) assert self._handle.closed else: self._closing = True
def close(self)
Close the transport after all oustanding data has been written.
4.946537
4.370385
1.131831
if self._handle.closed: return elif self._protocol is None: raise TransportError('transport not started') self._handle.close(self._on_close_complete) assert self._handle.closed
def abort(self)
Close the transport immediately.
6.812726
5.528955
1.232191
if not isinstance(data, (bytes, bytearray, memoryview)): raise TypeError("data: expecting a bytes-like instance, got {!r}" .format(type(data).__name__)) if handle is not None and not isinstance(self._handle, pyuv.Pipe): raise ValueError('handle: can only be sent over pyuv.Pipe') self._check_status() if not self._writable: raise TransportError('transport is not writable') if self._closing: raise TransportError('transport is closing') try: if handle: self._handle.write(data, self._on_write_complete, handle) else: self._handle.write(data, self._on_write_complete) except pyuv.error.UVError as e: self._error = TransportError.from_errno(e.args[0]) self.abort() raise compat.saved_exc(self._error) # We only keep track of the number of outstanding write requests # outselves. See note in get_write_buffer_size(). self._write_buffer_size += 1 self._maybe_pause_protocol()
def write(self, data, handle=None)
Write *data* to the transport.
3.790735
3.654834
1.037184
self._check_status() if not self._writable: raise TransportError('transport is not writable') if self._closing: raise TransportError('transport is closing') try: self._handle.shutdown(self._on_write_complete) except pyuv.error.UVError as e: self._error = TransportError.from_errno(e.args[0]) self.abort() raise compat.saved_exc(self._error) self._write_buffer_size += 1
def write_eof(self)
Shut down the write direction of the transport.
4.952156
4.385331
1.129255
if name == 'sockname': if not hasattr(self._handle, 'getsockname'): return default try: return self._handle.getsockname() except pyuv.error.UVError: return default elif name == 'peername': if not hasattr(self._handle, 'getpeername'): return default try: return self._handle.getpeername() except pyuv.error.UVError: return default elif name == 'winsize': if not hasattr(self._handle, 'get_winsize'): return default try: return self._handle.get_winsize() except pyuv.error.UVError: return default elif name == 'unix_creds': # In case you're wondering, DBUS needs this. if not isinstance(self._handle, pyuv.Pipe) or not hasattr(socket, 'SO_PEERCRED'): return default try: fd = self._handle.fileno() sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_DGRAM) # will dup() with contextlib.closing(sock): creds = sock.getsockopt(socket.SOL_SOCKET, socket.SO_PEERCRED, struct.calcsize('3i')) except socket.error: return default return struct.unpack('3i', creds) elif name == 'server_hostname': return self._server_hostname else: return super(Transport, self).get_extra_info(name, default)
def get_extra_info(self, name, default=None)
Get transport specific data. In addition to the fields from :meth:`BaseTransport.get_extra_info`, the following information is also available: ===================== =================================================== Name Description ===================== =================================================== ``'sockname'`` The socket name i.e. the result of the ``getsockname()`` system call. ``'peername'`` The peer name i.e. the result of the ``getpeername()`` system call. ``'winsize'`` The terminal window size as a ``(cols, rows)`` tuple. Only available for :class:`pyuv.TTY` handles. ``'unix_creds'`` The Unix credentials of the peer as a ``(pid, uid, gid)`` tuple. Only available for :class:`pyuv.Pipe` handles on Unix. ``'server_hostname'`` The host name of the remote peer prior to address resolution, if applicable. ===================== ===================================================
2.280204
1.90468
1.197158
assert handle is self._handle if error: self._log.warning('pyuv error {} in recv callback', error) self._protocol.error_received(TransportError.from_errno(error)) elif flags: assert flags & pyuv.UV_UDP_PARTIAL self._log.warning('ignoring partial datagram') elif data: self._protocol.datagram_received(data, addr)
def _on_recv_complete(self, handle, addr, flags, data, error)
Callback used with handle.start_recv().
5.231097
5.321592
0.982995
assert handle is self._handle self._write_buffer_size -= 1 assert self._write_buffer_size >= 0 if self._error: self._log.debug('ignore sendto status {} after error', error) # See note in _on_write_complete() about UV_ECANCELED elif error and error != pyuv.errno.UV_ECANCELED: self._log.warning('pyuv error {} in sendto callback', error) self._protocol.error_received(TransportError.from_errno(error)) self._maybe_resume_protocol() self._maybe_close()
def _on_send_complete(self, handle, error)
Callback used with handle.send().
6.285658
6.247882
1.006046
if not isinstance(data, (bytes, bytearray, memoryview)): raise TypeError("data: expecting a bytes-like instance, got {!r}" .format(type(data).__name__)) self._check_status() if not self._writable: raise TransportError('transport is not writable') try: self._handle.send(addr, data, self._on_send_complete) except pyuv.error.UVError as e: error = TransportError.from_errno(e.args[0]) # Try to discern between permanent and transient errors. Permanent # errors close the transport. This list is very likely not complete. if error.errno != pyuv.errno.UV_EBADF: raise error self._error = error self.abort() self._write_buffer_size += 1 self._maybe_pause_protocol()
def sendto(self, data, addr=None)
Send a datagram containing *data* to *addr*. The *addr* argument may be omitted only if the handle was bound to a default remote address.
4.929618
5.054922
0.975212
if address == 'session': address = os.environ.get('DBUS_SESSION_BUS_ADDRESS') if not address: raise ValueError('$DBUS_SESSION_BUS_ADDRESS not set') elif address == 'system': address = os.environ.get('DBUS_SYSTEM_BUS_ADDRESS', 'unix:path=/var/run/dbus/system_bus_socket') addresses = [] for addr in address.split(';'): p1 = addr.find(':') if p1 == -1: raise ValueError('illegal address string: {}'.format(addr)) kind = addr[:p1] args = dict((kv.split('=') for kv in addr[p1+1:].split(','))) if kind == 'unix': if 'path' in args: addr = args['path'] elif 'abstract' in args: addr = '\0' + args['abstract'] else: raise ValueError('require "path" or "abstract" for unix') elif kind == 'tcp': if 'host' not in args or 'port' not in args: raise ValueError('require "host" and "port" for tcp') addr = (args['host'], int(args['port'])) else: raise ValueError('unknown transport: {}'.format(kind)) addresses.append(addr) return addresses
def parse_dbus_address(address)
Parse a D-BUS address string into a list of addresses.
2.109178
2.065866
1.020965
if six.indexbytes(header, 0) == ord('l'): endian = '<' elif six.indexbytes(header, 0) == ord('B'): endian = '>' else: raise ValueError('illegal endianness') if not 1 <= six.indexbytes(header, 1) <= 4: raise ValueError('illegel message type') if struct.unpack(endian + 'I', header[8:12])[0] == 0: raise ValueError('illegal serial number') harrlen = struct.unpack(endian + 'I', header[12:16])[0] padlen = (8 - harrlen) % 8 bodylen = struct.unpack(endian + 'I', header[4:8])[0] return 16 + harrlen + padlen + bodylen
def parse_dbus_header(header)
Parse a D-BUS header. Return the message size.
2.996689
2.829386
1.05913
if self._server_side: mech = self._authenticator.current_mech return mech.getMechanismName() if mech else None else: return getattr(self._authenticator, 'authMech', None)
def getMechanismName(self)
Return the authentication mechanism name.
4.972363
4.222811
1.177501
if not self._server_side: return mech = self._authenticator.current_mech return mech.getUserName() if mech else None
def getUserName(self)
Return the authenticated user name (server side).
9.013172
6.699678
1.345314
self._name_acquired.wait() if self._error: raise compat.saved_exc(self._error) elif self._transport is None: raise DbusError('not connected') return self._unique_name
def get_unique_name(self)
Return the unique name of the D-BUS connection.
10.26089
7.090471
1.447138
if not isinstance(message, txdbus.DbusMessage): raise TypeError('message: expecting DbusMessage instance (got {!r})', type(message).__name__) self._name_acquired.wait() if self._error: raise compat.saved_exc(self._error) elif self._transport is None: raise DbusError('not connected') self._writer.write(message.rawMessage)
def send_message(self, message)
Send a D-BUS message. The *message* argument must be ``gruvi.txdbus.DbusMessage`` instance.
7.563177
6.142924
1.231202
message = txdbus.MethodCallMessage(path, method, interface=interface, destination=service, signature=signature, body=args, expectReply=not no_reply, autoStart=auto_start) serial = message.serial if timeout == -1: timeout = self._timeout try: with switch_back(timeout) as switcher: self._method_calls[serial] = switcher self.send_message(message) args, _ = self._hub.switch() finally: self._method_calls.pop(serial, None) response = args[0] assert response.reply_serial == serial if isinstance(response, txdbus.ErrorMessage): raise DbusMethodCallError(method, response) args = tuple(response.body) if response.body else () return args
def call_method(self, service, path, interface, method, signature=None, args=None, no_reply=False, auto_start=False, timeout=-1)
Call a D-BUS method and wait for its reply. This method calls the D-BUS method with name *method* that resides on the object at bus address *service*, at path *path*, on interface *interface*. The *signature* and *args* are optional arguments that can be used to add parameters to the method call. The signature is a D-BUS signature string, while *args* must be a sequence of python types that can be converted into the types specified by the signature. See the `D-BUS specification <http://dbus.freedesktop.org/doc/dbus-specification.html>`_ for a reference on signature strings. The flags *no_reply* and *auto_start* control the NO_REPLY_EXPECTED and NO_AUTO_START flags on the D-BUS message. The return value is the result of the D-BUS method call. This will be a possibly empty sequence of values.
4.197921
5.015608
0.836972
if isinstance(address, six.string_types): addresses = parse_dbus_address(address) else: addresses = [address] for addr in addresses: try: super(DbusClient, self).connect(addr) except pyuv.error.UVError: continue break else: raise DbusError('could not connect to any address') # Wait for authentication to complete self.get_unique_name()
def connect(self, address='session')
Connect to *address* and wait until the connection is established. The *address* argument must be a D-BUS server address, in the format described in the D-BUS specification. It may also be one of the special addresses ``'session'`` or ``'system'``, to connect to the D-BUS session and system bus, respectively.
3.816976
3.515142
1.085867
if isinstance(address, six.string_types): addresses = parse_dbus_address(address) else: addresses = [address] for addr in addresses: try: super(DbusServer, self).listen(addr) except pyuv.error.UVError: self._log.error('skipping address {}', saddr(addr))
def listen(self, address='session')
Start listening on *address* for new connection. The *address* argument must be a D-BUS server address, in the format described in the D-BUS specification. It may also be one of the special addresses ``'session'`` or ``'system'``, to connect to the D-BUS session and system bus, respectively.
4.904908
4.70348
1.042825
def setdoc(func): func.__doc__ = (getattr(base, '__doc__') or '') + (func.__doc__ or '') return func return setdoc
def docfrom(base)
Decorator to set a function's docstring from another function.
3.745609
2.998451
1.249181
ref = _objrefs.get(obj) if ref is None: clsname = obj.__class__.__name__.split('.')[-1] seqno = _lastids.setdefault(clsname, 1) ref = '{}-{}'.format(clsname, seqno) _objrefs[obj] = ref _lastids[clsname] += 1 return ref
def objref(obj)
Return a string that uniquely and compactly identifies an object.
2.965668
2.775548
1.068498
frame = sys._getframe(1) classdict = frame.f_locals @functools.wraps(method) def delegate(self, *args, **kwargs): other_self = other.__get__(self) return method(other_self, *args, **kwargs) if getattr(method, '__switchpoint__', False): delegate.__switchpoint__ = True if name is None: name = method.__name__ propname = None for key in classdict: if classdict[key] is other: propname = key break # If we know the property name, replace the docstring with a small # reference instead of copying the function docstring. if propname: qname = getattr(method, '__qualname__', method.__name__) if '.' in qname: delegate.__doc__ = 'A shorthand for ``self.{propname}.{name}()``.' \ .format(name=name, propname=propname) else: delegate.__doc__ = 'A shorthand for ``{name}({propname}, ...)``.' \ .format(name=name, propname=propname) classdict[name] = delegate
def delegate_method(other, method, name=None)
Add a method to the current class that delegates to another method. The *other* argument must be a property that returns the instance to delegate to. Due to an implementation detail, the property must be defined in the current class. The *method* argument specifies a method to delegate to. It can be any callable as long as it takes the instances as its first argument. It is a common paradigm in Gruvi to expose protocol methods onto clients. This keeps most of the logic into the protocol, but prevents the user from having to type ``'client.protocol.*methodname*'`` all the time. For example:: class MyClient(Client): protocol = Client.protocol delegate_method(protocol, MyProtocol.method)
3.180135
3.666658
0.867312
match = re_ws.match(buf, pos) if not match: return None, pos return buf[match.start(0):match.end(0)], match.end(0)
def accept_ws(buf, pos)
Skip whitespace at the current buffer position.
2.462393
2.437401
1.010253
if pos >= len(buf) or buf[pos] != char: return None, pos return char, pos+1
def accept_lit(char, buf, pos)
Accept a literal character at the current buffer position.
3.005634
3.125483
0.961654
if pos >= len(buf) or buf[pos] != char: return None, len(buf) return char, pos+1
def expect_lit(char, buf, pos)
Expect a literal character at the current buffer position.
3.369905
3.410097
0.988214
match = regexp.match(buf, pos) if not match: return None, pos return buf[match.start(1):match.end(1)], match.end(0)
def accept_re(regexp, buf, pos)
Accept a regular expression at the current buffer position.
2.633519
2.820656
0.933655
match = regexp.match(buf, pos) if not match: return None, len(buf) return buf[match.start(1):match.end(1)], match.end(0)
def expect_re(regexp, buf, pos)
Require a regular expression at the current buffer position.
2.798397
2.890145
0.968255
typ = subtyp = None; options = {} typ, pos = expect_re(re_token, header, 0) _, pos = expect_lit('/', header, pos) subtyp, pos = expect_re(re_token, header, pos) ctype = header[:pos] if subtyp else '' while pos < len(header): _, pos = accept_ws(header, pos) _, pos = expect_lit(';', header, pos) _, pos = accept_ws(header, pos) name, pos = expect_re(re_token, header, pos) _, pos = expect_lit('=', header, pos) char = lookahead(header, pos) if char == '"': value, pos = expect_re(re_qstring, header, pos) value = re_qpair.sub('\\1', value) elif char: value, pos = expect_re(re_token, header, pos) if name and value is not None: options[name] = value return ctype, options
def parse_content_type(header)
Parse the "Content-Type" header.
3.157933
3.106361
1.016602
pos = 0 names = [] while pos < len(header): name, pos = expect_re(re_token, header, pos) _, pos = accept_ws(header, pos) _, pos = accept_lit(';', header, pos) _, pos = accept_ws(header, pos) qvalue, pos = accept_re(re_qvalue, header, pos) if name: names.append((name, qvalue)) _, pos = accept_ws(header, pos) _, pos = expect_lit(',', header, pos) _, pos = accept_ws(header, pos) return names
def parse_te(header)
Parse the "TE" header.
2.945363
2.829653
1.040892
pos = 0 names = [] while pos < len(header): name, pos = expect_re(re_token, header, pos) if name: names.append(name) _, pos = accept_ws(header, pos) _, pos = expect_lit(',', header, pos) _, pos = accept_ws(header, pos) return names
def parse_trailer(header)
Parse the "Trailer" header.
4.096409
3.989027
1.026919
if timestamp is None: timestamp = time.time() timestamp = int(timestamp) global _cached_timestamp, _cached_datestring if timestamp != _cached_timestamp: # The time stamp must be GMT, and cannot be localized. tm = time.gmtime(timestamp) s = rfc1123_fmt.replace('%a', weekdays[tm.tm_wday]) \ .replace('%b', months[tm.tm_mon-1]) _cached_datestring = time.strftime(s, tm) _cached_timestamp = timestamp return _cached_datestring
def rfc1123_date(timestamp=None)
Create a RFC1123 style Date header for *timestamp*.
3.221012
3.23152
0.996748
# If this is not in origin-form, authority-form or asterisk-form and no # scheme is present, assume it's in absolute-form with a missing scheme. # See RFC7230 section 5.3. if url[:1] not in '*/' and not is_connect and '://' not in url: url = '{}://{}'.format(default_scheme, url) burl = s2b(url) parser = ffi.new('struct http_parser_url *') lib.http_parser_url_init(parser) res = lib.http_parser_parse_url(ffi.from_buffer(burl), len(burl), is_connect, parser) if res != 0: raise ValueError('invalid URL') parsed = ParsedUrl.from_parser(parser, url) return parsed
def parse_url(url, default_scheme='http', is_connect=False)
Parse an URL and return its components. The *default_scheme* argument specifies the scheme in case URL is an otherwise valid absolute URL but with a missing scheme. The *is_connect* argument must be set to ``True`` if the URL was requested with the HTTP CONNECT method. These URLs have a different form and need to be parsed differently. The result is a :class:`ParsedUrl` containing the URL components.
4.617111
4.628054
0.997636
name = name.lower() for header in headers: if header[0].lower() == name: return header[1] return default
def get_header(headers, name, default=None)
Return the value of header *name*. The *headers* argument must be a list of ``(name, value)`` tuples. If the header is found its associated value is returned, otherwise *default* is returned. Header names are matched case insensitively.
2.3542
3.132214
0.751609
i = 0 name = name.lower() for j in range(len(headers)): if headers[j][0].lower() != name: if i != j: headers[i] = headers[j] i += 1 del headers[i:] return headers
def remove_headers(headers, name)
Remove all headers with name *name*. The list is modified in-place and the updated list is returned.
2.420341
2.707393
0.893975
chunk = [] chunk.append(s2b('{:X}\r\n'.format(len(buf)))) chunk.append(buf) chunk.append(b'\r\n') return b''.join(chunk)
def create_chunk(buf)
Create a chunk for the HTTP "chunked" transfer encoding.
2.816707
2.487188
1.132487
chunk = [] chunk.append('0\r\n') if trailers: for name, value in trailers: chunk.append(name) chunk.append(': ') chunk.append(value) chunk.append('\r\n') chunk.append('\r\n') return s2b(''.join(chunk))
def create_chunked_body_end(trailers=None)
Create the ending that terminates a chunked body.
2.225255
2.162971
1.028796
# According to my measurements using b''.join is faster that constructing a # bytearray. message = [] message.append('{} {} HTTP/{}\r\n'.format(method, url, version)) for name, value in headers: message.append(name) message.append(': ') message.append(value) message.append('\r\n') message.append('\r\n') return s2b(''.join(message))
def create_request(version, method, url, headers)
Create a HTTP request header.
4.322793
4.211825
1.026347
message = [] message.append('HTTP/{} {}\r\n'.format(version, status)) for name, value in headers: message.append(name) message.append(': ') message.append(value) message.append('\r\n') message.append('\r\n') return s2b(''.join(message))
def create_response(version, status, headers)
Create a HTTP response header.
2.306739
2.330462
0.98982
port = self.port if port: port = int(port) else: port = default_ports.get(self.scheme or 'http') return (self.host, port)
def addr(self)
Address tuple that can be used with :func:`~gruvi.create_connection`.
4.323232
4.136446
1.045156
target = self.path or '/' if self.query: target = '{}?{}'.format(target, self.query) return target
def target(self)
The "target" i.e. local part of the URL, consisting of the path and query.
5.297818
3.242431
1.633903
self._headers = headers or [] agent = host = clen = trailer = None # Check the headers provided, and capture some information about the # request from them. for name, value in self._headers: lname = name.lower() # Only HTTP applications are allowed to set "hop-by-hop" headers. if lname in hop_by_hop: raise ValueError('header {} is hop-by-hop'.format(name)) elif lname == 'user-agent': agent = value elif lname == 'host': host = value elif lname == 'content-length': clen = int(value) elif lname == 'trailer': trailer = parse_trailer(value) elif lname == 'content-type' and value.startswith('text/'): ctype, params = parse_content_type(value) self._charset = params.get('charset') version = self._protocol._version # The Host header is mandatory in 1.1. Add it if it's missing. if host is None and version == '1.1' and self._protocol._server_name: self._headers.append(('Host', self._protocol._server_name)) # Identify ourselves. if agent is None: self._headers.append(('User-Agent', self._protocol.identifier)) # Check if we need to use chunked encoding due to unknown body size. if clen is None and bodylen is None: if version == '1.0': raise HttpError('body size unknown for HTTP/1.0') self._chunked = True self._content_length = clen # Check if trailers are requested and if so need to switch to chunked. if trailer: if version == '1.0': raise HttpError('cannot support trailers for HTTP/1.0') if clen is not None: remove_headers(self._headers, 'Content-Length') self._chunked = True self._trailer = trailer # Add Content-Length if we know the body size and are not using chunked. if not self._chunked and clen is None and bodylen >= 0: self._headers.append(('Content-Length', str(bodylen))) self._content_length = bodylen # Complete the "Hop by hop" headers. if version == '1.0': self._headers.append(('Connection', 'keep-alive')) elif version == '1.1': self._headers.append(('Connection', 'te')) self._headers.append(('TE', 'trailers')) if self._chunked: self._headers.append(('Transfer-Encoding', 'chunked')) # Start the request self._protocol._requests.append(method) header = create_request(version, method, url, self._headers) self._protocol.writer.write(header)
def start_request(self, method, url, headers=None, bodylen=None)
Start a new HTTP request. The optional *headers* argument contains the headers to send. It must be a sequence of ``(name, value)`` tuples. The optional *bodylen* parameter is a hint that specifies the length of the body that will follow. A length of -1 indicates no body, 0 means an empty body, and a positive number indicates the body size in bytes. This parameter helps determine whether to use the chunked transfer encoding. Normally when the body size is known chunked encoding is not used.
3.06569
3.013492
1.017321
if not isinstance(buf, six.binary_type): raise TypeError('buf: must be a bytes instance') # Be careful not to write zero-length chunks as they indicate the end of a body. if len(buf) == 0: return if self._content_length and self._bytes_written > self._content_length: raise RuntimeError('wrote too many bytes ({} > {})' .format(self._bytes_written, self._content_length)) self._bytes_written += len(buf) if self._chunked: buf = create_chunk(buf) self._protocol.writer.write(buf)
def write(self, buf)
Write *buf* to the request body.
3.871878
3.744401
1.034045
if not self._chunked: return trailers = [(n, get_header(self._headers, n)) for n in self._trailer] \ if self._trailer else None ending = create_chunked_body_end(trailers) self._protocol.writer.write(ending)
def end_request(self)
End the request body.
6.818199
6.289249
1.084104
if self._error: raise compat.saved_exc(self._error) elif self._transport is None: raise HttpError('not connected') request = HttpRequest(self) bodylen = -1 if body is None else \ len(body) if isinstance(body, bytes) else None request.start_request(method, url, headers, bodylen) if isinstance(body, bytes): request.write(body) elif hasattr(body, 'read'): while True: chunk = body.read(4096) if not chunk: break request.write(chunk) elif hasattr(body, '__iter__'): for chunk in body: request.write(chunk) request.end_request()
def request(self, method, url, headers=None, body=None)
Make a new HTTP request. The *method* argument is the HTTP method as a string, for example ``'GET'`` or ``'POST'``. The *url* argument specifies the URL. The optional *headers* argument specifies extra HTTP headers to use in the request. It must be a sequence of ``(name, value)`` tuples. The optional *body* argument may be used to include a body in the request. It must be a ``bytes`` instance, a file-like object opened in binary mode, or an iterable producing ``bytes`` instances. To send potentially large bodies, use the file or iterator interfaces. This has the benefit that only a single chunk is kept in memory at a time. The response to the request can be obtained by calling the :meth:`getresponse` method. You may make multiple requests before reading a response. For every request that you make however, you must call :meth:`getresponse` exactly once. The remote HTTP implementation will send by the responses in the same order as the requests. This method will use the "chunked" transfer encoding if here is a body and the body size is unknown ahead of time. This happens when the file or interator interface is used in the abence of a "Content-Length" header.
2.977873
2.972633
1.001763
if self._error: raise compat.saved_exc(self._error) elif self._transport is None: raise HttpError('not connected') message = self._queue.get(timeout=self._timeout) if isinstance(message, Exception): raise compat.saved_exc(message) return message
def getresponse(self)
Wait for and return a HTTP response. The return value will be a :class:`HttpMessage`. When this method returns only the response header has been read. The response body can be read using :meth:`~gruvi.Stream.read` and similar methods on the message :attr:`~HttpMessage.body`. Note that if you use persistent connections (the default), it is required that you read the entire body of each response. If you don't then deadlocks may occur.
4.742342
4.868966
0.973994
s = sha1() with open(filepath, "rb") as f: buf = f.read(blocksize) s.update(buf) return s.hexdigest()
def unique_hash(filepath: str, blocksize: int=80)->str
Small function to generate a hash to uniquely generate a file. Default blocksize is `500`
2.880836
2.435941
1.182638
audiofile = AudioSegment.from_file(filename) if limit: audiofile = audiofile[:limit * 1000] data = np.fromstring(audiofile._data, np.int16) channels = [] for chn in range(audiofile.channels): channels.append(data[chn::audiofile.channels]) fs = audiofile.frame_rate return channels, fs
def read(filename: str, limit: Optional[int]=None) -> Tuple[list, int]
Reads any file supported by pydub (ffmpeg) and returns the data contained within. returns: (channels, samplerate)
2.426962
2.304486
1.053147
return os.path.splitext(os.path.basename(path))[0]
def path_to_songname(path: str)->str
Extracts song name from a filepath. Used to identify which songs have already been fingerprinted on disk.
3.600737
3.859793
0.932883
server = Server(protocol_factory) server.listen(address, ssl=ssl, family=family, flags=flags, backlog=backlog) return server
def create_server(protocol_factory, address=None, ssl=False, family=0, flags=0, ipc=False, backlog=128)
Create a new network server. This creates one or more :class:`pyuv.Handle` instances bound to *address*, puts them in listen mode and starts accepting new connections. For each accepted connection, a new transport is created which is connected to a new protocol instance obtained by calling *protocol_factory*. The *address* argument may be either be a string, a ``(host, port)`` tuple, or a ``pyuv.Stream`` handle: * If the address is a string, this method creates a new :class:`pyuv.Pipe` instance and binds it to *address*. * If the address is a tuple, this method creates one or more :class:`pyuv.TCP` handles. The first element of the tuple specifies the IP address or DNS name, and the second element specifies the port number or service name. A transport is created for each resolved address. * If the address is a ``pyuv.Stream`` handle, it must already be bound to an address. The *ssl* parameter indicates whether SSL should be used for accepted connections. See :func:`create_connection` for a description. The *family* and *flags* keyword arguments are used to customize address resolution for TCP handles as described in :func:`socket.getaddrinfo`. The *ipc* parameter indicates whether this server will accept new connections via file descriptor passing. This works for `pyuv.Pipe` handles only, and the user is required to call :meth:`Server.accept_connection` whenever a new connection is pending. The *backlog* parameter specifies the listen backlog i.e the maximum number of not yet accepted active opens to queue. To disable listening for new connections (useful when *ipc* was set), set the backlog to ``None``. The return value is a :class:`Server` instance.
2.760819
3.953817
0.698267
if self._transport: raise RuntimeError('already connected') kwargs.setdefault('timeout', self._timeout) conn = create_connection(self._protocol_factory, address, **kwargs) self._transport = conn[0] self._transport._log = self._log self._protocol = conn[1] self._protocol._log = self._log
def connect(self, address, **kwargs)
Connect to *address* and wait for the connection to be established. See :func:`~gruvi.create_connection` for a description of *address* and the supported keyword arguments.
3.115382
2.93069
1.06302
if self._transport is None: return self._transport.close() self._transport._closed.wait() self._transport = None self._protocol = None
def close(self)
Close the connection.
4.363168
3.664776
1.190569
if ssl: context = ssl if hasattr(ssl, 'set_ciphers') \ else ssl(client) if callable(ssl) \ else create_default_context(True) transport = SslTransport(client, context, True) else: transport = Transport(client) transport._log = self._log transport._server = self if DEBUG: self._log.debug('new connection on {}', saddr(client.getsockname())) if hasattr(client, 'getpeername'): self._log.debug('remote peer is {}', saddr(client.getpeername())) protocol = self._protocol_factory() protocol._log = self._log protocol._timeout = self._timeout self._connections[transport] = protocol self.connection_made(transport, protocol) transport.start(protocol)
def handle_connection(self, client, ssl)
Handle a new connection with handle *client*. This method exists so that it can be overridden in subclass. It is not intended to be called directly.
3.752281
3.775993
0.99372
handles = [] handle_args = () if isinstance(address, six.string_types): handle_type = pyuv.Pipe handle_args = (ipc,) addresses = [address] elif isinstance(address, tuple): handle_type = pyuv.TCP result = getaddrinfo(address[0], address[1], family, socket.SOCK_STREAM, socket.IPPROTO_TCP, flags) addresses = [res[4] for res in result] elif isinstance(address, pyuv.Stream): handles.append(address) addresses = [] else: raise TypeError('expecting a string, tuple or pyuv.Stream') for addr in addresses: handle = handle_type(self._hub.loop, *handle_args) try: if compat.pyuv_pipe_helper(handle, handle_args, 'bind', addr): handles.append(handle) break handle.bind(addr) except pyuv.error.UVError as e: self._log.warning('bind error {!r}, skipping {}', e[0], saddr(addr)) continue handles.append(handle) addresses = [] for handle in handles: if backlog is not None: callback = functools.partial(self._on_new_connection, ssl=ssl) handle.listen(callback, backlog) addr = handle.getsockname() self._log.debug('listen on {}', saddr(addr)) addresses.append(addr) self._handles += handles self._addresses += addresses
def listen(self, address, ssl=False, family=0, flags=0, ipc=False, backlog=128)
Create a new transport, bind it to *address*, and start listening for new connections. See :func:`create_server` for a description of *address* and the supported keyword arguments.
3.40767
3.487212
0.97719
for handle in self._handles: if not handle.closed: handle.close() del self._handles[:] for transport, _ in self.connections: transport.close() self._all_closed.wait()
def close(self)
Close the listening sockets and all accepted connections.
4.988868
4.990569
0.999659
global DEBUG, debug # The argd parameter for main() is for testing purposes only. argd = argd or docopt( USAGESTR, version=VERSIONSTR, script=SCRIPT, # Example usage of colr_docopt colors. colors={ 'header': {'fore': 'yellow'}, 'script': {'fore': 'lightblue', 'style': 'bright'}, 'version': {'fore': 'lightblue'}, } ) DEBUG = argd['--debug'] # Load real debug function if available. if DEBUG: load_debug_deps() else: debug = noop if argd['--auto-disable']: auto_disable() if argd['--names']: return list_names() elif argd['--translate']: # Just translate a simple code and exit. try: print('\n'.join( translate( argd['CODE'] or read_stdin().split(), rgb_mode=argd['--truecolor'], ) )) except ValueError as ex: print_err('Translation error: {}'.format(ex)) return 1 return 0 elif argd['--listcodes']: # List all escape codes found in some text and exit. return list_known_codes( argd['TEXT'] or read_stdin(), unique=argd['--unique'], rgb_mode=argd['--truecolor'], ) txt = argd['TEXT'] or read_stdin() fd = sys.stderr if argd['--err'] else sys.stdout end = '' if argd['--newline'] else '\n' if argd['--stripcodes']: txt = justify(strip_codes(txt), argd) print(txt, file=fd, end=end) return 0 clr = get_colr(txt, argd) # Center, ljust, rjust, or not. clr = justify(clr, argd) if clr: print(str(clr), file=fd, end=end) return 0 # Error while building Colr. return 1
def main(argd=None)
Main entry point, expects doctopt arg dict as argd.
5.195676
5.091924
1.020376