idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
54,700
def get ( self , word , default = nil ) : node = self . __get_node ( word ) output = nil if node : output = node . output if output is nil : if default is nil : raise KeyError ( "no key '%s'" % word ) else : return default else : return output
Retrieves output value associated with word .
54,701
def items ( self ) : L = [ ] def aux ( node , s ) : s = s + node . char if node . output is not nil : L . append ( ( s , node . output ) ) for child in node . children . values ( ) : if child is not node : aux ( child , s ) aux ( self . root , '' ) return iter ( L )
Generator returning all keys and values stored in a trie .
54,702
def add_word ( self , word , value ) : if not word : return node = self . root for c in word : try : node = node . children [ c ] except KeyError : n = TrieNode ( c ) node . children [ c ] = n node = n node . output = value
Adds word and associated value .
54,703
def exists ( self , word ) : node = self . __get_node ( word ) if node : return bool ( node . output != nil ) else : return False
Checks if whole word is present in the trie .
54,704
def make_automaton ( self ) : queue = deque ( ) for i in range ( 256 ) : c = chr ( i ) if c in self . root . children : node = self . root . children [ c ] node . fail = self . root queue . append ( node ) else : self . root . children [ c ] = self . root while queue : r = queue . popleft ( ) for node in r . children . values ( ) : queue . append ( node ) state = r . fail while node . char not in state . children : state = state . fail node . fail = state . children . get ( node . char , self . root )
Converts trie to Aho - Corasick automaton .
54,705
def iter_long ( self , string ) : state = self . root last = None index = 0 while index < len ( string ) : c = string [ index ] if c in state . children : state = state . children [ c ] if state . output is not nil : last = ( state . output , index ) index += 1 else : if last : yield last index = last [ 1 ] + 1 state = self . root last = None else : while c not in state . children : state = state . fail if last : yield last
Generator performs a modified Aho - Corasick search string algorithm which maches only the longest word .
54,706
def find_all ( self , string , callback ) : for index , output in self . iter ( string ) : callback ( index , output )
Wrapper on iter method callback gets an iterator result
54,707
def get_long_description ( ) : import codecs with codecs . open ( 'README.rst' , encoding = 'UTF-8' ) as f : readme = [ line for line in f if not line . startswith ( '.. contents::' ) ] return '' . join ( readme )
Strip the content index from the long description .
54,708
def _add_play_button ( self , image_url , image_path ) : try : from PIL import Image from tempfile import NamedTemporaryFile import urllib try : urlretrieve = urllib . request . urlretrieve except ImportError : urlretrieve = urllib . urlretrieve with NamedTemporaryFile ( suffix = ".jpg" ) as screenshot_img : with NamedTemporaryFile ( suffix = ".jpg" ) as button_img : urlretrieve ( image_url , screenshot_img . name ) urlretrieve ( play_button_url , button_img . name ) with Image . open ( screenshot_img . name ) as background : with Image . open ( button_img . name ) as foreground : background . paste ( foreground , ( 90 , 65 ) , foreground ) background . save ( image_path ) except ImportError as e : print ( e ) except Exception as e : print ( 'Unable to add play button to YouTube ' 'screenshot (%s). Using the screenshot ' 'on its own instead.' % e )
Try to add a play button to the screenshot .
54,709
def process ( self ) : self . modules . sort ( key = lambda x : x . priority ) for module in self . modules : transforms = module . transform ( self . data ) transforms . sort ( key = lambda x : x . linenum , reverse = True ) for transform in transforms : linenum = transform . linenum if isinstance ( transform . data , basestring ) : transform . data = [ transform . data ] if transform . oper == "prepend" : self . data [ linenum : linenum ] = transform . data elif transform . oper == "append" : self . data [ linenum + 1 : linenum + 1 ] = transform . data elif transform . oper == "swap" : self . data [ linenum : linenum + 1 ] = transform . data elif transform . oper == "drop" : self . data [ linenum : linenum + 1 ] = [ ] elif transform . oper == "noop" : pass
This method handles the actual processing of Modules and Transforms
54,710
def _irregular ( singular , plural ) : def caseinsensitive ( string ) : return '' . join ( '[' + char + char . upper ( ) + ']' for char in string ) if singular [ 0 ] . upper ( ) == plural [ 0 ] . upper ( ) : PLURALS . insert ( 0 , ( r"(?i)({}){}$" . format ( singular [ 0 ] , singular [ 1 : ] ) , r'\1' + plural [ 1 : ] ) ) PLURALS . insert ( 0 , ( r"(?i)({}){}$" . format ( plural [ 0 ] , plural [ 1 : ] ) , r'\1' + plural [ 1 : ] ) ) SINGULARS . insert ( 0 , ( r"(?i)({}){}$" . format ( plural [ 0 ] , plural [ 1 : ] ) , r'\1' + singular [ 1 : ] ) ) else : PLURALS . insert ( 0 , ( r"{}{}$" . format ( singular [ 0 ] . upper ( ) , caseinsensitive ( singular [ 1 : ] ) ) , plural [ 0 ] . upper ( ) + plural [ 1 : ] ) ) PLURALS . insert ( 0 , ( r"{}{}$" . format ( singular [ 0 ] . lower ( ) , caseinsensitive ( singular [ 1 : ] ) ) , plural [ 0 ] . lower ( ) + plural [ 1 : ] ) ) PLURALS . insert ( 0 , ( r"{}{}$" . format ( plural [ 0 ] . upper ( ) , caseinsensitive ( plural [ 1 : ] ) ) , plural [ 0 ] . upper ( ) + plural [ 1 : ] ) ) PLURALS . insert ( 0 , ( r"{}{}$" . format ( plural [ 0 ] . lower ( ) , caseinsensitive ( plural [ 1 : ] ) ) , plural [ 0 ] . lower ( ) + plural [ 1 : ] ) ) SINGULARS . insert ( 0 , ( r"{}{}$" . format ( plural [ 0 ] . upper ( ) , caseinsensitive ( plural [ 1 : ] ) ) , singular [ 0 ] . upper ( ) + singular [ 1 : ] ) ) SINGULARS . insert ( 0 , ( r"{}{}$" . format ( plural [ 0 ] . lower ( ) , caseinsensitive ( plural [ 1 : ] ) ) , singular [ 0 ] . lower ( ) + singular [ 1 : ] ) )
A convenience function to add appropriate rules to plurals and singular for irregular words .
54,711
def camelize ( string , uppercase_first_letter = True ) : if uppercase_first_letter : return re . sub ( r"(?:^|_)(.)" , lambda m : m . group ( 1 ) . upper ( ) , string ) else : return string [ 0 ] . lower ( ) + camelize ( string ) [ 1 : ]
Convert strings to CamelCase .
54,712
def parameterize ( string , separator = '-' ) : string = transliterate ( string ) string = re . sub ( r"(?i)[^a-z0-9\-_]+" , separator , string ) if separator : re_sep = re . escape ( separator ) string = re . sub ( r'%s{2,}' % re_sep , separator , string ) string = re . sub ( r"(?i)^{sep}|{sep}$" . format ( sep = re_sep ) , '' , string ) return string . lower ( )
Replace special characters in a string so that it may be used as part of a pretty URL .
54,713
def pluralize ( word ) : if not word or word . lower ( ) in UNCOUNTABLES : return word else : for rule , replacement in PLURALS : if re . search ( rule , word ) : return re . sub ( rule , replacement , word ) return word
Return the plural form of a word .
54,714
def underscore ( word ) : word = re . sub ( r"([A-Z]+)([A-Z][a-z])" , r'\1_\2' , word ) word = re . sub ( r"([a-z\d])([A-Z])" , r'\1_\2' , word ) word = word . replace ( "-" , "_" ) return word . lower ( )
Make an underscored lowercase form from the expression in the string .
54,715
def print_all ( msg ) : gc . collect ( ) logger . debug ( msg ) vips_lib . vips_object_print_all ( ) logger . debug ( )
Print all objects .
54,716
def get_typeof ( self , name ) : pspec = self . _get_pspec ( name ) if pspec is None : Error ( '' ) return 0 return pspec . value_type
Get the GType of a GObject property .
54,717
def get_blurb ( self , name ) : c_str = gobject_lib . g_param_spec_get_blurb ( self . _get_pspec ( name ) ) return _to_string ( c_str )
Get the blurb for a GObject property .
54,718
def get ( self , name ) : logger . debug ( 'VipsObject.get: name = %s' , name ) pspec = self . _get_pspec ( name ) if pspec is None : raise Error ( 'Property not found.' ) gtype = pspec . value_type gv = pyvips . GValue ( ) gv . set_type ( gtype ) go = ffi . cast ( 'GObject *' , self . pointer ) gobject_lib . g_object_get_property ( go , _to_bytes ( name ) , gv . pointer ) return gv . get ( )
Get a GObject property .
54,719
def set ( self , name , value ) : logger . debug ( 'VipsObject.set: name = %s, value = %s' , name , value ) gtype = self . get_typeof ( name ) gv = pyvips . GValue ( ) gv . set_type ( gtype ) gv . set ( value ) go = ffi . cast ( 'GObject *' , self . pointer ) gobject_lib . g_object_set_property ( go , _to_bytes ( name ) , gv . pointer )
Set a GObject property .
54,720
def set_string ( self , string_options ) : vo = ffi . cast ( 'VipsObject *' , self . pointer ) cstr = _to_bytes ( string_options ) result = vips_lib . vips_object_set_from_string ( vo , cstr ) return result == 0
Set a series of properties using a string .
54,721
def get_description ( self ) : vo = ffi . cast ( 'VipsObject *' , self . pointer ) return _to_string ( vips_lib . vips_object_get_description ( vo ) )
Get the description of a GObject .
54,722
def generate_sphinx_all ( ) : all_nicknames = [ ] def add_nickname ( gtype , a , b ) : nickname = nickname_find ( gtype ) try : Operation . generate_sphinx ( nickname ) all_nicknames . append ( nickname ) except Error : pass type_map ( gtype , add_nickname ) return ffi . NULL type_map ( type_from_name ( 'VipsOperation' ) , add_nickname ) all_nicknames . sort ( ) exclude = [ 'scale' , 'ifthenelse' , 'bandjoin' , 'bandrank' ] all_nicknames = [ x for x in all_nicknames if x not in exclude ] print ( '.. class:: pyvips.Image\n' ) print ( ' .. rubric:: Methods\n' ) print ( ' .. autosummary::' ) print ( ' :nosignatures:\n' ) for nickname in all_nicknames : print ( ' ~{0}' . format ( nickname ) ) print ( ) print ( ) for nickname in all_nicknames : docstr = Operation . generate_sphinx ( nickname ) docstr = docstr . replace ( '\n' , '\n ' ) print ( ' ' + docstr )
Generate sphinx documentation .
54,723
def new ( image ) : pointer = vips_lib . vips_region_new ( image . pointer ) if pointer == ffi . NULL : raise Error ( 'unable to make region' ) return pyvips . Region ( pointer )
Make a region on an image .
54,724
def fetch ( self , x , y , w , h ) : if not at_least_libvips ( 8 , 8 ) : raise Error ( 'libvips too old' ) psize = ffi . new ( 'size_t *' ) pointer = vips_lib . vips_region_fetch ( self . pointer , x , y , w , h , psize ) if pointer == ffi . NULL : raise Error ( 'unable to fetch from region' ) pointer = ffi . gc ( pointer , glib_lib . g_free ) return ffi . buffer ( pointer , psize [ 0 ] )
Fill a region with pixel data .
54,725
def gtype_to_python ( gtype ) : fundamental = gobject_lib . g_type_fundamental ( gtype ) if gtype in GValue . _gtype_to_python : return GValue . _gtype_to_python [ gtype ] if fundamental in GValue . _gtype_to_python : return GValue . _gtype_to_python [ fundamental ] return '<unknown type>'
Map a gtype to the name of the Python type we use to represent it .
54,726
def to_enum ( gtype , value ) : if isinstance ( value , basestring if _is_PY2 else str ) : enum_value = vips_lib . vips_enum_from_nick ( b'pyvips' , gtype , _to_bytes ( value ) ) if enum_value < 0 : raise Error ( 'no value {0} in gtype {1} ({2})' . format ( value , type_name ( gtype ) , gtype ) ) else : enum_value = value return enum_value
Turn a string into an enum value ready to be passed into libvips .
54,727
def from_enum ( gtype , enum_value ) : pointer = vips_lib . vips_enum_nick ( gtype , enum_value ) if pointer == ffi . NULL : raise Error ( 'value not in enum' ) return _to_string ( pointer )
Turn an int back into an enum string .
54,728
def set ( self , value ) : gtype = self . gvalue . g_type fundamental = gobject_lib . g_type_fundamental ( gtype ) if gtype == GValue . gbool_type : gobject_lib . g_value_set_boolean ( self . gvalue , value ) elif gtype == GValue . gint_type : gobject_lib . g_value_set_int ( self . gvalue , int ( value ) ) elif gtype == GValue . guint64_type : gobject_lib . g_value_set_uint64 ( self . gvalue , value ) elif gtype == GValue . gdouble_type : gobject_lib . g_value_set_double ( self . gvalue , value ) elif fundamental == GValue . genum_type : gobject_lib . g_value_set_enum ( self . gvalue , GValue . to_enum ( gtype , value ) ) elif fundamental == GValue . gflags_type : gobject_lib . g_value_set_flags ( self . gvalue , value ) elif gtype == GValue . gstr_type : gobject_lib . g_value_set_string ( self . gvalue , _to_bytes ( value ) ) elif gtype == GValue . refstr_type : vips_lib . vips_value_set_ref_string ( self . gvalue , _to_bytes ( value ) ) elif fundamental == GValue . gobject_type : gobject_lib . g_value_set_object ( self . gvalue , value . pointer ) elif gtype == GValue . array_int_type : if isinstance ( value , numbers . Number ) : value = [ value ] array = ffi . new ( 'int[]' , value ) vips_lib . vips_value_set_array_int ( self . gvalue , array , len ( value ) ) elif gtype == GValue . array_double_type : if isinstance ( value , numbers . Number ) : value = [ value ] array = ffi . new ( 'double[]' , value ) vips_lib . vips_value_set_array_double ( self . gvalue , array , len ( value ) ) elif gtype == GValue . array_image_type : if isinstance ( value , pyvips . Image ) : value = [ value ] vips_lib . vips_value_set_array_image ( self . gvalue , len ( value ) ) array = vips_lib . vips_value_get_array_image ( self . gvalue , ffi . NULL ) for i , image in enumerate ( value ) : gobject_lib . g_object_ref ( image . pointer ) array [ i ] = image . pointer elif gtype == GValue . blob_type : memory = glib_lib . g_malloc ( len ( value ) ) ffi . memmove ( memory , value , len ( value ) ) if at_least_libvips ( 8 , 6 ) : vips_lib . vips_value_set_blob_free ( self . gvalue , memory , len ( value ) ) else : if pyvips . API_mode : vips_lib . vips_value_set_blob ( self . gvalue , ffi . NULL , memory , len ( value ) ) else : vips_lib . vips_value_set_blob ( self . gvalue , glib_lib . g_free , memory , len ( value ) ) else : raise Error ( 'unsupported gtype for set {0}, fundamental {1}' . format ( type_name ( gtype ) , type_name ( fundamental ) ) )
Set a GValue .
54,729
def get ( self ) : gtype = self . gvalue . g_type fundamental = gobject_lib . g_type_fundamental ( gtype ) result = None if gtype == GValue . gbool_type : result = bool ( gobject_lib . g_value_get_boolean ( self . gvalue ) ) elif gtype == GValue . gint_type : result = gobject_lib . g_value_get_int ( self . gvalue ) elif gtype == GValue . guint64_type : result = gobject_lib . g_value_get_uint64 ( self . gvalue ) elif gtype == GValue . gdouble_type : result = gobject_lib . g_value_get_double ( self . gvalue ) elif fundamental == GValue . genum_type : return GValue . from_enum ( gtype , gobject_lib . g_value_get_enum ( self . gvalue ) ) elif fundamental == GValue . gflags_type : result = gobject_lib . g_value_get_flags ( self . gvalue ) elif gtype == GValue . gstr_type : pointer = gobject_lib . g_value_get_string ( self . gvalue ) if pointer != ffi . NULL : result = _to_string ( pointer ) elif gtype == GValue . refstr_type : psize = ffi . new ( 'size_t *' ) pointer = vips_lib . vips_value_get_ref_string ( self . gvalue , psize ) result = _to_string ( pointer ) elif gtype == GValue . image_type : go = gobject_lib . g_value_get_object ( self . gvalue ) vi = ffi . cast ( 'VipsImage *' , go ) gobject_lib . g_object_ref ( go ) result = pyvips . Image ( vi ) elif gtype == GValue . array_int_type : pint = ffi . new ( 'int *' ) array = vips_lib . vips_value_get_array_int ( self . gvalue , pint ) result = [ ] for i in range ( 0 , pint [ 0 ] ) : result . append ( array [ i ] ) elif gtype == GValue . array_double_type : pint = ffi . new ( 'int *' ) array = vips_lib . vips_value_get_array_double ( self . gvalue , pint ) result = [ ] for i in range ( 0 , pint [ 0 ] ) : result . append ( array [ i ] ) elif gtype == GValue . array_image_type : pint = ffi . new ( 'int *' ) array = vips_lib . vips_value_get_array_image ( self . gvalue , pint ) result = [ ] for i in range ( 0 , pint [ 0 ] ) : vi = array [ i ] gobject_lib . g_object_ref ( vi ) image = pyvips . Image ( vi ) result . append ( image ) elif gtype == GValue . blob_type : psize = ffi . new ( 'size_t *' ) array = vips_lib . vips_value_get_blob ( self . gvalue , psize ) buf = ffi . cast ( 'char*' , array ) result = ffi . unpack ( buf , psize [ 0 ] ) else : raise Error ( 'unsupported gtype for get {0}' . format ( type_name ( gtype ) ) ) return result
Get the contents of a GValue .
54,730
def to_polar ( image ) : xy = pyvips . Image . xyz ( image . width , image . height ) xy -= [ image . width / 2.0 , image . height / 2.0 ] scale = min ( image . width , image . height ) / float ( image . width ) xy *= 2.0 / scale index = xy . polar ( ) index *= [ 1 , image . height / 360.0 ] return image . mapim ( index )
Transform image coordinates to polar .
54,731
def to_rectangular ( image ) : xy = pyvips . Image . xyz ( image . width , image . height ) xy *= [ 1 , 360.0 / image . height ] index = xy . rect ( ) scale = min ( image . width , image . height ) / float ( image . width ) index *= scale / 2.0 index += [ image . width / 2.0 , image . height / 2.0 ] return image . mapim ( index )
Transform image coordinates to rectangular .
54,732
def _to_string ( x ) : if x == ffi . NULL : x = 'NULL' else : x = ffi . string ( x ) if isinstance ( x , byte_type ) : x = x . decode ( 'utf-8' ) return x
Convert to a unicode string .
54,733
def new ( name ) : vi = vips_lib . vips_interpolate_new ( _to_bytes ( name ) ) if vi == ffi . NULL : raise Error ( 'no such interpolator {0}' . format ( name ) ) return Interpolate ( vi )
Make a new interpolator by name .
54,734
def _run_cmplx ( fn , image ) : original_format = image . format if image . format != 'complex' and image . format != 'dpcomplex' : if image . bands % 2 != 0 : raise Error ( 'not an even number of bands' ) if image . format != 'float' and image . format != 'double' : image = image . cast ( 'float' ) if image . format == 'double' : new_format = 'dpcomplex' else : new_format = 'complex' image = image . copy ( format = new_format , bands = image . bands / 2 ) image = fn ( image ) if original_format != 'complex' and original_format != 'dpcomplex' : if image . format == 'dpcomplex' : new_format = 'double' else : new_format = 'float' image = image . copy ( format = new_format , bands = image . bands * 2 ) return image
Run a complex function on a non - complex image .
54,735
def get_suffixes ( ) : names = [ ] if at_least_libvips ( 8 , 8 ) : array = vips_lib . vips_foreign_get_suffixes ( ) i = 0 while array [ i ] != ffi . NULL : name = _to_string ( array [ i ] ) if name not in names : names . append ( name ) glib_lib . g_free ( array [ i ] ) i += 1 glib_lib . g_free ( array ) return names
Get a list of all the filename suffixes supported by libvips .
54,736
def at_least_libvips ( x , y ) : major = version ( 0 ) minor = version ( 1 ) return major > x or ( major == x and minor >= y )
Is this at least libvips x . y?
54,737
def type_map ( gtype , fn ) : cb = ffi . callback ( 'VipsTypeMap2Fn' , fn ) return vips_lib . vips_type_map ( gtype , cb , ffi . NULL , ffi . NULL )
Map fn over all child types of gtype .
54,738
def basicConfig ( ** kwargs ) : logging . basicConfig ( ** kwargs ) logging . _acquireLock ( ) try : stream = logging . root . handlers [ 0 ] stream . setFormatter ( ColoredFormatter ( fmt = kwargs . get ( 'format' , BASIC_FORMAT ) , datefmt = kwargs . get ( 'datefmt' , None ) ) ) finally : logging . _releaseLock ( )
Call logging . basicConfig and override the formatter it creates .
54,739
def ensure_configured ( func ) : @ functools . wraps ( func ) def wrapper ( * args , ** kwargs ) : if len ( logging . root . handlers ) == 0 : basicConfig ( ) return func ( * args , ** kwargs ) return wrapper
Modify a function to call basicConfig first if no handlers exist .
54,740
def color ( self , log_colors , level_name ) : if not self . stream . isatty ( ) : log_colors = { } return ColoredFormatter . color ( self , log_colors , level_name )
Only returns colors if STDOUT is a TTY .
54,741
def setup_logger ( ) : formatter = ColoredFormatter ( "%(log_color)s%(levelname)-8s%(reset)s %(blue)s%(message)s" , datefmt = None , reset = True , log_colors = { 'DEBUG' : 'cyan' , 'INFO' : 'green' , 'WARNING' : 'yellow' , 'ERROR' : 'red' , 'CRITICAL' : 'red' , } ) logger = logging . getLogger ( 'example' ) handler = logging . StreamHandler ( ) handler . setFormatter ( formatter ) logger . addHandler ( handler ) logger . setLevel ( logging . DEBUG ) return logger
Return a logger with a default ColoredFormatter .
54,742
def _extract_annotations_from_task ( self , task ) : annotations = list ( ) if 'annotations' in task : existing_annotations = task . pop ( 'annotations' ) for v in existing_annotations : if isinstance ( v , dict ) : annotations . append ( v [ 'description' ] ) else : annotations . append ( v ) for key in list ( task . keys ( ) ) : if key . startswith ( 'annotation_' ) : annotations . append ( task [ key ] ) del ( task [ key ] ) return annotations
Removes annotations from a task and returns a list of annotations
54,743
def task_done ( self , ** kw ) : def validate ( task ) : if not Status . is_pending ( task [ 'status' ] ) : raise ValueError ( "Task is not pending." ) return self . _task_change_status ( Status . COMPLETED , validate , ** kw )
Marks a pending task as done optionally specifying a completion date with the end argument .
54,744
def task_delete ( self , ** kw ) : def validate ( task ) : if task [ 'status' ] == Status . DELETED : raise ValueError ( "Task is already deleted." ) return self . _task_change_status ( Status . DELETED , validate , ** kw )
Marks a task as deleted optionally specifying a completion date with the end argument .
54,745
def _execute ( self , * args ) : command = ( [ 'task' , 'rc:%s' % self . config_filename , ] + self . get_configuration_override_args ( ) + [ six . text_type ( arg ) for arg in args ] ) for i in range ( len ( command ) ) : if isinstance ( command [ i ] , six . text_type ) : command [ i ] = ( taskw . utils . clean_ctrl_chars ( command [ i ] . encode ( 'utf-8' ) ) ) try : proc = subprocess . Popen ( command , stdout = subprocess . PIPE , stderr = subprocess . PIPE , ) stdout , stderr = proc . communicate ( ) except OSError as e : if e . errno == errno . ENOENT : raise OSError ( "Unable to find the 'task' command-line tool." ) raise if proc . returncode != 0 : raise TaskwarriorError ( command , stderr , stdout , proc . returncode ) try : stdout = stdout . decode ( self . config . get ( 'encoding' , 'utf-8' ) ) except UnicodeDecodeError as e : stdout = kitchen . text . converters . to_unicode ( stdout ) try : stderr = stderr . decode ( self . config . get ( 'encoding' , 'utf-8' ) ) except UnicodeDecodeError as e : stderr = kitchen . text . converters . to_unicode ( stderr ) for c in ( '\a' , '\b' , '\f' , '' ) : stdout = stdout . replace ( c , '?' ) stderr = stderr . replace ( c , '?' ) return stdout , stderr
Execute a given taskwarrior command with arguments
54,746
def load_tasks ( self , command = 'all' ) : results = dict ( ( db , self . _get_task_objects ( 'status:%s' % db , 'export' ) ) for db in Command . files ( command ) ) if 'pending' in results : results [ 'pending' ] . extend ( self . _get_task_objects ( 'status:waiting' , 'export' ) ) return results
Returns a dictionary of tasks for a list of command .
54,747
def filter_tasks ( self , filter_dict ) : query_args = taskw . utils . encode_query ( filter_dict , self . get_version ( ) ) return self . _get_task_objects ( 'export' , * query_args )
Return a filtered list of tasks from taskwarrior .
54,748
def task_annotate ( self , task , annotation ) : self . _execute ( task [ 'uuid' ] , 'annotate' , '--' , annotation ) id , annotated_task = self . get_task ( uuid = task [ six . u ( 'uuid' ) ] ) return annotated_task
Annotates a task .
54,749
def task_denotate ( self , task , annotation ) : self . _execute ( task [ 'uuid' ] , 'denotate' , '--' , annotation ) id , denotated_task = self . get_task ( uuid = task [ six . u ( 'uuid' ) ] ) return denotated_task
Removes an annotation from a task .
54,750
def task_delete ( self , ** kw ) : id , task = self . get_task ( ** kw ) if task [ 'status' ] == Status . DELETED : raise ValueError ( "Task is already deleted." ) self . _execute ( id , 'delete' ) return self . get_task ( uuid = task [ 'uuid' ] ) [ 1 ]
Marks a task as deleted .
54,751
def task_start ( self , ** kw ) : id , task = self . get_task ( ** kw ) self . _execute ( id , 'start' ) return self . get_task ( uuid = task [ 'uuid' ] ) [ 1 ]
Marks a task as started .
54,752
def task_stop ( self , ** kw ) : id , task = self . get_task ( ** kw ) self . _execute ( id , 'stop' ) return self . get_task ( uuid = task [ 'uuid' ] ) [ 1 ]
Marks a task as stopped .
54,753
def to_file ( cls , status ) : return { Status . PENDING : DataFile . PENDING , Status . WAITING : DataFile . PENDING , Status . COMPLETED : DataFile . COMPLETED , Status . DELETED : DataFile . COMPLETED } [ status ]
Returns the file in which this task is stored .
54,754
def from_stub ( cls , data , udas = None ) : udas = udas or { } fields = cls . FIELDS . copy ( ) fields . update ( udas ) processed = { } for k , v in six . iteritems ( data ) : processed [ k ] = cls . _serialize ( k , v , fields ) return cls ( processed , udas )
Create a Task from an already deserialized dict .
54,755
def from_input ( cls , input_file = sys . stdin , modify = False , udas = None ) : original_task = input_file . readline ( ) . strip ( ) if modify : modified_task = input_file . readline ( ) . strip ( ) return cls ( json . loads ( modified_task ) , udas = udas ) return cls ( json . loads ( original_task ) , udas = udas )
Create a Task directly from stdin by reading one line . If modify = True two lines are expected which is consistent with the Taskwarrior hook system . The first line is interpreted as the original state of the Task and the second one as the new modified state .
54,756
def _deserialize ( cls , key , value , fields ) : converter = cls . _get_converter_for_field ( key , None , fields ) return converter . deserialize ( value )
Marshal incoming data into Python objects .
54,757
def _serialize ( cls , key , value , fields ) : converter = cls . _get_converter_for_field ( key , None , fields ) return converter . serialize ( value )
Marshal outgoing data into Taskwarrior s JSON format .
54,758
def get_changes ( self , serialized = False , keep = False ) : results = { } for k , f , t in self . _changes : if k not in results : results [ k ] = [ f , None ] results [ k ] [ 1 ] = ( self . _serialize ( k , t , self . _fields ) if serialized else t ) for k , v in six . iteritems ( self ) : if isinstance ( v , Dirtyable ) : result = v . get_changes ( keep = keep ) if result : if not k in results : results [ k ] = [ result [ 0 ] , None ] results [ k ] [ 1 ] = ( self . _serialize ( k , result [ 1 ] , self . _fields ) if serialized else result [ 1 ] ) if not keep : self . _changes = [ ] return results
Get a journal of changes that have occurred
54,759
def update ( self , values , force = False ) : results = { } for k , v in six . iteritems ( values ) : results [ k ] = self . __setitem__ ( k , v , force = force ) return results
Update this task dictionary
54,760
def set ( self , key , value ) : return self . __setitem__ ( key , value , force = True )
Set a key s value regardless of whether a change is seen .
54,761
def serialized ( self ) : serialized = { } for k , v in six . iteritems ( self ) : serialized [ k ] = self . _serialize ( k , v , self . _fields ) return serialized
Returns a serialized representation of this task .
54,762
def encode_task_experimental ( task ) : task = task . copy ( ) if 'tags' in task : task [ 'tags' ] = ',' . join ( task [ 'tags' ] ) for k in task : task [ k ] = encode_task_value ( k , task [ k ] ) return [ "%s:\"%s\"" % ( k , v ) if v else "%s:" % ( k , ) for k , v in sorted ( task . items ( ) , key = itemgetter ( 0 ) ) ]
Convert a dict - like task to its string representation Used for adding a task via task add
54,763
def encode_task ( task ) : task = task . copy ( ) if 'tags' in task : task [ 'tags' ] = ',' . join ( task [ 'tags' ] ) for k in task : for unsafe , safe in six . iteritems ( encode_replacements ) : if isinstance ( task [ k ] , six . string_types ) : task [ k ] = task [ k ] . replace ( unsafe , safe ) if isinstance ( task [ k ] , datetime . datetime ) : task [ k ] = task [ k ] . strftime ( "%Y%m%dT%M%H%SZ" ) return "[%s]\n" % " " . join ( [ "%s:\"%s\"" % ( k , v ) for k , v in sorted ( task . items ( ) , key = itemgetter ( 0 ) ) ] )
Convert a dict - like task to its string representation
54,764
def convert_dict_to_override_args ( config , prefix = '' ) : args = [ ] for k , v in six . iteritems ( config ) : if isinstance ( v , dict ) : args . extend ( convert_dict_to_override_args ( v , prefix = '.' . join ( [ prefix , k , ] ) if prefix else k ) ) else : v = six . text_type ( v ) left = 'rc' + ( ( '.' + prefix ) if prefix else '' ) + '.' + k right = v if ' ' not in v else '"%s"' % v args . append ( '=' . join ( [ left , right ] ) ) return args
Converts a dictionary of override arguments into CLI arguments .
54,765
def stats_per_chunk ( chunk ) : for block_id in chunk . iter_block ( ) : try : block_counts [ block_id ] += 1 except KeyError : block_counts [ block_id ] = 1
Given a chunk increment the block types with the number of blocks found
54,766
def bounded_stats_per_chunk ( chunk , block_counts , start , stop ) : chunk_z , chunk_x = chunk . get_coords ( ) for z in range ( 16 ) : world_z = z + chunk_z * 16 if ( ( start != None and world_z < int ( start [ 2 ] ) ) or ( stop != None and world_z > int ( stop [ 2 ] ) ) ) : break for x in range ( 16 ) : world_x = x + chunk_x * 16 if ( ( start != None and world_x < int ( start [ 0 ] ) ) or ( stop != None and world_x > int ( stop [ 0 ] ) ) ) : break for y in range ( chunk . get_max_height ( ) + 1 ) : if ( ( start != None and y < int ( start [ 1 ] ) ) or ( stop != None and y > int ( stop [ 1 ] ) ) ) : break block_id = chunk . get_block ( x , y , z ) if block_id != None : try : block_counts [ block_id ] += 1 except KeyError : block_counts [ block_id ] = 1
Given a chunk return the number of blocks types within the specified selection
54,767
def process_region_file ( region , start , stop ) : rx = region . loc . x rz = region . loc . z if ( start != None ) : if ( ( rx + 1 ) * 512 - 1 < int ( start [ 0 ] ) or ( rz + 1 ) * 512 - 1 < int ( start [ 2 ] ) ) : return elif ( stop != None ) : if ( rx * 512 - 1 > int ( stop [ 0 ] ) or rz * 512 - 1 > int ( stop [ 2 ] ) ) : return print ( "Parsing region %s..." % os . path . basename ( region . filename ) ) for c in region . iter_chunks_class ( ) : cx , cz = c . get_coords ( ) if ( start != None ) : if ( ( cx + 1 ) * 16 + rx * 512 - 1 < int ( start [ 0 ] ) or ( cz + 1 ) * 16 + rz * 512 - 1 < int ( start [ 2 ] ) ) : continue elif ( stop != None ) : if ( cx * 16 + rx * 512 - 1 > int ( stop [ 0 ] ) or cz * 16 + rz * 512 - 1 > int ( stop [ 2 ] ) ) : continue if ( start == None and stop == None ) : stats_per_chunk ( c ) else : bounded_stats_per_chunk ( c , start , stop )
Given a region return the number of blocks of each ID in that region
54,768
def get_region ( self , x , z ) : if ( x , z ) not in self . regions : if ( x , z ) in self . regionfiles : self . regions [ ( x , z ) ] = region . RegionFile ( self . regionfiles [ ( x , z ) ] ) else : self . regions [ ( x , z ) ] = region . RegionFile ( ) self . regions [ ( x , z ) ] . loc = Location ( x = x , z = z ) return self . regions [ ( x , z ) ]
Get a region using x z coordinates of a region . Cache results .
54,769
def iter_regions ( self ) : for x , z in self . regionfiles . keys ( ) : close_after_use = False if ( x , z ) in self . regions : regionfile = self . regions [ ( x , z ) ] else : regionfile = region . RegionFile ( self . regionfiles [ ( x , z ) ] , chunkclass = self . chunkclass ) regionfile . loc = Location ( x = x , z = z ) close_after_use = True try : yield regionfile finally : if close_after_use : regionfile . close ( )
Return an iterable list of all region files . Use this function if you only want to loop through each region files once and do not want to cache the results .
54,770
def get_nbt ( self , x , z ) : rx , cx = divmod ( x , 32 ) rz , cz = divmod ( z , 32 ) if ( rx , rz ) not in self . regions and ( rx , rz ) not in self . regionfiles : raise InconceivedChunk ( "Chunk %s,%s is not present in world" % ( x , z ) ) nbt = self . get_region ( rx , rz ) . get_nbt ( cx , cz ) assert nbt != None return nbt
Return a NBT specified by the chunk coordinates x z . Raise InconceivedChunk if the NBT file is not yet generated . To get a Chunk object use get_chunk .
54,771
def get_chunk ( self , x , z ) : return self . chunkclass ( self . get_nbt ( x , z ) )
Return a chunk specified by the chunk coordinates x z . Raise InconceivedChunk if the chunk is not yet generated . To get the raw NBT data use get_nbt .
54,772
def chunk_count ( self ) : c = 0 for r in self . iter_regions ( ) : c += r . chunk_count ( ) return c
Return a count of the chunks in this world folder .
54,773
def get_boundingbox ( self ) : b = BoundingBox ( ) for rx , rz in self . regionfiles . keys ( ) : region = self . get_region ( rx , rz ) rx , rz = 32 * rx , 32 * rz for cc in region . get_chunk_coords ( ) : x , z = ( rx + cc [ 'x' ] , rz + cc [ 'z' ] ) b . expand ( x , None , z ) return b
Return minimum and maximum x and z coordinates of the chunks that make up this world save
54,774
def expand ( self , x , y , z ) : if x != None : if self . minx is None or x < self . minx : self . minx = x if self . maxx is None or x > self . maxx : self . maxx = x if y != None : if self . miny is None or y < self . miny : self . miny = y if self . maxy is None or y > self . maxy : self . maxy = y if z != None : if self . minz is None or z < self . minz : self . minz = z if self . maxz is None or z > self . maxz : self . maxz = z
Expands the bounding
54,775
def unpack_nbt ( tag ) : if isinstance ( tag , TAG_List ) : return [ unpack_nbt ( i ) for i in tag . tags ] elif isinstance ( tag , TAG_Compound ) : return dict ( ( i . name , unpack_nbt ( i ) ) for i in tag . tags ) else : return tag . value
Unpack an NBT tag into a native Python data structure .
54,776
def _init_file ( self ) : header_length = 2 * SECTOR_LENGTH if self . size > header_length : self . file . truncate ( header_length ) self . file . seek ( 0 ) self . file . write ( header_length * b'\x00' ) self . size = header_length
Initialise the file header . This will erase any data previously in the file .
54,777
def _sectors ( self , ignore_chunk = None ) : sectorsize = self . _bytes_to_sector ( self . size ) sectors = [ [ ] for s in range ( sectorsize ) ] sectors [ 0 ] = True sectors [ 1 ] = True for m in self . metadata . values ( ) : if not m . is_created ( ) : continue if ignore_chunk == m : continue if m . blocklength and m . blockstart : blockend = m . blockstart + max ( m . blocklength , m . requiredblocks ( ) ) for b in range ( max ( m . blockstart , 2 ) , min ( blockend , sectorsize ) ) : sectors [ b ] . append ( m ) return sectors
Return a list of all sectors each sector is a list of chunks occupying the block .
54,778
def _locate_free_sectors ( self , ignore_chunk = None ) : sectors = self . _sectors ( ignore_chunk = ignore_chunk ) return [ not i for i in sectors ]
Return a list of booleans indicating the free sectors .
54,779
def get_nbt ( self , x , z ) : data = self . get_blockdata ( x , z ) data = BytesIO ( data ) err = None try : nbt = NBTFile ( buffer = data ) if self . loc . x != None : x += self . loc . x * 32 if self . loc . z != None : z += self . loc . z * 32 nbt . loc = Location ( x = x , z = z ) return nbt except MalformedFileError as e : err = '%s' % e if err : raise ChunkDataError ( err )
Return a NBTFile of the specified chunk . Raise InconceivedChunk if the chunk is not included in the file .
54,780
def write_chunk ( self , x , z , nbt_file ) : data = BytesIO ( ) nbt_file . write_file ( buffer = data ) self . write_blockdata ( x , z , data . getvalue ( ) )
Pack the NBT file as binary data and write to file in a compressed format .
54,781
def unlink_chunk ( self , x , z ) : if self . size < 2 * SECTOR_LENGTH : return self . file . seek ( 4 * ( x + 32 * z ) ) self . file . write ( pack ( ">IB" , 0 , 0 ) [ 1 : ] ) self . file . seek ( SECTOR_LENGTH + 4 * ( x + 32 * z ) ) self . file . write ( pack ( ">I" , 0 ) ) current = self . metadata [ x , z ] free_sectors = self . _locate_free_sectors ( ignore_chunk = current ) truncate_count = list ( reversed ( free_sectors ) ) . index ( False ) if truncate_count > 0 : self . size = SECTOR_LENGTH * ( len ( free_sectors ) - truncate_count ) self . file . truncate ( self . size ) free_sectors = free_sectors [ : - truncate_count ] for s in range ( current . blockstart , min ( current . blockstart + current . blocklength , len ( free_sectors ) ) ) : if free_sectors [ s ] : self . file . seek ( SECTOR_LENGTH * s ) self . file . write ( SECTOR_LENGTH * b'\x00' ) self . metadata [ x , z ] = ChunkMetadata ( x , z )
Remove a chunk from the header of the region file . Fragmentation is not a problem chunks are written to free sectors when possible .
54,782
def _classname ( self ) : if self . __class__ . __module__ in ( None , ) : return self . __class__ . __name__ else : return "%s.%s" % ( self . __class__ . __module__ , self . __class__ . __name__ )
Return the fully qualified class name .
54,783
def chests_per_chunk ( chunk ) : chests = [ ] for entity in chunk [ 'Entities' ] : eid = entity [ "id" ] . value if eid == "Minecart" and entity [ "type" ] . value == 1 or eid == "minecraft:chest_minecart" : x , y , z = entity [ "Pos" ] x , y , z = x . value , y . value , z . value try : items = items_from_nbt ( entity [ "Items" ] ) except KeyError : items = { } chests . append ( Chest ( "Minecart with chest" , ( x , y , z ) , items ) ) for entity in chunk [ 'TileEntities' ] : eid = entity [ "id" ] . value if eid == "Chest" or eid == "minecraft:chest" : x , y , z = entity [ "x" ] . value , entity [ "y" ] . value , entity [ "z" ] . value try : items = items_from_nbt ( entity [ "Items" ] ) except KeyError : items = { } chests . append ( Chest ( "Chest" , ( x , y , z ) , items ) ) return chests
Find chests and get contents in a given chunk .
54,784
def get_block ( self , x , y , z ) : sy , by = divmod ( y , 16 ) section = self . get_section ( sy ) if section == None : return None return section . get_block ( x , by , z )
Get a block from relative x y z .
54,785
def get_blocks_byte_array ( self , buffer = False ) : if buffer : length = len ( self . blocksList ) return BytesIO ( pack ( ">i" , length ) + self . get_blocks_byte_array ( ) ) else : return array . array ( 'B' , self . blocksList ) . tostring ( )
Return a list of all blocks in this chunk .
54,786
def get_data_byte_array ( self , buffer = False ) : if buffer : length = len ( self . dataList ) return BytesIO ( pack ( ">i" , length ) + self . get_data_byte_array ( ) ) else : return array . array ( 'B' , self . dataList ) . tostring ( )
Return a list of data for all blocks in this chunk .
54,787
def generate_heightmap ( self , buffer = False , as_array = False ) : non_solids = [ 0 , 8 , 9 , 10 , 11 , 38 , 37 , 32 , 31 ] if buffer : return BytesIO ( pack ( ">i" , 256 ) + self . generate_heightmap ( ) ) else : bytes = [ ] for z in range ( 16 ) : for x in range ( 16 ) : for y in range ( 127 , - 1 , - 1 ) : offset = y + z * 128 + x * 128 * 16 if ( self . blocksList [ offset ] not in non_solids or y == 0 ) : bytes . append ( y + 1 ) break if ( as_array ) : return bytes else : return array . array ( 'B' , bytes ) . tostring ( )
Return a heightmap representing the highest solid blocks in this chunk .
54,788
def set_blocks ( self , list = None , dict = None , fill_air = False ) : if list : self . blocksList = list elif dict : list = [ ] for x in range ( 16 ) : for z in range ( 16 ) : for y in range ( 128 ) : coord = x , y , z offset = y + z * 128 + x * 128 * 16 if ( coord in dict ) : list . append ( dict [ coord ] ) else : if ( self . blocksList [ offset ] and not fill_air ) : list . append ( self . blocksList [ offset ] ) else : list . append ( 0 ) self . blocksList = list else : return False return True
Sets all blocks in this chunk using either a list or dictionary . Blocks not explicitly set can be filled to air by setting fill_air to True .
54,789
def set_block ( self , x , y , z , id , data = 0 ) : offset = y + z * 128 + x * 128 * 16 self . blocksList [ offset ] = id if ( offset % 2 == 1 ) : index = ( offset - 1 ) // 2 b = self . dataList [ index ] self . dataList [ index ] = ( b & 240 ) + ( data & 15 ) else : index = offset // 2 b = self . dataList [ index ] self . dataList [ index ] = ( b & 15 ) + ( data << 4 & 240 )
Sets the block a x y z to the specified id and optionally data .
54,790
def get_block ( self , x , y , z , coord = False ) : offset = y + z * 128 + x * 128 * 16 if ( coord == False ) else coord [ 1 ] + coord [ 2 ] * 128 + coord [ 0 ] * 128 * 16 return self . blocksList [ offset ]
Return the id of the block at x y z .
54,791
def tag_info ( self ) : return self . __class__ . __name__ + ( '(%r)' % self . name if self . name else "" ) + ": " + self . valuestr ( )
Return Unicode string with class name and unnested value .
54,792
def parse_file ( self , filename = None , buffer = None , fileobj = None ) : if filename : self . file = GzipFile ( filename , 'rb' ) elif buffer : if hasattr ( buffer , 'name' ) : self . filename = buffer . name self . file = buffer elif fileobj : if hasattr ( fileobj , 'name' ) : self . filename = fileobj . name self . file = GzipFile ( fileobj = fileobj ) if self . file : try : type = TAG_Byte ( buffer = self . file ) if type . value == self . id : name = TAG_String ( buffer = self . file ) . value self . _parse_buffer ( self . file ) self . name = name self . file . close ( ) else : raise MalformedFileError ( "First record is not a Compound Tag" ) except StructError as e : raise MalformedFileError ( "Partial File Parse: file possibly truncated." ) else : raise ValueError ( "NBTFile.parse_file(): Need to specify either a " "filename or a file object" )
Completely parse a file extracting all tags .
54,793
def write_file ( self , filename = None , buffer = None , fileobj = None ) : closefile = True if buffer : self . filename = None self . file = buffer closefile = False elif filename : self . filename = filename self . file = GzipFile ( filename , "wb" ) elif fileobj : self . filename = None self . file = GzipFile ( fileobj = fileobj , mode = "wb" ) elif self . filename : self . file = GzipFile ( self . filename , "wb" ) elif not self . file : raise ValueError ( "NBTFile.write_file(): Need to specify either a " "filename or a file object" ) TAG_Byte ( self . id ) . _render_buffer ( self . file ) TAG_String ( self . name ) . _render_buffer ( self . file ) self . _render_buffer ( self . file ) try : self . file . flush ( ) except ( AttributeError , IOError ) : pass if closefile : try : self . file . close ( ) except ( AttributeError , IOError ) : pass
Write this NBT file to a file .
54,794
def loads ( self , value ) : raw = False if self . encoding == "utf-8" else True if value is None : return None return msgpack . loads ( value , raw = raw , use_list = self . use_list )
Deserialize value using msgpack . loads .
54,795
def aiocache_enabled ( cls , fake_return = None ) : def enabled ( func ) : @ functools . wraps ( func ) async def _enabled ( * args , ** kwargs ) : if os . getenv ( "AIOCACHE_DISABLE" ) == "1" : return fake_return return await func ( * args , ** kwargs ) return _enabled return enabled
Use this decorator to be able to fake the return of the function by setting the AIOCACHE_DISABLE environment variable
54,796
async def add ( self , key , value , ttl = SENTINEL , dumps_fn = None , namespace = None , _conn = None ) : start = time . monotonic ( ) dumps = dumps_fn or self . _serializer . dumps ns_key = self . build_key ( key , namespace = namespace ) await self . _add ( ns_key , dumps ( value ) , ttl = self . _get_ttl ( ttl ) , _conn = _conn ) logger . debug ( "ADD %s %s (%.4f)s" , ns_key , True , time . monotonic ( ) - start ) return True
Stores the value in the given key with ttl if specified . Raises an error if the key already exists .
54,797
async def get ( self , key , default = None , loads_fn = None , namespace = None , _conn = None ) : start = time . monotonic ( ) loads = loads_fn or self . _serializer . loads ns_key = self . build_key ( key , namespace = namespace ) value = loads ( await self . _get ( ns_key , encoding = self . serializer . encoding , _conn = _conn ) ) logger . debug ( "GET %s %s (%.4f)s" , ns_key , value is not None , time . monotonic ( ) - start ) return value if value is not None else default
Get a value from the cache . Returns default if not found .
54,798
async def multi_get ( self , keys , loads_fn = None , namespace = None , _conn = None ) : start = time . monotonic ( ) loads = loads_fn or self . _serializer . loads ns_keys = [ self . build_key ( key , namespace = namespace ) for key in keys ] values = [ loads ( value ) for value in await self . _multi_get ( ns_keys , encoding = self . serializer . encoding , _conn = _conn ) ] logger . debug ( "MULTI_GET %s %d (%.4f)s" , ns_keys , len ( [ value for value in values if value is not None ] ) , time . monotonic ( ) - start , ) return values
Get multiple values from the cache values not found are Nones .
54,799
async def set ( self , key , value , ttl = SENTINEL , dumps_fn = None , namespace = None , _cas_token = None , _conn = None ) : start = time . monotonic ( ) dumps = dumps_fn or self . _serializer . dumps ns_key = self . build_key ( key , namespace = namespace ) res = await self . _set ( ns_key , dumps ( value ) , ttl = self . _get_ttl ( ttl ) , _cas_token = _cas_token , _conn = _conn ) logger . debug ( "SET %s %d (%.4f)s" , ns_key , True , time . monotonic ( ) - start ) return res
Stores the value in the given key with ttl if specified