idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
60,700 | def _count_relevant_tb_levels ( tb ) : length = contiguous_unittest_frames = 0 while tb : length += 1 if _is_unittest_frame ( tb ) : contiguous_unittest_frames += 1 else : contiguous_unittest_frames = 0 tb = tb . tb_next return length - contiguous_unittest_frames | Return the number of frames in tb before all that s left is unittest frames . |
60,701 | def cmdloop ( self , * args , ** kwargs ) : def unwrapping_raw_input ( * args , ** kwargs ) : wrapped_stdout = sys . stdout sys . stdout = wrapped_stdout . stream ret = orig_raw_input ( * args , ** kwargs ) sys . stdout = wrapped_stdout return ret try : orig_raw_input = raw_input except NameError : orig_raw_input = input if hasattr ( sys . stdout , 'stream' ) : __builtin__ . raw_input = unwrapping_raw_input try : ret = cmd . Cmd . cmdloop ( self , * args , ** kwargs ) finally : __builtin__ . raw_input = orig_raw_input return ret | Call pdb s cmdloop making readline work . |
60,702 | def set_trace ( * args , ** kwargs ) : out = sys . stdout . stream if hasattr ( sys . stdout , 'stream' ) else None kwargs [ 'stdout' ] = out debugger = pdb . Pdb ( * args , ** kwargs ) debugger . use_rawinput = True debugger . set_trace ( sys . _getframe ( ) . f_back ) | Call pdb . set_trace making sure it receives the unwrapped stdout . |
60,703 | def begin ( self ) : self . _stderr . append ( sys . stderr ) sys . stderr = StreamWrapper ( sys . stderr , self ) self . _stdout . append ( sys . stdout ) sys . stdout = StreamWrapper ( sys . stdout , self ) self . _set_trace . append ( pdb . set_trace ) pdb . set_trace = set_trace self . _cmdloop . append ( pdb . Pdb . cmdloop ) pdb . Pdb . cmdloop = cmdloop self . _cwd = '' if self . conf . options . absolute_paths else getcwd ( ) | Make some monkeypatches to dodge progress bar . |
60,704 | def finalize ( self , result ) : sys . stderr = self . _stderr . pop ( ) sys . stdout = self . _stdout . pop ( ) pdb . set_trace = self . _set_trace . pop ( ) pdb . Pdb . cmdloop = self . _cmdloop . pop ( ) | Put monkeypatches back as we found them . |
60,705 | def configure ( self , options , conf ) : super ( ProgressivePlugin , self ) . configure ( options , conf ) if ( getattr ( options , 'verbosity' , 0 ) > 1 and getattr ( options , 'enable_plugin_id' , False ) ) : print ( 'Using --with-id and --verbosity=2 or higher with ' 'nose-progressive causes visualization errors. Remove one ' 'or the other to avoid a mess.' ) if options . with_bar : options . with_styling = True | Turn style - forcing on if bar - forcing is on . |
60,706 | def update ( self , test_path , number ) : GRAPH_WIDTH = 14 num_filled = int ( round ( min ( 1.0 , float ( number ) / self . max ) * GRAPH_WIDTH ) ) graph = '' . join ( [ self . _fill_cap ( ' ' * num_filled ) , self . _empty_cap ( self . _empty_char * ( GRAPH_WIDTH - num_filled ) ) ] ) cols_for_path = self . cols - GRAPH_WIDTH - 2 if len ( test_path ) > cols_for_path : test_path = test_path [ len ( test_path ) - cols_for_path : ] else : test_path += ' ' * ( cols_for_path - len ( test_path ) ) self . last = self . _term . bold ( test_path ) + ' ' + graph with self . _at_last_line ( ) : self . stream . write ( self . last ) self . stream . flush ( ) | Draw an updated progress bar . |
60,707 | def erase ( self ) : with self . _at_last_line ( ) : self . stream . write ( self . _term . clear_eol ) self . stream . flush ( ) | White out the progress bar . |
60,708 | def dodging ( bar ) : class ShyProgressBar ( object ) : def __enter__ ( self ) : bar . _is_dodging += 1 if bar . _is_dodging <= 1 : bar . erase ( ) def __exit__ ( self , type , value , tb ) : if bar . _is_dodging == 1 : with bar . _at_last_line ( ) : bar . stream . write ( bar . last ) bar . stream . flush ( ) bar . _is_dodging -= 1 return ShyProgressBar ( ) | Return a context manager which erases the bar lets you output things and then redraws the bar . |
60,709 | def _makeResult ( self ) : return ProgressiveResult ( self . _cwd , self . _totalTests , self . stream , config = self . config ) | Return a Result that doesn t print dots . |
60,710 | def run ( self , test ) : "Run the given test case or test suite...quietly." wrapper = self . config . plugins . prepareTest ( test ) if wrapper is not None : test = wrapper wrapped = self . config . plugins . setOutputStream ( self . stream ) if wrapped is not None : self . stream = wrapped result = self . _makeResult ( ) startTime = time ( ) try : test ( result ) except KeyboardInterrupt : pass stopTime = time ( ) self . config . plugins . report ( self . stream ) result . printSummary ( startTime , stopTime ) self . config . plugins . finalize ( result ) return result | Run the given test case or test suite ... quietly . |
60,711 | def _printTraceback ( self , test , err ) : exception_type , exception_value = err [ : 2 ] extracted_tb = extract_relevant_tb ( err [ 2 ] , exception_type , exception_type is test . failureException ) test_frame_index = index_of_test_frame ( extracted_tb , exception_type , exception_value , test ) if test_frame_index : extracted_tb = extracted_tb [ test_frame_index : ] with self . bar . dodging ( ) : self . stream . write ( '' . join ( format_traceback ( extracted_tb , exception_type , exception_value , self . _cwd , self . _term , self . _options . function_color , self . _options . dim_color , self . _options . editor , self . _options . editor_shortcut_template ) ) ) | Print a nicely formatted traceback . |
60,712 | def _printHeadline ( self , kind , test , is_failure = True ) : if is_failure or self . _options . show_advisories : with self . bar . dodging ( ) : self . stream . writeln ( '\n' + ( self . _term . bold if is_failure else '' ) + '%s: %s' % ( kind , nose_selector ( test ) ) + ( self . _term . normal if is_failure else '' ) ) | Output a 1 - line error summary to the stream if appropriate . |
60,713 | def _recordAndPrintHeadline ( self , test , error_class , artifact ) : is_error_class = False for cls , ( storage , label , is_failure ) in self . errorClasses . items ( ) : if isclass ( error_class ) and issubclass ( error_class , cls ) : if is_failure : test . passed = False storage . append ( ( test , artifact ) ) is_error_class = True if not is_error_class : self . errors . append ( ( test , artifact ) ) test . passed = False is_any_failure = not is_error_class or is_failure self . _printHeadline ( label if is_error_class else 'ERROR' , test , is_failure = is_any_failure ) return is_any_failure | Record that an error - like thing occurred and print a summary . |
60,714 | def addSkip ( self , test , reason ) : self . _recordAndPrintHeadline ( test , SkipTest , reason ) if isinstance ( reason , Exception ) : reason = getattr ( reason , 'message' , None ) or getattr ( reason , 'args' ) [ 0 ] if reason and self . _options . show_advisories : with self . bar . dodging ( ) : self . stream . writeln ( reason ) | Catch skipped tests in Python 2 . 7 and above . |
60,715 | def printSummary ( self , start , stop ) : def renderResultType ( type , number , is_failure ) : ret = '%s %s%s' % ( number , type , 's' if number != 1 else '' ) if is_failure and number : ret = self . _term . bold ( ret ) return ret counts = [ ( 'test' , self . testsRun , False ) , ( 'failure' , len ( self . failures ) , True ) , ( 'error' , len ( self . errors ) , True ) ] counts . extend ( [ ( label . lower ( ) if label . isupper ( ) else label , len ( storage ) , is_failure ) for ( storage , label , is_failure ) in self . errorClasses . values ( ) if len ( storage ) ] ) summary = ( ', ' . join ( renderResultType ( * a ) for a in counts ) + ' in %.1fs' % ( stop - start ) ) self . bar . erase ( ) self . stream . writeln ( ) if self . wasSuccessful ( ) : self . stream . write ( self . _term . bold_green ( 'OK! ' ) ) self . stream . writeln ( summary ) | As a final summary print number of tests broken down by result . |
60,716 | def nose_selector ( test ) : address = test_address ( test ) if address : file , module , rest = address if module : if rest : try : return '%s:%s%s' % ( module , rest , test . test . arg or '' ) except AttributeError : return '%s:%s' % ( module , rest ) else : return module return 'Unknown test' | Return the string you can pass to nose to run test including argument values if the test was made by a test generator . |
60,717 | def human_path ( path , cwd ) : path = abspath ( path ) if cwd and path . startswith ( cwd ) : path = path [ len ( cwd ) + 1 : ] return path | Return the most human - readable representation of the given path . |
60,718 | def know ( self , what , confidence ) : if confidence > self . confidence : self . best = what self . confidence = confidence return self | Know something with the given confidence and return self for chaining . |
60,719 | def _generate_arg_types ( coordlist_length , shape_name ) : from . ds9_region_parser import ds9_shape_defs from . ds9_attr_parser import ds9_shape_in_comment_defs if shape_name in ds9_shape_defs : shape_def = ds9_shape_defs [ shape_name ] else : shape_def = ds9_shape_in_comment_defs [ shape_name ] initial_arg_types = shape_def . args_list arg_repeats = shape_def . args_repeat if arg_repeats is None : return initial_arg_types n1 , n2 = arg_repeats arg_types = list ( initial_arg_types [ : n1 ] ) num_of_repeats = coordlist_length - ( len ( initial_arg_types ) - n2 ) arg_types . extend ( ( num_of_repeats - n1 ) // ( n2 - n1 ) * initial_arg_types [ n1 : n2 ] ) arg_types . extend ( initial_arg_types [ n2 : ] ) return arg_types | Find coordinate types based on shape name and coordlist length |
60,720 | def convert_to_imagecoord ( shape , header ) : arg_types = _generate_arg_types ( len ( shape . coord_list ) , shape . name ) new_coordlist = [ ] is_even_distance = True coord_list_iter = iter ( zip ( shape . coord_list , arg_types ) ) new_wcs = WCS ( header ) pixel_scales = proj_plane_pixel_scales ( new_wcs ) for coordinate , coordinate_type in coord_list_iter : if coordinate_type == CoordOdd : even_coordinate = next ( coord_list_iter ) [ 0 ] old_coordinate = SkyCoord ( coordinate , even_coordinate , frame = shape . coord_format , unit = 'degree' , obstime = 'J2000' ) new_coordlist . extend ( np . asscalar ( x ) for x in old_coordinate . to_pixel ( new_wcs , origin = 1 ) ) elif coordinate_type == Distance : if arg_types [ - 1 ] == Angle : degree_per_pixel = pixel_scales [ 0 if is_even_distance else 1 ] is_even_distance = not is_even_distance else : degree_per_pixel = np . sqrt ( proj_plane_pixel_area ( new_wcs ) ) new_coordlist . append ( coordinate / degree_per_pixel ) elif coordinate_type == Angle : new_angle = _estimate_angle ( coordinate , shape . coord_format , header ) new_coordlist . append ( new_angle ) else : new_coordlist . append ( coordinate ) return new_coordlist | Convert the coordlist of shape to image coordinates |
60,721 | def get_auth_info ( ) : env_username = os . environ . get ( 'JOTTACLOUD_USERNAME' ) env_password = os . environ . get ( 'JOTTACLOUD_PASSWORD' ) netrc_auth = None try : netrc_file = netrc . netrc ( ) netrc_auth = netrc_file . authenticators ( 'jottacloud.com' ) except IOError : pass netrc_username = None netrc_password = None if netrc_auth : netrc_username , _ , netrc_password = netrc_auth username = env_username or netrc_username password = env_password or netrc_password if not ( username and password ) : raise JFSError ( 'Could not find username and password in either env or ~/.netrc, ' 'you need to add one of these to use these tools' ) return ( username , password ) | Get authentication details to jottacloud . |
60,722 | def calculate_md5 ( fileobject , size = 2 ** 16 ) : fileobject . seek ( 0 ) md5 = hashlib . md5 ( ) for data in iter ( lambda : fileobject . read ( size ) , b'' ) : if not data : break if isinstance ( data , six . text_type ) : data = data . encode ( 'utf-8' ) md5 . update ( data ) fileobject . seek ( 0 ) return md5 . hexdigest ( ) | Utility function to calculate md5 hashes while being light on memory usage . |
60,723 | def deleted ( self ) : 'Return datetime.datetime or None if the file isnt deleted' _d = self . folder . attrib . get ( 'deleted' , None ) if _d is None : return None return dateutil . parser . parse ( str ( _d ) ) | Return datetime . datetime or None if the file isnt deleted |
60,724 | def sync ( self ) : 'Update state of folder from Jottacloud server' log . info ( "syncing %r" % self . path ) self . folder = self . jfs . get ( self . path ) self . synced = True | Update state of folder from Jottacloud server |
60,725 | def mkdir ( self , foldername ) : 'Create a new subfolder and return the new JFSFolder' url = posixpath . join ( self . path , foldername ) params = { 'mkDir' : 'true' } r = self . jfs . post ( url , params ) self . sync ( ) return r | Create a new subfolder and return the new JFSFolder |
60,726 | def delete ( self ) : 'Delete this folder and return a deleted JFSFolder' params = { 'dlDir' : 'true' } r = self . jfs . post ( self . path , params ) self . sync ( ) return r | Delete this folder and return a deleted JFSFolder |
60,727 | def hard_delete ( self ) : 'Deletes without possibility to restore' url = 'https://www.jottacloud.com/rest/webrest/%s/action/delete' % self . jfs . username data = { 'paths[]' : self . path . replace ( JFS_ROOT , '' ) , 'web' : 'true' , 'ts' : int ( time . time ( ) ) , 'authToken' : 0 } r = self . jfs . post ( url , content = data ) return r | Deletes without possibility to restore |
60,728 | def rename ( self , newpath ) : "Move folder to a new name, possibly a whole new path" params = { 'mvDir' : '/%s%s' % ( self . jfs . username , newpath ) } r = self . jfs . post ( self . path , extra_headers = { 'Content-Type' : 'application/octet-stream' } , params = params ) return r | Move folder to a new name possibly a whole new path |
60,729 | def up ( self , fileobj_or_path , filename = None , upload_callback = None ) : 'Upload a file to current folder and return the new JFSFile' close_on_done = False if isinstance ( fileobj_or_path , six . string_types ) : filename = filename or os . path . basename ( fileobj_or_path ) fileobj_or_path = open ( fileobj_or_path , 'rb' ) close_on_done = True elif hasattr ( fileobj_or_path , 'read' ) : pass else : raise JFSError ( "Need filename or file-like object" ) if filename is None : if hasattr ( fileobj_or_path , 'name' ) : filename = os . path . basename ( fileobj_or_path . name ) else : raise JFSError ( "Unable to guess filename" ) log . debug ( '.up %s -> %s %s' , repr ( fileobj_or_path ) , repr ( self . path ) , repr ( filename ) ) r = self . jfs . up ( posixpath . join ( self . path , filename ) , fileobj_or_path , upload_callback = upload_callback ) if close_on_done : fileobj_or_path . close ( ) self . sync ( ) return r | Upload a file to current folder and return the new JFSFile |
60,730 | def factory ( fileobject , jfs , parentpath ) : 'Class method to get the correct file class instatiated' if hasattr ( fileobject , 'currentRevision' ) : return JFSFile ( fileobject , jfs , parentpath ) elif str ( fileobject . latestRevision . state ) == ProtoFile . STATE_INCOMPLETE : return JFSIncompleteFile ( fileobject , jfs , parentpath ) elif str ( fileobject . latestRevision . state ) == ProtoFile . STATE_CORRUPT : return JFSCorruptFile ( fileobject , jfs , parentpath ) else : raise NotImplementedError ( 'No JFS*File support for state %r. Please file a bug!' % fileobject . latestRevision . state ) | Class method to get the correct file class instatiated |
60,731 | def resume ( self , data ) : 'Resume uploading an incomplete file, after a previous upload was interrupted. Returns new file object' if not hasattr ( data , 'read' ) : data = six . BytesIO ( data ) if self . size == - 1 : log . debug ( '%r is an incomplete file, but .size is unknown. Refreshing the file object from server' , self . path ) self . f = self . jfs . get ( self . path ) md5 = calculate_md5 ( data ) if md5 != self . md5 : raise JFSError ( ) log . debug ( 'Resuming %s from offset %s' , self . path , self . size ) return self . jfs . up ( self . path , data , resume_offset = self . size ) | Resume uploading an incomplete file after a previous upload was interrupted . Returns new file object |
60,732 | def size ( self ) : if hasattr ( self . f . latestRevision , 'size' ) : return int ( self . f . latestRevision . size ) return None | Bytes uploaded of the file so far . |
60,733 | def stream ( self , chunk_size = 64 * 1024 ) : 'Returns a generator to iterate over the file contents' return self . jfs . stream ( url = self . path , params = { 'mode' : 'bin' } , chunk_size = chunk_size ) | Returns a generator to iterate over the file contents |
60,734 | def restore ( self ) : 'Restore the file' if not self . deleted : raise JFSError ( 'Tried to restore a not deleted file' ) raise NotImplementedError ( 'Jottacloud has changed the restore API. Please use jottacloud.com in a browser, for now.' ) url = 'https://www.jottacloud.com/rest/webrest/%s/action/restore' % self . jfs . username data = { 'paths[]' : self . path . replace ( JFS_ROOT , '' ) , 'web' : 'true' , 'ts' : int ( time . time ( ) ) , 'authToken' : 0 } r = self . jfs . post ( url , content = data ) return r | Restore the file |
60,735 | def delete ( self ) : 'Delete this file and return the new, deleted JFSFile' r = self . jfs . post ( url = self . path , params = { 'dl' : 'true' } ) return r | Delete this file and return the new deleted JFSFile |
60,736 | def thumb ( self , size = BIGTHUMB ) : if not self . is_image ( ) : return None if not size in ( self . BIGTHUMB , self . MEDIUMTHUMB , self . SMALLTHUMB , self . XLTHUMB ) : raise JFSError ( 'Invalid thumbnail size: %s for image %s' % ( size , self . path ) ) return self . jfs . raw ( url = self . path , params = { 'mode' : 'thumb' , 'ts' : size } ) | Get a thumbnail as string or None if the file isnt an image |
60,737 | def new_mountpoint ( self , name ) : url = posixpath . join ( self . path , name ) r = self . _jfs . post ( url , extra_headers = { 'content-type' : 'application/x-www-form-urlencoded' } ) return r | Create a new mountpoint |
60,738 | def sharedFiles ( self ) : 'iterate over shared files and get their public URI' for f in self . sharing . files . iterchildren ( ) : yield ( f . attrib [ 'name' ] , f . attrib [ 'uuid' ] , 'https://www.jottacloud.com/p/%s/%s' % ( self . jfs . username , f . publicURI . text ) ) | iterate over shared files and get their public URI |
60,739 | def files ( self ) : 'iterate over found files' for _f in self . searchresult . files . iterchildren ( ) : yield ProtoFile . factory ( _f , jfs = self . jfs , parentpath = unicode ( _f . abspath ) ) | iterate over found files |
60,740 | def request ( self , url , extra_headers = None , params = None ) : 'Make a GET request for url, with or without caching' if not url . startswith ( 'http' ) : url = self . rootpath + url log . debug ( "getting url: %r, extra_headers=%r, params=%r" , url , extra_headers , params ) if extra_headers is None : extra_headers = { } r = self . session . get ( url , headers = extra_headers , params = params , timeout = 1800 ) if r . status_code in ( 500 , ) : raise JFSError ( r . reason ) return r | Make a GET request for url with or without caching |
60,741 | def raw ( self , url , extra_headers = None , params = None ) : 'Make a GET request for url and return whatever content we get' r = self . request ( url , extra_headers = extra_headers , params = params ) if not r . ok : o = lxml . objectify . fromstring ( r . content ) JFSError . raiseError ( o , url ) return r . content | Make a GET request for url and return whatever content we get |
60,742 | def get ( self , url , params = None ) : 'Make a GET request for url and return the response content as a generic lxml.objectify object' url = self . escapeUrl ( url ) content = six . BytesIO ( self . raw ( url , params = params ) ) content . seek ( 0 , 2 ) contentlen = content . tell ( ) content . seek ( 0 ) MAX_BUFFER_SIZE = 1024 * 1024 * 200 if contentlen > MAX_BUFFER_SIZE : contentfile = tempfile . NamedTemporaryFile ( ) contentfile . write ( content . read ( ) ) o = lxml . objectify . parse ( contentfile ) else : o = lxml . objectify . fromstring ( content . getvalue ( ) ) if o . tag == 'error' : JFSError . raiseError ( o , url ) return o | Make a GET request for url and return the response content as a generic lxml . objectify object |
60,743 | def devices ( self ) : 'return generator of configured devices' return self . fs is not None and [ JFSDevice ( d , self , parentpath = self . rootpath ) for d in self . fs . devices . iterchildren ( ) ] or [ x for x in [ ] ] | return generator of configured devices |
60,744 | def parse ( region_string ) : rp = RegionParser ( ) ss = rp . parse ( region_string ) sss1 = rp . convert_attr ( ss ) sss2 = _check_wcs ( sss1 ) shape_list , comment_list = rp . filter_shape2 ( sss2 ) return ShapeList ( shape_list , comment_list = comment_list ) | Parse DS9 region string into a ShapeList . |
60,745 | def open ( fname ) : with _builtin_open ( fname ) as fh : region_string = fh . read ( ) return parse ( region_string ) | Open read and parse DS9 region file . |
60,746 | def read_region ( s ) : rp = RegionParser ( ) ss = rp . parse ( s ) sss1 = rp . convert_attr ( ss ) sss2 = _check_wcs ( sss1 ) shape_list = rp . filter_shape ( sss2 ) return ShapeList ( shape_list ) | Read region . |
60,747 | def read_region_as_imagecoord ( s , header ) : rp = RegionParser ( ) ss = rp . parse ( s ) sss1 = rp . convert_attr ( ss ) sss2 = _check_wcs ( sss1 ) sss3 = rp . sky_to_image ( sss2 , header ) shape_list = rp . filter_shape ( sss3 ) return ShapeList ( shape_list ) | Read region as image coordinates . |
60,748 | def get_mask ( region , hdu , origin = 1 ) : from pyregion . region_to_filter import as_region_filter data = hdu . data region_filter = as_region_filter ( region , origin = origin ) mask = region_filter . mask ( data ) return mask | Get mask . |
60,749 | def as_imagecoord ( self , header ) : comment_list = self . _comment_list if comment_list is None : comment_list = cycle ( [ None ] ) r = RegionParser . sky_to_image ( zip ( self , comment_list ) , header ) shape_list , comment_list = zip ( * list ( r ) ) return ShapeList ( shape_list , comment_list = comment_list ) | New shape list in image coordinates . |
60,750 | def get_mask ( self , hdu = None , header = None , shape = None ) : if hdu and header is None : header = hdu . header if hdu and shape is None : shape = hdu . data . shape region_filter = self . get_filter ( header = header ) mask = region_filter . mask ( shape ) return mask | Create a 2 - d mask . |
60,751 | def write ( self , outfile ) : if len ( self ) < 1 : print ( "WARNING: The region list is empty. The region file " "'{:s}' will be empty." . format ( outfile ) ) try : outf = _builtin_open ( outfile , 'w' ) outf . close ( ) return except IOError as e : cmsg = "Unable to create region file '{:s}'." . format ( outfile ) if e . args : e . args = ( e . args [ 0 ] + '\n' + cmsg , ) + e . args [ 1 : ] else : e . args = ( cmsg , ) raise e prev_cs = self [ 0 ] . coord_format outf = None try : outf = _builtin_open ( outfile , 'w' ) attr0 = self [ 0 ] . attr [ 1 ] defaultline = " " . join ( [ "{:s}={:s}" . format ( a , attr0 [ a ] ) for a in attr0 if a != 'text' ] ) outf . write ( "global {0}\n" . format ( defaultline ) ) outf . write ( "{0}\n" . format ( prev_cs ) ) for shape in self : shape_attr = '' if prev_cs == shape . coord_format else shape . coord_format + "; " shape_excl = '-' if shape . exclude else '' text_coordlist = [ "{:f}" . format ( f ) for f in shape . coord_list ] shape_coords = "(" + "," . join ( text_coordlist ) + ")" shape_comment = " # " + shape . comment if shape . comment else '' shape_str = ( shape_attr + shape_excl + shape . name + shape_coords + shape_comment ) outf . write ( "{0}\n" . format ( shape_str ) ) except IOError as e : cmsg = "Unable to create region file \'{:s}\'." . format ( outfile ) if e . args : e . args = ( e . args [ 0 ] + '\n' + cmsg , ) + e . args [ 1 : ] else : e . args = ( cmsg , ) raise e finally : if outf : outf . close ( ) | Write this shape list to a region file . |
60,752 | def AppConfigFlagHandler ( feature = None ) : if not current_app : log . warn ( u"Got a request to check for {feature} but we're outside the request context. Returning False" . format ( feature = feature ) ) return False try : return current_app . config [ FEATURE_FLAGS_CONFIG ] [ feature ] except ( AttributeError , KeyError ) : raise NoFeatureFlagFound ( ) | This is the default handler . It checks for feature flags in the current app s configuration . |
60,753 | def is_active ( feature ) : if current_app : feature_flagger = current_app . extensions . get ( EXTENSION_NAME ) if feature_flagger : return feature_flagger . check ( feature ) else : raise AssertionError ( "Oops. This application doesn't have the Flask-FeatureFlag extention installed." ) else : log . warn ( u"Got a request to check for {feature} but we're running outside the request context. Check your setup. Returning False" . format ( feature = feature ) ) return False | Check if a feature is active |
60,754 | def is_active_feature ( feature , redirect_to = None , redirect = None ) : def _is_active_feature ( func ) : @ wraps ( func ) def wrapped ( * args , ** kwargs ) : if not is_active ( feature ) : url = redirect_to if redirect : url = url_for ( redirect ) if url : log . debug ( u'Feature {feature} is off, redirecting to {url}' . format ( feature = feature , url = url ) ) return _redirect ( url , code = 302 ) else : log . debug ( u'Feature {feature} is off, aborting request' . format ( feature = feature ) ) abort ( 404 ) return func ( * args , ** kwargs ) return wrapped return _is_active_feature | Decorator for Flask views . If a feature is off it can either return a 404 or redirect to a URL if you d rather . |
60,755 | def init_app ( self , app ) : app . config . setdefault ( FEATURE_FLAGS_CONFIG , { } ) app . config . setdefault ( RAISE_ERROR_ON_MISSING_FEATURES , False ) if hasattr ( app , "add_template_test" ) : app . add_template_test ( self . check , name = self . JINJA_TEST_NAME ) else : app . jinja_env . tests [ self . JINJA_TEST_NAME ] = self . check if not hasattr ( app , 'extensions' ) : app . extensions = { } app . extensions [ EXTENSION_NAME ] = self | Add ourselves into the app config and setup and add a jinja function test |
60,756 | def check ( self , feature ) : found = False for handler in self . handlers : try : if handler ( feature ) : return True except StopCheckingFeatureFlags : return False except NoFeatureFlagFound : pass else : found = True if not found : message = u"No feature flag defined for {feature}" . format ( feature = feature ) if current_app . debug and current_app . config . get ( RAISE_ERROR_ON_MISSING_FEATURES , False ) : raise KeyError ( message ) else : log . info ( message ) missing_feature . send ( self , feature = feature ) return False | Loop through all our feature flag checkers and return true if any of them are true . |
60,757 | def yank_path ( self , path ) : for func in self . _caches : cache = { } for key in self . _caches [ func ] . keys ( ) : log . debug ( "cache key %s for func %s" , key , func ) if path in key [ 0 ] : log . debug ( "del cache key %s" , key ) del self . _caches [ func ] [ key ] | Clear cache of results from a specific path |
60,758 | def release ( self , path , fh ) : "Run after a read or write operation has finished. This is where we upload on writes" try : f = self . __newfiles [ path ] f . seek ( 0 , os . SEEK_END ) if f . tell ( ) > 0 : self . client . up ( path , f ) del self . __newfiles [ path ] del f self . _dirty ( path ) except KeyError : pass return ESUCCESS | Run after a read or write operation has finished . This is where we upload on writes |
60,759 | def truncate ( self , path , length , fh = None ) : "Download existing path, truncate and reupload" try : f = self . _getpath ( path ) except JFS . JFSError : raise OSError ( errno . ENOENT , '' ) if isinstance ( f , ( JFS . JFSFile , JFS . JFSFolder ) ) and f . is_deleted ( ) : raise OSError ( errno . ENOENT ) data = StringIO ( f . read ( ) ) data . truncate ( length ) try : self . client . up ( path , data ) self . _dirty ( path ) return ESUCCESS except : raise OSError ( errno . ENOENT , '' ) | Download existing path truncate and reupload |
60,760 | def commandline_text ( bytestring ) : 'Convert bytestring from command line to unicode, using default file system encoding' if six . PY3 : return bytestring unicode_string = bytestring . decode ( sys . getfilesystemencoding ( ) ) return unicode_string | Convert bytestring from command line to unicode using default file system encoding |
60,761 | def sky_to_image ( shape_list , header ) : for shape , comment in shape_list : if isinstance ( shape , Shape ) and ( shape . coord_format not in image_like_coordformats ) : new_coords = convert_to_imagecoord ( shape , header ) l1n = copy . copy ( shape ) l1n . coord_list = new_coords l1n . coord_format = "image" yield l1n , comment elif isinstance ( shape , Shape ) and shape . coord_format == "physical" : if header is None : raise RuntimeError ( "Physical coordinate is not known." ) new_coordlist = convert_physical_to_imagecoord ( shape , header ) l1n = copy . copy ( shape ) l1n . coord_list = new_coordlist l1n . coord_format = "image" yield l1n , comment else : yield shape , comment | Converts a ShapeList into shapes with coordinates in image coordinates |
60,762 | def _new ( self , src_path , dry_run = False , remove_uploaded = False ) : 'Code to upload' if os . path . islink ( src_path ) : sourcefile = os . path . normpath ( os . path . join ( self . topdir , os . readlink ( src_path ) ) ) if not os . path . exists ( sourcefile ) : log . error ( "broken symlink %s->%s" , src_path , sourcefile ) raise IOError ( "broken symliknk %s->%s" , src_path , sourcefile ) jottapath = self . get_jottapath ( src_path , filename = os . path . basename ( sourcefile ) ) elif os . path . splitext ( src_path ) [ 1 ] . lower ( ) == '.lnk' : sourcefile = os . path . normpath ( readlnk ( src_path ) ) if not os . path . exists ( sourcefile ) : log . error ( "broken fat32lnk %s->%s" , src_path , sourcefile ) raise IOError ( "broken fat32lnk %s->%s" , src_path , sourcefile ) jottapath = self . get_jottapath ( src_path , filename = os . path . basename ( sourcefile ) ) else : sourcefile = src_path if not os . path . exists ( sourcefile ) : log . error ( "file does not exist: %s" , sourcefile ) raise IOError ( "file does not exist: %s" , sourcefile ) jottapath = self . get_jottapath ( src_path ) log . info ( 'Uploading file %s to %s' , sourcefile , jottapath ) if not dry_run : if not jottacloud . new ( sourcefile , jottapath , self . jfs ) : log . error ( 'Uploading file %s failed' , sourcefile ) raise if remove_uploaded : log . info ( 'Removing file after upload: %s' , src_path ) if not dry_run : os . remove ( src_path ) | Code to upload |
60,763 | def _estimate_angle ( angle , reg_coordinate_frame , header ) : y_axis_rot = _calculate_rotation_angle ( reg_coordinate_frame , header ) return angle - y_axis_rot | Transform an angle into a different frame |
60,764 | def _calculate_rotation_angle ( reg_coordinate_frame , header ) : new_wcs = WCS ( header ) region_frame = SkyCoord ( '0d 0d' , frame = reg_coordinate_frame , obstime = 'J2000' ) region_frame = SkyCoord ( '0d 0d' , frame = reg_coordinate_frame , obstime = 'J2000' , equinox = region_frame . equinox ) origin = SkyCoord . from_pixel ( header [ 'NAXIS1' ] / 2 , header [ 'NAXIS2' ] / 2 , wcs = new_wcs , origin = 1 ) . transform_to ( region_frame ) offset = proj_plane_pixel_scales ( new_wcs ) [ 1 ] origin_x , origin_y = origin . to_pixel ( new_wcs , origin = 1 ) origin_lon = origin . data . lon . degree origin_lat = origin . data . lat . degree offset_point = SkyCoord ( origin_lon , origin_lat + offset , unit = 'degree' , frame = origin . frame . name , obstime = 'J2000' ) offset_x , offset_y = offset_point . to_pixel ( new_wcs , origin = 1 ) north_rot = np . arctan2 ( offset_y - origin_y , offset_x - origin_x ) / np . pi * 180. cdelt = new_wcs . wcs . get_cdelt ( ) if ( cdelt > 0 ) . all ( ) or ( cdelt < 0 ) . all ( ) : return north_rot - 90 else : return - ( north_rot - 90 ) | Calculates the rotation angle from the region to the header s frame |
60,765 | def sf ( f , dirpath , jottapath ) : log . debug ( 'Create SyncFile from %s' , repr ( f ) ) log . debug ( 'Got encoded filename %r, joining with dirpath %r' , _encode_filename_to_filesystem ( f ) , dirpath ) return SyncFile ( localpath = os . path . join ( dirpath , _encode_filename_to_filesystem ( f ) ) , jottapath = posixpath . join ( _decode_filename_to_unicode ( jottapath ) , _decode_filename_to_unicode ( f ) ) ) | Create and return a SyncFile tuple from filename . |
60,766 | def get_jottapath ( localtopdir , dirpath , jottamountpoint ) : log . debug ( "get_jottapath %r %r %r" , localtopdir , dirpath , jottamountpoint ) normpath = posixpath . normpath ( posixpath . join ( jottamountpoint , posixpath . basename ( localtopdir ) , posixpath . relpath ( dirpath , localtopdir ) ) ) return _decode_filename_to_unicode ( normpath ) | Translate localtopdir to jottapath . Returns unicode string |
60,767 | def is_file ( jottapath , JFS ) : log . debug ( "is_file %r" , jottapath ) try : jf = JFS . getObject ( jottapath ) except JFSNotFoundError : return False return isinstance ( jf , JFSFile ) | Check if a file exists on jottacloud |
60,768 | def compare ( localtopdir , jottamountpoint , JFS , followlinks = False , exclude_patterns = None ) : def excluded ( unicodepath , fname ) : fpath = os . path . join ( unicodepath , _decode_filename_to_unicode ( fname ) ) mode = os . stat ( fpath ) . st_mode if not ( stat . S_ISREG ( mode ) or stat . S_ISLNK ( mode ) or stat . S_ISDIR ( mode ) ) : return True if exclude_patterns is None : return False for p in exclude_patterns : if p . search ( fpath ) : log . debug ( "%r excluded by pattern %r" , fpath , p . pattern ) return True return False bytestring_localtopdir = _encode_filename_to_filesystem ( localtopdir ) for dirpath , dirnames , filenames in os . walk ( bytestring_localtopdir , followlinks = followlinks ) : dirpath = _encode_filename_to_filesystem ( dirpath ) unicodepath = _decode_filename_to_unicode ( dirpath ) log . debug ( "compare walk: %r -> %s files " , unicodepath , len ( filenames ) ) localfiles = set ( [ f for f in filenames if not excluded ( unicodepath , f ) ] ) localfolders = set ( [ f for f in dirnames if not excluded ( unicodepath , f ) ] ) jottapath = get_jottapath ( localtopdir , unicodepath , jottamountpoint ) log . debug ( "compare jottapath: %r" , jottapath ) cloudfiles = filelist ( jottapath , JFS ) cloudfolders = folderlist ( jottapath , JFS ) log . debug ( "--cloudfiles: %r" , cloudfiles ) log . debug ( "--localfiles: %r" , localfiles ) log . debug ( "--cloudfolders: %r" , cloudfolders ) onlylocal = [ sf ( f , dirpath , jottapath ) for f in localfiles . difference ( cloudfiles ) ] onlyremote = [ sf ( f , dirpath , jottapath ) for f in cloudfiles . difference ( localfiles ) ] bothplaces = [ sf ( f , dirpath , jottapath ) for f in localfiles . intersection ( cloudfiles ) ] onlyremotefolders = [ sf ( f , dirpath , jottapath ) for f in cloudfolders . difference ( localfolders ) ] yield dirpath , onlylocal , onlyremote , bothplaces , onlyremotefolders | Make a tree of local files and folders and compare it with what s currently on JottaCloud . |
60,769 | def _decode_filename_to_unicode ( f ) : log . debug ( '_decode_filename_to_unicode(%s)' , repr ( f ) ) if isinstance ( f , unicode ) : return f try : return f . decode ( sys . getfilesystemencoding ( ) ) except UnicodeDecodeError : charguess = chardet . detect ( f ) log . debug ( "chardet filename: %r -> %r" , f , charguess ) if charguess [ 'encoding' ] is not None : try : return f . decode ( charguess [ 'encoding' ] ) except UnicodeDecodeError : pass log . warning ( 'Cannot understand decoding of this filename: %r (guessed %r, but was wrong)' , f , charguess ) log . debug ( 'Trying utf-8 to decode %r' , f ) try : return f . decode ( 'utf-8' ) except UnicodeDecodeError : pass log . debug ( 'Trying latin1 to decode %r' , f ) try : return f . decode ( 'latin1' ) except UnicodeDecodeError : log . warning ( 'Exhausted all options. Decoding %r to safe ascii' , f ) return f . decode ( 'ascii' , errors = 'ignore' ) | Get bytestring filename and return unicode . First try to decode from default file system encoding If that fails use chardet module to guess encoding . As a last resort try to decode as utf - 8 . |
60,770 | def _encode_filename_to_filesystem ( f ) : log . debug ( '_encode_filename_to_filesystem(%s)' , repr ( f ) ) if isinstance ( f , str ) : return f try : return f . encode ( sys . getfilesystemencoding ( ) ) except UnicodeEncodeError : raise | Get a unicode filename and return bytestring encoded to file system default . |
60,771 | def resume ( localfile , jottafile , JFS ) : with open ( localfile ) as lf : _complete = jottafile . resume ( lf ) return _complete | Continue uploading a new file from local file ( already exists on JottaCloud |
60,772 | def replace_if_changed ( localfile , jottapath , JFS ) : jf = JFS . getObject ( jottapath ) lf_hash = getxattrhash ( localfile ) if lf_hash is None : with open ( localfile ) as lf : lf_hash = calculate_md5 ( lf ) if type ( jf ) == JFSIncompleteFile : log . debug ( "Local file %s is incompletely uploaded, continue" , localfile ) return resume ( localfile , jf , JFS ) elif jf . md5 == lf_hash : log . debug ( "hash match (%s), file contents haven't changed" , lf_hash ) setxattrhash ( localfile , lf_hash ) return jf else : setxattrhash ( localfile , lf_hash ) return new ( localfile , jottapath , JFS ) | Compare md5 hash to determine if contents have changed . Upload a file from local disk and replace file on JottaCloud if the md5s differ or continue uploading if the file is incompletely uploaded . |
60,773 | def iter_tree ( jottapath , JFS ) : filedirlist = JFS . getObject ( '%s?mode=list' % jottapath ) log . debug ( "got tree: %s" , filedirlist ) if not isinstance ( filedirlist , JFSFileDirList ) : yield ( '' , tuple ( ) , tuple ( ) ) for path in filedirlist . tree : yield path | Get a tree of of files and folders . use as an iterator you get something like os . walk |
60,774 | def _query ( self , filename ) : log . Info ( 'Querying size of %s' % filename ) from jottalib . JFS import JFSNotFoundError , JFSIncompleteFile remote_path = posixpath . join ( self . folder . path , filename ) try : remote_file = self . client . getObject ( remote_path ) except JFSNotFoundError : return { 'size' : - 1 } return { 'size' : remote_file . size , } | Get size of filename |
60,775 | def parse_drawing ( document , container , elem ) : _blip = elem . xpath ( './/a:blip' , namespaces = NAMESPACES ) if len ( _blip ) > 0 : blip = _blip [ 0 ] _rid = blip . attrib [ _name ( '{{{r}}}embed' ) ] img = doc . Image ( _rid ) container . elements . append ( img ) | Parse drawing element . |
60,776 | def parse_footnote ( document , container , elem ) : "Parse the footnote element." _rid = elem . attrib [ _name ( '{{{w}}}id' ) ] foot = doc . Footnote ( _rid ) container . elements . append ( foot ) | Parse the footnote element . |
60,777 | def parse_text ( document , container , element ) : "Parse text element." txt = None alternate = element . find ( _name ( '{{{mc}}}AlternateContent' ) ) if alternate is not None : parse_alternate ( document , container , alternate ) br = element . find ( _name ( '{{{w}}}br' ) ) if br is not None : if _name ( '{{{w}}}type' ) in br . attrib : _type = br . attrib [ _name ( '{{{w}}}type' ) ] brk = doc . Break ( _type ) else : brk = doc . Break ( ) container . elements . append ( brk ) t = element . find ( _name ( '{{{w}}}t' ) ) if t is not None : txt = doc . Text ( t . text ) txt . parent = container container . elements . append ( txt ) rpr = element . find ( _name ( '{{{w}}}rPr' ) ) if rpr is not None : parse_previous_properties ( document , txt , rpr ) for r in element . findall ( _name ( '{{{w}}}r' ) ) : parse_text ( document , container , r ) foot = element . find ( _name ( '{{{w}}}footnoteReference' ) ) if foot is not None : parse_footnote ( document , container , foot ) end = element . find ( _name ( '{{{w}}}endnoteReference' ) ) if end is not None : parse_endnote ( document , container , end ) sym = element . find ( _name ( '{{{w}}}sym' ) ) if sym is not None : _font = sym . attrib [ _name ( '{{{w}}}font' ) ] _char = sym . attrib [ _name ( '{{{w}}}char' ) ] container . elements . append ( doc . Symbol ( font = _font , character = _char ) ) image = element . find ( _name ( '{{{w}}}drawing' ) ) if image is not None : parse_drawing ( document , container , image ) refe = element . find ( _name ( '{{{w}}}commentReference' ) ) if refe is not None : _m = doc . Comment ( refe . attrib [ _name ( '{{{w}}}id' ) ] , 'reference' ) container . elements . append ( _m ) return | Parse text element . |
60,778 | def parse_paragraph ( document , par ) : paragraph = doc . Paragraph ( ) paragraph . document = document for elem in par : if elem . tag == _name ( '{{{w}}}pPr' ) : parse_paragraph_properties ( document , paragraph , elem ) if elem . tag == _name ( '{{{w}}}r' ) : parse_text ( document , paragraph , elem ) if elem . tag == _name ( '{{{m}}}oMath' ) : _m = doc . Math ( ) paragraph . elements . append ( _m ) if elem . tag == _name ( '{{{m}}}oMathPara' ) : _m = doc . Math ( ) paragraph . elements . append ( _m ) if elem . tag == _name ( '{{{w}}}commentRangeStart' ) : _m = doc . Comment ( elem . attrib [ _name ( '{{{w}}}id' ) ] , 'start' ) paragraph . elements . append ( _m ) if elem . tag == _name ( '{{{w}}}commentRangeEnd' ) : _m = doc . Comment ( elem . attrib [ _name ( '{{{w}}}id' ) ] , 'end' ) paragraph . elements . append ( _m ) if elem . tag == _name ( '{{{w}}}hyperlink' ) : try : t = doc . Link ( elem . attrib [ _name ( '{{{r}}}id' ) ] ) parse_text ( document , t , elem ) paragraph . elements . append ( t ) except : logger . error ( 'Error with with hyperlink [%s].' , str ( elem . attrib . items ( ) ) ) if elem . tag == _name ( '{{{w}}}smartTag' ) : parse_smarttag ( document , paragraph , elem ) return paragraph | Parse paragraph element . |
60,779 | def parse_table_properties ( doc , table , prop ) : "Parse table properties." if not table : return style = prop . find ( _name ( '{{{w}}}tblStyle' ) ) if style is not None : table . style_id = style . attrib [ _name ( '{{{w}}}val' ) ] doc . add_style_as_used ( table . style_id ) | Parse table properties . |
60,780 | def parse_table_column_properties ( doc , cell , prop ) : "Parse table column properties." if not cell : return grid = prop . find ( _name ( '{{{w}}}gridSpan' ) ) if grid is not None : cell . grid_span = int ( grid . attrib [ _name ( '{{{w}}}val' ) ] ) vmerge = prop . find ( _name ( '{{{w}}}vMerge' ) ) if vmerge is not None : if _name ( '{{{w}}}val' ) in vmerge . attrib : cell . vmerge = vmerge . attrib [ _name ( '{{{w}}}val' ) ] else : cell . vmerge = "" | Parse table column properties . |
60,781 | def parse_table ( document , tbl ) : "Parse table element." def _change ( rows , pos_x ) : if len ( rows ) == 1 : return rows count_x = 1 for x in rows [ - 1 ] : if count_x == pos_x : x . row_span += 1 count_x += x . grid_span return rows table = doc . Table ( ) tbl_pr = tbl . find ( _name ( '{{{w}}}tblPr' ) ) if tbl_pr is not None : parse_table_properties ( document , table , tbl_pr ) for tr in tbl . xpath ( './w:tr' , namespaces = NAMESPACES ) : columns = [ ] pos_x = 0 for tc in tr . xpath ( './w:tc' , namespaces = NAMESPACES ) : cell = doc . TableCell ( ) tc_pr = tc . find ( _name ( '{{{w}}}tcPr' ) ) if tc_pr is not None : parse_table_column_properties ( doc , cell , tc_pr ) pos_x += cell . grid_span if cell . vmerge is not None and cell . vmerge == "" : table . rows = _change ( table . rows , pos_x ) else : for p in tc . xpath ( './w:p' , namespaces = NAMESPACES ) : cell . elements . append ( parse_paragraph ( document , p ) ) columns . append ( cell ) table . rows . append ( columns ) return table | Parse table element . |
60,782 | def parse_document ( xmlcontent ) : document = etree . fromstring ( xmlcontent ) body = document . xpath ( './/w:body' , namespaces = NAMESPACES ) [ 0 ] document = doc . Document ( ) for elem in body : if elem . tag == _name ( '{{{w}}}p' ) : document . elements . append ( parse_paragraph ( document , elem ) ) if elem . tag == _name ( '{{{w}}}tbl' ) : document . elements . append ( parse_table ( document , elem ) ) if elem . tag == _name ( '{{{w}}}sdt' ) : document . elements . append ( doc . TOC ( ) ) return document | Parse document with content . |
60,783 | def parse_relationship ( document , xmlcontent , rel_type ) : doc = etree . fromstring ( xmlcontent ) for elem in doc : if elem . tag == _name ( '{{{pr}}}Relationship' ) : rel = { 'target' : elem . attrib [ 'Target' ] , 'type' : elem . attrib [ 'Type' ] , 'target_mode' : elem . attrib . get ( 'TargetMode' , 'Internal' ) } document . relationships [ rel_type ] [ elem . attrib [ 'Id' ] ] = rel | Parse relationship document . |
60,784 | def parse_style ( document , xmlcontent ) : styles = etree . fromstring ( xmlcontent ) _r = styles . xpath ( './/w:rPrDefault' , namespaces = NAMESPACES ) if len ( _r ) > 0 : rpr = _r [ 0 ] . find ( _name ( '{{{w}}}rPr' ) ) if rpr is not None : st = doc . Style ( ) parse_previous_properties ( document , st , rpr ) document . default_style = st for style in styles . xpath ( './/w:style' , namespaces = NAMESPACES ) : st = doc . Style ( ) st . style_id = style . attrib [ _name ( '{{{w}}}styleId' ) ] style_type = style . attrib [ _name ( '{{{w}}}type' ) ] if style_type is not None : st . style_type = style_type if _name ( '{{{w}}}default' ) in style . attrib : is_default = style . attrib [ _name ( '{{{w}}}default' ) ] if is_default is not None : st . is_default = is_default == '1' name = style . find ( _name ( '{{{w}}}name' ) ) if name is not None : st . name = name . attrib [ _name ( '{{{w}}}val' ) ] based_on = style . find ( _name ( '{{{w}}}basedOn' ) ) if based_on is not None : st . based_on = based_on . attrib [ _name ( '{{{w}}}val' ) ] document . styles . styles [ st . style_id ] = st if st . is_default : document . styles . default_styles [ st . style_type ] = st . style_id rpr = style . find ( _name ( '{{{w}}}rPr' ) ) if rpr is not None : parse_previous_properties ( document , st , rpr ) ppr = style . find ( _name ( '{{{w}}}pPr' ) ) if ppr is not None : parse_paragraph_properties ( document , st , ppr ) | Parse styles document . |
60,785 | def parse_comments ( document , xmlcontent ) : comments = etree . fromstring ( xmlcontent ) document . comments = { } for comment in comments . xpath ( './/w:comment' , namespaces = NAMESPACES ) : comment_id = comment . attrib [ _name ( '{{{w}}}id' ) ] comm = doc . CommentContent ( comment_id ) comm . author = comment . attrib . get ( _name ( '{{{w}}}author' ) , None ) comm . date = comment . attrib . get ( _name ( '{{{w}}}date' ) , None ) comm . elements = [ parse_paragraph ( document , para ) for para in comment . xpath ( './/w:p' , namespaces = NAMESPACES ) ] document . comments [ comment_id ] = comm | Parse comments document . |
60,786 | def parse_footnotes ( document , xmlcontent ) : footnotes = etree . fromstring ( xmlcontent ) document . footnotes = { } for footnote in footnotes . xpath ( './/w:footnote' , namespaces = NAMESPACES ) : _type = footnote . attrib . get ( _name ( '{{{w}}}type' ) , None ) if _type in [ 'separator' , 'continuationSeparator' , 'continuationNotice' ] : continue paragraphs = [ parse_paragraph ( document , para ) for para in footnote . xpath ( './/w:p' , namespaces = NAMESPACES ) ] document . footnotes [ footnote . attrib [ _name ( '{{{w}}}id' ) ] ] = paragraphs | Parse footnotes document . |
60,787 | def parse_endnotes ( document , xmlcontent ) : endnotes = etree . fromstring ( xmlcontent ) document . endnotes = { } for note in endnotes . xpath ( './/w:endnote' , namespaces = NAMESPACES ) : paragraphs = [ parse_paragraph ( document , para ) for para in note . xpath ( './/w:p' , namespaces = NAMESPACES ) ] document . endnotes [ note . attrib [ _name ( '{{{w}}}id' ) ] ] = paragraphs | Parse endnotes document . |
60,788 | def parse_numbering ( document , xmlcontent ) : numbering = etree . fromstring ( xmlcontent ) document . abstruct_numbering = { } document . numbering = { } for abstruct_num in numbering . xpath ( './/w:abstractNum' , namespaces = NAMESPACES ) : numb = { } for lvl in abstruct_num . xpath ( './w:lvl' , namespaces = NAMESPACES ) : ilvl = int ( lvl . attrib [ _name ( '{{{w}}}ilvl' ) ] ) fmt = lvl . find ( _name ( '{{{w}}}numFmt' ) ) numb [ ilvl ] = { 'numFmt' : fmt . attrib [ _name ( '{{{w}}}val' ) ] } document . abstruct_numbering [ abstruct_num . attrib [ _name ( '{{{w}}}abstractNumId' ) ] ] = numb for num in numbering . xpath ( './/w:num' , namespaces = NAMESPACES ) : num_id = num . attrib [ _name ( '{{{w}}}numId' ) ] abs_num = num . find ( _name ( '{{{w}}}abstractNumId' ) ) if abs_num is not None : number_id = abs_num . attrib [ _name ( '{{{w}}}val' ) ] document . numbering [ int ( num_id ) ] = number_id | Parse numbering document . |
60,789 | def parse_from_file ( file_object ) : logger . info ( 'Parsing %s file.' , file_object . file_name ) doc_content = file_object . read_file ( 'document.xml' ) document = parse_document ( doc_content ) try : style_content = file_object . read_file ( 'styles.xml' ) parse_style ( document , style_content ) except KeyError : logger . warning ( 'Could not read styles.' ) try : doc_rel_content = file_object . read_file ( '_rels/document.xml.rels' ) parse_relationship ( document , doc_rel_content , 'document' ) except KeyError : logger . warning ( 'Could not read document relationships.' ) try : doc_rel_content = file_object . read_file ( '_rels/endnotes.xml.rels' ) parse_relationship ( document , doc_rel_content , 'endnotes' ) except KeyError : logger . warning ( 'Could not read endnotes relationships.' ) try : doc_rel_content = file_object . read_file ( '_rels/footnotes.xml.rels' ) parse_relationship ( document , doc_rel_content , 'footnotes' ) except KeyError : logger . warning ( 'Could not read footnotes relationships.' ) try : comments_content = file_object . read_file ( 'comments.xml' ) parse_comments ( document , comments_content ) except KeyError : logger . warning ( 'Could not read comments.' ) try : footnotes_content = file_object . read_file ( 'footnotes.xml' ) parse_footnotes ( document , footnotes_content ) except KeyError : logger . warning ( 'Could not read footnotes.' ) try : endnotes_content = file_object . read_file ( 'endnotes.xml' ) parse_endnotes ( document , endnotes_content ) except KeyError : logger . warning ( 'Could not read endnotes.' ) try : numbering_content = file_object . read_file ( 'numbering.xml' ) parse_numbering ( document , numbering_content ) except KeyError : logger . warning ( 'Could not read numbering.' ) return document | Parses existing OOXML file . |
60,790 | def get_by_name ( self , name , style_type = None ) : for st in self . styles . values ( ) : if st : if st . name == name : return st if style_type and not st : st = self . styles . get ( self . default_styles [ style_type ] , None ) return st | Find style by it s descriptive name . |
60,791 | def get_by_id ( self , style_id , style_type = None ) : for st in self . styles . values ( ) : if st : if st . style_id == style_id : return st if style_type : return self . styles . get ( self . default_styles [ style_type ] , None ) return None | Find style by it s unique identifier |
60,792 | def process_affinity ( affinity = None ) : if affinity is not None : affinity = CPUSet ( affinity ) if not affinity . issubset ( system_affinity ( ) ) : raise ValueError ( "unknown cpus: %s" % affinity ) return system_affinity ( ) | Get or set the CPU affinity set for the current process . |
60,793 | def acquire ( self , blocking = True , timeout = None ) : if timeout is None : return self . __lock . acquire ( blocking ) else : endtime = _time ( ) + timeout delay = 0.0005 while not self . __lock . acquire ( False ) : remaining = endtime - _time ( ) if remaining <= 0 : return False delay = min ( delay * 2 , remaining , 0.05 ) _sleep ( delay ) return True | Attempt to acquire this lock . |
60,794 | def from_thread ( cls , thread ) : new_classes = [ ] for new_cls in cls . __mro__ : if new_cls not in thread . __class__ . __mro__ : new_classes . append ( new_cls ) if isinstance ( thread , cls ) : pass elif issubclass ( cls , thread . __class__ ) : thread . __class__ = cls else : class UpgradedThread ( thread . __class__ , cls ) : pass thread . __class__ = UpgradedThread for new_cls in new_classes : if hasattr ( new_cls , "_upgrade_thread" ) : new_cls . _upgrade_thread ( thread ) return thread | Convert a vanilla thread object into an instance of this class . |
60,795 | def acquire ( self , blocking = True , timeout = None , shared = False ) : with self . _lock : if shared : self . _acquire_shared ( blocking , timeout ) else : self . _acquire_exclusive ( blocking , timeout ) assert not ( self . is_shared and self . is_exclusive ) | Acquire the lock in shared or exclusive mode . |
60,796 | def _get_font_size ( document , style ) : font_size = style . get_font_size ( ) if font_size == - 1 : if style . based_on : based_on = document . styles . get_by_id ( style . based_on ) if based_on : return _get_font_size ( document , based_on ) return font_size | Get font size defined for this style . |
60,797 | def _get_numbering ( document , numid , ilvl ) : try : abs_num = document . numbering [ numid ] return document . abstruct_numbering [ abs_num ] [ ilvl ] [ 'numFmt' ] except : return 'bullet' | Returns type for the list . |
60,798 | def _get_parent ( root ) : elem = root while True : elem = elem . getparent ( ) if elem . tag in [ 'ul' , 'ol' ] : return elem | Returns root element for a list . |
60,799 | def close_list ( ctx , root ) : try : n = len ( ctx . in_list ) if n <= 0 : return root elem = root while n > 0 : while True : if elem . tag in [ 'ul' , 'ol' , 'td' ] : elem = elem . getparent ( ) break elem = elem . getparent ( ) n -= 1 ctx . in_list = [ ] return elem except : return None | Close already opened list if needed . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.