idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
48,000
def _secured_storage_parameters ( self ) : parameters = self . _storage_parameters or dict ( ) if self . _unsecure : parameters = parameters . copy ( ) parameters [ 'protocol' ] = 'http' return parameters
Updates storage parameters with unsecure mode .
48,001
def _format_src_url ( self , path , caller_system ) : path = '%s/%s' % ( self . _endpoint , self . relpath ( path ) ) if caller_system is not self : try : path = '%s?%s' % ( path , self . _storage_parameters [ 'sas_token' ] ) except KeyError : pass return path
Ensure path is absolute and use the correct URL format for use with cross Azure storage account copy function .
48,002
def _update_listing_client_kwargs ( client_kwargs , max_request_entries ) : client_kwargs = client_kwargs . copy ( ) if max_request_entries : client_kwargs [ 'num_results' ] = max_request_entries return client_kwargs
Updates client kwargs for listing functions .
48,003
def _model_to_dict ( obj ) : result = _properties_model_to_dict ( obj . properties ) for attribute in ( 'metadata' , 'snapshot' ) : try : value = getattr ( obj , attribute ) except AttributeError : continue if value : result [ attribute ] = value return result
Convert object model to dict .
48,004
def _create ( self ) : with _handle_azure_exception ( ) : self . _create_from_size ( content_length = self . _content_length , ** self . _client_kwargs )
Create the file if not exists .
48,005
def _flush ( self , buffer , start , end ) : buffer_size = len ( buffer ) if not buffer_size : return with self . _size_lock : if end > self . _size : with _handle_azure_exception ( ) : self . _resize ( content_length = end , ** self . _client_kwargs ) self . _reset_head ( ) if buffer_size > self . MAX_FLUSH_SIZE : futures = [ ] for part_start in range ( 0 , buffer_size , self . MAX_FLUSH_SIZE ) : buffer_part = buffer [ part_start : part_start + self . MAX_FLUSH_SIZE ] if not len ( buffer_part ) : break start_range = start + part_start futures . append ( self . _workers . submit ( self . _update_range , data = buffer_part . tobytes ( ) , start_range = start_range , end_range = start_range + len ( buffer_part ) - 1 , ** self . _client_kwargs ) ) with _handle_azure_exception ( ) : for future in _as_completed ( futures ) : future . result ( ) else : with _handle_azure_exception ( ) : self . _update_range ( data = buffer . tobytes ( ) , start_range = start , end_range = end - 1 , ** self . _client_kwargs )
Flush the write buffer of the stream if applicable .
48,006
def memoizedmethod ( method ) : method_name = method . __name__ @ wraps ( method ) def patched ( self , * args , ** kwargs ) : try : return self . _cache [ method_name ] except KeyError : result = self . _cache [ method_name ] = method ( self , * args , ** kwargs ) return result return patched
Decorator that caches method result .
48,007
def _generate_async ( self , generator ) : first_value_future = self . _workers . submit ( next , generator ) def get_first_element ( future = first_value_future ) : try : yield future . result ( ) except StopIteration : return return chain ( get_first_element ( ) , generator )
Return the previous generator object after having run the first element evaluation as a background task .
48,008
def _handle_client_exception ( ) : try : yield except _ClientException as exception : if exception . http_status in _ERROR_CODES : raise _ERROR_CODES [ exception . http_status ] ( exception . http_reason ) raise
Handle Swift exception and convert to class IO exceptions
48,009
def _update_seek ( self , offset , whence ) : with self . _seek_lock : if whence == SEEK_SET : self . _seek = offset elif whence == SEEK_CUR : self . _seek += offset elif whence == SEEK_END : self . _seek = offset + self . _size else : raise ValueError ( 'whence value %s unsupported' % whence ) return self . _seek
Update seek value .
48,010
def _handle_http_errors ( response ) : code = response . status_code if 200 <= code < 400 : return response elif code in ( 403 , 404 ) : raise { 403 : _ObjectPermissionError , 404 : _ObjectNotFoundError } [ code ] ( response . reason ) response . raise_for_status ( )
Check for HTTP errors and raise OSError if relevant .
48,011
def flush ( self ) : if self . _writable : with self . _seek_lock : buffer = self . _get_buffer ( ) end = self . _seek start = end - len ( buffer ) self . _write_buffer = bytearray ( ) with handle_os_exceptions ( ) : self . _flush ( buffer , start , end )
Flush the write buffers of the stream if applicable and save the object on the cloud .
48,012
def _update_size ( self , size , future ) : with self . _size_lock : if size > self . _size and future . done : self . _size = size
Keep track of the file size during writing .
48,013
def _flush_range ( self , buffer , start , end ) : with self . _size_lock : if not self . _size_synched : self . _size_synched = True try : self . _size = self . raw . _size except ( ObjectNotFoundError , UnsupportedOperation ) : self . _size = 0 while start > self . _size : sleep ( self . _FLUSH_WAIT ) self . _raw_flush ( buffer , start , end )
Flush a buffer to a range of the file .
48,014
def _update_range ( self , data , ** kwargs ) : self . _client . update_range ( data = data , ** kwargs )
Update range with data
48,015
def handle_bodhi ( msg ) : if 'bodhi.update.comment' in msg . topic : username = msg . msg [ 'comment' ] [ 'author' ] elif 'bodhi.buildroot_override' in msg . topic : username = msg . msg [ 'override' ] [ 'submitter' ] else : username = msg . msg . get ( 'update' , { } ) . get ( 'submitter' ) return username
Given a bodhi message return the FAS username .
48,016
def handle_wiki ( msg ) : if 'wiki.article.edit' in msg . topic : username = msg . msg [ 'user' ] elif 'wiki.upload.complete' in msg . topic : username = msg . msg [ 'user_text' ] else : raise ValueError ( "Unhandled topic." ) return username
Given a wiki message return the FAS username .
48,017
def is_storage ( url , storage = None ) : if storage : return True split_url = url . split ( '://' , 1 ) if len ( split_url ) == 2 and split_url [ 0 ] . lower ( ) != 'file' : return True return False
Check if file is a local file or a storage file .
48,018
def format_and_is_storage ( path ) : if not hasattr ( path , 'read' ) : path = fsdecode ( path ) . replace ( '\\' , '/' ) return path , is_storage ( path ) return path , True
Checks if path is storage and format it .
48,019
def equivalent_to ( std_function ) : def decorate ( cos_function ) : @ wraps ( cos_function ) def decorated ( path , * args , ** kwargs ) : path = fsdecode ( path ) . replace ( '\\' , '/' ) if is_storage ( path ) : with handle_os_exceptions ( ) : return cos_function ( path , * args , ** kwargs ) return std_function ( path , * args , ** kwargs ) return decorated return decorate
Decorates a cloud object compatible function to provides fall back to standard function if used on local files .
48,020
def _handle_oss_error ( ) : try : yield except _OssError as exception : if exception . status in _ERROR_CODES : raise _ERROR_CODES [ exception . status ] ( exception . details . get ( 'Message' , '' ) ) raise
Handle OSS exception and convert to class IO exceptions
48,021
def _get_client ( self ) : return ( _oss . StsAuth if 'security_token' in self . _storage_parameters else _oss . Auth if self . _storage_parameters else _oss . AnonymousAuth ) ( ** self . _storage_parameters )
OSS2 Auth client
48,022
def _get_bucket ( self , client_kwargs ) : return _oss . Bucket ( self . client , endpoint = self . _endpoint , bucket_name = client_kwargs [ 'bucket_name' ] )
Get bucket object .
48,023
def islink ( self , path = None , header = None ) : if header is None : header = self . _head ( self . get_client_kwargs ( path ) ) for key in ( 'x-oss-object-type' , 'type' ) : try : return header . pop ( key ) == 'Symlink' except KeyError : continue return False
Returns True if object is a symbolic link .
48,024
def _model_to_dict ( model , ignore ) : return { attr : value for attr , value in model . __dict__ . items ( ) if not attr . startswith ( '_' ) and attr not in ignore }
Convert OSS model to dict .
48,025
def cos_open ( file , mode = 'r' , buffering = - 1 , encoding = None , errors = None , newline = None , storage = None , storage_parameters = None , unsecure = None , ** kwargs ) : if hasattr ( file , 'read' ) : with _text_io_wrapper ( file , mode , encoding , errors , newline ) as wrapped : yield wrapped return file = fsdecode ( file ) . replace ( '\\' , '/' ) if is_storage ( file , storage ) : with get_instance ( name = file , cls = 'raw' if buffering == 0 else 'buffered' , storage = storage , storage_parameters = storage_parameters , mode = mode , unsecure = unsecure , ** kwargs ) as stream : with _text_io_wrapper ( stream , mode = mode , encoding = encoding , errors = errors , newline = newline ) as wrapped : yield wrapped else : with io_open ( file , mode , buffering , encoding , errors , newline , ** kwargs ) as stream : yield stream
Open file and return a corresponding file object .
48,026
def _text_io_wrapper ( stream , mode , encoding , errors , newline ) : if "t" in mode and not hasattr ( stream , 'encoding' ) : text_stream = TextIOWrapper ( stream , encoding = encoding , errors = errors , newline = newline ) yield text_stream text_stream . flush ( ) else : yield stream
Wrap a binary stream to Text stream .
48,027
def hash_key ( self , key ) : for i , destination_key in enumerate ( self . _dict ) : if key < destination_key : return destination_key return key
Hash all keys in a timerange to the same value .
48,028
def _load_from_yaml ( self , filename : str , model_identifiers : Dict [ str , List [ str ] ] ) : class_name = filename [ : filename . rfind ( '.' ) ] rendered_yaml = self . env . get_template ( filename ) . render ( model_identifiers = model_identifiers ) fixture_data , self . relationships [ class_name ] = self . _post_process_yaml_data ( yaml . load ( rendered_yaml ) , self . factory . get_relationships ( class_name ) ) for identifier_key , data in fixture_data . items ( ) : self . model_fixtures [ class_name ] [ identifier_key ] = data
Load fixtures from the given filename
48,029
def _post_process_yaml_data ( self , fixture_data : Dict [ str , Dict [ str , Any ] ] , relationship_columns : Set [ str ] , ) -> Tuple [ Dict [ str , Dict [ str , Any ] ] , List [ str ] ] : rv = { } relationships = set ( ) if not fixture_data : return rv , relationships for identifier_id , data in fixture_data . items ( ) : new_data = { } for col_name , value in data . items ( ) : if col_name not in relationship_columns : new_data [ col_name ] = value continue identifiers = normalize_identifiers ( value ) if identifiers : relationships . add ( identifiers [ 0 ] . class_name ) if isinstance ( value , str ) and len ( identifiers ) <= 1 : new_data [ col_name ] = identifiers [ 0 ] if identifiers else None else : new_data [ col_name ] = identifiers rv [ identifier_id ] = new_data return rv , list ( relationships )
Convert and normalize identifier strings to Identifiers as well as determine class relationships .
48,030
def _ensure_env ( self , env : Union [ jinja2 . Environment , None ] ) : if not env : env = jinja2 . Environment ( ) if not env . loader : env . loader = jinja2 . FunctionLoader ( lambda filename : self . _cache [ filename ] ) if 'faker' not in env . globals : faker = Faker ( ) faker . seed ( 1234 ) env . globals [ 'faker' ] = faker if 'random_model' not in env . globals : env . globals [ 'random_model' ] = jinja2 . contextfunction ( random_model ) if 'random_models' not in env . globals : env . globals [ 'random_models' ] = jinja2 . contextfunction ( random_models ) return env
Make sure the jinja environment is minimally configured .
48,031
def _preloading_env ( self ) : ctx = self . env . globals try : ctx [ 'random_model' ] = lambda * a , ** kw : None ctx [ 'random_models' ] = lambda * a , ** kw : None yield self . env finally : ctx [ 'random_model' ] = jinja2 . contextfunction ( random_model ) ctx [ 'random_models' ] = jinja2 . contextfunction ( random_models )
A stripped jinja environment .
48,032
def get_instance ( name , cls = 'system' , storage = None , storage_parameters = None , unsecure = None , * args , ** kwargs ) : system_parameters = _system_parameters ( unsecure = unsecure , storage_parameters = storage_parameters ) with _MOUNT_LOCK : for root in MOUNTED : if ( ( isinstance ( root , Pattern ) and root . match ( name ) ) or ( not isinstance ( root , Pattern ) and name . startswith ( root ) ) ) : info = MOUNTED [ root ] stored_parameters = info . get ( 'system_parameters' ) or dict ( ) if not system_parameters : same_parameters = True system_parameters = stored_parameters elif system_parameters == stored_parameters : same_parameters = True else : same_parameters = False system_parameters . update ( { key : value for key , value in stored_parameters . items ( ) if key not in system_parameters } ) break else : mount_info = mount ( storage = storage , name = name , ** system_parameters ) info = mount_info [ tuple ( mount_info ) [ 0 ] ] same_parameters = True if cls == 'system' : if same_parameters : return info [ 'system_cached' ] else : return info [ 'system' ] ( roots = info [ 'roots' ] , ** system_parameters ) if same_parameters : if 'storage_parameters' not in system_parameters : system_parameters [ 'storage_parameters' ] = dict ( ) system_parameters [ 'storage_parameters' ] [ 'pycosio.system_cached' ] = info [ 'system_cached' ] kwargs . update ( system_parameters ) return info [ cls ] ( name = name , * args , ** kwargs )
Get a cloud object storage instance .
48,033
def mount ( storage = None , name = '' , storage_parameters = None , unsecure = None , extra_root = None ) : if storage is None : if '://' in name : storage = name . split ( '://' , 1 ) [ 0 ] . lower ( ) storage = 'http' if storage == 'https' else storage else : raise ValueError ( 'No storage specified and unable to infer it from file name.' ) system_parameters = _system_parameters ( unsecure = unsecure , storage_parameters = storage_parameters ) storage_info = dict ( storage = storage , system_parameters = system_parameters ) for package in STORAGE_PACKAGE : try : module = import_module ( '%s.%s' % ( package , storage ) ) break except ImportError : continue else : raise ImportError ( 'No storage named "%s" found' % storage ) if hasattr ( module , 'MOUNT_REDIRECT' ) : if extra_root : raise ValueError ( ( "Can't define extra_root with %s. " "%s can't have a common root." ) % ( storage , ', ' . join ( extra_root ) ) ) result = dict ( ) for storage in getattr ( module , 'MOUNT_REDIRECT' ) : result [ storage ] = mount ( storage = storage , storage_parameters = storage_parameters , unsecure = unsecure ) return result classes_items = tuple ( _BASE_CLASSES . items ( ) ) for member_name in dir ( module ) : member = getattr ( module , member_name ) for cls_name , cls in classes_items : try : if not issubclass ( member , cls ) or member is cls : continue except TypeError : continue default_flag = '_%s__DEFAULT_CLASS' % member . __name__ . strip ( '_' ) try : is_default = getattr ( member , default_flag ) except AttributeError : is_default = None if is_default is False : continue elif is_default is not True and member . __abstractmethods__ : continue storage_info [ cls_name ] = member break storage_info [ 'system_cached' ] = storage_info [ 'system' ] ( ** system_parameters ) roots = storage_info [ 'system_cached' ] . roots if extra_root : roots = list ( roots ) roots . append ( extra_root ) roots = tuple ( roots ) storage_info [ 'system_cached' ] . roots = storage_info [ 'roots' ] = roots with _MOUNT_LOCK : for root in roots : MOUNTED [ root ] = storage_info items = OrderedDict ( ( key , MOUNTED [ key ] ) for key in reversed ( sorted ( MOUNTED , key = _compare_root ) ) ) MOUNTED . clear ( ) MOUNTED . update ( items ) return { storage : storage_info }
Mount a new storage .
48,034
def _system_parameters ( ** kwargs ) : return { key : value for key , value in kwargs . items ( ) if ( value is not None or value == { } ) }
Returns system keyword arguments removing Nones .
48,035
def manual_argument_parsing ( argv ) : if not argv or argv == [ '-h' ] or argv == [ '--help' ] : print_help_and_exit ( ) try : dashdash_index = argv . index ( '--' ) except ValueError : print_std_err ( 'Must separate command by `--`' ) print_help_and_exit ( ) patches , cmd = argv [ : dashdash_index ] , argv [ dashdash_index + 1 : ] if '--help' in patches or '-h' in patches : print_help_and_exit ( ) if '--all' in patches : all_patches = True patches . remove ( '--all' ) else : all_patches = False unknown_options = [ patch for patch in patches if patch . startswith ( '-' ) ] if unknown_options : print_std_err ( 'Unknown options: {!r}' . format ( unknown_options ) ) print_help_and_exit ( ) if patches and all_patches : print_std_err ( '--all and patches specified: {!r}' . format ( patches ) ) print_help_and_exit ( ) return Arguments ( all = all_patches , patches = tuple ( patches ) , cmd = tuple ( cmd ) )
sadness because argparse doesn t quite do what we want .
48,036
def exists ( self , path = None , client_kwargs = None , assume_exists = None ) : try : self . head ( path , client_kwargs ) except ObjectNotFoundError : return False except ObjectPermissionError : if assume_exists is None : raise return assume_exists return True
Return True if path refers to an existing path .
48,037
def getctime ( self , path = None , client_kwargs = None , header = None ) : return self . _getctime_from_header ( self . head ( path , client_kwargs , header ) )
Return the creation time of path .
48,038
def getmtime ( self , path = None , client_kwargs = None , header = None ) : return self . _getmtime_from_header ( self . head ( path , client_kwargs , header ) )
Return the time of last access of path .
48,039
def getsize ( self , path = None , client_kwargs = None , header = None ) : return self . _getsize_from_header ( self . head ( path , client_kwargs , header ) )
Return the size in bytes of path .
48,040
def _getsize_from_header ( self , header ) : for key in self . _SIZE_KEYS : try : return int ( header . pop ( key ) ) except KeyError : continue else : raise UnsupportedOperation ( 'getsize' )
Return the size from header
48,041
def isfile ( self , path = None , client_kwargs = None , assume_exists = None ) : relative = self . relpath ( path ) if not relative : return False if path [ - 1 ] != '/' and not self . is_locator ( path , relative = True ) : return self . exists ( path = path , client_kwargs = client_kwargs , assume_exists = assume_exists ) return False
Return True if path is an existing regular file .
48,042
def relpath ( self , path ) : for root in self . roots : if isinstance ( root , Pattern ) : match = root . match ( path ) if not match : continue root = match . group ( 0 ) try : relative = path . split ( root , 1 ) [ 1 ] return relative . lstrip ( '/' ) except IndexError : continue return path
Get path relative to storage .
48,043
def is_locator ( self , path , relative = False ) : if not relative : path = self . relpath ( path ) return path and '/' not in path . rstrip ( '/' )
Returns True if path refer to a locator .
48,044
def ensure_dir_path ( self , path , relative = False ) : if not relative : rel_path = self . relpath ( path ) else : rel_path = path if self . is_locator ( rel_path , relative = True ) : path = path . rstrip ( '/' ) elif rel_path : path = path . rstrip ( '/' ) + '/' return path
Ensure the path is a dir path .
48,045
def stat ( self , path = None , client_kwargs = None , header = None ) : stat = OrderedDict ( ( ( "st_mode" , 0 ) , ( "st_ino" , 0 ) , ( "st_dev" , 0 ) , ( "st_nlink" , 0 ) , ( "st_uid" , 0 ) , ( "st_gid" , 0 ) , ( "st_size" , 0 ) , ( "st_atime" , 0 ) , ( "st_mtime" , 0 ) , ( "st_ctime" , 0 ) ) ) header = self . head ( path , client_kwargs , header ) for key , method in ( ( 'st_size' , self . _getsize_from_header ) , ( 'st_ctime' , self . _getctime_from_header ) , ( 'st_mtime' , self . _getmtime_from_header ) , ) : try : stat [ key ] = int ( method ( header ) ) except UnsupportedOperation : continue if self . islink ( path = path , header = header ) : stat [ 'st_mode' ] = S_IFLNK elif ( ( not path or path [ - 1 ] == '/' or self . is_locator ( path ) ) and not stat [ 'st_size' ] ) : stat [ 'st_mode' ] = S_IFDIR else : stat [ 'st_mode' ] = S_IFREG sub = self . _CHAR_FILTER . sub for key , value in tuple ( header . items ( ) ) : stat [ 'st_' + sub ( '' , key . lower ( ) ) ] = value stat_result = namedtuple ( 'stat_result' , tuple ( stat ) ) stat_result . __name__ = 'os.stat_result' stat_result . __module__ = 'pycosio' return stat_result ( ** stat )
Get the status of an object .
48,046
def init ( uri = None , alembic_ini = None , engine = None , create = False ) : if uri and engine : raise ValueError ( "uri and engine cannot both be specified" ) if uri is None and not engine : uri = 'sqlite:////tmp/datanommer.db' log . warning ( "No db uri given. Using %r" % uri ) if uri and not engine : engine = create_engine ( uri ) if 'sqlite' in engine . driver : @ event . listens_for ( engine , "connect" ) def do_connect ( dbapi_connection , connection_record ) : dbapi_connection . isolation_level = None @ event . listens_for ( engine , "begin" ) def do_begin ( conn ) : conn . execute ( "BEGIN" ) if getattr ( session , '_datanommer_initialized' , None ) : log . warning ( "Session already initialized. Bailing" ) return session . _datanommer_initialized = True session . configure ( bind = engine ) DeclarativeBase . query = session . query_property ( ) if alembic_ini is not None : from alembic . config import Config from alembic import command alembic_cfg = Config ( alembic_ini ) command . stamp ( alembic_cfg , "head" ) if create : DeclarativeBase . metadata . create_all ( engine )
Initialize a connection . Create tables if requested .
48,047
def add ( envelope ) : message = envelope [ 'body' ] timestamp = message . get ( 'timestamp' , None ) try : if timestamp : timestamp = datetime . datetime . utcfromtimestamp ( timestamp ) else : timestamp = datetime . datetime . utcnow ( ) except Exception : pass headers = envelope . get ( 'headers' , None ) msg_id = message . get ( 'msg_id' , None ) if not msg_id and headers : msg_id = headers . get ( 'message-id' , None ) if not msg_id : msg_id = six . text_type ( timestamp . year ) + six . u ( '-' ) + six . text_type ( uuid . uuid4 ( ) ) obj = Message ( i = message . get ( 'i' , 0 ) , msg_id = msg_id , topic = message [ 'topic' ] , timestamp = timestamp , username = message . get ( 'username' , None ) , crypto = message . get ( 'crypto' , None ) , certificate = message . get ( 'certificate' , None ) , signature = message . get ( 'signature' , None ) , ) obj . msg = message [ 'msg' ] obj . headers = headers try : session . add ( obj ) session . flush ( ) except IntegrityError : log . warning ( 'Skipping message from %s with duplicate id: %s' , message [ 'topic' ] , msg_id ) session . rollback ( ) return usernames = fedmsg . meta . msg2usernames ( message ) packages = fedmsg . meta . msg2packages ( message ) if None in usernames : log . error ( 'NoneType found in usernames of %r' % msg_id ) usernames = [ name for name in usernames if name is not None ] if None in packages : log . error ( 'NoneType found in packages of %r' % msg_id ) packages = [ pkg for pkg in packages if pkg is not None ] for username in usernames : if username not in _users_seen : User . get_or_create ( username ) _users_seen . add ( username ) for package in packages : if package not in _packages_seen : Package . get_or_create ( package ) _packages_seen . add ( package ) session . flush ( ) values = [ { 'username' : username , 'msg' : obj . id } for username in usernames ] if values : session . execute ( user_assoc_table . insert ( ) , values ) values = [ { 'package' : package , 'msg' : obj . id } for package in packages ] if values : session . execute ( pack_assoc_table . insert ( ) , values ) session . flush ( ) session . commit ( )
Take a dict - like fedmsg envelope and store the headers and message in the table .
48,048
def get_or_create ( cls , name ) : obj = cls . query . filter_by ( name = name ) . one_or_none ( ) if obj : return obj try : with session . begin_nested ( ) : obj = cls ( name = name ) session . add ( obj ) session . flush ( ) return obj except IntegrityError : log . debug ( 'Collision when adding %s(name="%s"), returning existing object' , cls . __name__ , name ) return cls . query . filter_by ( name = name ) . one ( )
Return the instance of the class with the specified name . If it doesn t already exist create it .
48,049
def relpath ( path , start = None ) : relative = get_instance ( path ) . relpath ( path ) if start : return os_path_relpath ( relative , start = start ) . replace ( '\\' , '/' ) return relative
Return a relative file path to path either from the current directory or from an optional start directory .
48,050
def samefile ( path1 , path2 ) : path1 , path1_is_storage = format_and_is_storage ( path1 ) path2 , path2_is_storage = format_and_is_storage ( path2 ) if not path1_is_storage and not path2_is_storage : return os_path_samefile ( path1 , path2 ) if not path1_is_storage or not path2_is_storage : return False with handle_os_exceptions ( ) : system = get_instance ( path1 ) if system is not get_instance ( path2 ) : return False elif system . relpath ( path1 ) != system . relpath ( path2 ) : return False return True
Return True if both pathname arguments refer to the same file or directory .
48,051
def handle_os_exceptions ( ) : try : yield except ObjectException : exc_type , exc_value , _ = exc_info ( ) raise _OS_EXCEPTIONS . get ( exc_type , OSError ) ( exc_value ) except ( OSError , same_file_error , UnsupportedOperation ) : raise except Exception : exc_type , exc_value , _ = exc_info ( ) raise OSError ( '%s%s' % ( exc_type , ( ', %s' % exc_value ) if exc_value else '' ) )
Handles pycosio exceptions and raise standard OS exceptions .
48,052
def listdir ( path = '.' ) : return [ name . rstrip ( '/' ) for name , _ in get_instance ( path ) . list_objects ( path , first_level = True ) ]
Return a list containing the names of the entries in the directory given by path .
48,053
def mkdir ( path , mode = 0o777 , dir_fd = None ) : system = get_instance ( path ) relative = system . relpath ( path ) parent_dir = dirname ( relative . rstrip ( '/' ) ) if parent_dir : parent = path . rsplit ( relative , 1 ) [ 0 ] + parent_dir + '/' if not system . isdir ( parent ) : raise ObjectNotFoundError ( "No such file or directory: '%s'" % parent ) if system . isdir ( system . ensure_dir_path ( path ) ) : raise ObjectExistsError ( "File exists: '%s'" % path ) system . make_dir ( relative , relative = True )
Create a directory named path with numeric mode mode .
48,054
def remove ( path , dir_fd = None ) : system = get_instance ( path ) if system . is_locator ( path ) or path [ - 1 ] == '/' : raise is_a_directory_error ( "Is a directory: '%s'" % path ) system . remove ( path )
Remove a file .
48,055
def rmdir ( path , dir_fd = None ) : system = get_instance ( path ) system . remove ( system . ensure_dir_path ( path ) )
Remove a directory .
48,056
def scandir ( path = '.' ) : scandir_path = fsdecode ( path ) . replace ( '\\' , '/' ) if not is_storage ( scandir_path ) : return os_scandir ( scandir_path ) return _scandir_generator ( is_bytes = isinstance ( fspath ( path ) , ( bytes , bytearray ) ) , scandir_path = scandir_path , system = get_instance ( scandir_path ) )
Return an iterator of os . DirEntry objects corresponding to the entries in the directory given by path . The entries are yielded in arbitrary order and the special entries . and .. are not included .
48,057
def _flush_raw_or_buffered ( self ) : if self . _buffer_seek and self . _seek : self . _seek += 1 with handle_os_exceptions ( ) : self . _flush ( ) elif self . _buffer_seek : self . _raw . _write_buffer = self . _get_buffer ( ) self . _raw . _seek = self . _buffer_seek self . _raw . flush ( )
Flush using raw of buffered methods .
48,058
def _preload_range ( self ) : queue = self . _read_queue size = self . _buffer_size start = self . _seek end = int ( start + size * self . _max_buffers ) workers_submit = self . _workers . submit indexes = tuple ( range ( start , end , size ) ) for seek in tuple ( queue ) : if seek not in indexes : del queue [ seek ] read_range = self . _read_range for seek in indexes : if seek not in queue : queue [ seek ] = workers_submit ( read_range , seek , seek + size )
Preload data for reading
48,059
def _detect_content_type ( self , content , encoding ) : def two_bytes ( c ) : def next_byte ( b ) : if not isinstance ( b , int ) : return ord ( b ) else : return b for i in range ( 0 , len ( c ) , 2 ) : yield ( next_byte ( c [ i ] ) << 8 ) | next_byte ( c [ i + 1 ] ) try : if str ( content ) . isdigit ( ) : return 'numeric' , encoding except ( TypeError , UnicodeError ) : pass valid_characters = '' . join ( tables . ascii_codes . keys ( ) ) valid_characters = valid_characters . encode ( 'ASCII' ) try : if isinstance ( content , bytes ) : c = content . decode ( 'ASCII' ) else : c = str ( content ) . encode ( 'ASCII' ) if all ( map ( lambda x : x in valid_characters , c ) ) : return 'alphanumeric' , 'ASCII' except TypeError : pass except UnicodeError : pass try : if isinstance ( content , bytes ) : if encoding is None : encoding = 'shiftjis' c = content . decode ( encoding ) . encode ( 'shiftjis' ) else : c = content . encode ( 'shiftjis' ) if len ( c ) % 2 != 0 : return 'binary' , encoding for asint in two_bytes ( c ) : if not ( 0x8140 <= asint <= 0x9FFC or 0xE040 <= asint <= 0xEBBF ) : return 'binary' , encoding return 'kanji' , encoding except UnicodeError : pass return 'binary' , encoding
This method tries to auto - detect the type of the data . It first tries to see if the data is a valid integer in which case it returns numeric . Next it tests the data to see if it is alphanumeric . QR Codes use a special table with very limited range of ASCII characters . The code s data is tested to make sure it fits inside this limited range . If all else fails the data is determined to be of type binary . Returns a tuple containing the detected mode and encoding .
48,060
def _pick_best_fit ( self , content ) : import math for version in range ( 1 , 41 ) : capacity = tables . data_capacity [ version ] [ self . error ] [ self . mode_num ] if ( self . mode_num == tables . modes [ 'kanji' ] and capacity >= math . ceil ( len ( content ) / 2 ) ) : return version if capacity >= len ( content ) : return version raise ValueError ( 'The data will not fit in any QR code version ' 'with the given encoding and error level.' )
This method return the smallest possible QR code version number that will fit the specified data with the given error level .
48,061
def show ( self , wait = 1.2 , scale = 10 , module_color = ( 0 , 0 , 0 , 255 ) , background = ( 255 , 255 , 255 , 255 ) , quiet_zone = 4 ) : import os import time import tempfile import webbrowser try : from urlparse import urljoin from urllib import pathname2url except ImportError : from urllib . parse import urljoin from urllib . request import pathname2url f = tempfile . NamedTemporaryFile ( 'wb' , suffix = '.png' , delete = False ) self . png ( f , scale = scale , module_color = module_color , background = background , quiet_zone = quiet_zone ) f . close ( ) webbrowser . open_new_tab ( urljoin ( 'file:' , pathname2url ( f . name ) ) ) time . sleep ( wait ) os . unlink ( f . name )
Displays this QR code .
48,062
def png ( self , file , scale = 1 , module_color = ( 0 , 0 , 0 , 255 ) , background = ( 255 , 255 , 255 , 255 ) , quiet_zone = 4 ) : builder . _png ( self . code , self . version , file , scale , module_color , background , quiet_zone )
This method writes the QR code out as an PNG image . The resulting PNG has a bit depth of 1 . The file parameter is used to specify where to write the image to . It can either be an writable stream or a file path .
48,063
def svg ( self , file , scale = 1 , module_color = '#000' , background = None , quiet_zone = 4 , xmldecl = True , svgns = True , title = None , svgclass = 'pyqrcode' , lineclass = 'pyqrline' , omithw = False , debug = False ) : builder . _svg ( self . code , self . version , file , scale = scale , module_color = module_color , background = background , quiet_zone = quiet_zone , xmldecl = xmldecl , svgns = svgns , title = title , svgclass = svgclass , lineclass = lineclass , omithw = omithw , debug = debug )
This method writes the QR code out as an SVG document . The code is drawn by drawing only the modules corresponding to a 1 . They are drawn using a line such that contiguous modules in a row are drawn with a single line .
48,064
def eps ( self , file , scale = 1 , module_color = ( 0 , 0 , 0 ) , background = None , quiet_zone = 4 ) : builder . _eps ( self . code , self . version , file , scale , module_color , background , quiet_zone )
This method writes the QR code out as an EPS document . The code is drawn by only writing the data modules corresponding to a 1 . They are drawn using a line such that contiguous modules in a row are drawn with a single line .
48,065
def terminal ( self , module_color = 'default' , background = 'reverse' , quiet_zone = 4 ) : return builder . _terminal ( self . code , module_color , background , quiet_zone )
This method returns a string containing ASCII escape codes such that if printed to a compatible terminal it will display a vaild QR code . The code is printed using ASCII escape codes that alter the coloring of the background .
48,066
def _get_writable ( stream_or_path , mode ) : is_stream = hasattr ( stream_or_path , 'write' ) if not is_stream : stream_or_path = open ( stream_or_path , mode ) return stream_or_path , not is_stream
This method returns a tuple containing the stream and a flag to indicate if the stream should be automatically closed .
48,067
def _text ( code , quiet_zone = 4 ) : buf = io . StringIO ( ) border_row = '0' * ( len ( code [ 0 ] ) + ( quiet_zone * 2 ) ) for b in range ( quiet_zone ) : buf . write ( border_row ) buf . write ( '\n' ) for row in code : for b in range ( quiet_zone ) : buf . write ( '0' ) for bit in row : if bit == 1 : buf . write ( '1' ) elif bit == 0 : buf . write ( '0' ) else : buf . write ( ' ' ) for b in range ( quiet_zone ) : buf . write ( '0' ) buf . write ( '\n' ) for b in range ( quiet_zone ) : buf . write ( border_row ) buf . write ( '\n' ) return buf . getvalue ( )
This method returns a text based representation of the QR code . This is useful for debugging purposes .
48,068
def _xbm ( code , scale = 1 , quiet_zone = 4 ) : try : str = unicode except NameError : str = __builtins__ [ 'str' ] buf = io . StringIO ( ) pixel_width = ( len ( code [ 0 ] ) + quiet_zone * 2 ) * scale buf . write ( '#define im_width ' ) buf . write ( str ( pixel_width ) ) buf . write ( '\n' ) buf . write ( '#define im_height ' ) buf . write ( str ( pixel_width ) ) buf . write ( '\n' ) buf . write ( 'static char im_bits[] = {\n' ) byte_width = int ( math . ceil ( pixel_width / 8.0 ) ) buf . write ( ( '0x00,' * byte_width + '\n' ) * quiet_zone * scale ) for row in code : row_bits = '0' * quiet_zone * scale for pixel in row : row_bits += str ( pixel ) * scale row_bits += '0' * quiet_zone * scale formated_row = '' for b in range ( byte_width ) : formated_row += '0x{0:02x},' . format ( int ( row_bits [ : 8 ] [ : : - 1 ] , 2 ) ) row_bits = row_bits [ 8 : ] formated_row += '\n' buf . write ( formated_row * scale ) buf . write ( ( '0x00,' * byte_width + '\n' ) * quiet_zone * scale ) buf . write ( '};' ) return buf . getvalue ( )
This function will format the QR code as a X BitMap . This can be used to display the QR code with Tkinter .
48,069
def _hex_to_rgb ( color ) : if color [ 0 ] == '#' : color = color [ 1 : ] if len ( color ) == 3 : color = color [ 0 ] * 2 + color [ 1 ] * 2 + color [ 2 ] * 2 if len ( color ) != 6 : raise ValueError ( 'Input #{0} is not in #RRGGBB format' . format ( color ) ) return [ int ( n , 16 ) for n in ( color [ : 2 ] , color [ 2 : 4 ] , color [ 4 : ] ) ]
\ Helper function to convert a color provided in hexadecimal format as RGB triple .
48,070
def grouper ( self , n , iterable , fillvalue = None ) : args = [ iter ( iterable ) ] * n if hasattr ( itertools , 'zip_longest' ) : return itertools . zip_longest ( * args , fillvalue = fillvalue ) return itertools . izip_longest ( * args , fillvalue = fillvalue )
This generator yields a set of tuples where the iterable is broken into n sized chunks . If the iterable is not evenly sized then fillvalue will be appended to the last tuple to make up the difference .
48,071
def get_data_length ( self ) : if 1 <= self . version <= 9 : max_version = 9 elif 10 <= self . version <= 26 : max_version = 26 elif 27 <= self . version <= 40 : max_version = 40 data_length = tables . data_length_field [ max_version ] [ self . mode ] if self . mode != tables . modes [ 'kanji' ] : length_string = self . binary_string ( len ( self . data ) , data_length ) else : length_string = self . binary_string ( len ( self . data ) / 2 , data_length ) if len ( length_string ) > data_length : raise ValueError ( 'The supplied data will not fit ' 'within this version of a QRCode.' ) return length_string
QR codes contain a data length field . This method creates this field . A binary string representing the appropriate length is returned .
48,072
def encode ( self ) : if self . mode == tables . modes [ 'alphanumeric' ] : encoded = self . encode_alphanumeric ( ) elif self . mode == tables . modes [ 'numeric' ] : encoded = self . encode_numeric ( ) elif self . mode == tables . modes [ 'binary' ] : encoded = self . encode_bytes ( ) elif self . mode == tables . modes [ 'kanji' ] : encoded = self . encode_kanji ( ) return encoded
This method encodes the data into a binary string using the appropriate algorithm specified by the mode .
48,073
def encode_alphanumeric ( self ) : self . data = self . data . upper ( ) ascii = [ ] for char in self . data : if isinstance ( char , int ) : ascii . append ( tables . ascii_codes [ chr ( char ) ] ) else : ascii . append ( tables . ascii_codes [ char ] ) with io . StringIO ( ) as buf : for ( a , b ) in self . grouper ( 2 , ascii ) : if b is not None : buf . write ( self . binary_string ( ( 45 * a ) + b , 11 ) ) else : buf . write ( self . binary_string ( a , 6 ) ) return buf . getvalue ( )
This method encodes the QR code s data if its mode is alphanumeric . It returns the data encoded as a binary string .
48,074
def encode_numeric ( self ) : with io . StringIO ( ) as buf : for triplet in self . grouper ( 3 , self . data ) : number = '' for digit in triplet : if isinstance ( digit , int ) : digit = chr ( digit ) if digit : number = '' . join ( [ number , digit ] ) else : break if len ( number ) == 1 : bin = self . binary_string ( number , 4 ) elif len ( number ) == 2 : bin = self . binary_string ( number , 7 ) else : bin = self . binary_string ( number , 10 ) buf . write ( bin ) return buf . getvalue ( )
This method encodes the QR code s data if its mode is numeric . It returns the data encoded as a binary string .
48,075
def encode_bytes ( self ) : with io . StringIO ( ) as buf : for char in self . data : if not isinstance ( char , int ) : buf . write ( '{{0:0{0}b}}' . format ( 8 ) . format ( ord ( char ) ) ) else : buf . write ( '{{0:0{0}b}}' . format ( 8 ) . format ( char ) ) return buf . getvalue ( )
This method encodes the QR code s data if its mode is 8 bit mode . It returns the data encoded as a binary string .
48,076
def encode_kanji ( self ) : def two_bytes ( data ) : def next_byte ( b ) : if not isinstance ( b , int ) : return ord ( b ) else : return b for i in range ( 0 , len ( data ) , 2 ) : yield ( next_byte ( data [ i ] ) << 8 ) | next_byte ( data [ i + 1 ] ) if isinstance ( self . data , bytes ) : data = self . data . decode ( 'shiftjis' ) . encode ( 'shiftjis' ) else : data = self . data . encode ( 'shiftjis' ) with io . StringIO ( ) as buf : for asint in two_bytes ( data ) : if 0x8140 <= asint <= 0x9FFC : difference = asint - 0x8140 elif 0xE040 <= asint <= 0xEBBF : difference = asint - 0xC140 msb = ( difference >> 8 ) lsb = ( difference & 0x00FF ) buf . write ( '{0:013b}' . format ( ( msb * 0xC0 ) + lsb ) ) return buf . getvalue ( )
This method encodes the QR code s data if its mode is kanji . It returns the data encoded as a binary string .
48,077
def add_data ( self ) : self . buffer . write ( self . binary_string ( self . mode , 4 ) ) self . buffer . write ( self . get_data_length ( ) ) self . buffer . write ( self . encode ( ) ) bits = self . terminate_bits ( self . buffer . getvalue ( ) ) if bits is not None : self . buffer . write ( bits ) add_bits = self . delimit_words ( ) if add_bits : self . buffer . write ( add_bits ) fill_bytes = self . add_words ( ) if fill_bytes : self . buffer . write ( fill_bytes ) data = [ int ( '' . join ( x ) , 2 ) for x in self . grouper ( 8 , self . buffer . getvalue ( ) ) ] error_info = tables . eccwbi [ self . version ] [ self . error ] data_blocks = [ ] error_blocks = [ ] data_block_sizes = [ error_info [ 2 ] ] * error_info [ 1 ] if error_info [ 3 ] != 0 : data_block_sizes . extend ( [ error_info [ 4 ] ] * error_info [ 3 ] ) current_byte = 0 for n_data_blocks in data_block_sizes : data_blocks . append ( data [ current_byte : current_byte + n_data_blocks ] ) current_byte += n_data_blocks if current_byte < len ( data ) : raise ValueError ( 'Too much data for this code version.' ) for n , block in enumerate ( data_blocks ) : error_blocks . append ( self . make_error_block ( block , n ) ) data_buffer = io . StringIO ( ) largest_block = max ( error_info [ 2 ] , error_info [ 4 ] ) + error_info [ 0 ] for i in range ( largest_block ) : for block in data_blocks : if i < len ( block ) : data_buffer . write ( self . binary_string ( block [ i ] , 8 ) ) for i in range ( error_info [ 0 ] ) : for block in error_blocks : data_buffer . write ( self . binary_string ( block [ i ] , 8 ) ) self . buffer = data_buffer
This function properly constructs a QR code s data string . It takes into account the interleaving pattern required by the standard .
48,078
def terminate_bits ( self , payload ) : data_capacity = tables . data_capacity [ self . version ] [ self . error ] [ 0 ] if len ( payload ) > data_capacity : raise ValueError ( 'The supplied data will not fit ' 'within this version of a QR code.' ) if len ( payload ) == data_capacity : return None elif len ( payload ) <= data_capacity - 4 : bits = self . binary_string ( 0 , 4 ) else : bits = self . binary_string ( 0 , data_capacity - len ( payload ) ) return bits
This method adds zeros to the end of the encoded data so that the encoded data is of the correct length . It returns a binary string containing the bits to be added .
48,079
def delimit_words ( self ) : bits_short = 8 - ( len ( self . buffer . getvalue ( ) ) % 8 ) if bits_short == 0 or bits_short == 8 : return None else : return self . binary_string ( 0 , bits_short )
This method takes the existing encoded binary string and returns a binary string that will pad it such that the encoded string contains only full bytes .
48,080
def add_words ( self ) : data_blocks = len ( self . buffer . getvalue ( ) ) // 8 total_blocks = tables . data_capacity [ self . version ] [ self . error ] [ 0 ] // 8 needed_blocks = total_blocks - data_blocks if needed_blocks == 0 : return None block = itertools . cycle ( [ '11101100' , '00010001' ] ) return '' . join ( [ next ( block ) for x in range ( needed_blocks ) ] )
The data block must fill the entire data capacity of the QR code . If we fall short then we must add bytes to the end of the encoded data field . The value of these bytes are specified in the standard .
48,081
def make_code ( self ) : from copy import deepcopy matrix_size = tables . version_size [ self . version ] row = [ ' ' for x in range ( matrix_size ) ] template = [ deepcopy ( row ) for x in range ( matrix_size ) ] self . add_detection_pattern ( template ) self . add_position_pattern ( template ) self . add_version_pattern ( template ) self . masks = self . make_masks ( template ) self . best_mask = self . choose_best_mask ( ) self . code = self . masks [ self . best_mask ]
This method returns the best possible QR code .
48,082
def add_detection_pattern ( self , m ) : for i in range ( 7 ) : inv = - ( i + 1 ) for j in [ 0 , 6 , - 1 , - 7 ] : m [ j ] [ i ] = 1 m [ i ] [ j ] = 1 m [ inv ] [ j ] = 1 m [ j ] [ inv ] = 1 for i in range ( 1 , 6 ) : inv = - ( i + 1 ) for j in [ 1 , 5 , - 2 , - 6 ] : m [ j ] [ i ] = 0 m [ i ] [ j ] = 0 m [ inv ] [ j ] = 0 m [ j ] [ inv ] = 0 for i in range ( 2 , 5 ) : for j in range ( 2 , 5 ) : inv = - ( i + 1 ) m [ i ] [ j ] = 1 m [ inv ] [ j ] = 1 m [ j ] [ inv ] = 1 for i in range ( 8 ) : inv = - ( i + 1 ) for j in [ 7 , - 8 ] : m [ i ] [ j ] = 0 m [ j ] [ i ] = 0 m [ inv ] [ j ] = 0 m [ j ] [ inv ] = 0 for i in range ( - 8 , 0 ) : for j in range ( - 8 , 0 ) : m [ i ] [ j ] = ' ' bit = itertools . cycle ( [ 1 , 0 ] ) for i in range ( 8 , ( len ( m ) - 8 ) ) : b = next ( bit ) m [ i ] [ 6 ] = b m [ 6 ] [ i ] = b m [ - 8 ] [ 8 ] = 1
This method add the detection patterns to the QR code . This lets the scanner orient the pattern . It is required for all QR codes . The detection pattern consists of three boxes located at the upper left upper right and lower left corners of the matrix . Also two special lines called the timing pattern is also necessary . Finally a single black pixel is added just above the lower left black box .
48,083
def add_position_pattern ( self , m ) : if self . version == 1 : return coordinates = tables . position_adjustment [ self . version ] min_coord = coordinates [ 0 ] max_coord = coordinates [ - 1 ] for i in coordinates : for j in coordinates : if ( i == min_coord and j == min_coord ) or ( i == min_coord and j == max_coord ) or ( i == max_coord and j == min_coord ) : continue m [ i ] [ j ] = 1 for x in [ - 1 , 1 ] : m [ i + x ] [ j + x ] = 0 m [ i + x ] [ j ] = 0 m [ i ] [ j + x ] = 0 m [ i - x ] [ j + x ] = 0 m [ i + x ] [ j - x ] = 0 for x in [ - 2 , 2 ] : for y in [ 0 , - 1 , 1 ] : m [ i + x ] [ j + x ] = 1 m [ i + x ] [ j + y ] = 1 m [ i + y ] [ j + x ] = 1 m [ i - x ] [ j + x ] = 1 m [ i + x ] [ j - x ] = 1
This method draws the position adjustment patterns onto the QR Code . All QR code versions larger than one require these special boxes called position adjustment patterns .
48,084
def add_version_pattern ( self , m ) : if self . version < 7 : return field = iter ( tables . version_pattern [ self . version ] [ : : - 1 ] ) start = len ( m ) - 11 for i in range ( 6 ) : for j in range ( start , start + 3 ) : bit = int ( next ( field ) ) m [ i ] [ j ] = bit m [ j ] [ i ] = bit
For QR codes with a version 7 or higher a special pattern specifying the code s version is required .
48,085
def make_masks ( self , template ) : from copy import deepcopy nmasks = len ( tables . mask_patterns ) masks = [ '' ] * nmasks count = 0 for n in range ( nmasks ) : cur_mask = deepcopy ( template ) masks [ n ] = cur_mask self . add_type_pattern ( cur_mask , tables . type_bits [ self . error ] [ n ] ) pattern = tables . mask_patterns [ n ] bits = iter ( self . buffer . getvalue ( ) ) row_start = itertools . cycle ( [ len ( cur_mask ) - 1 , 0 ] ) row_stop = itertools . cycle ( [ - 1 , len ( cur_mask ) ] ) direction = itertools . cycle ( [ - 1 , 1 ] ) for column in range ( len ( cur_mask ) - 1 , 0 , - 2 ) : if column <= 6 : column = column - 1 column_pair = itertools . cycle ( [ column , column - 1 ] ) for row in range ( next ( row_start ) , next ( row_stop ) , next ( direction ) ) : for i in range ( 2 ) : col = next ( column_pair ) if cur_mask [ row ] [ col ] != ' ' : continue try : bit = int ( next ( bits ) ) except : bit = 0 if pattern ( row , col ) : cur_mask [ row ] [ col ] = bit ^ 1 else : cur_mask [ row ] [ col ] = bit return masks
This method generates all seven masks so that the best mask can be determined . The template parameter is a code matrix that will server as the base for all the generated masks .
48,086
def add_type_pattern ( self , m , type_bits ) : field = iter ( type_bits ) for i in range ( 7 ) : bit = int ( next ( field ) ) if i < 6 : m [ 8 ] [ i ] = bit else : m [ 8 ] [ i + 1 ] = bit if - 8 < - ( i + 1 ) : m [ - ( i + 1 ) ] [ 8 ] = bit for i in range ( - 8 , 0 ) : bit = int ( next ( field ) ) m [ 8 ] [ i ] = bit i = - i if i > 6 : m [ i ] [ 8 ] = bit else : m [ i - 1 ] [ 8 ] = bit
This will add the pattern to the QR code that represents the error level and the type of mask used to make the code .
48,087
def split_pulls ( all_issues , project = "arokem/python-matlab-bridge" ) : pulls = [ ] issues = [ ] for i in all_issues : if is_pull_request ( i ) : pull = get_pull_request ( project , i [ 'number' ] , auth = True ) pulls . append ( pull ) else : issues . append ( i ) return issues , pulls
split a list of closed issues into non - PR Issues and Pull Requests
48,088
def issues_closed_since ( period = timedelta ( days = 365 ) , project = "arokem/python-matlab-bridge" , pulls = False ) : which = 'pulls' if pulls else 'issues' if isinstance ( period , timedelta ) : since = round_hour ( datetime . utcnow ( ) - period ) else : since = period url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % ( project , which , since . strftime ( ISO8601 ) , PER_PAGE ) allclosed = get_paged_request ( url , headers = make_auth_header ( ) ) filtered = [ i for i in allclosed if _parse_datetime ( i [ 'closed_at' ] ) > since ] if pulls : filtered = [ i for i in filtered if _parse_datetime ( i [ 'merged_at' ] ) > since ] filtered = [ i for i in filtered if i [ 'base' ] [ 'ref' ] == 'master' ] else : filtered = [ i for i in filtered if not is_pull_request ( i ) ] return filtered
Get all issues closed since a particular point in time . period can either be a datetime object or a timedelta object . In the latter case it is used as a time before the present .
48,089
def sorted_by_field ( issues , field = 'closed_at' , reverse = False ) : return sorted ( issues , key = lambda i : i [ field ] , reverse = reverse )
Return a list of issues sorted by closing date date .
48,090
def report ( issues , show_urls = False ) : if show_urls : for i in issues : print ( u'#%d: %s' % ( i [ 'number' ] , i [ 'title' ] . replace ( u'`' , u'``' ) ) ) else : for i in issues : print ( u'* %d: %s' % ( i [ 'number' ] , i [ 'title' ] . replace ( u'`' , u'``' ) ) )
Summary report about a list of issues printing number and title .
48,091
def encode_ndarray ( obj ) : shape = obj . shape if len ( shape ) == 1 : shape = ( 1 , obj . shape [ 0 ] ) if obj . flags . c_contiguous : obj = obj . T elif not obj . flags . f_contiguous : obj = asfortranarray ( obj . T ) else : obj = obj . T try : data = obj . astype ( float64 ) . tobytes ( ) except AttributeError : data = obj . astype ( float64 ) . tostring ( ) data = base64 . b64encode ( data ) . decode ( 'utf-8' ) return data , shape
Write a numpy array and its shape to base64 buffers
48,092
def decode_arr ( data ) : data = data . encode ( 'utf-8' ) return frombuffer ( base64 . b64decode ( data ) , float64 )
Extract a numpy array from a base64 buffer
48,093
def run_func ( self , func_path , * func_args , ** kwargs ) : if not self . started : raise ValueError ( 'Session not started, use start()' ) nargout = kwargs . pop ( 'nargout' , 1 ) func_args += tuple ( item for pair in zip ( kwargs . keys ( ) , kwargs . values ( ) ) for item in pair ) dname = os . path . dirname ( func_path ) fname = os . path . basename ( func_path ) func_name , ext = os . path . splitext ( fname ) if ext and not ext == '.m' : raise TypeError ( 'Need to give path to .m file' ) return self . _json_response ( cmd = 'eval' , func_name = func_name , func_args = func_args or '' , dname = dname , nargout = nargout )
Run a function in Matlab and return the result .
48,094
def _bind_method ( self , name , unconditionally = False ) : exists = self . run_func ( 'exist' , name ) [ 'result' ] in [ 2 , 3 , 5 ] if not unconditionally and not exists : raise AttributeError ( "'Matlab' object has no attribute '%s'" % name ) method_instance = MatlabFunction ( weakref . ref ( self ) , name ) method_instance . __name__ = name if sys . version . startswith ( '3' ) : method = types . MethodType ( method_instance , weakref . ref ( self ) ) else : method = types . MethodType ( method_instance , weakref . ref ( self ) , _Session ) setattr ( self , name , method ) return getattr ( self , name )
Generate a Matlab function and bind it to the instance
48,095
def format_line ( line ) : if line . startswith ( '%%' ) : md = True new_cell = True source = line . split ( '%%' ) [ 1 ] + '\n' elif line . startswith ( '%' ) : md = True new_cell = False source = line . split ( '%' ) [ 1 ] + '\n' else : md = False new_cell = False source = line return new_cell , md , source
Format a line of Matlab into either a markdown line or a code line .
48,096
def lines_to_notebook ( lines , name = None ) : source = [ ] md = np . empty ( len ( lines ) , dtype = object ) new_cell = np . empty ( len ( lines ) , dtype = object ) for idx , l in enumerate ( lines ) : new_cell [ idx ] , md [ idx ] , this_source = format_line ( l ) if idx > 1 and not new_cell [ idx ] : if md [ idx ] != md [ idx - 1 ] : new_cell [ idx ] = True source . append ( this_source ) new_cell_idx = np . hstack ( [ np . where ( new_cell ) [ 0 ] , - 1 ] ) cell_source = [ source [ new_cell_idx [ i ] : new_cell_idx [ i + 1 ] ] for i in range ( len ( new_cell_idx ) - 1 ) ] cell_md = [ md [ new_cell_idx [ i ] ] for i in range ( len ( new_cell_idx ) - 1 ) ] cells = [ ] notebook_head = "import pymatbridge as pymat\n" + "ip = get_ipython()\n" + "pymat.load_ipython_extension(ip)" cells . append ( nbformat . new_code_cell ( notebook_head ) ) for cell_idx , cell_s in enumerate ( cell_source ) : if cell_md [ cell_idx ] : cells . append ( nbformat . new_markdown_cell ( cell_s ) ) else : cell_s . insert ( 0 , '%%matlab\n' ) cells . append ( nbformat . new_code_cell ( cell_s ) ) notebook = nbformat . new_notebook ( cells = cells ) return notebook
Convert the lines of an m file into an IPython notebook
48,097
def convert_mfile ( mfile , outfile = None ) : lines = mfile_to_lines ( mfile ) nb = lines_to_notebook ( lines ) if outfile is None : outfile = mfile . split ( '.m' ) [ 0 ] + '.ipynb' with open ( outfile , 'w' ) as fid : nbwrite ( nb , fid )
Convert a Matlab m - file into a Matlab notebook in ipynb format
48,098
def post_gist ( content , description = '' , filename = 'file' , auth = False ) : post_data = json . dumps ( { "description" : description , "public" : True , "files" : { filename : { "content" : content } } } ) . encode ( 'utf-8' ) headers = make_auth_header ( ) if auth else { } response = requests . post ( "https://api.github.com/gists" , data = post_data , headers = headers ) response . raise_for_status ( ) response_data = json . loads ( response . text ) return response_data [ 'html_url' ]
Post some text to a Gist and return the URL .
48,099
def get_pull_request ( project , num , auth = False ) : url = "https://api.github.com/repos/{project}/pulls/{num}" . format ( project = project , num = num ) if auth : header = make_auth_header ( ) else : header = None response = requests . get ( url , headers = header ) response . raise_for_status ( ) return json . loads ( response . text , object_hook = Obj )
get pull request info by number