idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
5,700
def assert_unordered_list_eq ( expected , actual , message = None ) : missing_in_actual = [ ] missing_in_expected = list ( actual ) for x in expected : try : missing_in_expected . remove ( x ) except ValueError : missing_in_actual . append ( x ) if missing_in_actual or missing_in_expected : if not message : message = ( "%r not equal to %r; missing items: %r in expected, %r in actual." % ( expected , actual , missing_in_expected , missing_in_actual ) ) assert False , message
Raises an AssertionError if the objects contained in expected are not equal to the objects contained in actual without regard to their order .
5,701
def _execute_if_not_empty ( func ) : def wrapper ( * args , ** kwargs ) : if any ( args [ 1 : ] ) or any ( kwargs . items ( ) ) : return func ( * args , ** kwargs ) return wrapper
Execute function only if one of input parameters is not empty
5,702
def prepare_search_body ( self , should_terms = None , must_terms = None , must_not_terms = None , search_text = '' , start = None , end = None ) : self . body = self . SearchBody ( ) self . body . set_should_terms ( should_terms ) self . body . set_must_terms ( must_terms ) self . body . set_must_not_terms ( must_not_terms ) self . body . set_search_text ( search_text ) self . body . set_timestamp_filter ( start , end ) self . body . prepare ( )
Prepare body for elasticsearch query
5,703
def execute ( cls , instance , async = True , countdown = 2 , is_heavy_task = False , ** kwargs ) : cls . pre_apply ( instance , async = async , ** kwargs ) result = cls . apply_signature ( instance , async = async , countdown = countdown , is_heavy_task = is_heavy_task , ** kwargs ) cls . post_apply ( instance , async = async , ** kwargs ) return result
Execute high level - operation
5,704
def apply_signature ( cls , instance , async = True , countdown = None , is_heavy_task = False , ** kwargs ) : serialized_instance = utils . serialize_instance ( instance ) signature = cls . get_task_signature ( instance , serialized_instance , ** kwargs ) link = cls . get_success_signature ( instance , serialized_instance , ** kwargs ) link_error = cls . get_failure_signature ( instance , serialized_instance , ** kwargs ) if async : return signature . apply_async ( link = link , link_error = link_error , countdown = countdown , queue = is_heavy_task and 'heavy' or None ) else : result = signature . apply ( ) callback = link if not result . failed ( ) else link_error if callback is not None : cls . _apply_callback ( callback , result ) return result . get ( )
Serialize input data and apply signature
5,705
def _apply_callback ( cls , callback , result ) : if not callback . immutable : callback . args = ( result . id , ) + callback . args callback . apply ( )
Synchronously execute callback
5,706
def is_disabled_action ( view ) : if not isinstance ( view , core_views . ActionsViewSet ) : return False action = getattr ( view , 'action' , None ) return action in view . disabled_actions if action is not None else False
Checks whether Link action is disabled .
5,707
def get_allowed_methods ( self , callback ) : if hasattr ( callback , 'actions' ) : return [ method . upper ( ) for method in callback . actions . keys ( ) if method != 'head' ] return [ method for method in callback . cls ( ) . allowed_methods if method not in ( 'OPTIONS' , 'HEAD' ) ]
Return a list of the valid HTTP methods for this endpoint .
5,708
def create_view ( self , callback , method , request = None ) : view = super ( WaldurSchemaGenerator , self ) . create_view ( callback , method , request ) if is_disabled_action ( view ) : view . exclude_from_schema = True return view
Given a callback return an actual view instance .
5,709
def get_description ( self , path , method , view ) : description = super ( WaldurSchemaGenerator , self ) . get_description ( path , method , view ) permissions_description = get_permissions_description ( view , method ) if permissions_description : description += '\n\n' + permissions_description if description else permissions_description if isinstance ( view , core_views . ActionsViewSet ) : validators_description = get_validators_description ( view ) if validators_description : description += '\n\n' + validators_description if description else validators_description validation_description = get_validation_description ( view , method ) if validation_description : description += '\n\n' + validation_description if description else validation_description return description
Determine a link description .
5,710
def delete_error_message ( sender , instance , name , source , target , ** kwargs ) : if source != StateMixin . States . ERRED : return instance . error_message = '' instance . save ( update_fields = [ 'error_message' ] )
Delete error message if instance state changed from erred
5,711
def _make_value ( self , value ) : member = self . __new__ ( self , value ) member . __init__ ( value ) return member
Instantiates an enum with an arbitrary value .
5,712
def parse ( cls , value , default = _no_default ) : if isinstance ( value , cls ) : return value elif isinstance ( value , six . integer_types ) and not isinstance ( value , EnumBase ) : e = cls . _value_to_member . get ( value , _no_default ) else : e = cls . _name_to_member . get ( value , _no_default ) if e is _no_default or not e . is_valid ( ) : if default is _no_default : raise _create_invalid_value_error ( cls , value ) return default return e
Parses an enum member name or value into an enum member .
5,713
def parse ( cls , value , default = _no_default ) : if isinstance ( value , cls ) : return value elif isinstance ( value , int ) : e = cls . _make_value ( value ) else : if not value : e = cls . _make_value ( 0 ) else : r = 0 for k in value . split ( "," ) : v = cls . _name_to_member . get ( k , _no_default ) if v is _no_default : if default is _no_default : raise _create_invalid_value_error ( cls , value ) else : return default r |= v . value e = cls . _make_value ( r ) if not e . is_valid ( ) : if default is _no_default : raise _create_invalid_value_error ( cls , value ) return default return e
Parses a flag integer or string into a Flags instance .
5,714
def get_permission_checks ( self , request , view ) : if view . action is None : return [ ] if hasattr ( view , view . action + '_permissions' ) : return getattr ( view , view . action + '_permissions' ) extra_permissions = getattr ( view , view . action + 'extra_permissions' , [ ] ) if request . method in SAFE_METHODS : return getattr ( view , 'safe_methods_permissions' , [ ] ) + extra_permissions else : return getattr ( view , 'unsafe_methods_permissions' , [ ] ) + extra_permissions
Get permission checks that will be executed for current action .
5,715
def add_function ( self , function ) : if not len ( self . settings . FUNCTION_MANAGERS ) : raise ConfigurationError ( 'Where have the default function manager gone?!' ) self . settings . FUNCTION_MANAGERS [ 0 ] . add_function ( function )
Registers the function to the server s default fixed function manager .
5,716
def format_raw_field ( key ) : subfield = django_settings . WALDUR_CORE . get ( 'ELASTICSEARCH' , { } ) . get ( 'raw_subfield' , 'keyword' ) return '%s.%s' % ( camel_case_to_underscore ( key ) , subfield )
When ElasticSearch analyzes string it breaks it into parts . In order make query for not - analyzed exact string values we should use subfield instead .
5,717
def decorate ( decorator_cls , * args , ** kwargs ) : global _wrappers wrapper_cls = _wrappers . get ( decorator_cls , None ) if wrapper_cls is None : class PythonWrapper ( decorator_cls ) : pass wrapper_cls = PythonWrapper wrapper_cls . __name__ = decorator_cls . __name__ + "PythonWrapper" _wrappers [ decorator_cls ] = wrapper_cls def decorator ( fn ) : wrapped = wrapper_cls ( fn , * args , ** kwargs ) _update_wrapper ( wrapped , fn ) return wrapped return decorator
Creates a decorator function that applies the decorator_cls that was passed in .
5,718
def deprecated ( replacement_description ) : def decorate ( fn_or_class ) : if isinstance ( fn_or_class , type ) : pass else : try : fn_or_class . __doc__ = "This API point is obsolete. %s\n\n%s" % ( replacement_description , fn_or_class . __doc__ , ) except AttributeError : pass return fn_or_class return decorate
States that method is deprecated .
5,719
def convert_result ( converter ) : def decorate ( fn ) : @ inspection . wraps ( fn ) def new_fn ( * args , ** kwargs ) : return converter ( fn ( * args , ** kwargs ) ) return new_fn return decorate
Decorator that can convert the result of a function call .
5,720
def retry ( exception_cls , max_tries = 10 , sleep = 0.05 ) : assert max_tries > 0 def with_max_retries_call ( delegate ) : for i in xrange ( 0 , max_tries ) : try : return delegate ( ) except exception_cls : if i + 1 == max_tries : raise time . sleep ( sleep ) def outer ( fn ) : is_generator = inspect . isgeneratorfunction ( fn ) @ functools . wraps ( fn ) def retry_fun ( * args , ** kwargs ) : return with_max_retries_call ( lambda : fn ( * args , ** kwargs ) ) @ functools . wraps ( fn ) def retry_generator_fun ( * args , ** kwargs ) : def get_first_item ( ) : results = fn ( * args , ** kwargs ) for first_result in results : return [ first_result ] , results return [ ] , results cache , generator = with_max_retries_call ( get_first_item ) for item in cache : yield item for item in generator : yield item if not is_generator : retry_fun . fn = fn retry_fun . __reduce__ = lambda : fn . __name__ return retry_fun else : retry_generator_fun . fn = fn retry_generator_fun . __reduce__ = lambda : fn . __name__ return retry_generator_fun return outer
Decorator for retrying a function if it throws an exception .
5,721
def decorator_of_context_manager ( ctxt ) : def decorator_fn ( * outer_args , ** outer_kwargs ) : def decorator ( fn ) : @ functools . wraps ( fn ) def wrapper ( * args , ** kwargs ) : with ctxt ( * outer_args , ** outer_kwargs ) : return fn ( * args , ** kwargs ) return wrapper return decorator if getattr ( ctxt , "__doc__" , None ) is None : msg = "Decorator that runs the inner function in the context of %s" decorator_fn . __doc__ = msg % ctxt else : decorator_fn . __doc__ = ctxt . __doc__ return decorator_fn
Converts a context manager into a decorator .
5,722
def get_error ( self , error ) : error_type = type ( error ) if error . error_type == ET_CLIENT : error_type_name = 'Client' else : error_type_name = 'Server' return { 'type' : error_type_name , 'name' : error_type . __name__ , 'prefix' : getattr ( error_type , '__module__' , '' ) , 'message' : unicode ( error ) , 'params' : error . args , }
A helper function gets standard information from the error .
5,723
def validate_quota_change ( self , quota_deltas , raise_exception = False ) : errors = [ ] for name , delta in six . iteritems ( quota_deltas ) : quota = self . quotas . get ( name = name ) if quota . is_exceeded ( delta ) : errors . append ( '%s quota limit: %s, requires %s (%s)\n' % ( quota . name , quota . limit , quota . usage + delta , quota . scope ) ) if not raise_exception : return errors else : if errors : raise exceptions . QuotaExceededException ( _ ( 'One or more quotas were exceeded: %s' ) % ';' . join ( errors ) )
Get error messages about object and his ancestor quotas that will be exceeded if quota_delta will be added .
5,724
def get_sum_of_quotas_as_dict ( cls , scopes , quota_names = None , fields = [ 'usage' , 'limit' ] ) : if not scopes : return { } if quota_names is None : quota_names = cls . get_quotas_names ( ) scope_models = set ( [ scope . _meta . model for scope in scopes ] ) if len ( scope_models ) > 1 : raise exceptions . QuotaError ( _ ( 'All scopes have to be instances of the same model.' ) ) filter_kwargs = { 'content_type' : ct_models . ContentType . objects . get_for_model ( scopes [ 0 ] ) , 'object_id__in' : [ scope . id for scope in scopes ] , 'name__in' : quota_names } result = { } if 'usage' in fields : items = Quota . objects . filter ( ** filter_kwargs ) . values ( 'name' ) . annotate ( usage = Sum ( 'usage' ) ) for item in items : result [ item [ 'name' ] + '_usage' ] = item [ 'usage' ] if 'limit' in fields : unlimited_quotas = Quota . objects . filter ( limit = - 1 , ** filter_kwargs ) unlimited_quotas = list ( unlimited_quotas . values_list ( 'name' , flat = True ) ) for quota_name in unlimited_quotas : result [ quota_name ] = - 1 items = Quota . objects . filter ( ** filter_kwargs ) . exclude ( name__in = unlimited_quotas ) . values ( 'name' ) . annotate ( limit = Sum ( 'limit' ) ) for item in items : result [ item [ 'name' ] ] = item [ 'limit' ] return result
Return dictionary with sum of all scopes quotas .
5,725
def scope_types ( self , request , * args , ** kwargs ) : return response . Response ( utils . get_scope_types_mapping ( ) . keys ( ) )
Returns a list of scope types acceptable by events filter .
5,726
def import_from_file ( self , index , filename ) : with open ( filename , 'r' ) as fp : self . _import_from_struct ( index , json . load ( fp ) )
Import this instrument s settings from the given file . Will automatically add the instrument s synth and table to the song s synths and tables if needed .
5,727
def load_lsdsng ( filename ) : with open ( filename , 'rb' ) as fp : preamble_data = bread . parse ( fp , spec . lsdsng_preamble ) with open ( filename , 'rb' ) as fp : fp . seek ( int ( len ( preamble_data ) / 8 ) ) factory = BlockFactory ( ) while True : block_data = bytearray ( fp . read ( blockutils . BLOCK_SIZE ) ) if len ( block_data ) == 0 : break block = factory . new_block ( ) block . data = block_data remapped_blocks = filepack . renumber_block_keys ( factory . blocks ) reader = BlockReader ( ) compressed_data = reader . read ( remapped_blocks ) raw_data = filepack . decompress ( compressed_data ) name = preamble_data . name version = preamble_data . version size_blks = int ( math . ceil ( float ( len ( compressed_data ) ) / blockutils . BLOCK_SIZE ) ) return Project ( name , version , size_blks , raw_data )
Load a Project from a . lsdsng file .
5,728
def load_srm ( filename ) : with open ( filename , 'rb' ) as fp : raw_data = fp . read ( ) compressed_data = filepack . compress ( raw_data ) factory = BlockFactory ( ) writer = BlockWriter ( ) writer . write ( compressed_data , factory ) size_in_blocks = len ( factory . blocks ) name = "SRMLOAD" version = 0 return Project ( name , version , size_in_blocks , raw_data )
Load a Project from an . srm file .
5,729
def song ( self ) : if self . _song is None : self . _song = Song ( self . _song_data ) return self . _song
the song associated with the project
5,730
def save ( self , filename ) : with open ( filename , 'wb' ) as fp : writer = BlockWriter ( ) factory = BlockFactory ( ) preamble_dummy_bytes = bytearray ( [ 0 ] * 9 ) preamble = bread . parse ( preamble_dummy_bytes , spec . lsdsng_preamble ) preamble . name = self . name preamble . version = self . version preamble_data = bread . write ( preamble ) raw_data = self . get_raw_data ( ) compressed_data = filepack . compress ( raw_data ) writer . write ( compressed_data , factory ) fp . write ( preamble_data ) for key in sorted ( factory . blocks . keys ( ) ) : fp . write ( bytearray ( factory . blocks [ key ] . data ) )
Save a project in . lsdsng format to the target file .
5,731
def save_srm ( self , filename ) : with open ( filename , 'wb' ) as fp : raw_data = bread . write ( self . _song_data , spec . song ) fp . write ( raw_data )
Save a project in . srm format to the target file .
5,732
def phase_type ( self , value ) : self . _params . phase_type = value self . _overwrite_lock . disable ( )
compresses the waveform horizontally ; one of normal resync resync2
5,733
def save ( self , filename , callback = _noop_callback ) : with open ( filename , 'wb' ) as fp : self . _save ( fp , callback )
Save this file .
5,734
def split ( compressed_data , segment_size , block_factory ) : segments = [ ] current_segment_start = 0 index = 0 data_size = len ( compressed_data ) while index < data_size : current_byte = compressed_data [ index ] if index < data_size - 1 : next_byte = compressed_data [ index + 1 ] else : next_byte = None jump_size = 1 if current_byte == RLE_BYTE : assert next_byte is not None , "Expected a command to follow " "RLE byte" if next_byte == RLE_BYTE : jump_size = 2 else : jump_size = 3 elif current_byte == SPECIAL_BYTE : assert next_byte is not None , "Expected a command to follow " "special byte" if next_byte == SPECIAL_BYTE : jump_size = 2 elif next_byte == DEFAULT_INSTR_BYTE or next_byte == DEFAULT_WAVE_BYTE : jump_size = 3 else : assert False , "Encountered unexpected EOF or block " "switch while segmenting" if index - current_segment_start + jump_size > segment_size - 2 : segments . append ( compressed_data [ current_segment_start : index ] ) current_segment_start = index else : index += jump_size if current_segment_start != index : segments . append ( compressed_data [ current_segment_start : current_segment_start + index ] ) total_segment_length = sum ( map ( len , segments ) ) assert total_segment_length == len ( compressed_data ) , "Lost %d bytes of " "data while segmenting" % ( len ( compressed_data ) - total_segment_length ) block_ids = [ ] for segment in segments : block = block_factory . new_block ( ) block_ids . append ( block . id ) for ( i , segment ) in enumerate ( segments ) : block = block_factory . blocks [ block_ids [ i ] ] assert len ( block . data ) == 0 , "Encountered a block with " "pre-existing data while writing" if i == len ( segments ) - 1 : add_eof ( segment ) else : add_block_switch ( segment , block_ids [ i + 1 ] ) pad ( segment , segment_size ) block . data = segment return block_ids
Splits compressed data into blocks .
5,735
def renumber_block_keys ( blocks ) : byte_switch_keys = [ 0 ] block_keys = list ( blocks . keys ( ) ) for block in list ( blocks . values ( ) ) : i = 0 while i < len ( block . data ) - 1 : current_byte = block . data [ i ] next_byte = block . data [ i + 1 ] if current_byte == RLE_BYTE : if next_byte == RLE_BYTE : i += 2 else : i += 3 elif current_byte == SPECIAL_BYTE : if next_byte in SPECIAL_DEFAULTS : i += 3 elif next_byte == SPECIAL_BYTE : i += 2 else : if next_byte != EOF_BYTE : byte_switch_keys . append ( next_byte ) break else : i += 1 byte_switch_keys . sort ( ) block_keys . sort ( ) assert len ( byte_switch_keys ) == len ( block_keys ) , ( "Number of blocks that are target of block switches (%d) " % ( len ( byte_switch_keys ) ) + "does not equal number of blocks in the song (%d)" % ( len ( block_keys ) ) + "; possible corruption" ) if byte_switch_keys == block_keys : return blocks new_block_map = { } for block_key , byte_switch_key in zip ( block_keys , byte_switch_keys ) : new_block_map [ byte_switch_key ] = blocks [ block_key ] return new_block_map
Renumber a block map s indices so that tehy match the blocks block switch statements .
5,736
def merge ( blocks ) : current_block = blocks [ sorted ( blocks . keys ( ) ) [ 0 ] ] compressed_data = [ ] eof = False while not eof : data_size_to_append = None next_block = None i = 0 while i < len ( current_block . data ) - 1 : current_byte = current_block . data [ i ] next_byte = current_block . data [ i + 1 ] if current_byte == RLE_BYTE : if next_byte == RLE_BYTE : i += 2 else : i += 3 elif current_byte == SPECIAL_BYTE : if next_byte in SPECIAL_DEFAULTS : i += 3 elif next_byte == SPECIAL_BYTE : i += 2 else : data_size_to_append = i if next_byte == EOF_BYTE : eof = True else : next_block = blocks [ next_byte ] break else : i += 1 assert data_size_to_append is not None , "Ran off the end of a " "block without encountering a block switch or EOF" compressed_data . extend ( current_block . data [ 0 : data_size_to_append ] ) if not eof : assert next_block is not None , "Switched blocks, but did " "not provide the next block to switch to" current_block = next_block return compressed_data
Merge the given blocks into a contiguous block of compressed data .
5,737
def pad ( segment , size ) : for i in range ( size - len ( segment ) ) : segment . append ( 0 ) assert len ( segment ) == size
Add zeroes to a segment until it reaches a certain size .
5,738
def decompress ( compressed_data ) : raw_data = [ ] index = 0 while index < len ( compressed_data ) : current = compressed_data [ index ] index += 1 if current == RLE_BYTE : directive = compressed_data [ index ] index += 1 if directive == RLE_BYTE : raw_data . append ( RLE_BYTE ) else : count = compressed_data [ index ] index += 1 raw_data . extend ( [ directive ] * count ) elif current == SPECIAL_BYTE : directive = compressed_data [ index ] index += 1 if directive == SPECIAL_BYTE : raw_data . append ( SPECIAL_BYTE ) elif directive == DEFAULT_WAVE_BYTE : count = compressed_data [ index ] index += 1 raw_data . extend ( DEFAULT_WAVE * count ) elif directive == DEFAULT_INSTR_BYTE : count = compressed_data [ index ] index += 1 raw_data . extend ( DEFAULT_INSTRUMENT_FILEPACK * count ) elif directive == EOF_BYTE : assert False , ( "Unexpected EOF command encountered while " "decompressing" ) else : assert False , "Countered unexpected sequence 0x%02x 0x%02x" % ( current , directive ) else : raw_data . append ( current ) return raw_data
Decompress data that has been compressed by the filepack algorithm .
5,739
def compress ( raw_data ) : raw_data = bytearray ( raw_data ) compressed_data = [ ] data_size = len ( raw_data ) index = 0 next_bytes = [ - 1 , - 1 , - 1 ] def is_default_instrument ( index ) : if index + len ( DEFAULT_INSTRUMENT_FILEPACK ) > len ( raw_data ) : return False instr_bytes = raw_data [ index : index + len ( DEFAULT_INSTRUMENT_FILEPACK ) ] if instr_bytes [ 0 ] != 0xa8 or instr_bytes [ 1 ] != 0 : return False return instr_bytes == DEFAULT_INSTRUMENT_FILEPACK def is_default_wave ( index ) : return ( index + len ( DEFAULT_WAVE ) <= len ( raw_data ) and raw_data [ index : index + len ( DEFAULT_WAVE ) ] == DEFAULT_WAVE ) while index < data_size : current_byte = raw_data [ index ] for i in range ( 3 ) : if index < data_size - ( i + 1 ) : next_bytes [ i ] = raw_data [ index + ( i + 1 ) ] else : next_bytes [ i ] = - 1 if current_byte == RLE_BYTE : compressed_data . append ( RLE_BYTE ) compressed_data . append ( RLE_BYTE ) index += 1 elif current_byte == SPECIAL_BYTE : compressed_data . append ( SPECIAL_BYTE ) compressed_data . append ( SPECIAL_BYTE ) index += 1 elif is_default_instrument ( index ) : counter = 1 index += len ( DEFAULT_INSTRUMENT_FILEPACK ) while ( is_default_instrument ( index ) and counter < 0x100 ) : counter += 1 index += len ( DEFAULT_INSTRUMENT_FILEPACK ) compressed_data . append ( SPECIAL_BYTE ) compressed_data . append ( DEFAULT_INSTR_BYTE ) compressed_data . append ( counter ) elif is_default_wave ( index ) : counter = 1 index += len ( DEFAULT_WAVE ) while is_default_wave ( index ) and counter < 0xff : counter += 1 index += len ( DEFAULT_WAVE ) compressed_data . append ( SPECIAL_BYTE ) compressed_data . append ( DEFAULT_WAVE_BYTE ) compressed_data . append ( counter ) elif ( current_byte == next_bytes [ 0 ] and next_bytes [ 0 ] == next_bytes [ 1 ] and next_bytes [ 1 ] == next_bytes [ 2 ] ) : compressed_data . append ( RLE_BYTE ) compressed_data . append ( current_byte ) counter = 0 while ( index < data_size and raw_data [ index ] == current_byte and counter < 0xff ) : index += 1 counter += 1 compressed_data . append ( counter ) else : compressed_data . append ( current_byte ) index += 1 return compressed_data
Compress raw bytes with the filepack algorithm .
5,740
def name_without_zeroes ( name ) : first_zero = name . find ( b'\0' ) if first_zero == - 1 : return name else : return str ( name [ : first_zero ] )
Return a human - readable name without LSDJ s trailing zeroes .
5,741
def table ( self ) : if hasattr ( self . data , 'table_on' ) and self . data . table_on : assert_index_sane ( self . data . table , len ( self . song . tables ) ) return self . song . tables [ self . data . table ]
a pylsdj . Table referencing the instrument s table or None if the instrument doesn t have a table
5,742
def import_lsdinst ( self , struct_data ) : self . name = struct_data [ 'name' ] self . automate = struct_data [ 'data' ] [ 'automate' ] self . pan = struct_data [ 'data' ] [ 'pan' ] if self . table is not None : self . table . import_lsdinst ( struct_data )
import from an lsdinst struct
5,743
def export_to_file ( self , filename ) : instr_json = self . export_struct ( ) with open ( filename , 'w' ) as fp : json . dump ( instr_json , fp , indent = 2 )
Export this instrument s settings to a file .
5,744
def write_wav ( self , filename ) : wave_output = None try : wave_output = wave . open ( filename , 'w' ) wave_output . setparams ( WAVE_PARAMS ) frames = bytearray ( [ x << 4 for x in self . sample_data ] ) wave_output . writeframes ( frames ) finally : if wave_output is not None : wave_output . close ( )
Write this sample to a WAV file .
5,745
def read_wav ( self , filename ) : wave_input = None try : wave_input = wave . open ( filename , 'r' ) wave_frames = bytearray ( wave_input . readframes ( wave_input . getnframes ( ) ) ) self . sample_data = [ x >> 4 for x in wave_frames ] finally : if wave_input is not None : wave_input . close ( )
Read sample data for this sample from a WAV file .
5,746
def get_device_address ( device ) : if device is None : return None command = [ 'ip' , 'route' , 'list' , 'dev' , device ] ip_routes = subprocess . check_output ( command ) . strip ( ) for line in ip_routes . split ( '\n' ) : seen = '' for a in line . split ( ) : if seen == 'src' : return a seen = a return None
find the local ip address on the given device
5,747
def get_default_net_device ( ) : with open ( '/proc/net/route' ) as fh : for line in fh : iface , dest , _ = line . split ( None , 2 ) if dest == '00000000' : return iface return None
Find the device where the default route is .
5,748
def add_missing_optional_args_with_value_none ( args , optional_args ) : for name in optional_args : if not name in args . keys ( ) : args [ name ] = None return args
Adds key - value pairs to the passed dictionary so that afterwards the dictionary can be used without needing to check for KeyErrors .
5,749
def check_presence_of_mandatory_args ( args , mandatory_args ) : missing_args = [ ] for name in mandatory_args : if name not in args . keys ( ) : missing_args . append ( name ) if len ( missing_args ) > 0 : raise ValueError ( 'Missing mandatory arguments: ' + ', ' . join ( missing_args ) ) else : return True
Checks whether all mandatory arguments are passed .
5,750
def monkey_patch_migration_template ( self , app , fixture_path ) : self . _MIGRATION_TEMPLATE = writer . MIGRATION_TEMPLATE module_split = app . module . __name__ . split ( '.' ) if len ( module_split ) == 1 : module_import = "import %s\n" % module_split [ 0 ] else : module_import = "from %s import %s\n" % ( '.' . join ( module_split [ : - 1 ] ) , module_split [ - 1 : ] [ 0 ] , ) writer . MIGRATION_TEMPLATE = writer . MIGRATION_TEMPLATE . replace ( '%(imports)s' , "%(imports)s" + "\nfrom django_migration_fixture import fixture\n%s" % module_import ) . replace ( '%(operations)s' , " migrations.RunPython(**fixture(%s, ['%s'])),\n" % ( app . label , os . path . basename ( fixture_path ) ) + "%(operations)s\n" )
Monkey patch the django . db . migrations . writer . MIGRATION_TEMPLATE
5,751
def migration_exists ( self , app , fixture_path ) : base_name = os . path . basename ( fixture_path ) for migration_path in glob . glob ( os . path . join ( app . path , 'migrations' , '*.py' ) ) : if base_name in open ( migration_path ) . read ( ) : return True return False
Return true if it looks like a migration already exists .
5,752
def create_migration ( self , app , fixture_path ) : self . monkey_patch_migration_template ( app , fixture_path ) out = StringIO ( ) management . call_command ( 'makemigrations' , app . label , empty = True , stdout = out ) self . restore_migration_template ( ) self . stdout . write ( out . getvalue ( ) )
Create a data migration for app that uses fixture_path .
5,753
def instantiate_for_read_and_search ( handle_server_url , reverselookup_username , reverselookup_password , ** config ) : if handle_server_url is None and 'reverselookup_baseuri' not in config . keys ( ) : raise TypeError ( 'You must specify either "handle_server_url" or "reverselookup_baseuri".' + ' Searching not possible without the URL of a search servlet.' ) inst = EUDATHandleClient ( handle_server_url , reverselookup_username = reverselookup_username , reverselookup_password = reverselookup_password , ** config ) return inst
Initialize client with read access and with search function .
5,754
def get_value_from_handle ( self , handle , key , handlerecord_json = None ) : LOGGER . debug ( 'get_value_from_handle...' ) handlerecord_json = self . __get_handle_record_if_necessary ( handle , handlerecord_json ) if handlerecord_json is None : raise HandleNotFoundException ( handle = handle ) list_of_entries = handlerecord_json [ 'values' ] indices = [ ] for i in xrange ( len ( list_of_entries ) ) : if list_of_entries [ i ] [ 'type' ] == key : indices . append ( i ) if len ( indices ) == 0 : return None else : if len ( indices ) > 1 : LOGGER . debug ( 'get_value_from_handle: The handle ' + handle + ' contains several entries of type "' + key + '". Only the first one is returned.' ) return list_of_entries [ indices [ 0 ] ] [ 'data' ] [ 'value' ]
Retrieve a single value from a single Handle . If several entries with this key exist the methods returns the first one . If the handle does not exist the method will raise a HandleNotFoundException .
5,755
def delete_handle_value ( self , handle , key ) : LOGGER . debug ( 'delete_handle_value...' ) handlerecord_json = self . retrieve_handle_record_json ( handle ) if handlerecord_json is None : msg = 'Cannot modify unexisting handle' raise HandleNotFoundException ( handle = handle , msg = msg ) list_of_entries = handlerecord_json [ 'values' ] keys = None indices = [ ] if type ( key ) != type ( [ ] ) : keys = [ key ] else : keys = key keys_done = [ ] for key in keys : if key == 'HS_ADMIN' : op = 'deleting "HS_ADMIN"' raise IllegalOperationException ( operation = op , handle = handle ) if key not in keys_done : indices_onekey = self . get_handlerecord_indices_for_key ( key , list_of_entries ) indices = indices + indices_onekey keys_done . append ( key ) if not len ( indices ) > 0 : LOGGER . debug ( 'delete_handle_value: No values for key(s) ' + str ( keys ) ) return None else : op = 'deleting "' + str ( keys ) + '"' resp = self . __send_handle_delete_request ( handle , indices = indices , op = op ) if hsresponses . handle_success ( resp ) : LOGGER . debug ( "delete_handle_value: Deleted handle values " + str ( keys ) + "of handle " + handle ) elif hsresponses . values_not_found ( resp ) : pass else : raise GenericHandleError ( operation = op , handle = handle , response = resp )
Delete a key - value pair from a handle record . If the key exists more than once all key - value pairs with this key are deleted .
5,756
def delete_handle ( self , handle , * other ) : LOGGER . debug ( 'delete_handle...' ) utilhandle . check_handle_syntax ( handle ) if len ( other ) > 0 : message = 'You specified more than one argument. If you wanted' + ' to delete just some values from a handle, please use the' + ' new method "delete_handle_value()".' raise TypeError ( message ) op = 'deleting handle' resp = self . __send_handle_delete_request ( handle , op = op ) if hsresponses . handle_success ( resp ) : LOGGER . info ( 'Handle ' + handle + ' deleted.' ) elif hsresponses . handle_not_found ( resp ) : msg = ( 'delete_handle: Handle ' + handle + ' did not exist, ' 'so it could not be deleted.' ) LOGGER . debug ( msg ) raise HandleNotFoundException ( msg = msg , handle = handle , response = resp ) else : raise GenericHandleError ( op = op , handle = handle , response = resp )
Delete the handle and its handle record . If the Handle is not found an Exception is raised .
5,757
def register_handle ( self , handle , location , checksum = None , additional_URLs = None , overwrite = False , ** extratypes ) : LOGGER . debug ( 'register_handle...' ) if overwrite == False : handlerecord_json = self . retrieve_handle_record_json ( handle ) if handlerecord_json is not None : msg = 'Could not register handle' LOGGER . error ( msg + ', as it already exists.' ) raise HandleAlreadyExistsException ( handle = handle , msg = msg ) list_of_entries = [ ] adminentry = self . __create_admin_entry ( self . __handleowner , self . __HS_ADMIN_permissions , self . __make_another_index ( list_of_entries , hs_admin = True ) , handle ) list_of_entries . append ( adminentry ) entry_URL = self . __create_entry ( 'URL' , location , self . __make_another_index ( list_of_entries , url = True ) ) list_of_entries . append ( entry_URL ) if checksum is not None : entryChecksum = self . __create_entry ( 'CHECKSUM' , checksum , self . __make_another_index ( list_of_entries ) ) list_of_entries . append ( entryChecksum ) if extratypes is not None : for key , value in extratypes . items ( ) : entry = self . __create_entry ( key , value , self . __make_another_index ( list_of_entries ) ) list_of_entries . append ( entry ) if additional_URLs is not None and len ( additional_URLs ) > 0 : for url in additional_URLs : self . __add_URL_to_10320LOC ( url , list_of_entries , handle ) op = 'registering handle' resp , put_payload = self . __send_handle_put_request ( handle , list_of_entries , overwrite = overwrite , op = op ) resp_content = decoded_response ( resp ) if hsresponses . was_handle_created ( resp ) or hsresponses . handle_success ( resp ) : LOGGER . info ( "Handle registered: " + handle ) return json . loads ( resp_content ) [ 'handle' ] elif hsresponses . is_temporary_redirect ( resp ) : oldurl = resp . url newurl = resp . headers [ 'location' ] raise GenericHandleError ( operation = op , handle = handle , response = resp , payload = put_payload , msg = 'Temporary redirect from ' + oldurl + ' to ' + newurl + '.' ) elif hsresponses . handle_not_found ( resp ) : raise GenericHandleError ( operation = op , handle = handle , response = resp , payload = put_payload , msg = 'Could not create handle. Possibly you used HTTP instead of HTTPS?' ) else : raise GenericHandleError ( operation = op , handle = handle , reponse = resp , payload = put_payload )
Registers a new Handle with given name . If the handle already exists and overwrite is not set to True the method will throw an exception .
5,758
def get_handlerecord_indices_for_key ( self , key , list_of_entries ) : LOGGER . debug ( 'get_handlerecord_indices_for_key...' ) indices = [ ] for entry in list_of_entries : if entry [ 'type' ] == key : indices . append ( entry [ 'index' ] ) return indices
Finds the Handle entry indices of all entries that have a specific type .
5,759
def __get_handle_record_if_necessary ( self , handle , handlerecord_json ) : if handlerecord_json is None : handlerecord_json = self . retrieve_handle_record_json ( handle ) else : if handle != handlerecord_json [ 'handle' ] : handlerecord_json = self . retrieve_handle_record_json ( handle ) return handlerecord_json
Returns the handle record if it is None or if its handle is not the same as the specified handle .
5,760
def __create_entry ( self , entrytype , data , index , ttl = None ) : if entrytype == 'HS_ADMIN' : op = 'creating HS_ADMIN entry' msg = 'This method can not create HS_ADMIN entries.' raise IllegalOperationException ( operation = op , msg = msg ) entry = { 'index' : index , 'type' : entrytype , 'data' : data } if ttl is not None : entry [ 'ttl' ] = ttl return entry
Create an entry of any type except HS_ADMIN .
5,761
def __create_admin_entry ( self , handleowner , permissions , index , handle , ttl = None ) : if handleowner is None : adminindex = '200' prefix = handle . split ( '/' ) [ 0 ] adminhandle = '0.NA/' + prefix else : adminindex , adminhandle = utilhandle . remove_index_from_handle ( handleowner ) data = { 'value' : { 'index' : adminindex , 'handle' : adminhandle , 'permissions' : permissions } , 'format' : 'admin' } entry = { 'index' : index , 'type' : 'HS_ADMIN' , 'data' : data } if ttl is not None : entry [ 'ttl' ] = ttl return entry
Create an entry of type HS_ADMIN .
5,762
def auth_access ( self , auth_code ) : data = { 'client_id' : self . client_id , 'client_secret' : self . client_secret , 'grant_type' : 'authorization_code' , 'code' : auth_code , 'redirect_uri' : self . redirect_url } return self . request ( "post" , "access_token" , data = data )
verify the fist authorization response url code
5,763
def check_if_username_exists ( self , username ) : LOGGER . debug ( 'check_if_username_exists...' ) _ , handle = b2handle . utilhandle . remove_index_from_handle ( username ) resp = self . send_handle_get_request ( handle ) resp_content = decoded_response ( resp ) if b2handle . hsresponses . does_handle_exist ( resp ) : handlerecord_json = json . loads ( resp_content ) if not handlerecord_json [ 'handle' ] == handle : raise GenericHandleError ( operation = 'Checking if username exists' , handle = handle , reponse = resp , msg = 'The check returned a different handle than was asked for.' ) return True elif b2handle . hsresponses . handle_not_found ( resp ) : msg = 'The username handle does not exist' raise HandleNotFoundException ( handle = handle , msg = msg , response = resp ) else : op = 'checking if handle exists' msg = 'Checking if username exists went wrong' raise GenericHandleError ( operation = op , handle = handle , response = resp , msg = msg )
Check if the username handles exists .
5,764
def publish_metric ( self , metric_name , metric_value , epoch_seconds = None ) : if epoch_seconds is None : epoch_seconds = self . _reactor . seconds ( ) self . _client_factory . publish_metric ( metric_name , metric_value , int ( epoch_seconds ) )
Record a single hit on a given metric .
5,765
def register_repeating_metric ( self , metric_name , frequency , getter ) : l = task . LoopingCall ( self . _publish_repeating_metric , metric_name , getter ) repeating_metric_handle = RepeatingMetricHandle ( l , frequency ) self . _repeating_metric_handles . append ( repeating_metric_handle ) if self . running : repeating_metric_handle . start ( ) return repeating_metric_handle
Record hits to a metric at a specified interval .
5,766
def show ( ) : parent = None current = QtWidgets . QApplication . activeWindow ( ) while current : parent = current current = parent . parent ( ) window = ( _discover_gui ( ) or _show_no_gui ) ( parent ) return window
Try showing the most desirable GUI
5,767
def dock ( window ) : if self . _dock : print ( "Deleting existing dock..." ) parent = self . _dock dialog = None stacked_widget = None main_windows = [ ] while parent : if isinstance ( parent , QtWidgets . QDialog ) : dialog = parent if isinstance ( parent , QtWidgets . QStackedWidget ) : stacked_widget = parent if isinstance ( parent , QtWidgets . QMainWindow ) : main_windows . append ( parent ) parent = parent . parent ( ) dialog . deleteLater ( ) if len ( main_windows ) > 1 : if stacked_widget . count ( ) == 1 : main_windows [ 0 ] . deleteLater ( ) pane = nuke . getPaneFor ( "Properties.1" ) widget_path = "pyblish_nuke.lib.pyblish_nuke_dockwidget" panel = nukescripts . panels . registerWidgetAsPanel ( widget_path , window . windowTitle ( ) , "pyblish_nuke.dock" , True ) . addToPane ( pane ) panel_widget = panel . customKnob . getObject ( ) . widget panel_widget . layout ( ) . addWidget ( window ) _nuke_set_zero_margins ( panel_widget ) self . _dock = panel_widget return self . _dock
Expecting a window to parent into a Nuke panel that is dockable .
5,768
def remove_index_from_handle ( handle_with_index ) : split = handle_with_index . split ( ':' ) if len ( split ) == 2 : split [ 0 ] = int ( split [ 0 ] ) return split elif len ( split ) == 1 : return ( None , handle_with_index ) elif len ( split ) > 2 : raise handleexceptions . HandleSyntaxError ( msg = 'Too many colons' , handle = handle_with_index , expected_syntax = 'index:prefix/suffix' )
Returns index and handle separately in a tuple .
5,769
def create_authentication_string ( username , password ) : username_utf8 = username . encode ( 'utf-8' ) userpw_utf8 = password . encode ( 'utf-8' ) username_perc = quote ( username_utf8 ) userpw_perc = quote ( userpw_utf8 ) authinfostring = username_perc + ':' + userpw_perc authinfostring_base64 = base64 . b64encode ( authinfostring . encode ( 'utf-8' ) ) . decode ( 'utf-8' ) return authinfostring_base64
Creates an authentication string from the username and password .
5,770
def make_request_log_message ( ** args ) : mandatory_args = [ 'op' , 'handle' , 'url' , 'headers' , 'verify' , 'resp' ] optional_args = [ 'payload' ] util . check_presence_of_mandatory_args ( args , mandatory_args ) util . add_missing_optional_args_with_value_none ( args , optional_args ) space = '\n ' message = '' message += '\n' + args [ 'op' ] + ' ' + args [ 'handle' ] message += space + 'URL: ' + args [ 'url' ] message += space + 'HEADERS: ' + str ( args [ 'headers' ] ) message += space + 'VERIFY: ' + str ( args [ 'verify' ] ) if 'payload' in args . keys ( ) : message += space + 'PAYLOAD:' + space + str ( args [ 'payload' ] ) message += space + 'RESPONSECODE: ' + str ( args [ 'resp' ] . status_code ) message += space + 'RESPONSE:' + space + str ( args [ 'resp' ] . content ) return message
Creates a string containing all relevant information about a request made to the Handle System for logging purposes .
5,771
def find_module ( fdr , fqname , path = None ) : if fqname in fdr . aliases : return Loader ( fqname , fdr . aliases [ fqname ] ) return None
Find a loader for module or package fqname .
5,772
def load_module ( ldr , fqname ) : scope = ldr . scope . split ( '.' ) modpath = fqname . split ( '.' ) if scope != modpath [ 0 : len ( scope ) ] : raise AssertionError ( "%s responsible for %s got request for %s" % ( ldr . __class__ . __name__ , ldr . scope , fqname , ) ) if fqname in sys . modules : mod = sys . modules [ fqname ] else : mod = sys . modules . setdefault ( fqname , types . ModuleType ( fqname ) ) mod . __loader__ = ldr fspath = ldr . path_to ( fqname ) mod . __file__ = str ( fspath ) if fs . is_package ( fspath ) : mod . __path__ = [ ldr . fspath ] mod . __package__ = str ( fqname ) else : mod . __package__ = str ( fqname . rpartition ( '.' ) [ 0 ] ) exec ( fs . get_code ( fspath ) , mod . __dict__ ) return mod
Load fqname from under ldr . fspath .
5,773
def zthread_fork ( ctx , func , * args , ** kwargs ) : a = ctx . socket ( zmq . PAIR ) a . setsockopt ( zmq . LINGER , 0 ) a . setsockopt ( zmq . RCVHWM , 100 ) a . setsockopt ( zmq . SNDHWM , 100 ) a . setsockopt ( zmq . SNDTIMEO , 5000 ) a . setsockopt ( zmq . RCVTIMEO , 5000 ) b = ctx . socket ( zmq . PAIR ) b . setsockopt ( zmq . LINGER , 0 ) b . setsockopt ( zmq . RCVHWM , 100 ) b . setsockopt ( zmq . SNDHWM , 100 ) b . setsockopt ( zmq . SNDTIMEO , 5000 ) a . setsockopt ( zmq . RCVTIMEO , 5000 ) iface = "inproc://%s" % binascii . hexlify ( os . urandom ( 8 ) ) a . bind ( iface ) b . connect ( iface ) thread = threading . Thread ( target = func , args = ( ( ctx , b ) + args ) , kwargs = kwargs ) thread . daemon = False thread . start ( ) return a
Create an attached thread . An attached thread gets a ctx and a PAIR pipe back to its parent . It must monitor its pipe and exit if the pipe becomes unreadable . Returns pipe or NULL if there was an error .
5,774
def _remap ( object , name , value , safe = True ) : if os . getenv ( "QT_TESTING" ) is not None and safe : if hasattr ( object , name ) : raise AttributeError ( "Cannot override existing name: " "%s.%s" % ( object . __name__ , name ) ) if type ( object ) . __name__ != "module" : raise AttributeError ( "%s != 'module': Cannot alter " "anything but modules" % object ) elif hasattr ( object , name ) : self . __modified__ . append ( name ) self . __remapped__ . append ( name ) setattr ( object , name , value )
Prevent accidental assignment of existing members
5,775
def log_instantiation ( LOGGER , classname , args , forbidden , with_date = False ) : if with_date : LOGGER . info ( 'Instantiating ' + classname + ' at ' + datetime . datetime . now ( ) . strftime ( '%Y-%m-%d_%H:%M' ) ) else : LOGGER . info ( 'Instantiating ' + classname ) for argname in args : if args [ argname ] is not None : if argname in forbidden : LOGGER . debug ( 'Param ' + argname + '*******' ) else : LOGGER . debug ( 'Param ' + argname + '=' + str ( args [ argname ] ) )
Log the instantiation of an object to the given logger .
5,776
def filter_params ( params ) : if params is not None : new_params = copy . deepcopy ( params ) new_params = dict ( ( k , v ) for k , v in new_params . items ( ) if v is not None ) for key , value in new_params . items ( ) : if isinstance ( value , bool ) : new_params [ key ] = "true" if value else "false" return new_params
convert dict value if value is bool type False - > false True - > true
5,777
def __set_revlookup_auth_string ( self , username , password ) : auth = b2handle . utilhandle . create_authentication_string ( username , password ) self . __revlookup_auth_string = auth
Creates and sets the authentication string for accessing the reverse lookup servlet . No return the string is set as an attribute to the client instance .
5,778
def load_from_JSON ( json_filename ) : try : jsonfilecontent = json . loads ( open ( json_filename , 'r' ) . read ( ) ) except ValueError as exc : raise CredentialsFormatError ( msg = "Invalid JSON syntax: " + str ( exc ) ) instance = PIDClientCredentials ( credentials_filename = json_filename , ** jsonfilecontent ) return instance
Create a new instance of a PIDClientCredentials with information read from a local JSON file .
5,779
def fixture ( app , fixtures , fixtures_dir = 'fixtures' , raise_does_not_exist = False , reversible = True , models = [ ] ) : fixture_path = os . path . join ( app . __path__ [ 0 ] , fixtures_dir ) if isinstance ( fixtures , string_types ) : fixtures = [ fixtures ] def get_format ( fixture ) : return os . path . splitext ( fixture ) [ 1 ] [ 1 : ] def get_objects ( ) : for fixture in fixtures : with open ( os . path . join ( fixture_path , fixture ) , 'rb' ) as f : objects = serializers . deserialize ( get_format ( fixture ) , f , ignorenonexistent = True ) for obj in objects : yield obj def patch_apps ( func ) : @ wraps ( func ) def inner ( apps , schema_editor ) : try : original_apps = django . core . serializers . python . apps django . core . serializers . python . apps = apps return func ( apps , schema_editor ) finally : django . core . serializers . python . apps = original_apps return inner @ patch_apps def load_fixture ( apps , schema_editor ) : for obj in get_objects ( ) : obj . save ( ) @ patch_apps def unload_fixture ( apps , schema_editor ) : for obj in get_objects ( ) : model = apps . get_model ( app . __name__ , obj . object . __class__ . __name__ ) kwargs = dict ( ) if 'id' in obj . object . __dict__ : kwargs . update ( id = obj . object . __dict__ . get ( 'id' ) ) elif 'slug' in obj . object . __dict__ : kwargs . update ( slug = obj . object . __dict__ . get ( 'slug' ) ) else : kwargs . update ( ** obj . object . __dict__ ) try : model . objects . get ( ** kwargs ) . delete ( ) except model . DoesNotExist : if not raise_does_not_exist : raise FixtureObjectDoesNotExist ( ( "Model %s instance with " "kwargs %s does not exist." % ( model , kwargs ) ) ) kwargs = dict ( code = load_fixture ) if reversible : kwargs [ 'reverse_code' ] = unload_fixture return kwargs
Load fixtures using a data migration .
5,780
def nonzero ( self ) : return [ i for i in xrange ( self . size ( ) ) if self . test ( i ) ]
Get all non - zero bits
5,781
def tohexstring ( self ) : val = self . tostring ( ) st = "{0:0x}" . format ( int ( val , 2 ) ) return st . zfill ( len ( self . bitmap ) * 2 )
Returns a hexadecimal string
5,782
def fromhexstring ( cls , hexstring ) : bitstring = format ( int ( hexstring , 16 ) , "0" + str ( len ( hexstring ) / 4 ) + "b" ) return cls . fromstring ( bitstring )
Construct BitMap from hex string
5,783
def fromstring ( cls , bitstring ) : nbits = len ( bitstring ) bm = cls ( nbits ) for i in xrange ( nbits ) : if bitstring [ - i - 1 ] == '1' : bm . set ( i ) elif bitstring [ - i - 1 ] != '0' : raise Exception ( "Invalid bit string!" ) return bm
Construct BitMap from string
5,784
def get_valid_https_verify ( value ) : http_verify_value = value bool_values = { 'false' : False , 'true' : True } if isinstance ( value , bool ) : http_verify_value = value elif ( isinstance ( value , str ) or isinstance ( value , unicode ) ) and value . lower ( ) in bool_values . keys ( ) : http_verify_value = bool_values [ value . lower ( ) ] return http_verify_value
Get a value that can be the boolean representation of a string or a boolean itself and returns It as a boolean . If this is not the case It returns a string .
5,785
def setup ( self ) : super ( CleanCSSFilter , self ) . setup ( ) self . root = current_app . config . get ( 'COLLECT_STATIC_ROOT' )
Initialize filter just before it will be used .
5,786
def rebase_opt ( self ) : if not hasattr ( self , '_rebase_opt' ) : out , err = Popen ( [ 'cleancss' , '--version' ] , stdout = PIPE ) . communicate ( ) ver = int ( out [ : out . index ( b'.' ) ] ) self . _rebase_opt = [ '--root' , self . root ] if ver == 3 else [ ] return self . _rebase_opt
Determine which option name to use .
5,787
def input ( self , _in , out , ** kw ) : args = [ self . binary or 'cleancss' ] + self . rebase_opt if self . extra_args : args . extend ( self . extra_args ) self . subprocess ( args , out , _in )
Input filtering .
5,788
def output ( self , _in , out , ** kwargs ) : out . write ( 'angular.module("{0}", ["gettext"]).run(' '["gettextCatalog", function (gettextCatalog) {{' . format ( self . catalog_name ) ) out . write ( _in . read ( ) ) out . write ( '}]);' )
Wrap translation in Angular module .
5,789
def input ( self , _in , out , ** kwargs ) : language_code = _re_language_code . search ( _in . read ( ) ) . group ( 'language_code' ) _in . seek ( 0 ) catalog = read_po ( _in ) out . write ( 'gettextCatalog.setStrings("{0}", ' . format ( language_code ) ) out . write ( json . dumps ( { key : value . string for key , value in catalog . _messages . items ( ) if key and value . string } ) ) out . write ( ');' )
Process individual translation file .
5,790
def retrieve_info ( self ) : path = urlparse ( self . url ) . path path = path . split ( '/' ) [ 1 : ] sanity_filter = re . compile ( '[\da-z-_]+' , re . IGNORECASE ) self . product = sanity_filter . match ( path [ 0 ] ) . group ( 0 ) self . component = sanity_filter . match ( path [ 1 ] ) . group ( 0 ) self . issue_id = int ( path [ 3 ] ) github_url = '%s/%s/%s/issues/%s' % ( _URL_BASE , self . product , self . component , self . issue_id ) result = requests . get ( github_url ) self . status_code = result . status_code if result . status_code == 200 : result = result . json ( ) self . title = result [ 'title' ] self . reporter = result [ 'user' ] [ 'login' ] if result [ 'assignee' ] is not None : self . assignee = result [ 'assignee' ] [ 'login' ] self . status = result [ 'state' ] self . created_at = result [ 'created_at' ] self . updated_at = result [ 'updated_at' ] self . closed_at = result [ 'closed_at' ] elif result . status_code == 404 : self . title = 'private issue'
Query the Github API to retrieve the needed infos .
5,791
def disk_cache ( cls , basename , function , * args , method = True , ** kwargs ) : @ utility . disk_cache ( basename , cls . directory ( ) , method = method ) def wrapper ( * args , ** kwargs ) : return function ( * args , ** kwargs ) return wrapper ( * args , ** kwargs )
Cache the return value in the correct cache directory . Set method to false for static methods .
5,792
def download ( cls , url , filename = None ) : return utility . download ( url , cls . directory ( ) , filename )
Download a file into the correct cache directory .
5,793
def directory ( cls , prefix = None ) : prefix = prefix or utility . read_config ( ) . directory name = cls . __name__ . lower ( ) directory = os . path . expanduser ( os . path . join ( prefix , name ) ) utility . ensure_directory ( directory ) return directory
Path that should be used for caching . Different for all subclasses .
5,794
def get_last_rconfiguration_id ( topic_id , remoteci_id , db_conn = None ) : db_conn = db_conn or flask . g . db_conn __TABLE = models . JOBS query = sql . select ( [ __TABLE . c . rconfiguration_id ] ) . order_by ( sql . desc ( __TABLE . c . created_at ) ) . where ( sql . and_ ( __TABLE . c . topic_id == topic_id , __TABLE . c . remoteci_id == remoteci_id ) ) . limit ( 1 ) rconfiguration_id = db_conn . execute ( query ) . fetchone ( ) if rconfiguration_id is not None : return str ( rconfiguration_id [ 0 ] ) else : return None
Get the rconfiguration_id of the last job run by the remoteci .
5,795
def get_remoteci_configuration ( topic_id , remoteci_id , db_conn = None ) : db_conn = db_conn or flask . g . db_conn last_rconfiguration_id = get_last_rconfiguration_id ( topic_id , remoteci_id , db_conn = db_conn ) _RCONFIGURATIONS = models . REMOTECIS_RCONFIGURATIONS _J_RCONFIGURATIONS = models . JOIN_REMOTECIS_RCONFIGURATIONS query = sql . select ( [ _RCONFIGURATIONS ] ) . select_from ( _J_RCONFIGURATIONS . join ( _RCONFIGURATIONS ) ) . where ( _J_RCONFIGURATIONS . c . remoteci_id == remoteci_id ) query = query . where ( sql . and_ ( _RCONFIGURATIONS . c . state != 'archived' , _RCONFIGURATIONS . c . topic_id == topic_id ) ) query = query . order_by ( sql . desc ( _RCONFIGURATIONS . c . created_at ) ) query = query . order_by ( sql . asc ( _RCONFIGURATIONS . c . name ) ) all_rconfigurations = db_conn . execute ( query ) . fetchall ( ) if len ( all_rconfigurations ) > 0 : for i in range ( len ( all_rconfigurations ) ) : if str ( all_rconfigurations [ i ] [ 'id' ] ) == last_rconfiguration_id : return all_rconfigurations [ i - 1 ] return all_rconfigurations [ 0 ] else : return None
Get a remoteci configuration . This will iterate over each configuration in a round robin manner depending on the last rconfiguration used by the remoteci .
5,796
def ignore_whitespace_text_nodes ( cls , wrapped_node ) : for child in wrapped_node . children : if child . is_text and child . value . strip ( ) == '' : child . delete ( ) else : cls . ignore_whitespace_text_nodes ( child )
Find and delete any text nodes containing nothing but whitespace in in the given node and its descendents .
5,797
def verify_existence_and_get ( id , table , name = None , get_id = False ) : where_clause = table . c . id == id if name : where_clause = table . c . name == name if 'state' in table . columns : where_clause = sql . and_ ( table . c . state != 'archived' , where_clause ) query = sql . select ( [ table ] ) . where ( where_clause ) result = flask . g . db_conn . execute ( query ) . fetchone ( ) if result is None : raise dci_exc . DCIException ( 'Resource "%s" not found.' % id , status_code = 404 ) if get_id : return result . id return result
Verify the existence of a resource in the database and then return it if it exists according to the condition or raise an exception .
5,798
def user_topic_ids ( user ) : if user . is_super_admin ( ) or user . is_read_only_user ( ) : query = sql . select ( [ models . TOPICS ] ) else : query = ( sql . select ( [ models . JOINS_TOPICS_TEAMS . c . topic_id ] ) . select_from ( models . JOINS_TOPICS_TEAMS . join ( models . TOPICS , sql . and_ ( models . JOINS_TOPICS_TEAMS . c . topic_id == models . TOPICS . c . id , models . TOPICS . c . state == 'active' ) ) ) . where ( sql . or_ ( models . JOINS_TOPICS_TEAMS . c . team_id . in_ ( user . teams_ids ) , models . JOINS_TOPICS_TEAMS . c . team_id . in_ ( user . child_teams_ids ) ) ) ) rows = flask . g . db_conn . execute ( query ) . fetchall ( ) return [ str ( row [ 0 ] ) for row in rows ]
Retrieve the list of topics IDs a user has access to .
5,799
def verify_team_in_topic ( user , topic_id ) : if user . is_super_admin ( ) or user . is_read_only_user ( ) : return if str ( topic_id ) not in user_topic_ids ( user ) : raise dci_exc . Unauthorized ( )
Verify that the user s team does belongs to the given topic . If the user is an admin or read only user then it belongs to all topics .