idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
48,200 | def quattodcm ( quat ) : dcm = ( quat [ - 1 ] ** 2 - np . inner ( quat [ 0 : 3 ] , quat [ 0 : 3 ] ) ) * np . eye ( 3 , 3 ) + 2 * np . outer ( quat [ 0 : 3 ] , quat [ 0 : 3 ] ) + 2 * quat [ - 1 ] * hat_map ( quat [ 0 : 3 ] ) return dcm | Convert quaternion to DCM |
48,201 | def dcmdottoang_vel ( R , Rdot ) : w = vee_map ( Rdot . dot ( R . T ) ) Omega = vee_map ( R . T . dot ( Rdot ) ) return ( w , Omega ) | Convert a rotation matrix to angular velocity w - angular velocity in inertial frame Omega - angular velocity in body frame |
48,202 | def ang_veltoaxisangledot ( angle , axis , Omega ) : angle_dot = axis . dot ( Omega ) axis_dot = 1 / 2 * ( hat_map ( axis ) - 1 / np . tan ( angle / 2 ) * hat_map ( axis ) . dot ( hat_map ( axis ) ) ) . dot ( Omega ) return angle_dot , axis_dot | Compute kinematics for axis angle representation |
48,203 | def axisangledottoang_vel ( angle , axis , angle_dot , axis_dot ) : Omega = angle_dot * axis + np . sin ( angle ) * axis_dot - ( 1 - np . cos ( angle ) ) * hat_map ( axis ) . dot ( axis_dot ) return Omega | Convert axis angle represetnation to angular velocity in body frame |
48,204 | def list_repos ( owner = None , ** kwargs ) : client = get_repos_api ( ) api_kwargs = { } api_kwargs . update ( utils . get_page_kwargs ( ** kwargs ) ) repos_list = client . repos_list_with_http_info if owner is not None : api_kwargs [ "owner" ] = owner if hasattr ( client , "repos_list0_with_http_info" ) : repos_list = client . repos_list0_with_http_info else : if hasattr ( client , "repos_all_list_with_http_info" ) : repos_list = client . repos_all_list_with_http_info with catch_raise_api_exception ( ) : res , _ , headers = repos_list ( ** api_kwargs ) ratelimits . maybe_rate_limit ( client , headers ) page_info = PageInfo . from_headers ( headers ) return [ x . to_dict ( ) for x in res ] , page_info | List repositories in a namespace . |
48,205 | def rm_file_or_dir ( path , ignore_errors = True ) : if os . path . exists ( path ) : if os . path . isdir ( path ) : if os . path . islink ( path ) : os . unlink ( path ) else : shutil . rmtree ( path , ignore_errors = ignore_errors ) else : if os . path . islink ( path ) : os . unlink ( path ) else : os . remove ( path ) | Helper function to clean a certain filepath |
48,206 | def unique ( self , col_or_col_list ) : if isinstance ( col_or_col_list , list ) : col_is_list = True col_list = col_or_col_list else : col_is_list = False col_list = [ col_or_col_list ] output = [ ] for col in col_list : if self . auto_cache or self . cache_valid ( col ) : if not self . cache_valid ( col ) : self . cache_factor ( [ col ] ) col_values_rootdir = self [ col ] . rootdir + '.values' carray_values = bcolz . carray ( rootdir = col_values_rootdir , mode = 'r' ) values = list ( carray_values ) else : _ , values = ctable_ext . factorize ( self [ col ] ) values = values . values ( ) output . append ( values ) if not col_is_list : output = output [ 0 ] return output | Return a list of unique values of a column or a list of lists of column list |
48,207 | def aggregate_groups ( self , ct_agg , nr_groups , skip_key , carray_factor , groupby_cols , agg_ops , dtype_dict , bool_arr = None ) : for col in groupby_cols : result_array = ctable_ext . groupby_value ( self [ col ] , carray_factor , nr_groups , skip_key ) if bool_arr is not None : result_array = np . delete ( result_array , skip_key ) ct_agg . addcol ( result_array , name = col ) del result_array for input_col_name , output_col_name , agg_op in agg_ops : input_col = self [ input_col_name ] output_col_dtype = dtype_dict [ output_col_name ] input_buffer = np . empty ( input_col . chunklen , dtype = input_col . dtype ) output_buffer = np . zeros ( nr_groups , dtype = output_col_dtype ) if agg_op == 'sum' : ctable_ext . aggregate_sum ( input_col , carray_factor , nr_groups , skip_key , input_buffer , output_buffer ) elif agg_op == 'mean' : ctable_ext . aggregate_mean ( input_col , carray_factor , nr_groups , skip_key , input_buffer , output_buffer ) elif agg_op == 'std' : ctable_ext . aggregate_std ( input_col , carray_factor , nr_groups , skip_key , input_buffer , output_buffer ) elif agg_op == 'count' : ctable_ext . aggregate_count ( input_col , carray_factor , nr_groups , skip_key , input_buffer , output_buffer ) elif agg_op == 'count_distinct' : ctable_ext . aggregate_count_distinct ( input_col , carray_factor , nr_groups , skip_key , input_buffer , output_buffer ) elif agg_op == 'sorted_count_distinct' : ctable_ext . aggregate_sorted_count_distinct ( input_col , carray_factor , nr_groups , skip_key , input_buffer , output_buffer ) else : raise KeyError ( 'Unknown aggregation operation ' + str ( agg_op ) ) if bool_arr is not None : output_buffer = np . delete ( output_buffer , skip_key ) ct_agg . addcol ( output_buffer , name = output_col_name ) del output_buffer ct_agg . delcol ( 'tmp_col_bquery__' ) | Perform aggregation and place the result in the given ctable . |
48,208 | def factorize_groupby_cols ( self , groupby_cols ) : factor_list = [ ] values_list = [ ] for col in groupby_cols : if self . auto_cache or self . cache_valid ( col ) : if not self . cache_valid ( col ) : self . cache_factor ( [ col ] ) col_rootdir = self [ col ] . rootdir col_factor_rootdir = col_rootdir + '.factor' col_values_rootdir = col_rootdir + '.values' col_carray_factor = bcolz . carray ( rootdir = col_factor_rootdir , mode = 'r' ) col_carray_values = bcolz . carray ( rootdir = col_values_rootdir , mode = 'r' ) else : col_carray_factor , values = ctable_ext . factorize ( self [ col ] ) col_carray_values = bcolz . carray ( np . fromiter ( values . values ( ) , dtype = self [ col ] . dtype ) ) factor_list . append ( col_carray_factor ) values_list . append ( col_carray_values ) return factor_list , values_list | factorizes all columns that are used in the groupby it will use cache carrays if available if not yet auto_cache is valid it will create cache carrays |
48,209 | def _int_array_hash ( input_list ) : list_len = len ( input_list ) arr_len = len ( input_list [ 0 ] ) mult_arr = np . full ( arr_len , 1000003 , dtype = np . long ) value_arr = np . full ( arr_len , 0x345678 , dtype = np . long ) for i , current_arr in enumerate ( input_list ) : index = list_len - i - 1 value_arr ^= current_arr value_arr *= mult_arr mult_arr += ( 82520 + index + index ) value_arr += 97531 result_carray = bcolz . carray ( value_arr ) del value_arr return result_carray | A function to calculate a hash value of multiple integer values not used at the moment |
48,210 | def create_group_column_factor ( self , factor_list , groupby_cols , cache = False ) : if not self . rootdir : input_rootdir = None col_rootdir = None col_factor_rootdir = None col_values_rootdir = None col_factor_rootdir_tmp = None col_values_rootdir_tmp = None else : input_rootdir = tempfile . mkdtemp ( prefix = 'bcolz-' ) col_factor_rootdir_tmp = tempfile . mkdtemp ( prefix = 'bcolz-' ) col_values_rootdir_tmp = tempfile . mkdtemp ( prefix = 'bcolz-' ) group_array = bcolz . zeros ( 0 , dtype = np . int64 , expectedlen = len ( self ) , rootdir = input_rootdir , mode = 'w' ) factor_table = bcolz . ctable ( factor_list , names = groupby_cols ) ctable_iter = factor_table . iter ( outcols = groupby_cols , out_flavor = tuple ) ctable_ext . create_group_index ( ctable_iter , len ( groupby_cols ) , group_array ) carray_factor = bcolz . carray ( [ ] , dtype = 'int64' , expectedlen = self . size , rootdir = col_factor_rootdir_tmp , mode = 'w' ) carray_factor , values = ctable_ext . factorize ( group_array , labels = carray_factor ) carray_factor . flush ( ) carray_values = bcolz . carray ( np . fromiter ( values . values ( ) , dtype = np . int64 ) , rootdir = col_values_rootdir_tmp , mode = 'w' ) carray_values . flush ( ) del group_array if cache : rm_file_or_dir ( input_rootdir , ignore_errors = True ) if cache : col_rootdir = os . path . join ( self . rootdir , self . create_group_base_name ( groupby_cols ) ) col_factor_rootdir = col_rootdir + '.factor' col_values_rootdir = col_rootdir + '.values' lock_file = col_rootdir + '.lock' if not os . path . exists ( lock_file ) : uid = str ( uuid . uuid4 ( ) ) try : with open ( lock_file , 'a+' ) as fn : fn . write ( uid + '\n' ) with open ( lock_file , 'r' ) as fn : temp = fn . read ( ) . splitlines ( ) if temp [ 0 ] == uid : lock = True else : lock = False del temp except : lock = False else : lock = False if lock : rm_file_or_dir ( col_factor_rootdir , ignore_errors = False ) shutil . move ( col_factor_rootdir_tmp , col_factor_rootdir ) carray_factor = bcolz . carray ( rootdir = col_factor_rootdir , mode = 'r' ) rm_file_or_dir ( col_values_rootdir , ignore_errors = False ) shutil . move ( col_values_rootdir_tmp , col_values_rootdir ) carray_values = bcolz . carray ( rootdir = col_values_rootdir , mode = 'r' ) else : self . _dir_clean_list . append ( col_factor_rootdir ) self . _dir_clean_list . append ( col_values_rootdir ) return carray_factor , carray_values | Create a unique factorized column out of several individual columns |
48,211 | def make_group_index ( self , groupby_cols , bool_arr ) : factor_list , values_list = self . factorize_groupby_cols ( groupby_cols ) if len ( factor_list ) == 0 : tmp_rootdir = self . create_tmp_rootdir ( ) carray_factor = bcolz . zeros ( len ( self ) , dtype = 'int64' , rootdir = tmp_rootdir , mode = 'w' ) carray_values = [ 'Total' ] elif len ( factor_list ) == 1 : carray_factor = factor_list [ 0 ] carray_values = values_list [ 0 ] else : if self . group_cache_valid ( col_list = groupby_cols ) : col_rootdir = os . path . join ( self . rootdir , self . create_group_base_name ( groupby_cols ) ) col_factor_rootdir = col_rootdir + '.factor' carray_factor = bcolz . carray ( rootdir = col_factor_rootdir ) col_values_rootdir = col_rootdir + '.values' carray_values = bcolz . carray ( rootdir = col_values_rootdir ) else : carray_factor , carray_values = self . create_group_column_factor ( factor_list , groupby_cols , cache = self . auto_cache ) nr_groups = len ( carray_values ) skip_key = None if bool_arr is not None : tmp_rootdir = self . create_tmp_rootdir ( ) carray_factor = bcolz . eval ( '(factor + 1) * bool - 1' , user_dict = { 'factor' : carray_factor , 'bool' : bool_arr } , rootdir = tmp_rootdir , mode = 'w' ) tmp_rootdir = self . create_tmp_rootdir ( ) labels = bcolz . carray ( [ ] , dtype = 'int64' , expectedlen = len ( carray_factor ) , rootdir = tmp_rootdir , mode = 'w' ) carray_factor , values = ctable_ext . factorize ( carray_factor , labels ) filter_check = [ key for key , value in values . items ( ) if value == - 1 ] if filter_check : skip_key = filter_check [ 0 ] nr_groups = len ( values ) if skip_key is None : skip_key = nr_groups return carray_factor , nr_groups , skip_key | Create unique groups for groupby loop |
48,212 | def create_tmp_rootdir ( self ) : if self . rootdir : tmp_rootdir = tempfile . mkdtemp ( prefix = 'bcolz-' ) self . _dir_clean_list . append ( tmp_rootdir ) else : tmp_rootdir = None return tmp_rootdir | create a rootdir that we can destroy later again |
48,213 | def clean_tmp_rootdir ( self ) : for tmp_rootdir in list ( self . _dir_clean_list ) : rm_file_or_dir ( tmp_rootdir ) self . _dir_clean_list . remove ( tmp_rootdir ) | clean up all used temporary rootdirs |
48,214 | def create_agg_ctable ( self , groupby_cols , agg_list , expectedlen , rootdir ) : dtype_dict = { } for col in groupby_cols : dtype_dict [ col ] = self [ col ] . dtype agg_ops_list = [ 'sum' , 'count' , 'count_distinct' , 'sorted_count_distinct' , 'mean' , 'std' ] agg_ops = [ ] for agg_info in agg_list : if not isinstance ( agg_info , list ) : output_col_name = agg_info input_col_name = agg_info agg_op = 'sum' else : input_col_name = agg_info [ 0 ] agg_op = agg_info [ 1 ] if len ( agg_info ) == 2 : output_col_name = input_col_name else : output_col_name = agg_info [ 2 ] if agg_op not in agg_ops_list : raise NotImplementedError ( 'Unknown Aggregation Type: ' + str ( agg_op ) ) if agg_op in ( 'count' , 'count_distinct' , 'sorted_count_distinct' ) : output_col_dtype = np . dtype ( np . int64 ) elif agg_op in ( 'mean' , 'std' ) : output_col_dtype = np . dtype ( np . float64 ) else : output_col_dtype = self [ input_col_name ] . dtype dtype_dict [ output_col_name ] = output_col_dtype agg_ops . append ( ( input_col_name , output_col_name , agg_op ) ) ct_agg = bcolz . ctable ( np . zeros ( expectedlen , [ ( 'tmp_col_bquery__' , np . bool ) ] ) , expectedlen = expectedlen , rootdir = rootdir ) return ct_agg , dtype_dict , agg_ops | Create a container for the output table a dictionary describing it s columns and a list of tuples describing aggregation operations to perform . |
48,215 | def is_in_ordered_subgroups ( self , basket_col = None , bool_arr = None , _max_len_subgroup = 1000 ) : assert basket_col is not None if bool_arr is None : return None if self . auto_cache and bool_arr . rootdir is not None : rootdir = self . create_tmp_rootdir ( ) else : rootdir = None return ctable_ext . is_in_ordered_subgroups ( self [ basket_col ] , bool_arr = bool_arr , rootdir = rootdir , _max_len_subgroup = _max_len_subgroup ) | Expands the filter using a specified column |
48,216 | def with_metaclass ( Type , skip_attrs = set ( ( '__dict__' , '__weakref__' ) ) ) : def _clone_with_metaclass ( Class ) : attrs = dict ( ( key , value ) for key , value in items ( vars ( Class ) ) if key not in skip_attrs ) return Type ( Class . __name__ , Class . __bases__ , attrs ) return _clone_with_metaclass | Class decorator to set metaclass . |
48,217 | def ensure_tuple ( length , tuples ) : for elem in tuples : if not isinstance ( elem , ( tuple , list ) ) : yield ( elem , ) * length continue l = len ( elem ) if l == length : yield elem elif l > length : yield tuple ( elem [ : length ] ) elif l < length : yield ( elem [ 0 ] , ) * ( length - l ) + tuple ( elem ) | Yield length - sized tuples from the given collection . Will truncate longer tuples to the desired length and pad using the leading element if shorter . |
48,218 | def print_version ( ) : click . echo ( "Versions:" ) click . secho ( "CLI Package Version: %(version)s" % { "version" : click . style ( get_cli_version ( ) , bold = True ) } ) click . secho ( "API Package Version: %(version)s" % { "version" : click . style ( get_api_version ( ) , bold = True ) } ) | Print the environment versions . |
48,219 | def main ( ctx , opts , version ) : if version : print_version ( ) elif ctx . invoked_subcommand is None : click . echo ( ctx . get_help ( ) ) | Handle entrypoint to CLI . |
48,220 | def upload_file ( ctx , opts , owner , repo , filepath , skip_errors , md5_checksum ) : filename = click . format_filename ( filepath ) basename = os . path . basename ( filename ) click . echo ( "Requesting file upload for %(filename)s ... " % { "filename" : click . style ( basename , bold = True ) } , nl = False , ) context_msg = "Failed to request file upload!" with handle_api_exceptions ( ctx , opts = opts , context_msg = context_msg , reraise_on_error = skip_errors ) : with maybe_spinner ( opts ) : identifier , upload_url , upload_fields = request_file_upload ( owner = owner , repo = repo , filepath = filename , md5_checksum = md5_checksum ) click . secho ( "OK" , fg = "green" ) context_msg = "Failed to upload file!" with handle_api_exceptions ( ctx , opts = opts , context_msg = context_msg ) : filesize = utils . get_file_size ( filepath = filename ) label = "Uploading %(filename)s:" % { "filename" : click . style ( basename , bold = True ) } with click . progressbar ( length = filesize , label = label , fill_char = click . style ( "#" , fg = "green" ) , empty_char = click . style ( "-" , fg = "red" ) , ) as pb : def progress_callback ( monitor ) : pb . update ( monitor . bytes_read ) api_upload_file ( upload_url = upload_url , upload_fields = upload_fields , filepath = filename , callback = progress_callback , ) return identifier | Upload a package file via the API . |
48,221 | def validate_create_package ( ctx , opts , owner , repo , package_type , skip_errors , ** kwargs ) : click . echo ( "Checking %(package_type)s package upload parameters ... " % { "package_type" : click . style ( package_type , bold = True ) } , nl = False , ) context_msg = "Failed to validate upload parameters!" with handle_api_exceptions ( ctx , opts = opts , context_msg = context_msg , reraise_on_error = skip_errors ) : with maybe_spinner ( opts ) : api_validate_create_package ( package_format = package_type , owner = owner , repo = repo , ** kwargs ) click . secho ( "OK" , fg = "green" ) return True | Check new package parameters via the API . |
48,222 | def create_package ( ctx , opts , owner , repo , package_type , skip_errors , ** kwargs ) : click . echo ( "Creating a new %(package_type)s package ... " % { "package_type" : click . style ( package_type , bold = True ) } , nl = False , ) context_msg = "Failed to create package!" with handle_api_exceptions ( ctx , opts = opts , context_msg = context_msg , reraise_on_error = skip_errors ) : with maybe_spinner ( opts ) : slug_perm , slug = api_create_package ( package_format = package_type , owner = owner , repo = repo , ** kwargs ) click . secho ( "OK" , fg = "green" ) click . echo ( "Created: %(owner)s/%(repo)s/%(slug)s (%(slug_perm)s)" % { "owner" : click . style ( owner , fg = "magenta" ) , "repo" : click . style ( repo , fg = "magenta" ) , "slug" : click . style ( slug , fg = "green" ) , "slug_perm" : click . style ( slug_perm , bold = True ) , } ) return slug_perm , slug | Create a new package via the API . |
48,223 | def upload_files_and_create_package ( ctx , opts , package_type , owner_repo , dry_run , no_wait_for_sync , wait_interval , skip_errors , sync_attempts , ** kwargs ) : owner , repo = owner_repo validate_create_package ( ctx = ctx , opts = opts , owner = owner , repo = repo , package_type = package_type , skip_errors = skip_errors , ** kwargs ) md5_checksums = { } for k , v in kwargs . items ( ) : if not v or not k . endswith ( "_file" ) : continue md5_checksums [ k ] = validate_upload_file ( ctx = ctx , opts = opts , owner = owner , repo = repo , filepath = v , skip_errors = skip_errors , ) if dry_run : click . echo ( ) click . secho ( "You requested a dry run so skipping upload." , fg = "yellow" ) return for k , v in kwargs . items ( ) : if not v or not k . endswith ( "_file" ) : continue kwargs [ k ] = upload_file ( ctx = ctx , opts = opts , owner = owner , repo = repo , filepath = v , skip_errors = skip_errors , md5_checksum = md5_checksums [ k ] , ) _ , slug = create_package ( ctx = ctx , opts = opts , owner = owner , repo = repo , package_type = package_type , skip_errors = skip_errors , ** kwargs ) if no_wait_for_sync : return wait_for_package_sync ( ctx = ctx , opts = opts , owner = owner , repo = repo , slug = slug , wait_interval = wait_interval , skip_errors = skip_errors , attempts = sync_attempts , ) | Upload package files and create a new package . |
48,224 | def memoize ( func ) : func . _result_cache = { } @ wraps ( func ) def _memoized_func ( * args , ** kwargs ) : key = ( args , tuple ( sorted ( kwargs . items ( ) ) ) ) if key in func . _result_cache : return func . _result_cache [ key ] else : result = func ( * args , ** kwargs ) if isinstance ( result , GeneratorType ) or not isinstance ( result , Hashable ) : raise TypeError ( "Can't memoize a generator or non-hashable object!" ) func . _result_cache [ key ] = result return result return _memoized_func | Decorator to cause a function to cache it s results for each combination of inputs and return the cached result on subsequent calls . Does not support named arguments or arg values that are not hashable . |
48,225 | def aes_encrypt ( base64_encryption_key , data ) : if isinstance ( data , text_type ) : data = data . encode ( "UTF-8" ) aes_key_bytes , hmac_key_bytes = _extract_keys ( base64_encryption_key ) data = _pad ( data ) iv_bytes = os . urandom ( AES_BLOCK_SIZE ) cipher = AES . new ( aes_key_bytes , mode = AES . MODE_CBC , IV = iv_bytes ) data = iv_bytes + cipher . encrypt ( data ) hmac_signature = hmac . new ( hmac_key_bytes , data , hashlib . sha256 ) . digest ( ) return as_base64 ( data + hmac_signature ) | Encrypt data with AES - CBC and sign it with HMAC - SHA256 |
48,226 | def aes_decrypt ( base64_encryption_key , base64_data ) : data = from_base64 ( base64_data ) aes_key_bytes , hmac_key_bytes = _extract_keys ( base64_encryption_key ) data , hmac_signature = data [ : - HMAC_SIG_SIZE ] , data [ - HMAC_SIG_SIZE : ] if hmac . new ( hmac_key_bytes , data , hashlib . sha256 ) . digest ( ) != hmac_signature : raise AuthenticationError ( "HMAC authentication failed" ) iv_bytes , data = data [ : AES_BLOCK_SIZE ] , data [ AES_BLOCK_SIZE : ] cipher = AES . new ( aes_key_bytes , AES . MODE_CBC , iv_bytes ) data = cipher . decrypt ( data ) return _unpad ( data ) | Verify HMAC - SHA256 signature and decrypt data with AES - CBC |
48,227 | def whoami ( ctx , opts ) : click . echo ( "Retrieving your authentication status from the API ... " , nl = False ) context_msg = "Failed to retrieve your authentication status!" with handle_api_exceptions ( ctx , opts = opts , context_msg = context_msg ) : with maybe_spinner ( opts ) : is_auth , username , email , name = get_user_brief ( ) click . secho ( "OK" , fg = "green" ) click . echo ( "You are authenticated as:" ) if not is_auth : click . secho ( "Nobody (i.e. anonymous user)" , fg = "yellow" ) else : click . secho ( "%(name)s (slug: %(username)s, email: %(email)s)" % { "name" : click . style ( name , fg = "cyan" ) , "username" : click . style ( username , fg = "magenta" ) , "email" : click . style ( email , fg = "green" ) , } ) | Retrieve your current authentication status . |
48,228 | def _clean ( self , value ) : value = ( str ( v ) for v in value ) if self . strip : value = ( v . strip ( ) for v in value ) if not self . empty : value = ( v for v in value if v ) return value | Perform a standardized pipline of operations across an iterable . |
48,229 | def native ( self , value , context = None ) : separator = self . separator . strip ( ) if self . strip and hasattr ( self . separator , 'strip' ) else self . separator value = super ( ) . native ( value , context ) if value is None : return self . cast ( ) if hasattr ( value , 'split' ) : value = value . split ( separator ) value = self . _clean ( value ) try : return self . cast ( value ) if self . cast else value except Exception as e : raise Concern ( "{0} caught, failed to perform array transform: {1}" , e . __class__ . __name__ , str ( e ) ) | Convert the given string into a list of substrings . |
48,230 | def foreign ( self , value , context = None ) : if self . separator is None : separator = ' ' else : separator = self . separator . strip ( ) if self . strip and hasattr ( self . separator , 'strip' ) else self . separator value = self . _clean ( value ) try : value = separator . join ( value ) except Exception as e : raise Concern ( "{0} caught, failed to convert to string: {1}" , e . __class__ . __name__ , str ( e ) ) return super ( ) . foreign ( value ) | Construct a string - like representation for an iterable of string - like objects . |
48,231 | def tob32 ( val ) : ret = bytearray ( 4 ) ret [ 0 ] = ( val >> 24 ) & M8 ret [ 1 ] = ( val >> 16 ) & M8 ret [ 2 ] = ( val >> 8 ) & M8 ret [ 3 ] = val & M8 return ret | Return provided 32 bit value as a string of four bytes . |
48,232 | def bintoihex ( buf , spos = 0x0000 ) : c = 0 olen = len ( buf ) ret = "" while ( c + 0x10 ) <= olen : adr = c + spos l = ':10{0:04X}00' . format ( adr ) sum = 0x10 + ( ( adr >> 8 ) & M8 ) + ( adr & M8 ) for j in range ( 0 , 0x10 ) : nb = buf [ c + j ] l += '{0:02X}' . format ( nb ) sum = ( sum + nb ) & M8 l += '{0:02X}' . format ( ( ~ sum + 1 ) & M8 ) ret += l + '\n' c += 0x10 if c < olen : rem = olen - c sum = rem adr = c + spos l = ':{0:02X}{1:04X}00' . format ( rem , adr ) sum += ( ( adr >> 8 ) & M8 ) + ( adr & M8 ) for j in range ( 0 , rem ) : nb = buf [ c + j ] l += '{0:02X}' . format ( nb ) sum = ( sum + nb ) & M8 l += '{0:02X}' . format ( ( ~ sum + 1 ) & M8 ) ret += l + '\n' ret += ':00000001FF\n' return ret | Convert binary buffer to ihex and return as string . |
48,233 | def op_gen ( mcode ) : gen = op_tbl [ mcode [ 0 ] ] ret = gen [ 0 ] nargs = len ( gen ) i = 1 while i < nargs : if i < len ( mcode ) : ret |= ( mcode [ i ] & gen [ i ] [ 0 ] ) << gen [ i ] [ 1 ] i += 1 return ret | Generate a machine instruction using the op gen table . |
48,234 | def scanerror ( self , msg ) : error ( 'scan error: ' + msg + ' on line {}' . format ( self . sline ) ) sys . exit ( - 1 ) | Emit scan error and abort assembly . |
48,235 | def parsewarn ( self , msg , line = None ) : if line is None : line = self . sline self . dowarn ( 'warning: ' + msg + ' on line {}' . format ( line ) ) | Emit parse warning . |
48,236 | def parseerror ( self , msg , line = None ) : if line is None : line = self . sline error ( 'parse error: ' + msg + ' on line {}' . format ( line ) ) sys . exit ( - 2 ) | Emit parse error and abort assembly . |
48,237 | def parse ( self ) : self . __next__ ( ) while self . sym [ 'type' ] != 'EOF' : if self . sym [ 'type' ] == 'LABEL' : self . __label__ ( ) elif self . sym [ 'type' ] == 'MNEMONIC' : self . __instruction__ ( ) elif self . sym [ 'type' ] == 'NAME' or self . sym [ 'type' ] == 'ASSEMBLER' : self . __assembler__ ( ) else : self . parseerror ( 'Unexpected input {}/{}' . format ( self . sym [ 'type' ] , repr ( self . sym [ 'txt' ] ) ) ) for i in self . pl : if i [ 'cmd' ] [ 0 ] == 'SKP' : if i [ 'target' ] is not None : if i [ 'target' ] in self . jmptbl : iloc = i [ 'addr' ] dest = self . jmptbl [ i [ 'target' ] ] if dest > iloc : oft = dest - iloc - 1 if oft > M6 : self . parseerror ( 'Offset from SKP to ' + repr ( i [ 'target' ] ) + ' (' + hex ( oft ) + ') too large' , i [ 'line' ] ) else : i [ 'cmd' ] [ 2 ] = oft else : self . parseerror ( 'Target ' + repr ( i [ 'target' ] ) + ' does not follow SKP' , i [ 'line' ] ) else : self . parseerror ( 'Undefined target for SKP ' + repr ( i [ 'target' ] ) , i [ 'line' ] ) else : pass self . __mkopcodes__ ( ) | Parse input . |
48,238 | def resync ( ctx , opts , owner_repo_package , skip_errors , wait_interval , no_wait_for_sync , sync_attempts , ) : owner , source , slug = owner_repo_package resync_package ( ctx = ctx , opts = opts , owner = owner , repo = source , slug = slug , skip_errors = skip_errors ) if no_wait_for_sync : return wait_for_package_sync ( ctx = ctx , opts = opts , owner = owner , repo = source , slug = slug , wait_interval = wait_interval , skip_errors = skip_errors , attempts = sync_attempts , ) | Resynchronise a package in a repository . |
48,239 | def resync_package ( ctx , opts , owner , repo , slug , skip_errors ) : click . echo ( "Resynchonising the %(slug)s package ... " % { "slug" : click . style ( slug , bold = True ) } , nl = False , ) context_msg = "Failed to resynchronise package!" with handle_api_exceptions ( ctx , opts = opts , context_msg = context_msg , reraise_on_error = skip_errors ) : with maybe_spinner ( opts ) : api_resync_package ( owner = owner , repo = repo , identifier = slug ) click . secho ( "OK" , fg = "green" ) | Resynchronise a package . |
48,240 | def create_package ( package_format , owner , repo , ** kwargs ) : client = get_packages_api ( ) with catch_raise_api_exception ( ) : upload = getattr ( client , "packages_upload_%s_with_http_info" % package_format ) data , _ , headers = upload ( owner = owner , repo = repo , data = make_create_payload ( ** kwargs ) ) ratelimits . maybe_rate_limit ( client , headers ) return data . slug_perm , data . slug | Create a new package in a repository . |
48,241 | def validate_create_package ( package_format , owner , repo , ** kwargs ) : client = get_packages_api ( ) with catch_raise_api_exception ( ) : check = getattr ( client , "packages_validate_upload_%s_with_http_info" % package_format ) _ , _ , headers = check ( owner = owner , repo = repo , data = make_create_payload ( ** kwargs ) ) ratelimits . maybe_rate_limit ( client , headers ) return True | Validate parameters for creating a package . |
48,242 | def move_package ( owner , repo , identifier , destination ) : client = get_packages_api ( ) with catch_raise_api_exception ( ) : data , _ , headers = client . packages_move_with_http_info ( owner = owner , repo = repo , identifier = identifier , data = { "destination" : destination } , ) ratelimits . maybe_rate_limit ( client , headers ) return data . slug_perm , data . slug | Move a package to another repository . |
48,243 | def delete_package ( owner , repo , identifier ) : client = get_packages_api ( ) with catch_raise_api_exception ( ) : _ , _ , headers = client . packages_delete_with_http_info ( owner = owner , repo = repo , identifier = identifier ) ratelimits . maybe_rate_limit ( client , headers ) return True | Delete a package in a repository . |
48,244 | def resync_package ( owner , repo , identifier ) : client = get_packages_api ( ) with catch_raise_api_exception ( ) : data , _ , headers = client . packages_resync_with_http_info ( owner = owner , repo = repo , identifier = identifier ) ratelimits . maybe_rate_limit ( client , headers ) return data . slug_perm , data . slug | Resync a package in a repository . |
48,245 | def get_package_status ( owner , repo , identifier ) : client = get_packages_api ( ) with catch_raise_api_exception ( ) : data , _ , headers = client . packages_status_with_http_info ( owner = owner , repo = repo , identifier = identifier ) ratelimits . maybe_rate_limit ( client , headers ) return ( data . is_sync_completed , data . is_sync_failed , data . sync_progress , data . status_str , data . stage_str , data . status_reason , ) | Get the status for a package in a repository . |
48,246 | def get_package_formats ( ) : def get_parameters ( cls ) : params = { } dummy_kwargs = { k : "dummy" for k in cls . swagger_types } instance = cls ( ** dummy_kwargs ) for k , v in six . iteritems ( cls . swagger_types ) : attr = getattr ( cls , k ) docs = attr . __doc__ . strip ( ) . split ( "\n" ) doc = ( docs [ 1 ] if docs [ 1 ] else docs [ 0 ] ) . strip ( ) try : setattr ( instance , k , None ) required = False except ValueError : required = True params [ cls . attribute_map . get ( k ) ] = { "type" : v , "help" : doc , "required" : required , } return params return { key . replace ( "PackagesUpload" , "" ) . lower ( ) : get_parameters ( cls ) for key , cls in inspect . getmembers ( cloudsmith_api . models ) if key . startswith ( "PackagesUpload" ) } | Get the list of available package formats and parameters . |
48,247 | def get_package_format_names ( predicate = None ) : return [ k for k , v in six . iteritems ( get_package_formats ( ) ) if not predicate or predicate ( k , v ) ] | Get names for available package formats . |
48,248 | def catch_raise_api_exception ( ) : try : yield except _ApiException as exc : detail = None fields = None if exc . body : try : data = json . loads ( exc . body ) detail = data . get ( "detail" , None ) fields = data . get ( "fields" , None ) except ValueError : pass detail = detail or exc . reason raise ApiException ( exc . status , detail = detail , headers = exc . headers , body = exc . body , fields = fields ) | Context manager that translates upstream API exceptions . |
48,249 | def get_default_filepath ( cls ) : if not cls . config_files : return None if not cls . config_searchpath : return None filename = cls . config_files [ 0 ] filepath = cls . config_searchpath [ 0 ] return os . path . join ( filepath , filename ) | Get the default filepath for the configuratin file . |
48,250 | def create_default_file ( cls , data = None , mode = None ) : filepath = cls . get_default_filepath ( ) if not filepath : return False filename = os . path . basename ( filepath ) config = read_file ( get_data_path ( ) , filename ) data = data or { } for k , v in six . iteritems ( data ) : v = v or "" config = re . sub ( r"^(%(key)s) =[ ]*$" % { "key" : k } , "%(key)s = %(value)s" % { "key" : k , "value" : v } , config , flags = re . MULTILINE , ) dirpath = os . path . dirname ( filepath ) if not os . path . exists ( dirpath ) : os . makedirs ( dirpath ) with click . open_file ( filepath , "w+" ) as f : f . write ( config ) if mode is not None : os . chmod ( filepath , mode ) return True | Create a config file and override data if specified . |
48,251 | def has_default_file ( cls ) : for filename in cls . config_files : for searchpath in cls . config_searchpath : path = os . path . join ( searchpath , filename ) if os . path . exists ( path ) : return True return False | Check if a configuration file exists . |
48,252 | def load_config ( cls , opts , path = None , profile = None ) : if path and os . path . exists ( path ) : if os . path . isdir ( path ) : cls . config_searchpath . insert ( 0 , path ) else : cls . config_files . insert ( 0 , path ) config = cls . read_config ( ) values = config . get ( "default" , { } ) cls . _load_values_into_opts ( opts , values ) if profile and profile != "default" : values = config . get ( "profile:%s" % profile , { } ) cls . _load_values_into_opts ( opts , values ) return values | Load a configuration file into an options object . |
48,253 | def load_config_file ( self , path , profile = None ) : config_cls = self . get_config_reader ( ) return config_cls . load_config ( self , path , profile = profile ) | Load the standard config file . |
48,254 | def load_creds_file ( self , path , profile = None ) : config_cls = self . get_creds_reader ( ) return config_cls . load_config ( self , path , profile = profile ) | Load the credentials config file . |
48,255 | def api_headers ( self , value ) : value = validators . validate_api_headers ( "api_headers" , value ) self . _set_option ( "api_headers" , value ) | Set value for API headers . |
48,256 | def error_retry_codes ( self , value ) : if isinstance ( value , six . string_types ) : value = [ int ( x ) for x in value . split ( "," ) ] self . _set_option ( "error_retry_codes" , value ) | Set value for error_retry_codes . |
48,257 | def _set_option ( self , name , value , allow_clear = False ) : if not allow_clear : try : current_value = self . _get_option ( name ) if value is None and current_value is not None : return except AttributeError : pass self . opts [ name ] = value | Set value for an option . |
48,258 | def loads ( self , value , context = None ) : if value == '' or ( hasattr ( value , 'strip' ) and value . strip ( ) == '' ) : return None return self . native ( value ) | Attempt to load a string - based value into the native representation . Empty strings are treated as None values . |
48,259 | def dump ( self , fh , value , context = None ) : value = self . dumps ( value ) fh . write ( value ) return len ( value ) | Attempt to transform and write a string - based foreign value to the given file - like object . Returns the length written . |
48,260 | def rates ( ctx , opts ) : click . echo ( "Retrieving rate limits ... " , nl = False ) context_msg = "Failed to retrieve status!" with handle_api_exceptions ( ctx , opts = opts , context_msg = context_msg ) : with maybe_spinner ( opts ) : resources_limits = get_rate_limits ( ) click . secho ( "OK" , fg = "green" ) headers = [ "Resource" , "Throttled" , "Remaining" , "Interval (Seconds)" , "Reset" ] rows = [ ] for resource , limits in six . iteritems ( resources_limits ) : rows . append ( [ click . style ( resource , fg = "cyan" ) , click . style ( "Yes" if limits . throttled else "No" , fg = "red" if limits . throttled else "green" , ) , "%(remaining)s/%(limit)s" % { "remaining" : click . style ( six . text_type ( limits . remaining ) , fg = "yellow" ) , "limit" : click . style ( six . text_type ( limits . limit ) , fg = "yellow" ) , } , click . style ( six . text_type ( limits . interval ) , fg = "blue" ) , click . style ( six . text_type ( limits . reset ) , fg = "magenta" ) , ] ) if resources_limits : click . echo ( ) utils . pretty_print_table ( headers , rows ) click . echo ( ) num_results = len ( resources_limits ) list_suffix = "resource%s" % ( "s" if num_results != 1 else "" ) utils . pretty_print_list_info ( num_results = num_results , suffix = list_suffix ) | Check current API rate limits . |
48,261 | def service ( ctx , opts ) : click . echo ( "Retrieving service status ... " , nl = False ) context_msg = "Failed to retrieve status!" with handle_api_exceptions ( ctx , opts = opts , context_msg = context_msg ) : with maybe_spinner ( opts ) : status , version = get_status ( with_version = True ) click . secho ( "OK" , fg = "green" ) config = cloudsmith_api . Configuration ( ) click . echo ( ) click . echo ( "The service endpoint is: %(endpoint)s" % { "endpoint" : click . style ( config . host , bold = True ) } ) click . echo ( "The service status is: %(status)s" % { "status" : click . style ( status , bold = True ) } ) click . echo ( "The service version is: %(version)s " % { "version" : click . style ( version , bold = True ) } , nl = False , ) api_version = get_api_version_info ( ) if semver . compare ( version , api_version ) > 0 : click . secho ( "(maybe out-of-date)" , fg = "yellow" ) click . echo ( ) click . secho ( "The API library used by this CLI tool is built against " "service version: %(version)s" % { "version" : click . style ( api_version , bold = True ) } , fg = "yellow" , ) else : click . secho ( "(up-to-date)" , fg = "green" ) click . echo ( ) click . secho ( "The API library used by this CLI tool seems to be up-to-date." , fg = "green" ) | Check the status of the Cloudsmith service . |
48,262 | def _get_package_status ( package ) : status = package [ "status_str" ] or "Unknown" stage = package [ "stage_str" ] or "Unknown" if stage == "Fully Synchronised" : return status return "%(status)s / %(stage)s" % { "status" : status , "stage" : stage } | Get the status for a package . |
48,263 | def boolify ( value , nullable = False , return_string = False ) : if isinstance ( value , BOOL_COERCEABLE_TYPES ) : return bool ( value ) val = text_type ( value ) . strip ( ) . lower ( ) . replace ( '.' , '' , 1 ) if val . isnumeric ( ) : return bool ( float ( val ) ) elif val in BOOLISH_TRUE : return True elif nullable and val in NULL_STRINGS : return None elif val in BOOLISH_FALSE : return False else : try : return bool ( complex ( val ) ) except ValueError : if isinstance ( value , string_types ) and return_string : return value raise TypeCoercionError ( value , "The value %r cannot be boolified." % value ) | Convert a number string or sequence type into a pure boolean . |
48,264 | def typify ( value , type_hint = None ) : if isinstance ( value , string_types ) : value = value . strip ( ) elif type_hint is None : return value if isiterable ( type_hint ) : if isinstance ( type_hint , type ) and issubclass ( type_hint , Enum ) : try : return type_hint ( value ) except ValueError : return type_hint [ value ] type_hint = set ( type_hint ) if not ( type_hint - NUMBER_TYPES_SET ) : return numberify ( value ) elif not ( type_hint - STRING_TYPES_SET ) : return text_type ( value ) elif not ( type_hint - { bool , NoneType } ) : return boolify ( value , nullable = True ) elif not ( type_hint - ( STRING_TYPES_SET | { bool } ) ) : return boolify ( value , return_string = True ) elif not ( type_hint - ( STRING_TYPES_SET | { NoneType } ) ) : value = text_type ( value ) return None if value . lower ( ) == 'none' else value elif not ( type_hint - { bool , int } ) : return typify_str_no_hint ( text_type ( value ) ) else : raise NotImplementedError ( ) elif type_hint is not None : try : return boolify ( value ) if type_hint == bool else type_hint ( value ) except ValueError as e : raise TypeCoercionError ( value , text_type ( e ) ) else : return typify_str_no_hint ( value ) | Take a primitive value usually a string and try to make a more relevant type out of it . An optional type_hint will try to coerce the value to that type . |
48,265 | def delete ( ctx , opts , owner_repo_package , yes ) : owner , repo , slug = owner_repo_package delete_args = { "owner" : click . style ( owner , bold = True ) , "repo" : click . style ( repo , bold = True ) , "package" : click . style ( slug , bold = True ) , } prompt = "delete the %(package)s from %(owner)s/%(repo)s" % delete_args if not utils . confirm_operation ( prompt , assume_yes = yes ) : return click . echo ( "Deleting %(package)s from %(owner)s/%(repo)s ... " % delete_args , nl = False ) context_msg = "Failed to delete the package!" with handle_api_exceptions ( ctx , opts = opts , context_msg = context_msg ) : with maybe_spinner ( opts ) : delete_package ( owner = owner , repo = repo , identifier = slug ) click . secho ( "OK" , fg = "green" ) | Delete a package from a repository . |
48,266 | def common_entitlements_options ( f ) : @ click . option ( "--show-tokens" , default = False , is_flag = True , help = "Show entitlement token string contents in output." , ) @ click . pass_context @ functools . wraps ( f ) def wrapper ( ctx , * args , ** kwargs ) : return ctx . invoke ( f , * args , ** kwargs ) return wrapper | Add common options for entitlement commands . |
48,267 | def list_entitlements_options ( f ) : @ common_entitlements_options @ decorators . common_cli_config_options @ decorators . common_cli_list_options @ decorators . common_cli_output_options @ decorators . common_api_auth_options @ decorators . initialise_api @ click . argument ( "owner_repo" , metavar = "OWNER/REPO" , callback = validators . validate_owner_repo ) @ click . pass_context @ functools . wraps ( f ) def wrapper ( ctx , * args , ** kwargs ) : return ctx . invoke ( f , * args , ** kwargs ) return wrapper | Options for list entitlements subcommand . |
48,268 | def list_entitlements ( ctx , opts , owner_repo , page , page_size , show_tokens ) : owner , repo = owner_repo use_stderr = opts . output != "pretty" click . echo ( "Getting list of entitlements for the %(repository)s " "repository ... " % { "repository" : click . style ( repo , bold = True ) } , nl = False , err = use_stderr , ) context_msg = "Failed to get list of entitlements!" with handle_api_exceptions ( ctx , opts = opts , context_msg = context_msg ) : with maybe_spinner ( opts ) : entitlements_ , page_info = api . list_entitlements ( owner = owner , repo = repo , page = page , page_size = page_size , show_tokens = show_tokens , ) click . secho ( "OK" , fg = "green" , err = use_stderr ) print_entitlements ( opts = opts , data = entitlements_ , page_info = page_info ) | List entitlements for a repository . |
48,269 | def print_entitlements ( opts , data , page_info = None , show_list_info = True ) : if utils . maybe_print_as_json ( opts , data , page_info ) : return headers = [ "Name" , "Token" , "Created / Updated" , "Identifier" ] rows = [ ] for entitlement in sorted ( data , key = itemgetter ( "name" ) ) : rows . append ( [ click . style ( "%(name)s (%(type)s)" % { "name" : click . style ( entitlement [ "name" ] , fg = "cyan" ) , "type" : "user" if entitlement [ "user" ] else "token" , } ) , click . style ( entitlement [ "token" ] , fg = "yellow" ) , click . style ( entitlement [ "updated_at" ] , fg = "blue" ) , click . style ( entitlement [ "slug_perm" ] , fg = "green" ) , ] ) if data : click . echo ( ) utils . pretty_print_table ( headers , rows ) if not show_list_info : return click . echo ( ) num_results = len ( data ) list_suffix = "entitlement%s" % ( "s" if num_results != 1 else "" ) utils . pretty_print_list_info ( num_results = num_results , suffix = list_suffix ) | Print entitlements as a table or output in another format . |
48,270 | def create ( ctx , opts , owner_repo , show_tokens , name , token ) : owner , repo = owner_repo use_stderr = opts . output != "pretty" click . secho ( "Creating %(name)s entitlement for the %(repository)s " "repository ... " % { "name" : click . style ( name , bold = True ) , "repository" : click . style ( repo , bold = True ) , } , nl = False , err = use_stderr , ) context_msg = "Failed to create the entitlement!" with handle_api_exceptions ( ctx , opts = opts , context_msg = context_msg ) : with maybe_spinner ( opts ) : entitlement = api . create_entitlement ( owner = owner , repo = repo , name = name , token = token , show_tokens = show_tokens ) click . secho ( "OK" , fg = "green" , err = use_stderr ) print_entitlements ( opts = opts , data = [ entitlement ] , show_list_info = False ) | Create a new entitlement in a repository . |
48,271 | def maybe_rate_limit ( client , headers , atexit = False ) : global LAST_CLIENT , LAST_HEADERS if LAST_CLIENT and LAST_HEADERS : rate_limit ( LAST_CLIENT , LAST_HEADERS , atexit = atexit ) LAST_CLIENT = copy . copy ( client ) LAST_HEADERS = copy . copy ( headers ) | Optionally pause the process based on suggested rate interval . |
48,272 | def rate_limit ( client , headers , atexit = False ) : if not client or not headers : return False if not getattr ( client . config , "rate_limit" , False ) : return False rate_info = RateLimitsInfo . from_headers ( headers ) if not rate_info or not rate_info . interval : return False if rate_info . interval : cb = getattr ( client . config , "rate_limit_callback" , None ) if cb and callable ( cb ) : cb ( rate_info , atexit = atexit ) time . sleep ( rate_info . interval ) return True | Pause the process based on suggested rate interval . |
48,273 | def first ( seq , key = lambda x : bool ( x ) , default = None , apply = lambda x : x ) : return next ( ( apply ( x ) for x in seq if key ( x ) ) , default ( ) if callable ( default ) else default ) | Give the first value that satisfies the key test . |
48,274 | def call_each ( seq ) : try : reduce ( lambda _ , y : y ( ) , seq ) except TypeError as e : if text_type ( e ) != "reduce() of empty sequence with no initial value" : raise | Calls each element of sequence to invoke the side effect . |
48,275 | def validate_api_headers ( param , value ) : if not value : return None headers = { } for kv in value . split ( "," ) : try : k , v = kv . split ( "=" , 1 ) k = k . strip ( ) for bad_header in BAD_API_HEADERS : if bad_header == k : raise click . BadParameter ( "%(key)s is not an allowed header" % { "key" : bad_header } , param = param , ) if k in API_HEADER_TRANSFORMS : transform_func = API_HEADER_TRANSFORMS [ k ] v = transform_func ( param , v ) except ValueError : raise click . BadParameter ( "Values need to be a CSV of key=value pairs" , param = param ) headers [ k ] = v return headers | Validate that API headers is a CSV of k = v pairs . |
48,276 | def validate_slashes ( param , value , minimum = 2 , maximum = None , form = None ) : try : value = value . split ( "/" ) except ValueError : value = None if value : if len ( value ) < minimum : value = None elif maximum and len ( value ) > maximum : value = None if not value : form = form or "/" . join ( "VALUE" for _ in range ( minimum ) ) raise click . BadParameter ( "Must be in the form of %(form)s" % { "form" : form } , param = param ) value = [ v . strip ( ) for v in value ] if not all ( value ) : raise click . BadParameter ( "Individual values cannot be blank" , param = param ) return value | Ensure that parameter has slashes and minimum parts . |
48,277 | def validate_page ( ctx , param , value ) : if value == 0 : raise click . BadParameter ( "Page is not zero-based, please set a value to 1 or higher." , param = param ) return value | Ensure that a valid value for page is chosen . |
48,278 | def validate_page_size ( ctx , param , value ) : if value == 0 : raise click . BadParameter ( "Page size must be non-zero or unset." , param = param ) return value | Ensure that a valid value for page size is chosen . |
48,279 | def getbalance ( self , ** params ) : if not all ( [ self . api_key , self . api_secret ] ) : raise AuthException ( ) endpoint = "/v1/me/getbalance" return self . request ( endpoint , params = params ) | Get Account Asset Balance |
48,280 | def tidy_all_the_variables ( host , inventory_mgr ) : global _vars _vars = dict ( ) _vars [ host ] = inventory_mgr . inventory . get_host_vars ( host ) for group in host . get_groups ( ) : remove_inherited_and_overridden_vars ( _vars [ host ] , group , inventory_mgr ) remove_inherited_and_overridden_group_vars ( group , inventory_mgr ) return _vars | removes all overridden and inherited variables from hosts and groups |
48,281 | def _plugins_inventory ( self , entities ) : import os from ansible . plugins . loader import vars_loader from ansible . utils . vars import combine_vars data = { } for inventory_dir in self . variable_manager . _inventory . _sources : if ',' in inventory_dir : continue elif not os . path . isdir ( inventory_dir ) : inventory_dir = os . path . dirname ( inventory_dir ) for plugin in vars_loader . all ( ) : data = combine_vars ( data , self . _get_plugin_vars ( plugin , inventory_dir , entities ) ) return data | merges all entities by inventory source |
48,282 | def handle ( self , * args , ** options ) : if 'username' in options : self . username = options [ 'username' ] else : self . username = None if 'password' in options : self . password = options [ 'password' ] else : self . password = None self . xml_path = options . get ( 'xml' ) self . url = options . get ( 'url' ) try : blog_index = BlogIndexPage . objects . get ( title__icontains = options [ 'blog_index' ] ) except BlogIndexPage . DoesNotExist : raise CommandError ( "Incorrect blog index title - have you created it?" ) if self . url == "just_testing" : with open ( 'test-data.json' ) as test_json : posts = json . load ( test_json ) elif self . xml_path : try : import lxml from blog . wp_xml_parser import XML_parser except ImportError as e : print ( "You must have lxml installed to run xml imports." " Run `pip install lxml`." ) raise e self . xml_parser = XML_parser ( self . xml_path ) posts = self . xml_parser . get_posts_data ( ) else : posts = self . get_posts_data ( self . url ) self . should_import_comments = options . get ( 'import_comments' ) self . create_blog_pages ( posts , blog_index ) | gets data from WordPress site |
48,283 | def create_images_from_urls_in_content ( self , body ) : soup = BeautifulSoup ( body , "html5lib" ) for img in soup . findAll ( 'img' ) : old_url = img [ 'src' ] if 'width' in img : width = img [ 'width' ] if 'height' in img : height = img [ 'height' ] else : width = 100 height = 100 path , file_ = os . path . split ( img [ 'src' ] ) if not img [ 'src' ] : continue if img [ 'src' ] . startswith ( 'data:' ) : continue try : remote_image = urllib . request . urlretrieve ( self . prepare_url ( img [ 'src' ] ) ) except ( urllib . error . HTTPError , urllib . error . URLError , UnicodeEncodeError , ValueError ) : print ( "Unable to import " + img [ 'src' ] ) continue image = Image ( title = file_ , width = width , height = height ) try : image . file . save ( file_ , File ( open ( remote_image [ 0 ] , 'rb' ) ) ) image . save ( ) new_url = image . file . url body = body . replace ( old_url , new_url ) body = self . convert_html_entities ( body ) except TypeError : print ( "Unable to import image {}" . format ( remote_image [ 0 ] ) ) return body | create Image objects and transfer image files to media root |
48,284 | def lookup_comment_by_wordpress_id ( self , comment_id , comments ) : for comment in comments : if comment . wordpress_id == comment_id : return comment | Returns Django comment object with this wordpress id |
48,285 | def create_blog_pages ( self , posts , blog_index , * args , ** options ) : for post in posts : post_id = post . get ( 'ID' ) title = post . get ( 'title' ) if title : new_title = self . convert_html_entities ( title ) title = new_title slug = post . get ( 'slug' ) description = post . get ( 'description' ) if description : description = self . convert_html_entities ( description ) body = post . get ( 'content' ) if not "<p>" in body : body = linebreaks ( body ) body = self . create_images_from_urls_in_content ( body ) author = post . get ( 'author' ) user = self . create_user ( author ) categories = post . get ( 'terms' ) date = post . get ( 'date' ) [ : 10 ] try : new_entry = BlogPage . objects . get ( slug = slug ) new_entry . title = title new_entry . body = body new_entry . owner = user new_entry . save ( ) except BlogPage . DoesNotExist : new_entry = blog_index . add_child ( instance = BlogPage ( title = title , slug = slug , search_description = "description" , date = date , body = body , owner = user ) ) featured_image = post . get ( 'featured_image' ) if featured_image is not None : title = post [ 'featured_image' ] [ 'title' ] source = post [ 'featured_image' ] [ 'source' ] path , file_ = os . path . split ( source ) source = source . replace ( 'stage.swoon' , 'swoon' ) try : remote_image = urllib . request . urlretrieve ( self . prepare_url ( source ) ) width = 640 height = 290 header_image = Image ( title = title , width = width , height = height ) header_image . file . save ( file_ , File ( open ( remote_image [ 0 ] , 'rb' ) ) ) header_image . save ( ) except UnicodeEncodeError : header_image = None print ( 'unable to set header image {}' . format ( source ) ) else : header_image = None new_entry . header_image = header_image new_entry . save ( ) if categories : self . create_categories_and_tags ( new_entry , categories ) if self . should_import_comments : self . import_comments ( post_id , slug ) | create Blog post entries from wordpress data |
48,286 | def unique_slugify ( instance , value , slug_field_name = 'slug' , queryset = None , slug_separator = '-' ) : slug_field = instance . _meta . get_field ( slug_field_name ) slug = getattr ( instance , slug_field . attname ) slug_len = slug_field . max_length slug = slugify ( value ) if slug_len : slug = slug [ : slug_len ] slug = _slug_strip ( slug , slug_separator ) original_slug = slug if queryset is None : queryset = instance . __class__ . _default_manager . all ( ) if instance . pk : queryset = queryset . exclude ( pk = instance . pk ) next = 2 while not slug or queryset . filter ( ** { slug_field_name : slug } ) : slug = original_slug end = '%s%s' % ( slug_separator , next ) if slug_len and len ( slug ) + len ( end ) > slug_len : slug = slug [ : slug_len - len ( end ) ] slug = _slug_strip ( slug , slug_separator ) slug = '%s%s' % ( slug , end ) next += 1 setattr ( instance , slug_field . attname , slug ) | Calculates and stores a unique slug of value for an instance . |
48,287 | def _slug_strip ( value , separator = '-' ) : separator = separator or '' if separator == '-' or not separator : re_sep = '-' else : re_sep = '(?:-|%s)' % re . escape ( separator ) if separator != re_sep : value = re . sub ( '%s+' % re_sep , separator , value ) if separator : if separator != '-' : re_sep = re . escape ( separator ) value = re . sub ( r'^%s+|%s+$' % ( re_sep , re_sep ) , '' , value ) return value | Cleans up a slug by removing slug separator characters that occur at the beginning or end of a slug . |
48,288 | def limit_author_choices ( ) : LIMIT_AUTHOR_CHOICES = getattr ( settings , 'BLOG_LIMIT_AUTHOR_CHOICES_GROUP' , None ) if LIMIT_AUTHOR_CHOICES : if isinstance ( LIMIT_AUTHOR_CHOICES , str ) : limit = Q ( groups__name = LIMIT_AUTHOR_CHOICES ) else : limit = Q ( ) for s in LIMIT_AUTHOR_CHOICES : limit = limit | Q ( groups__name = s ) if getattr ( settings , 'BLOG_LIMIT_AUTHOR_CHOICES_ADMIN' , False ) : limit = limit | Q ( is_staff = True ) else : limit = { 'is_staff' : True } return limit | Limit choices in blog author field based on config settings |
48,289 | def get_blog_context ( context ) : context [ 'authors' ] = get_user_model ( ) . objects . filter ( owned_pages__live = True , owned_pages__content_type__model = 'blogpage' ) . annotate ( Count ( 'owned_pages' ) ) . order_by ( '-owned_pages__count' ) context [ 'all_categories' ] = BlogCategory . objects . all ( ) context [ 'root_categories' ] = BlogCategory . objects . filter ( parent = None , ) . prefetch_related ( 'children' , ) . annotate ( blog_count = Count ( 'blogpage' ) , ) return context | Get context data useful on all blog related pages |
48,290 | def item_dict ( self , item ) : ret_dict = { "terms" : { "category" : [ ] , "post_tag" : [ ] } } for e in item : if "category" in e . tag : slug = e . attrib [ "nicename" ] name = htmlparser . unescape ( e . text ) cat_dict = self . category_dict . get ( slug ) or { "slug" : slug , "name" : name , "taxonomy" : "category" } ret_dict [ 'terms' ] [ 'category' ] . append ( cat_dict ) elif e . tag [ - 3 : ] == 'tag' : slug = e . attrib . get ( "tag_slug" ) name = htmlparser . unescape ( e . text ) tag_dict = self . tags_dict . get ( slug ) or { "slug" : slug , "name" : name , "taxonomy" : "post_tag" } ret_dict [ 'terms' ] [ 'post_tag' ] . append ( tag_dict ) else : ret_dict [ e . tag ] = e . text empty_keys = [ k for k , v in ret_dict [ "terms" ] . items ( ) if not v ] for k in empty_keys : ret_dict [ "terms" ] . pop ( k ) return ret_dict | create a default dict of values including category and tag lookup |
48,291 | def convert_date ( d , custom_date_string = None , fallback = None ) : if d == 'Mon, 30 Nov -0001 00:00:00 +0000' and fallback : d = fallback try : date = time . strftime ( "%Y-%m-%d" , time . strptime ( d , '%a, %d %b %Y %H:%M:%S %z' ) ) except ValueError : date = time . strftime ( "%Y-%m-%d" , time . strptime ( d , '%Y-%m-%d %H:%M:%S' ) ) except ValueError : date = custom_date_string or datetime . datetime . today ( ) . strftime ( "%Y-%m-%d" ) return date | for whatever reason sometimes WP XML has unintelligible datetime strings for pubDate . In this case default to custom_date_string or today Use fallback in case a secondary date string is available . |
48,292 | def translate_item ( self , item_dict ) : if not item_dict . get ( 'title' ) : return None if item_dict . get ( '{wp}post_type' , None ) == 'attachment' : return None ret_dict = { } ret_dict [ 'slug' ] = item_dict . get ( '{wp}post_name' ) or re . sub ( item_dict [ 'title' ] , ' ' , '-' ) ret_dict [ 'ID' ] = item_dict [ 'guid' ] ret_dict [ 'title' ] = item_dict [ 'title' ] ret_dict [ 'description' ] = item_dict [ 'description' ] ret_dict [ 'content' ] = item_dict [ '{content}encoded' ] ret_dict [ 'author' ] = { 'username' : item_dict [ '{dc}creator' ] , 'first_name' : '' , 'last_name' : '' } ret_dict [ 'terms' ] = item_dict . get ( 'terms' ) ret_dict [ 'date' ] = self . convert_date ( item_dict [ 'pubDate' ] , fallback = item_dict . get ( '{wp}post_date' , '' ) ) return ret_dict | cleanup item keys to match API json format |
48,293 | def _format_content ( self , content ) : content = re . sub ( r'\n' , '<br/>\n' , content ) content = re . sub ( r' ' , '  ' , content ) content = re . sub ( r'  ' , '  ' , content ) return content | Returns content with consecutive spaces converted to non - break spaces and linebreak converted into HTML br elements . |
48,294 | def _prepare_text ( self , text ) : text = re . sub ( r'[<>&]' , '' , text ) pattern = r'({})' . format ( self . _tokenizer . pattern ) return re . sub ( pattern , self . _base_token_markup , text ) | Returns text with each consituent token wrapped in HTML markup for later match annotation . |
48,295 | def generate ( self , output_dir , work , ngrams , labels , minus_ngrams ) : template = self . _get_template ( ) colours = generate_colours ( len ( ngrams ) ) for siglum in self . _corpus . get_sigla ( work ) : ngram_data = zip ( labels , ngrams ) content = self . _generate_base ( work , siglum ) for ngrams_group in ngrams : content = self . _highlight ( content , ngrams_group , True ) content = self . _highlight ( content , minus_ngrams , False ) self . _ngrams_count = 1 content = self . _format_content ( content ) report_name = '{}-{}.html' . format ( work , siglum ) self . _write ( work , siglum , content , output_dir , report_name , template , ngram_data = ngram_data , minus_ngrams = minus_ngrams , colours = colours ) | Generates HTML reports for each witness to work showing its text with the n - grams in ngrams highlighted . |
48,296 | def generate ( self , output_dir , work , matches_filename ) : template = self . _get_template ( ) matches = pd . read_csv ( matches_filename ) for siglum in self . _corpus . get_sigla ( work ) : subm = matches [ ( matches [ constants . WORK_FIELDNAME ] != work ) | ( matches [ constants . SIGLUM_FIELDNAME ] != siglum ) ] content = self . _generate_base ( work , siglum ) content = self . _highlight ( content , subm ) content = self . _format_content ( content ) text_list = self . _generate_text_list ( subm ) report_name = '{}-{}.html' . format ( work , siglum ) self . _write ( work , siglum , content , output_dir , report_name , template , True , text_list = text_list ) | Generates HTML reports showing the text of each witness to work with its matches in matches highlighted . |
48,297 | def print_flow_info ( flow ) : print ( 'Flow %s (%s)-- Packets:%d Bytes:%d Payload: %s...' % ( flow [ 'flow_id' ] , flow [ 'direction' ] , len ( flow [ 'packet_list' ] ) , len ( flow [ 'payload' ] ) , repr ( flow [ 'payload' ] ) [ : 30 ] ) ) | Print a summary of the flow information |
48,298 | def packets_to_flows ( self ) : for packet in self . input_stream : flow_id = flow_utils . flow_tuple ( packet ) self . _flows [ flow_id ] . add_packet ( packet ) for flow in list ( self . _flows . values ( ) ) : if flow . ready ( ) : flow_info = flow . get_flow ( ) yield flow_info del self . _flows [ flow_info [ 'flow_id' ] ] print ( '---- NO MORE INPUT ----' ) for flow in sorted ( self . _flows . values ( ) , key = lambda x : x . meta [ 'start' ] ) : yield flow . get_flow ( ) | Combine packets into flows |
48,299 | def _copy_static_assets ( self , output_dir ) : base_directory = 'assets/{}' . format ( self . _report_name ) for asset in resource_listdir ( self . _package_name , base_directory ) : filename = resource_filename ( self . _package_name , '{}/{}' . format ( base_directory , asset ) ) shutil . copy2 ( filename , output_dir ) | Copy assets for the report to output_dir . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.