idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
1,600
def SAS ( self ) : if self . x is None : self . x = np . arange ( self . dx / 2. , self . dx * self . qs . shape [ 0 ] , self . dx ) if self . filename : self . Te = self . configGet ( "float" , "input" , "ElasticThickness" ) self . qs = self . q0 . copy ( ) del self . q0 if self . dimension == 2 : if self . y is None : self . y = np . arange ( self . dy / 2. , self . dy * self . qs . shape [ 0 ] , self . dy ) try : self . qs except : self . qs = self . q0 . copy ( ) del self . q0 from scipy . special import kei
Set - up for the rectangularly - gridded superposition of analytical solutions method for solving flexure
1,601
def SAS_NG ( self ) : if self . filename : self . Te = self . configGet ( "float" , "input" , "ElasticThickness" ) self . latlon = self . configGet ( "string" , "numerical2D" , "latlon" , optional = True ) self . PlanetaryRadius = self . configGet ( "float" , "numerical2D" , "PlanetaryRadius" , optional = True ) if self . dimension == 2 : from scipy . special import kei if self . dimension == 1 : try : self . x self . q except : if self . q0 . shape [ 1 ] == 2 : self . x = self . q0 [ : , 0 ] self . q = self . q0 [ : , 1 ] else : sys . exit ( "For 1D (ungridded) SAS_NG configuration file, need [x,w] array. Your dimensions are: " + str ( self . q0 . shape ) ) else : try : self . x self . u self . q except : if self . q0 . shape [ 1 ] == 3 : self . x = self . q0 [ : , 0 ] self . y = self . q0 [ : , 1 ] self . q = self . q0 [ : , 2 ] else : sys . exit ( "For 2D (ungridded) SAS_NG configuration file, need [x,y,w] array. Your dimensions are: " + str ( self . q0 . shape ) ) self . x = self . x if self . dimension == 2 : self . y = self . y del self . q0 try : self . xw except : try : self . xw = self . configGet ( 'string' , "input" , "xw" , optional = True ) if self . xw == '' : self . xw = None except : self . xw = None if type ( self . xw ) == str : self . xw = self . loadFile ( self . xw ) if self . dimension == 2 : try : self . yw except : try : self . yw = self . configGet ( 'string' , "input" , "yw" , optional = True ) if self . yw == '' : self . yw = None except : self . yw = None if ( self . xw is not None and self . yw is None ) or ( self . xw is None and self . yw is not None ) : sys . exit ( "SAS_NG output at specified points requires both xw and yw to be defined" ) if type ( self . yw ) == str : self . yw = self . loadFile ( self . yw ) elif self . yw is None : self . yw = self . y . copy ( ) if self . xw is None : self . xw = self . x . copy ( )
Set - up for the ungridded superposition of analytical solutions method for solving flexure
1,602
def _c3_mro ( cls , abcs = None ) : for i , base in enumerate ( reversed ( cls . __bases__ ) ) : if hasattr ( base , '__abstractmethods__' ) : boundary = len ( cls . __bases__ ) - i break else : boundary = 0 abcs = list ( abcs ) if abcs else [ ] explicit_bases = list ( cls . __bases__ [ : boundary ] ) abstract_bases = [ ] other_bases = list ( cls . __bases__ [ boundary : ] ) for base in abcs : if issubclass ( cls , base ) and not any ( issubclass ( b , base ) for b in cls . __bases__ ) : abstract_bases . append ( base ) for base in abstract_bases : abcs . remove ( base ) explicit_c3_mros = [ _c3_mro ( base , abcs = abcs ) for base in explicit_bases ] abstract_c3_mros = [ _c3_mro ( base , abcs = abcs ) for base in abstract_bases ] other_c3_mros = [ _c3_mro ( base , abcs = abcs ) for base in other_bases ] return _c3_merge ( [ [ cls ] ] + explicit_c3_mros + abstract_c3_mros + other_c3_mros + [ explicit_bases ] + [ abstract_bases ] + [ other_bases ] )
Computes the method resolution order using extended C3 linearization .
1,603
def singledispatch ( function ) : registry = { } dispatch_cache = WeakKeyDictionary ( ) def ns ( ) : pass ns . cache_token = None def dispatch ( cls ) : if ns . cache_token is not None : current_token = get_cache_token ( ) if ns . cache_token != current_token : dispatch_cache . clear ( ) ns . cache_token = current_token try : impl = dispatch_cache [ cls ] except KeyError : try : impl = registry [ cls ] except KeyError : impl = _find_impl ( cls , registry ) dispatch_cache [ cls ] = impl return impl def register ( cls , func = None ) : if func is None : return lambda f : register ( cls , f ) registry [ cls ] = func if ns . cache_token is None and hasattr ( cls , '__abstractmethods__' ) : ns . cache_token = get_cache_token ( ) dispatch_cache . clear ( ) return func def wrapper ( * args , ** kw ) : return dispatch ( args [ 0 ] . __class__ ) ( * args , ** kw ) registry [ object ] = function wrapper . register = register wrapper . dispatch = dispatch wrapper . registry = MappingProxyType ( registry ) wrapper . _clear_cache = dispatch_cache . clear update_wrapper ( wrapper , function ) return wrapper
Single - dispatch generic function decorator .
1,604
def _parse_args ( self , args ) : parser = ArgumentParser ( description = "Runs pylint recursively on a directory" ) parser . add_argument ( "-v" , "--verbose" , dest = "verbose" , action = "store_true" , default = False , help = "Verbose mode (report which files were found for testing)." , ) parser . add_argument ( "--rcfile" , dest = "rcfile" , action = "store" , default = ".pylintrc" , help = "A relative or absolute path to your pylint rcfile. Defaults to\ `.pylintrc` at the current working directory" , ) parser . add_argument ( "-V" , "--version" , action = "version" , version = "%(prog)s ({0}) for Python {1}" . format ( __version__ , PYTHON_VERSION ) , ) options , _ = parser . parse_known_args ( args ) self . verbose = options . verbose if options . rcfile : if not os . path . isfile ( options . rcfile ) : options . rcfile = os . getcwd ( ) + "/" + options . rcfile self . rcfile = options . rcfile return options
Parses any supplied command - line args and provides help text .
1,605
def _parse_ignores ( self ) : error_message = ( colorama . Fore . RED + "{} does not appear to be a valid pylintrc file" . format ( self . rcfile ) + colorama . Fore . RESET ) if not os . path . isfile ( self . rcfile ) : if not self . _is_using_default_rcfile ( ) : print ( error_message ) sys . exit ( 1 ) else : return config = configparser . ConfigParser ( ) try : config . read ( self . rcfile ) except configparser . MissingSectionHeaderError : print ( error_message ) sys . exit ( 1 ) if config . has_section ( "MASTER" ) and config . get ( "MASTER" , "ignore" ) : self . ignore_folders += config . get ( "MASTER" , "ignore" ) . split ( "," )
Parse the ignores setting from the pylintrc file if available .
1,606
def run ( self , output = None , error = None ) : pylint_output = output if output is not None else sys . stdout pylint_error = error if error is not None else sys . stderr savedout , savederr = sys . __stdout__ , sys . __stderr__ sys . stdout = pylint_output sys . stderr = pylint_error pylint_files = self . get_files_from_dir ( os . curdir ) self . _print_line ( "Using pylint " + colorama . Fore . RED + pylint . __version__ + colorama . Fore . RESET + " for python " + colorama . Fore . RED + PYTHON_VERSION + colorama . Fore . RESET ) self . _print_line ( "pylint running on the following files:" ) for pylint_file in pylint_files : split_file = str ( pylint_file ) . split ( "/" ) split_file [ - 1 ] = colorama . Fore . CYAN + split_file [ - 1 ] + colorama . Fore . RESET pylint_file = "/" . join ( split_file ) self . _print_line ( "- " + pylint_file ) self . _print_line ( "----" ) if not self . _is_using_default_rcfile ( ) : self . args += [ "--rcfile={}" . format ( self . rcfile ) ] exit_kwarg = { "do_exit" : False } run = pylint . lint . Run ( self . args + pylint_files , ** exit_kwarg ) sys . stdout = savedout sys . stderr = savederr sys . exit ( run . linter . msg_status )
Runs pylint on all python files in the current directory
1,607
def strict ( * types ) : def decorate ( func ) : @ wraps ( func ) def wrapper ( self , p ) : func ( self , p ) if not isinstance ( p [ 0 ] , types ) : raise YAMLStrictTypeError ( p [ 0 ] , types , func ) wrapper . co_firstlineno = func . __code__ . co_firstlineno return wrapper return decorate
Decorator type check production rule output
1,608
def find_column ( t ) : pos = t . lexer . lexpos data = t . lexer . lexdata last_cr = data . rfind ( '\n' , 0 , pos ) if last_cr < 0 : last_cr = - 1 column = pos - last_cr return column
Get cursor position based on previous newline
1,609
def no_sleep ( ) : mode = power . ES . continuous | power . ES . system_required handle_nonzero_success ( power . SetThreadExecutionState ( mode ) ) try : yield finally : handle_nonzero_success ( power . SetThreadExecutionState ( power . ES . continuous ) )
Context that prevents the computer from going to sleep .
1,610
def selfoss ( reset_password = False ) : hostname = re . sub ( r'^[^@]+@' , '' , env . host ) sitename = query_input ( question = '\nEnter site-name of Your trac web service' , default = flo ( 'selfoss.{hostname}' ) ) username = env . user site_dir = flo ( '/home/{username}/sites/{sitename}' ) checkout_latest_release_of_selfoss ( ) create_directory_structure ( site_dir ) restored = install_selfoss ( sitename , site_dir , username ) nginx_site_config ( username , sitename , hostname ) enable_php5_socket_file ( ) if not restored or reset_password : setup_selfoss_user ( username , sitename , site_dir ) print_msg ( '\n## reload nginx and restart php\n' ) run ( 'sudo service nginx reload' ) run ( 'sudo service php5-fpm restart' )
Install update and set up selfoss .
1,611
def get_cache_path ( filename ) : cwd = os . path . dirname ( os . path . realpath ( __file__ ) ) return os . path . join ( cwd , filename )
get file path
1,612
def get_process_token ( ) : token = wintypes . HANDLE ( ) res = process . OpenProcessToken ( process . GetCurrentProcess ( ) , process . TOKEN_ALL_ACCESS , token ) if not res > 0 : raise RuntimeError ( "Couldn't get process token" ) return token
Get the current process token
1,613
def get_symlink_luid ( ) : symlink_luid = privilege . LUID ( ) res = privilege . LookupPrivilegeValue ( None , "SeCreateSymbolicLinkPrivilege" , symlink_luid ) if not res > 0 : raise RuntimeError ( "Couldn't lookup privilege value" ) return symlink_luid
Get the LUID for the SeCreateSymbolicLinkPrivilege
1,614
def get_privilege_information ( ) : return_length = wintypes . DWORD ( ) params = [ get_process_token ( ) , privilege . TOKEN_INFORMATION_CLASS . TokenPrivileges , None , 0 , return_length , ] res = privilege . GetTokenInformation ( * params ) buffer = ctypes . create_string_buffer ( return_length . value ) params [ 2 ] = buffer params [ 3 ] = return_length . value res = privilege . GetTokenInformation ( * params ) assert res > 0 , "Error in second GetTokenInformation (%d)" % res privileges = ctypes . cast ( buffer , ctypes . POINTER ( privilege . TOKEN_PRIVILEGES ) ) . contents return privileges
Get all privileges associated with the current process .
1,615
def enable_symlink_privilege ( ) : size = ctypes . sizeof ( privilege . TOKEN_PRIVILEGES ) size += ctypes . sizeof ( privilege . LUID_AND_ATTRIBUTES ) buffer = ctypes . create_string_buffer ( size ) tp = ctypes . cast ( buffer , ctypes . POINTER ( privilege . TOKEN_PRIVILEGES ) ) . contents tp . count = 1 tp . get_array ( ) [ 0 ] . enable ( ) tp . get_array ( ) [ 0 ] . LUID = get_symlink_luid ( ) token = get_process_token ( ) res = privilege . AdjustTokenPrivileges ( token , False , tp , 0 , None , None ) if res == 0 : raise RuntimeError ( "Error in AdjustTokenPrivileges" ) ERROR_NOT_ALL_ASSIGNED = 1300 return ctypes . windll . kernel32 . GetLastError ( ) != ERROR_NOT_ALL_ASSIGNED
Try to assign the symlink privilege to the current process token . Return True if the assignment is successful .
1,616
def grant_symlink_privilege ( who , machine = '' ) : flags = security . POLICY_CREATE_ACCOUNT | security . POLICY_LOOKUP_NAMES policy = OpenPolicy ( machine , flags ) return policy
Grant the create symlink privilege to who .
1,617
def add_tasks_r ( addon_module , package_module , package_name ) : module_dict = package_module . __dict__ for attr_name , attr_val in module_dict . items ( ) : if isinstance ( attr_val , fabric . tasks . WrappedCallableTask ) : addon_module . __dict__ [ attr_name ] = attr_val elif attr_name != package_name and isinstance ( attr_val , types . ModuleType ) and attr_val . __name__ . startswith ( 'fabsetup_' ) and attr_name . split ( '.' ) [ - 1 ] != package_name : submodule_name = flo ( '{addon_module.__name__}.{attr_name}' ) submodule = get_or_create_module_r ( submodule_name ) package_module = attr_val add_tasks_r ( submodule , package_module , package_name ) addon_module . __dict__ [ attr_name ] = submodule
Recursively iterate through package_module and add every fabric task to the addon_module keeping the task hierarchy .
1,618
def load_addon ( username , package_name , _globals ) : addon_module = get_or_create_module_r ( username ) package_module = __import__ ( package_name ) add_tasks_r ( addon_module , package_module , package_name ) _globals . update ( { username : addon_module } ) del package_module del addon_module
Load an fabsetup addon given by package_name and hook it in the base task namespace username .
1,619
def load_pip_addons ( _globals ) : for package_name in known_pip_addons : _ , username = package_username ( package_name ) try : load_addon ( username , package_name . replace ( '-' , '_' ) , _globals ) except ImportError : pass
Load all known fabsetup addons which are installed as pypi pip - packages .
1,620
def find_lib ( lib ) : r if isinstance ( lib , str ) : lib = getattr ( ctypes . windll , lib ) size = 1024 result = ctypes . create_unicode_buffer ( size ) library . GetModuleFileName ( lib . _handle , result , size ) return result . value
r Find the DLL for a given library .
1,621
def getScienceMetadataRDF ( self , pid ) : url = "{url_base}/scimeta/{pid}/" . format ( url_base = self . url_base , pid = pid ) r = self . _request ( 'GET' , url ) if r . status_code != 200 : if r . status_code == 403 : raise HydroShareNotAuthorized ( ( 'GET' , url ) ) elif r . status_code == 404 : raise HydroShareNotFound ( ( pid , ) ) else : raise HydroShareHTTPException ( ( url , 'GET' , r . status_code ) ) return str ( r . content )
Get science metadata for a resource in XML + RDF format
1,622
def getResource ( self , pid , destination = None , unzip = False , wait_for_bag_creation = True ) : stream = self . _getBagStream ( pid , wait_for_bag_creation ) if destination : self . _storeBagOnFilesystem ( stream , pid , destination , unzip ) return None else : return stream
Get a resource in BagIt format
1,623
def getResourceTypes ( self ) : url = "{url_base}/resource/types" . format ( url_base = self . url_base ) r = self . _request ( 'GET' , url ) if r . status_code != 200 : raise HydroShareHTTPException ( ( url , 'GET' , r . status_code ) ) resource_types = r . json ( ) return set ( [ t [ 'resource_type' ] for t in resource_types ] )
Get the list of resource types supported by the HydroShare server
1,624
def createResource ( self , resource_type , title , resource_file = None , resource_filename = None , abstract = None , keywords = None , edit_users = None , view_users = None , edit_groups = None , view_groups = None , metadata = None , extra_metadata = None , progress_callback = None ) : url = "{url_base}/resource/" . format ( url_base = self . url_base ) close_fd = False if resource_type not in self . resource_types : raise HydroShareArgumentException ( "Resource type {0} is not among known resources: {1}" . format ( resource_type , ", " . join ( [ r for r in self . resource_types ] ) ) ) params = { 'resource_type' : resource_type , 'title' : title } if abstract : params [ 'abstract' ] = abstract if keywords : for ( i , kw ) in enumerate ( keywords ) : key = "keywords[{index}]" . format ( index = i ) params [ key ] = kw if edit_users : params [ 'edit_users' ] = edit_users if view_users : params [ 'view_users' ] = view_users if edit_groups : params [ 'edit_groups' ] = edit_groups if view_groups : params [ 'view_groups' ] = view_groups if metadata : params [ 'metadata' ] = metadata if extra_metadata : params [ 'extra_metadata' ] = extra_metadata if resource_file : close_fd = self . _prepareFileForUpload ( params , resource_file , resource_filename ) encoder = MultipartEncoder ( params ) if progress_callback is None : progress_callback = default_progress_callback monitor = MultipartEncoderMonitor ( encoder , progress_callback ) r = self . _request ( 'POST' , url , data = monitor , headers = { 'Content-Type' : monitor . content_type } ) if close_fd : fd = params [ 'file' ] [ 1 ] fd . close ( ) if r . status_code != 201 : if r . status_code == 403 : raise HydroShareNotAuthorized ( ( 'POST' , url ) ) else : raise HydroShareHTTPException ( ( url , 'POST' , r . status_code , params ) ) response = r . json ( ) new_resource_id = response [ 'resource_id' ] return new_resource_id
Create a new resource .
1,625
def setAccessRules ( self , pid , public = False ) : url = "{url_base}/resource/accessRules/{pid}/" . format ( url_base = self . url_base , pid = pid ) params = { 'public' : public } r = self . _request ( 'PUT' , url , data = params ) if r . status_code != 200 : if r . status_code == 403 : raise HydroShareNotAuthorized ( ( 'PUT' , url ) ) elif r . status_code == 404 : raise HydroShareNotFound ( ( pid , ) ) else : raise HydroShareHTTPException ( ( url , 'PUT' , r . status_code , params ) ) resource = r . json ( ) assert ( resource [ 'resource_id' ] == pid ) return resource [ 'resource_id' ]
Set access rules for a resource . Current only allows for setting the public or private setting .
1,626
def addResourceFile ( self , pid , resource_file , resource_filename = None , progress_callback = None ) : url = "{url_base}/resource/{pid}/files/" . format ( url_base = self . url_base , pid = pid ) params = { } close_fd = self . _prepareFileForUpload ( params , resource_file , resource_filename ) encoder = MultipartEncoder ( params ) if progress_callback is None : progress_callback = default_progress_callback monitor = MultipartEncoderMonitor ( encoder , progress_callback ) r = self . _request ( 'POST' , url , data = monitor , headers = { 'Content-Type' : monitor . content_type } ) if close_fd : fd = params [ 'file' ] [ 1 ] fd . close ( ) if r . status_code != 201 : if r . status_code == 403 : raise HydroShareNotAuthorized ( ( 'POST' , url ) ) elif r . status_code == 404 : raise HydroShareNotFound ( ( pid , ) ) else : raise HydroShareHTTPException ( ( url , 'POST' , r . status_code ) ) response = r . json ( ) return response
Add a new file to an existing resource
1,627
def getResourceFile ( self , pid , filename , destination = None ) : url = "{url_base}/resource/{pid}/files/{filename}" . format ( url_base = self . url_base , pid = pid , filename = filename ) if destination : if not os . path . isdir ( destination ) : raise HydroShareArgumentException ( "{0} is not a directory." . format ( destination ) ) if not os . access ( destination , os . W_OK ) : raise HydroShareArgumentException ( "You do not have write permissions to directory '{0}'." . format ( destination ) ) r = self . _request ( 'GET' , url , stream = True ) if r . status_code != 200 : if r . status_code == 403 : raise HydroShareNotAuthorized ( ( 'GET' , url ) ) elif r . status_code == 404 : raise HydroShareNotFound ( ( pid , filename ) ) else : raise HydroShareHTTPException ( ( url , 'GET' , r . status_code ) ) if destination is None : return r . iter_content ( STREAM_CHUNK_SIZE ) else : filepath = os . path . join ( destination , filename ) with open ( filepath , 'wb' ) as fd : for chunk in r . iter_content ( STREAM_CHUNK_SIZE ) : fd . write ( chunk ) return filepath
Get a file within a resource .
1,628
def deleteResourceFile ( self , pid , filename ) : url = "{url_base}/resource/{pid}/files/{filename}" . format ( url_base = self . url_base , pid = pid , filename = filename ) r = self . _request ( 'DELETE' , url ) if r . status_code != 200 : if r . status_code == 403 : raise HydroShareNotAuthorized ( ( 'DELETE' , url ) ) elif r . status_code == 404 : raise HydroShareNotFound ( ( pid , filename ) ) else : raise HydroShareHTTPException ( ( url , 'DELETE' , r . status_code ) ) response = r . json ( ) assert ( response [ 'resource_id' ] == pid ) return response [ 'resource_id' ]
Delete a resource file
1,629
def getResourceFileList ( self , pid ) : url = "{url_base}/resource/{pid}/files/" . format ( url_base = self . url_base , pid = pid ) return resultsListGenerator ( self , url )
Get a listing of files within a resource .
1,630
def get_ssm_parameter ( parameter_name ) : try : response = boto3 . client ( 'ssm' ) . get_parameters ( Names = [ parameter_name ] , WithDecryption = True ) return response . get ( 'Parameters' , None ) [ 0 ] . get ( 'Value' , '' ) except Exception : pass return ''
Get the decrypted value of an SSM parameter
1,631
def powerline ( ) : bindings_dir , scripts_dir = install_upgrade_powerline ( ) set_up_powerline_fonts ( ) set_up_powerline_daemon ( scripts_dir ) powerline_for_vim ( bindings_dir ) powerline_for_bash_or_powerline_shell ( bindings_dir ) powerline_for_tmux ( bindings_dir ) powerline_for_i3 ( bindings_dir ) print ( '\nYou may have to reboot for make changes take effect' )
Install and set up powerline for vim bash tmux and i3 .
1,632
def ifancestor ( parser , token ) : contents = parser . parse ( ( 'endifancestor' , ) ) parser . delete_first_token ( ) bits = token . split_contents ( ) if len ( bits ) == 2 : arg = parser . compile_filter ( bits [ 1 ] ) else : arg = None url_node = url ( parser , token ) return AncestorNode ( url_node , arg = arg , contents = contents )
Returns the contents of the tag if the provided path consitutes the base of the current pages path .
1,633
def exit_statistics ( hostname , start_time , count_sent , count_received , min_time , avg_time , max_time , deviation ) : end_time = datetime . datetime . now ( ) duration = end_time - start_time duration_sec = float ( duration . seconds * 1000 ) duration_ms = float ( duration . microseconds / 1000 ) duration = duration_sec + duration_ms package_loss = 100 - ( ( float ( count_received ) / float ( count_sent ) ) * 100 ) print ( f'\b\b--- {hostname} ping statistics ---' ) try : print ( f'{count_sent} packages transmitted, {count_received} received, {package_loss}% package loss, time {duration}ms' ) except ZeroDivisionError : print ( f'{count_sent} packets transmitted, {count_received} received, 100% packet loss, time {duration}ms' ) print ( 'rtt min/avg/max/dev = %.2f/%.2f/%.2f/%.2f ms' % ( min_time . seconds * 1000 + float ( min_time . microseconds ) / 1000 , float ( avg_time ) / 1000 , max_time . seconds * 1000 + float ( max_time . microseconds ) / 1000 , float ( deviation ) ) )
Print ping exit statistics
1,634
def _filtered_walk ( path , file_filter ) : for root , dirs , files in os . walk ( path ) : log . debug ( 'looking in %s' , root ) log . debug ( 'files is %s' , files ) file_filter . set_root ( root ) files = filter ( file_filter , files ) log . debug ( 'filtered files is %s' , files ) yield ( root , dirs , files )
static method that calls os . walk but filters out anything that doesn t match the filter
1,635
def cache_control_expires ( num_hours ) : num_seconds = int ( num_hours * 60 * 60 ) def decorator ( func ) : @ wraps ( func ) def inner ( request , * args , ** kwargs ) : response = func ( request , * args , ** kwargs ) patch_response_headers ( response , num_seconds ) return response return inner return decorator
Set the appropriate Cache - Control and Expires headers for the given number of hours .
1,636
def upsert ( self ) : required_parameters = [ ] self . _stackParameters = [ ] try : self . _initialize_upsert ( ) except Exception : return False try : available_parameters = self . _parameters . keys ( ) for parameter_name in self . _template . get ( 'Parameters' , { } ) : required_parameters . append ( str ( parameter_name ) ) logging . info ( ' required parameters: ' + str ( required_parameters ) ) logging . info ( 'available parameters: ' + str ( available_parameters ) ) parameters = [ ] for required_parameter in required_parameters : parameter = { } parameter [ 'ParameterKey' ] = str ( required_parameter ) required_parameter = str ( required_parameter ) if required_parameter in self . _parameters : parameter [ 'ParameterValue' ] = self . _parameters [ required_parameter ] else : parameter [ 'ParameterValue' ] = self . _parameters [ required_parameter . lower ( ) ] parameters . append ( parameter ) if not self . _analyze_stuff ( ) : sys . exit ( 1 ) if self . _config . get ( 'dryrun' , False ) : logging . info ( 'Generating change set' ) set_id = self . _generate_change_set ( parameters ) if set_id : self . _describe_change_set ( set_id ) logging . info ( 'This was a dryrun' ) sys . exit ( 0 ) self . _tags . append ( { "Key" : "CODE_VERSION_SD" , "Value" : self . _config . get ( 'codeVersion' ) } ) self . _tags . append ( { "Key" : "ANSWER" , "Value" : str ( 42 ) } ) if self . _updateStack : stack = self . _cloudFormation . update_stack ( StackName = self . _config . get ( 'environment' , { } ) . get ( 'stack_name' , None ) , TemplateURL = self . _templateUrl , Parameters = parameters , Capabilities = [ 'CAPABILITY_IAM' , 'CAPABILITY_NAMED_IAM' ] , Tags = self . _tags , ClientRequestToken = str ( uuid . uuid4 ( ) ) ) logging . info ( 'existing stack ID: {}' . format ( stack . get ( 'StackId' , 'unknown' ) ) ) else : stack = self . _cloudFormation . create_stack ( StackName = self . _config . get ( 'environment' , { } ) . get ( 'stack_name' , None ) , TemplateURL = self . _templateUrl , Parameters = parameters , Capabilities = [ 'CAPABILITY_IAM' , 'CAPABILITY_NAMED_IAM' ] , Tags = self . _tags , ClientRequestToken = str ( uuid . uuid4 ( ) ) ) logging . info ( 'new stack ID: {}' . format ( stack . get ( 'StackId' , 'unknown' ) ) ) except Exception as x : if self . _verbose : logging . error ( x , exc_info = True ) else : logging . error ( x , exc_info = False ) return False return True
The main event of the utility . Create or update a Cloud Formation stack . Injecting properties where needed
1,637
def list ( self ) : self . _initialize_list ( ) interested = True response = self . _cloudFormation . list_stacks ( ) print ( 'Stack(s):' ) while interested : if 'StackSummaries' in response : for stack in response [ 'StackSummaries' ] : stack_status = stack [ 'StackStatus' ] if stack_status != 'DELETE_COMPLETE' : print ( ' [{}] - {}' . format ( stack [ 'StackStatus' ] , stack [ 'StackName' ] ) ) next_token = response . get ( 'NextToken' , None ) if next_token : response = self . _cloudFormation . list_stacks ( NextToken = next_token ) else : interested = False return True
List the existing stacks in the indicated region
1,638
def smash ( self ) : self . _initialize_smash ( ) try : stack_name = self . _config . get ( 'environment' , { } ) . get ( 'stack_name' , None ) response = self . _cloudFormation . describe_stacks ( StackName = stack_name ) logging . debug ( 'smash pre-flight returned: {}' . format ( json . dumps ( response , indent = 4 , default = json_util . default ) ) ) except ClientError as wtf : logging . warning ( 'your stack is in another castle [0].' ) return False except Exception as wtf : logging . error ( 'failed to find intial status of smash candidate: {}' . format ( wtf ) ) return False response = self . _cloudFormation . delete_stack ( StackName = stack_name ) logging . info ( 'delete started for stack: {}' . format ( stack_name ) ) logging . debug ( 'delete_stack returned: {}' . format ( json . dumps ( response , indent = 4 ) ) ) return self . poll_stack ( )
Smash the given stack
1,639
def _init_boto3_clients ( self ) : try : profile = self . _config . get ( 'environment' , { } ) . get ( 'profile' ) region = self . _config . get ( 'environment' , { } ) . get ( 'region' ) if profile : self . _b3Sess = boto3 . session . Session ( profile_name = profile ) else : self . _b3Sess = boto3 . session . Session ( ) self . _s3 = self . _b3Sess . client ( 's3' ) self . _cloudFormation = self . _b3Sess . client ( 'cloudformation' , region_name = region ) self . _ssm = self . _b3Sess . client ( 'ssm' , region_name = region ) return True except Exception as wtf : logging . error ( 'Exception caught in intialize_session(): {}' . format ( wtf ) ) traceback . print_exc ( file = sys . stdout ) return False
The utililty requires boto3 clients to Cloud Formation and S3 . Here is where we make them .
1,640
def _get_ssm_parameter ( self , p ) : try : response = self . _ssm . get_parameter ( Name = p , WithDecryption = True ) return response . get ( 'Parameter' , { } ) . get ( 'Value' , None ) except Exception as ruh_roh : logging . error ( ruh_roh , exc_info = False ) return None
Get parameters from Simple Systems Manager
1,641
def _fill_parameters ( self ) : self . _parameters = self . _config . get ( 'parameters' , { } ) self . _fill_defaults ( ) for k in self . _parameters . keys ( ) : try : if self . _parameters [ k ] . startswith ( self . SSM ) and self . _parameters [ k ] . endswith ( ']' ) : parts = self . _parameters [ k ] . split ( ':' ) tmp = parts [ 1 ] . replace ( ']' , '' ) val = self . _get_ssm_parameter ( tmp ) if val : self . _parameters [ k ] = val else : logging . error ( 'SSM parameter {} not found' . format ( tmp ) ) return False elif self . _parameters [ k ] == self . ASK : val = None a1 = '__x ' a2 = '__y ' prompt1 = "Enter value for '{}': " . format ( k ) prompt2 = "Confirm value for '{}': " . format ( k ) while a1 != a2 : a1 = getpass . getpass ( prompt = prompt1 ) a2 = getpass . getpass ( prompt = prompt2 ) if a1 == a2 : val = a1 else : print ( 'values do not match, try again' ) self . _parameters [ k ] = val except : pass return True
Fill in the _parameters dict from the properties file .
1,642
def _read_tags ( self ) : tags = self . _config . get ( 'tags' , { } ) logging . info ( 'Tags:' ) for tag_name in tags . keys ( ) : tag = { } tag [ 'Key' ] = tag_name tag [ 'Value' ] = tags [ tag_name ] self . _tags . append ( tag ) logging . info ( '{} = {}' . format ( tag_name , tags [ tag_name ] ) ) logging . debug ( json . dumps ( self . _tags , indent = 2 , sort_keys = True ) ) return True
Fill in the _tags dict from the tags file .
1,643
def _set_update ( self ) : try : self . _updateStack = False stack_name = self . _config . get ( 'environment' , { } ) . get ( 'stack_name' , None ) response = self . _cloudFormation . describe_stacks ( StackName = stack_name ) stack = response [ 'Stacks' ] [ 0 ] if stack [ 'StackStatus' ] == 'ROLLBACK_COMPLETE' : logging . info ( 'stack is in ROLLBACK_COMPLETE status and should be deleted' ) del_stack_resp = self . _cloudFormation . delete_stack ( StackName = stack_name ) logging . info ( 'delete started for stack: {}' . format ( stack_name ) ) logging . debug ( 'delete_stack returned: {}' . format ( json . dumps ( del_stack_resp , indent = 4 ) ) ) stack_delete = self . poll_stack ( ) if not stack_delete : return False if stack [ 'StackStatus' ] in [ 'CREATE_COMPLETE' , 'UPDATE_COMPLETE' , 'UPDATE_ROLLBACK_COMPLETE' ] : self . _updateStack = True except : self . _updateStack = False logging . info ( 'update_stack: ' + str ( self . _updateStack ) ) return True
Determine if we are creating a new stack or updating and existing one . The update member is set as you would expect at the end of this query .
1,644
def _craft_s3_keys ( self ) : now = time . gmtime ( ) stub = "templates/{stack_name}/{version}" . format ( stack_name = self . _config . get ( 'environment' , { } ) . get ( 'stack_name' , None ) , version = self . _config . get ( 'codeVersion' ) ) stub = stub + "/" + str ( now . tm_year ) stub = stub + "/" + str ( '%02d' % now . tm_mon ) stub = stub + "/" + str ( '%02d' % now . tm_mday ) stub = stub + "/" + str ( '%02d' % now . tm_hour ) stub = stub + ":" + str ( '%02d' % now . tm_min ) stub = stub + ":" + str ( '%02d' % now . tm_sec ) if self . _yaml : template_key = stub + "/stack.yaml" else : template_key = stub + "/stack.json" property_key = stub + "/stack.properties" return template_key , property_key
We are putting stuff into S3 were supplied the bucket . Here we craft the key of the elements we are putting up there in the internet clouds .
1,645
def poll_stack ( self ) : logging . info ( 'polling stack status, POLL_INTERVAL={}' . format ( POLL_INTERVAL ) ) time . sleep ( POLL_INTERVAL ) completed_states = [ 'CREATE_COMPLETE' , 'UPDATE_COMPLETE' , 'DELETE_COMPLETE' ] stack_name = self . _config . get ( 'environment' , { } ) . get ( 'stack_name' , None ) while True : try : response = self . _cloudFormation . describe_stacks ( StackName = stack_name ) stack = response [ 'Stacks' ] [ 0 ] current_status = stack [ 'StackStatus' ] logging . info ( 'current status of {}: {}' . format ( stack_name , current_status ) ) if current_status . endswith ( 'COMPLETE' ) or current_status . endswith ( 'FAILED' ) : if current_status in completed_states : return True else : return False time . sleep ( POLL_INTERVAL ) except ClientError as wtf : if str ( wtf ) . find ( 'does not exist' ) == - 1 : logging . error ( 'Exception caught in wait_for_stack(): {}' . format ( wtf ) ) traceback . print_exc ( file = sys . stdout ) return False else : logging . info ( '{} is gone' . format ( stack_name ) ) return True except Exception as wtf : logging . error ( 'Exception caught in wait_for_stack(): {}' . format ( wtf ) ) traceback . print_exc ( file = sys . stdout ) return False
Spin in a loop while the Cloud Formation process either fails or succeeds
1,646
def setup_desktop ( ) : run ( 'sudo apt-get update' ) install_packages ( packages_desktop ) execute ( custom . latex ) execute ( setup . ripping_of_cds ) execute ( setup . regex_repl ) execute ( setup . i3 ) execute ( setup . solarized ) execute ( setup . vim ) execute ( setup . tmux ) execute ( setup . pyenv ) from fabfile import dfh , check_reboot dfh ( ) check_reboot ( )
Run setup tasks to set up a nicely configured desktop pc .
1,647
def setup_webserver ( ) : run ( 'sudo apt-get update' ) install_packages ( packages_webserver ) execute ( custom . latex ) execute ( setup . solarized ) execute ( setup . vim ) execute ( setup . tmux ) checkup_git_repo_legacy ( url = 'git@github.com:letsencrypt/letsencrypt.git' ) execute ( setup . service . fdroid ) execute ( setup . service . owncloud ) from fabfile import dfh , check_reboot dfh ( ) check_reboot ( )
Run setup tasks to set up a nicely configured webserver .
1,648
def start_recv ( sockfile = None ) : if sockfile is not None : SOCKFILE = sockfile else : SOCKFILE = "/tmp/snort_alert" if os . path . exists ( SOCKFILE ) : os . unlink ( SOCKFILE ) unsock = socket . socket ( socket . AF_UNIX , socket . SOCK_DGRAM ) unsock . bind ( SOCKFILE ) logging . warning ( 'Unix socket start listening...' ) while True : data = unsock . recv ( BUFSIZE ) parsed_msg = alert . AlertPkt . parser ( data ) if parsed_msg : yield parsed_msg
Open a server on Unix Domain Socket
1,649
def dump ( obj , fp = None , indent = None , sort_keys = False , ** kw ) : if fp : iterable = YAMLEncoder ( indent = indent , sort_keys = sort_keys , ** kw ) . iterencode ( obj ) for chunk in iterable : fp . write ( chunk ) else : return dumps ( obj , indent = indent , sort_keys = sort_keys , ** kw )
Dump object to a file like object or string .
1,650
def dumps ( obj , indent = None , default = None , sort_keys = False , ** kw ) : return YAMLEncoder ( indent = indent , default = default , sort_keys = sort_keys , ** kw ) . encode ( obj )
Dump string .
1,651
def load ( s , ** kwargs ) : try : return loads ( s , ** kwargs ) except TypeError : return loads ( s . read ( ) , ** kwargs )
Load yaml file
1,652
def address ( self ) : "The address in big-endian" _ = struct . pack ( 'L' , self . address_num ) return struct . unpack ( '!L' , _ ) [ 0 ]
The address in big - endian
1,653
def validate_currency ( * currencies ) : validated_currency = [ ] if not currencies : raise CurrencyException ( 'My function need something to run, duh' ) for currency in currencies : currency = currency . upper ( ) if not isinstance ( currency , str ) : raise TypeError ( 'Currency code should be a string: ' + repr ( currency ) ) if currency not in _currencies : raise CurrencyException ( 'Currency code not found: ' + repr ( currency ) ) validated_currency . append ( currency ) return validated_currency [ 0 ] if len ( validated_currency ) == 1 else validated_currency
some validation checks before doing anything
1,654
def validate_price ( price ) : if isinstance ( price , str ) : try : price = int ( price ) except ValueError : price = float ( price ) if not isinstance ( price , ( int , float ) ) : raise TypeError ( 'Price should be a number: ' + repr ( price ) ) return price
validation checks for price argument
1,655
def name ( currency , * , plural = False ) : currency = validate_currency ( currency ) if plural : return _currencies [ currency ] [ 'name_plural' ] return _currencies [ currency ] [ 'name' ]
return name of currency
1,656
def symbol ( currency , * , native = True ) : currency = validate_currency ( currency ) if native : return _currencies [ currency ] [ 'symbol_native' ] return _currencies [ currency ] [ 'symbol' ]
return symbol of currency
1,657
def rounding ( price , currency ) : currency = validate_currency ( currency ) price = validate_price ( price ) if decimals ( currency ) == 0 : return round ( int ( price ) , decimals ( currency ) ) return round ( price , decimals ( currency ) )
rounding currency value based on its max decimal digits
1,658
def check_update ( from_currency , to_currency ) : if from_currency not in ccache : ccache [ from_currency ] = { } if ccache [ from_currency ] . get ( to_currency ) is None : ccache [ from_currency ] [ to_currency ] = { 'last_update' : 0 } last_update = float ( ccache [ from_currency ] [ to_currency ] [ 'last_update' ] ) if time . time ( ) - last_update >= 30 * 60 : return True return False
check if last update is over 30 mins ago . if so return True to update else False
1,659
def update_cache ( from_currency , to_currency ) : if check_update ( from_currency , to_currency ) is True : ccache [ from_currency ] [ to_currency ] [ 'value' ] = convert_using_api ( from_currency , to_currency ) ccache [ from_currency ] [ to_currency ] [ 'last_update' ] = time . time ( ) cache . write ( ccache )
update from_currency to_currency pair in cache if last update for that pair is over 30 minutes ago by request API info
1,660
def convert_using_api ( from_currency , to_currency ) : convert_str = from_currency + '_' + to_currency options = { 'compact' : 'ultra' , 'q' : convert_str } api_url = 'https://free.currencyconverterapi.com/api/v5/convert' result = requests . get ( api_url , params = options ) . json ( ) return result [ convert_str ]
convert from from_currency to to_currency by requesting API
1,661
def convert ( from_currency , to_currency , from_currency_price = 1 ) : get_cache ( ) from_currency , to_currency = validate_currency ( from_currency , to_currency ) update_cache ( from_currency , to_currency ) return ccache [ from_currency ] [ to_currency ] [ 'value' ] * from_currency_price
convert from from_currency to to_currency using cached info
1,662
def wait_for_signal ( self , timeout = None ) : timeout_ms = int ( timeout * 1000 ) if timeout else win32event . INFINITE win32event . WaitForSingleObject ( self . signal_event , timeout_ms )
wait for the signal ; return after the signal has occurred or the timeout in seconds elapses .
1,663
def ip_geoloc ( ip , hit_api = True ) : from . . logs . models import IPInfoCheck try : obj = IPInfoCheck . objects . get ( ip_address = ip ) . ip_info except IPInfoCheck . DoesNotExist : if hit_api : try : obj = IPInfoCheck . check_ip ( ip ) except RateExceededError : return None else : return None return obj . latitude , obj . longitude
Get IP geolocation .
1,664
def google_maps_geoloc_link ( data ) : if isinstance ( data , str ) : lat_lon = ip_geoloc ( data ) if lat_lon is None : return '' lat , lon = lat_lon else : lat , lon = data loc = '%s,%s' % ( lat , lon ) return 'https://www.google.com/maps/place/@%s,17z/' 'data=!3m1!4b1!4m5!3m4!1s0x0:0x0!8m2!3d%s!4d%s' % ( loc , lat , lon )
Get a link to google maps pointing on this IP s geolocation .
1,665
def open_street_map_geoloc_link ( data ) : if isinstance ( data , str ) : lat_lon = ip_geoloc ( data ) if lat_lon is None : return '' lat , lon = lat_lon else : lat , lon = data return 'https://www.openstreetmap.org/search' '?query=%s%%2C%s#map=7/%s/%s' % ( lat , lon , lat , lon )
Get a link to open street map pointing on this IP s geolocation .
1,666
def status_codes_chart ( ) : stats = status_codes_stats ( ) chart_options = { 'chart' : { 'type' : 'pie' } , 'title' : { 'text' : '' } , 'subtitle' : { 'text' : '' } , 'tooltip' : { 'formatter' : "return this.y + '/' + this.total + ' (' + " "Highcharts.numberFormat(this.percentage, 1) + '%)';" } , 'legend' : { 'enabled' : True , } , 'plotOptions' : { 'pie' : { 'allowPointSelect' : True , 'cursor' : 'pointer' , 'dataLabels' : { 'enabled' : True , 'format' : '<b>{point.name}</b>: {point.y}/{point.total} ' '({point.percentage:.1f}%)' } , 'showInLegend' : True } } , 'series' : [ { 'name' : _ ( 'Status Codes' ) , 'colorByPoint' : True , 'data' : sorted ( [ { 'name' : '%s %s' % ( k , STATUS_CODES [ int ( k ) ] [ 'name' ] ) , 'y' : v } for k , v in stats . items ( ) ] , key = lambda x : x [ 'y' ] , reverse = True ) } ] } return chart_options
Chart for status codes .
1,667
def most_visited_pages_legend_chart ( ) : return { 'chart' : { 'type' : 'bar' , 'height' : 200 , } , 'title' : { 'text' : _ ( 'Legend' ) } , 'xAxis' : { 'categories' : [ _ ( 'Project URL' ) , _ ( 'Old project URL' ) , _ ( 'Asset URL' ) , _ ( 'Old asset URL' ) , _ ( 'Common asset URL' ) , _ ( 'False-negative project URL' ) , _ ( 'Suspicious URL (potential attack)' ) ] , 'title' : { 'text' : None } } , 'yAxis' : { 'title' : { 'text' : None , 'align' : 'high' } , 'labels' : { 'overflow' : 'justify' } } , 'tooltip' : { 'enabled' : False } , 'legend' : { 'enabled' : False } , 'credits' : { 'enabled' : False } , 'series' : [ { 'name' : _ ( 'Legend' ) , 'data' : [ { 'color' : URL_TYPE_COLOR [ PROJECT ] , 'y' : 1 } , { 'color' : URL_TYPE_COLOR [ OLD_PROJECT ] , 'y' : 1 } , { 'color' : URL_TYPE_COLOR [ ASSET ] , 'y' : 1 } , { 'color' : URL_TYPE_COLOR [ OLD_ASSET ] , 'y' : 1 } , { 'color' : URL_TYPE_COLOR [ COMMON_ASSET ] , 'y' : 1 } , { 'color' : URL_TYPE_COLOR [ FALSE_NEGATIVE ] , 'y' : 1 } , { 'color' : URL_TYPE_COLOR [ SUSPICIOUS ] , 'y' : 1 } , ] } ] }
Chart for most visited pages legend .
1,668
def add_settings ( mod , allow_extras = True , settings = django_settings ) : extras = { } for setting in dir ( mod ) : if setting == setting . upper ( ) : setting_value = getattr ( mod , setting ) if setting in TUPLE_SETTINGS and type ( setting_value ) == str : setting_value = ( setting_value , ) if setting . startswith ( 'EXTRA_' ) : base_setting = setting . split ( 'EXTRA_' , 1 ) [ - 1 ] if isinstance ( getattr ( settings , base_setting ) , ( list , tuple ) ) : extras [ base_setting ] = setting_value continue setattr ( settings , setting , setting_value ) for key , value in extras . items ( ) : curval = getattr ( settings , key ) setattr ( settings , key , curval + type ( curval ) ( value ) )
Adds all settings that are part of mod to the global settings object .
1,669
def get_urls ( self ) : urls = super ( DashboardSite , self ) . get_urls ( ) custom_urls = [ url ( r'^$' , self . admin_view ( HomeView . as_view ( ) ) , name = 'index' ) , url ( r'^logs/' , include ( logs_urlpatterns ( self . admin_view ) ) ) , ] custom_urls += get_realtime_urls ( self . admin_view ) del urls [ 0 ] return custom_urls + urls
Get urls method .
1,670
def add_form_widget_attr ( field , attr_name , attr_value , replace = 0 ) : if not replace : attr = field . field . widget . attrs . get ( attr_name , '' ) attr += force_text ( attr_value ) field . field . widget . attrs [ attr_name ] = attr return field else : field . field . widget . attrs [ attr_name ] = attr_value return field
Adds widget attributes to a bound form field .
1,671
def block_anyfilter ( parser , token ) : bits = token . contents . split ( ) nodelist = parser . parse ( ( 'endblockanyfilter' , ) ) parser . delete_first_token ( ) return BlockAnyFilterNode ( nodelist , bits [ 1 ] , * bits [ 2 : ] )
Turn any template filter into a blocktag .
1,672
def calculate_dimensions ( image , long_side , short_side ) : if image . width >= image . height : return '{0}x{1}' . format ( long_side , short_side ) return '{0}x{1}' . format ( short_side , long_side )
Returns the thumbnail dimensions depending on the images format .
1,673
def call ( obj , method , * args , ** kwargs ) : function_or_dict_or_member = getattr ( obj , method ) if callable ( function_or_dict_or_member ) : return function_or_dict_or_member ( * args , ** kwargs ) if not len ( args ) : return function_or_dict_or_member return function_or_dict_or_member [ args [ 0 ] ]
Allows to call any method of any object with parameters .
1,674
def concatenate ( * args , ** kwargs ) : divider = kwargs . get ( 'divider' , '' ) result = '' for arg in args : if result == '' : result += arg else : result += '{0}{1}' . format ( divider , arg ) return result
Concatenates the given strings .
1,675
def get_content_type ( obj , field_name = False ) : content_type = ContentType . objects . get_for_model ( obj ) if field_name : return getattr ( content_type , field_name , '' ) return content_type
Returns the content type of an object .
1,676
def get_verbose ( obj , field_name = "" ) : if hasattr ( obj , "_meta" ) and hasattr ( obj . _meta , "get_field_by_name" ) : try : return obj . _meta . get_field ( field_name ) . verbose_name except FieldDoesNotExist : pass return ""
Returns the verbose name of an object s field .
1,677
def get_query_params ( request , * args ) : query = request . GET . copy ( ) index = 1 key = '' for arg in args : if index % 2 != 0 : key = arg else : if arg == "!remove" : try : query . pop ( key ) except KeyError : pass else : query [ key ] = arg index += 1 return query . urlencode ( )
Allows to change one of the URL get parameter while keeping all the others .
1,678
def navactive ( request , url , exact = 0 , use_resolver = 1 ) : if use_resolver : try : if url == resolve ( request . path ) . url_name : return 'active' elif url == request . path : match = request . path else : return '' except Resolver404 : match = request . path else : match = request . path if exact and url == match : return 'active' elif not exact and url in request . path : return 'active' return ''
Returns active if the given URL is in the url path otherwise .
1,679
def get_range_around ( range_value , current_item , padding ) : total_items = 1 + padding * 2 left_bound = padding right_bound = range_value - padding if range_value <= total_items : range_items = range ( 1 , range_value + 1 ) return { 'range_items' : range_items , 'left_padding' : False , 'right_padding' : False , } if current_item <= left_bound : range_items = range ( 1 , range_value + 1 ) [ : total_items ] return { 'range_items' : range_items , 'left_padding' : range_items [ 0 ] > 1 , 'right_padding' : range_items [ - 1 ] < range_value , } if current_item >= right_bound : range_items = range ( 1 , range_value + 1 ) [ - total_items : ] return { 'range_items' : range_items , 'left_padding' : range_items [ 0 ] > 1 , 'right_padding' : range_items [ - 1 ] < range_value , } range_items = range ( current_item - padding , current_item + padding + 1 ) return { 'range_items' : range_items , 'left_padding' : True , 'right_padding' : True , }
Returns a range of numbers around the given number .
1,680
def sum ( context , key , value , multiplier = 1 ) : if key not in context . dicts [ 0 ] : context . dicts [ 0 ] [ key ] = 0 context . dicts [ 0 ] [ key ] += value * multiplier return ''
Adds the given value to the total value currently held in key .
1,681
def verbatim ( parser , token ) : text = [ ] while 1 : token = parser . tokens . pop ( 0 ) if token . contents == 'endverbatim' : break if token . token_type == TOKEN_VAR : text . append ( '{{ ' ) elif token . token_type == TOKEN_BLOCK : text . append ( '{%' ) text . append ( token . contents ) if token . token_type == TOKEN_VAR : text . append ( ' }}' ) elif token . token_type == TOKEN_BLOCK : if not text [ - 1 ] . startswith ( '=' ) : text [ - 1 : - 1 ] = [ ' ' ] text . append ( ' %}' ) return VerbatimNode ( '' . join ( text ) )
Tag to render x - tmpl templates with Django template code .
1,682
def append_s ( value ) : if value . endswith ( 's' ) : return u"{0}'" . format ( value ) else : return u"{0}'s" . format ( value )
Adds the possessive s after a string .
1,683
def logs_urlpatterns ( admin_view = lambda x : x ) : return [ url ( r'^$' , admin_view ( LogsMenu . as_view ( ) ) , name = 'logs' ) , url ( r'^status_codes$' , admin_view ( LogsStatusCodes . as_view ( ) ) , name = 'logs_status_codes' ) , url ( r'^status_codes_by_date$' , admin_view ( LogsStatusCodesByDate . as_view ( ) ) , name = 'logs_status_codes_by_date' ) , url ( r'^most_visited_pages$' , admin_view ( LogsMostVisitedPages . as_view ( ) ) , name = 'logs_most_visited_pages' ) ]
Return the URL patterns for the logs views .
1,684
def _get ( self , ip ) : retries = 10 for retry in range ( retries ) : try : response = requests . get ( 'http://ipinfo.io/%s/json' % ip , verify = False , timeout = 1 ) if response . status_code == 429 : raise RateExceededError return response . json ( ) except ( requests . ReadTimeout , requests . ConnectTimeout ) : pass return { }
Get information about an IP .
1,685
def parse ( data ) : data = data . decode ( "ascii" ) if len ( data ) == 2 and data == "A5" : return AckMessage ( ) raw = [ data [ i : i + 2 ] for i in range ( len ( data ) ) if i % 2 == 0 ] if len ( raw ) != 7 : return UnknownMessage ( raw ) if raw [ 1 ] == "B8" : return StateMessage ( raw ) elif raw [ 3 ] == "12" : return CommandMessage ( raw ) elif raw [ 3 ] == "14" : return ScenarioTriggeredMessage ( raw ) elif raw [ 3 ] == "15" : return RequestStatusMessage ( raw ) else : return UnknownMessage ( raw )
Parses a raw datagram and return the right type of message
1,686
def checksum_bytes ( data ) : int_values = [ int ( x , 16 ) for x in data ] int_xor = reduce ( lambda x , y : x ^ y , int_values ) hex_xor = "{:X}" . format ( int_xor ) if len ( hex_xor ) % 2 != 0 : hex_xor = "0" + hex_xor return str . encode ( hex_xor )
Returns a XOR of all the bytes specified inside of the given list
1,687
def compose_telegram ( body ) : msg = [ b"A8" ] + body + [ checksum_bytes ( body ) ] + [ b"A3" ] return str . encode ( "" . join ( [ x . decode ( ) for x in msg ] ) )
Compose a SCS message
1,688
def send_email ( request , context , subject_template , body_template , from_email , recipients , priority = "medium" , reply_to = None , headers = None , cc = None , bcc = None ) : headers = headers or { } if not reply_to : reply_to = from_email if hasattr ( settings , 'DJANGO_LIBS_EMAIL_CONTEXT' ) : context_fn = load_member_from_setting ( 'DJANGO_LIBS_EMAIL_CONTEXT' ) context . update ( context_fn ( request ) ) if request and request . get_host ( ) : domain = request . get_host ( ) protocol = 'https://' if request . is_secure ( ) else 'http://' else : domain = getattr ( settings , 'DOMAIN' , Site . objects . get_current ( ) . domain ) protocol = getattr ( settings , 'PROTOCOL' , 'http://' ) context . update ( { 'domain' : domain , 'protocol' : protocol , } ) subject = render_to_string ( template_name = subject_template , context = context , request = request ) subject = '' . join ( subject . splitlines ( ) ) message_html = render_to_string ( template_name = body_template , context = context , request = request ) message_plaintext = html_to_plain_text ( message_html ) subject = force_text ( subject ) message = force_text ( message_plaintext ) email = EmailMultiAlternatives ( subject = subject , body = message , from_email = from_email , to = recipients , cc = cc , bcc = bcc , headers = headers , reply_to = [ reply_to ] , ) email . attach_alternative ( message_html , "text/html" ) if settings . EMAIL_BACKEND == 'mailer.backend.DbBackend' : priority = mailer . get_priority ( priority ) msg = make_message ( subject = subject , body = message , from_email = from_email , to = recipients , priority = priority , ) msg . email = email msg . save ( ) else : email . send ( )
Sends an email based on templates for subject and body .
1,689
def url_is_project ( url , default = 'not_a_func' ) : try : u = resolve ( url ) if u and u . func != default : return True except Resolver404 : static_url = settings . STATIC_URL static_url_wd = static_url . lstrip ( '/' ) if url . startswith ( static_url ) : url = url [ len ( static_url ) : ] elif url . startswith ( static_url_wd ) : url = url [ len ( static_url_wd ) : ] else : return False if finders . find ( url ) : return True return False
Check if URL is part of the current project s URLs .
1,690
def url_is ( white_list ) : def func ( url ) : prefixes = white_list . get ( 'PREFIXES' , ( ) ) for prefix in prefixes : if url . startswith ( prefix ) : return True constants = white_list . get ( 'CONSTANTS' , ( ) ) for exact_url in constants : if url == exact_url : return True return False return func
Function generator .
1,691
def save_records ( self , records ) : for record in records : if not isinstance ( record , Record ) : record = Record ( * record ) self . save_record ( * record )
Save a collection of records
1,692
def save_record ( self , agent_id , t_step , key , value ) : value = self . convert ( key , value ) self . _tups . append ( Record ( agent_id = agent_id , t_step = t_step , key = key , value = value ) ) if len ( self . _tups ) > 100 : self . flush_cache ( )
Save a collection of records to the database . Database writes are cached .
1,693
def convert ( self , key , value ) : if key not in self . _dtypes : self . read_types ( ) if key not in self . _dtypes : name = utils . name ( value ) serializer = utils . serializer ( name ) deserializer = utils . deserializer ( name ) self . _dtypes [ key ] = ( name , serializer , deserializer ) with self . db : self . db . execute ( "replace into value_types (key, value_type) values (?, ?)" , ( key , name ) ) return self . _dtypes [ key ] [ 1 ] ( value )
Get the serialized value for a given key .
1,694
def recover ( self , key , value ) : if key not in self . _dtypes : self . read_types ( ) if key not in self . _dtypes : raise ValueError ( "Unknown datatype for {} and {}" . format ( key , value ) ) return self . _dtypes [ key ] [ 2 ] ( value )
Get the deserialized value for a given key and the serialized version .
1,695
def flush_cache ( self ) : logger . debug ( 'Flushing cache {}' . format ( self . db_path ) ) with self . db : for rec in self . _tups : self . db . execute ( "replace into history(agent_id, t_step, key, value) values (?, ?, ?, ?)" , ( rec . agent_id , rec . t_step , rec . key , rec . value ) ) self . _tups = list ( )
Use a cache to save state changes to avoid opening a session for every change . The cache will be flushed at the end of the simulation and when history is accessed .
1,696
def records ( ) : import pkg_resources import uuid from dojson . contrib . marc21 import marc21 from dojson . contrib . marc21 . utils import create_record , split_blob from invenio_pidstore import current_pidstore from invenio_records . api import Record data_path = pkg_resources . resource_filename ( 'invenio_records' , 'data/marc21/bibliographic.xml' ) with open ( data_path ) as source : indexer = RecordIndexer ( ) with db . session . begin_nested ( ) : for index , data in enumerate ( split_blob ( source . read ( ) ) , start = 1 ) : rec_uuid = uuid . uuid4 ( ) record = marc21 . do ( create_record ( data ) ) current_pidstore . minters [ 'recid' ] ( rec_uuid , record ) indexer . index ( Record . create ( record , id_ = rec_uuid ) ) db . session . commit ( )
Load records .
1,697
def run_trial_exceptions ( self , * args , ** kwargs ) : try : return self . run_trial ( * args , ** kwargs ) except Exception as ex : c = ex . __cause__ c . message = '' . join ( traceback . format_exception ( type ( c ) , c , c . __traceback__ ) [ : ] ) return c
A wrapper for run_trial that catches exceptions and returns them . It is meant for async simulations
1,698
def search_orcid ( orcid ) : url = 'https://pub.orcid.org/v2.1/{orcid}/person' . format ( orcid = orcid ) r = requests . get ( url , headers = headers ) if r . status_code != 200 : r . raise_for_status ( ) return r . json ( )
Search the ORCID public API
1,699
def daterange ( start_date , end_date ) : for n in range ( int ( ( end_date - start_date ) . days ) ) : yield start_date + timedelta ( n )
Yield one date per day from starting date to ending date .