idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
54,800 | def authenticated_get ( username , password , url , verify = True ) : try : response = requests . get ( url , auth = ( username , password ) , verify = verify ) if response . status_code == 401 : raise BadCredentialsException ( "Unable to authenticate user %s to %s with password provided!" % ( username , url ) ) except requests . exceptions . SSLError : raise CertificateException ( "Unable to verify certificate at %s!" % url ) return response . content | Perform an authorized query to the url and return the result |
54,801 | def cleaned_request ( request_type , * args , ** kwargs ) : s = requests . Session ( ) s . trust_env = False return s . request ( request_type , * args , ** kwargs ) | Perform a cleaned requests request |
54,802 | def download_to_bytesio ( url ) : logger . info ( "Downloading url: {0}" . format ( url ) ) r = cleaned_request ( 'get' , url , stream = True ) stream = io . BytesIO ( ) total_length = int ( r . headers . get ( 'content-length' ) ) for chunk in progress . bar ( r . iter_content ( chunk_size = 1024 ) , expected_size = ( total_length / 1024 ) + 1 ) : if chunk : stream . write ( chunk ) stream . seek ( 0 ) return stream | Return a bytesio object with a download bar |
54,803 | def add ( one , two = 4 , three = False ) : s = str ( int ( one ) + int ( two ) ) logging . debug ( 'logging sum from hello.py:' + s ) print 'printing sum from hello.py:' , s return s | This function adds two number . |
54,804 | def queue ( self ) : with self . connection_pool . acquire ( block = True ) as conn : return Q ( self . routing_key , exchange = self . exchange , routing_key = self . routing_key ) ( conn ) | Message queue queue . |
54,805 | def exists ( self ) : try : queue = self . queue queue . queue_declare ( passive = True ) except NotFound : return False except ChannelError as e : if e . reply_code == '404' : return False raise e return True | Test if this queue exists in the AMQP store . |
54,806 | def create_consumer ( self ) : with self . connection_pool . acquire ( block = True ) as conn : yield self . consumer ( conn ) | Context manager that yields an instance of Consumer . |
54,807 | def publish ( self , events ) : assert len ( events ) > 0 with self . create_producer ( ) as producer : for event in events : producer . publish ( event ) | Publish events . |
54,808 | def consume ( self , payload = True ) : with self . create_consumer ( ) as consumer : for msg in consumer . iterqueue ( ) : yield msg . payload if payload else msg | Consume events . |
54,809 | def get_initial ( self , * args , ** kwargs ) : initial = { } for field in self . fields : value = None if hasattr ( self . user , field ) : value = getattr ( self . user , field ) if hasattr ( self . profile , field ) : value = getattr ( self . profile , field ) if value : initial . update ( { field : value } ) if hasattr ( self . profile , 'dob' ) : dob = self . profile . dob if dob : if 'dob_day' in self . fields : initial . update ( { 'dob_day' : dob . day } ) if 'dob_month' in self . fields : initial . update ( { 'dob_month' : dob . month } ) if 'dob_year' in self . fields : initial . update ( { 'dob_year' : dob . year } ) return initial | Gathers initial form values from user and profile objects suitable for using as form s initial data . |
54,810 | def save ( self , * args , ** kwargs ) : for key , value in self . cleaned_data . items ( ) : if value != None : if hasattr ( self . user , key ) : setattr ( self . user , key , value ) if hasattr ( self . profile , key ) : setattr ( self . profile , key , value ) if 'password1' in self . cleaned_data : if self . cleaned_data [ 'password1' ] : self . user . set_password ( self . cleaned_data [ 'password1' ] ) if 'dob_day' in self . cleaned_data and 'dob_month' in self . cleaned_data and 'dob_year' in self . cleaned_data : self . profile . dob = self . _gen_dob ( ) self . user . save ( ) self . profile . save ( ) | This method should be called when is_valid is true to save relevant fields to user and profile models . |
54,811 | def clean_username ( self ) : user = None try : user = User . objects . get ( username__iexact = self . cleaned_data [ 'username' ] ) except User . DoesNotExist : return self . cleaned_data [ 'username' ] if user : if user . username == self . user . username : return self . cleaned_data [ 'username' ] raise forms . ValidationError ( _ ( "A user with that username already exists." ) ) | Validate that the username is alphanumeric and is not already in use . Don t fail if users username is provided . |
54,812 | def add_body_part ( self , key , data , mime_type , size = None ) : if isinstance ( data , str ) : size = len ( data ) if hasattr ( data , "fileno" ) : size = os . fstat ( data . fileno ( ) ) [ stat . ST_SIZE ] if size is None : raise UnknownSize ( 'Each part of the body must have a known size.' ) if 'Content-Length' in self . headers : content_length = int ( self . headers [ 'Content-Length' ] ) else : content_length = 0 boundary_string = '\r\n--%s\r\n' % ( MIME_BOUNDARY , ) self . _body_parts . append ( boundary_string ) content_length += len ( boundary_string ) + size cd = 'Content-Disposition: form-data; name="%s"' % key mt = mime_type if hasattr ( data , "fileno" ) : cd += '; filename="%s"' % data . name . split ( '/' ) [ - 1 ] mt = mimetypes . guess_type ( data . name ) [ 0 ] or 'application/octet-stream' cd += '\r\n' type_string = 'Content-Type: %s\r\n\r\n' % ( mt ) self . _body_parts . append ( cd ) self . _body_parts . append ( type_string ) content_length += len ( type_string ) + len ( cd ) self . _body_parts . append ( data ) self . headers [ 'Content-Length' ] = str ( content_length ) | Adds data to the HTTP request body . If more than one part is added this is assumed to be a mime - multipart request . This method is designed to create MIME 1 . 0 requests as specified in RFC 1341 . |
54,813 | def _copy ( self ) : copied_uri = Uri ( self . uri . scheme , self . uri . host , self . uri . port , self . uri . path , self . uri . query . copy ( ) ) new_request = HttpRequest ( uri = copied_uri , method = self . method , headers = self . headers . copy ( ) ) new_request . _body_parts = self . _body_parts [ : ] return new_request | Creates a deep copy of this request . |
54,814 | def _get_relative_path ( self ) : param_string = self . _get_query_string ( ) if self . path is None : path = '/' else : path = self . path if param_string : return '?' . join ( [ path , param_string ] ) else : return path | Returns the path with the query parameters escaped and appended . |
54,815 | def modify_request ( self , http_request = None ) : if http_request is None : http_request = HttpRequest ( ) if http_request . uri is None : http_request . uri = Uri ( ) if self . scheme : http_request . uri . scheme = self . scheme if self . port : http_request . uri . port = self . port if self . host : http_request . uri . host = self . host if self . path : http_request . uri . path = self . path if self . query : http_request . uri . query = self . query . copy ( ) return http_request | Sets HTTP request components based on the URI . |
54,816 | def parse_uri ( uri_string ) : parts = urlparse . urlparse ( uri_string ) uri = Uri ( ) if parts [ 0 ] : uri . scheme = parts [ 0 ] if parts [ 1 ] : host_parts = parts [ 1 ] . split ( ':' ) if host_parts [ 0 ] : uri . host = host_parts [ 0 ] if len ( host_parts ) > 1 : uri . port = int ( host_parts [ 1 ] ) if parts [ 2 ] : uri . path = parts [ 2 ] if parts [ 4 ] : param_pairs = parts [ 4 ] . split ( '&' ) for pair in param_pairs : pair_parts = pair . split ( '=' ) if len ( pair_parts ) > 1 : uri . query [ urllib . unquote_plus ( pair_parts [ 0 ] ) ] = ( urllib . unquote_plus ( pair_parts [ 1 ] ) ) elif len ( pair_parts ) == 1 : uri . query [ urllib . unquote_plus ( pair_parts [ 0 ] ) ] = None return uri | Creates a Uri object which corresponds to the URI string . This method can accept partial URIs but it will leave missing members of the Uri unset . |
54,817 | def next ( self ) : " Move on to the next character in the text. " char = self . char if char == '\n' : self . lineno += 1 self . colno = 0 else : self . colno += 1 self . index += 1 return self . char | Move on to the next character in the text . |
54,818 | def readline ( self ) : " Reads a full line from the scanner and returns it. " start = end = self . index while end < len ( self . text ) : if self . text [ end ] == '\n' : end += 1 break end += 1 result = self . text [ start : end ] self . index = end if result . endswith ( '\n' ) : self . colno = 0 self . lineno += 1 else : self . colno += end - start return result | Reads a full line from the scanner and returns it . |
54,819 | def accept ( self , * names , ** kwargs ) : return self . next ( * names , as_accept = True , ** kwargs ) | Extracts a token of one of the specified rule names and doesn t error if unsuccessful . Skippable tokens might still be skipped by this method . |
54,820 | def append ( self , event , help = "" ) : if isinstance ( event , str ) : self . _events [ event ] = HookList ( is_waterfall = self . is_waterfall ) self . _help [ event ] = ( help , getframeinfo ( stack ( ) [ 1 ] [ 0 ] ) ) if not help : logger . warning ( "Great, don't say anything about your hooks and \ wait for plugin creators to figure it out." ) elif isinstance ( event , Iterable ) : for name in event : self . append ( name ) else : raise TypeError ( "Invalid event name!" ) | Creates a new event . event may be iterable or string |
54,821 | def hook ( self , function , event , dependencies ) : if event is None : for e in self . _events . keys ( ) : self . hook ( function , e , dependencies ) return if not isinstance ( event , str ) and isinstance ( event , Iterable ) : for e in event : self . hook ( function , e , dependencies ) return event_list = self . _events . get ( event , None ) if event_list is None : raise NameError ( "Invalid key provided '%s'. Valid options: %s" % ( event , ", " . join ( self . _events . keys ( ) ) ) ) return return event_list . hook ( function , dependencies ) | Tries to load the hook to the event |
54,822 | def print_message ( self , message , verbosity_needed = 1 ) : if self . args . verbosity >= verbosity_needed : print ( message ) | Prints the message if verbosity is high enough . |
54,823 | def error ( self , message , code = 1 ) : sys . stderr . write ( message ) sys . exit ( code ) | Prints the error and exits with the given code . |
54,824 | def parse_db_settings ( self , settings ) : if settings == 'DJANGO_SETTINGS_MODULE' : django_settings = os . environ . get ( 'DJANGO_SETTINGS_MODULE' ) self . print_message ( "Getting settings file from DJANGO_SETTINGS_MODULE=%s" % django_settings ) path_pieces = django_settings . split ( '.' ) path_pieces [ - 1 ] = '%s.py' % path_pieces [ - 1 ] settings = os . path . join ( * path_pieces ) self . print_message ( "Parsing settings from settings file '%s'" % settings ) parser = DatabaseSettingsParser ( ) with open ( settings ) as settings_file : settings_ast = ast . parse ( settings_file . read ( ) ) parser . visit ( settings_ast ) try : return parser . database_settings [ 'default' ] except KeyError as e : self . error ( "Missing key or value for: %s\nSettings must be of the form: %s" % ( e , self . settings_format ) ) | Parse out database settings from filename or DJANGO_SETTINGS_MODULE . |
54,825 | def initialize_db_args ( self , settings , db_key ) : self . print_message ( "Initializing database settings for %s" % db_key , verbosity_needed = 2 ) db_member = self . databases [ db_key ] db_name = settings . get ( 'NAME' ) if db_name and not db_member [ 'name' ] : db_member [ 'name' ] = db_name db_member [ 'password' ] = settings . get ( 'PASSWORD' ) args = [ ] for key in [ 'USER' , 'HOST' , 'PORT' ] : value = settings . get ( key ) if value : self . print_message ( "Adding parameter %s" % key . lower , verbosity_needed = 2 ) args . append ( '--%s=%s' % ( key . lower ( ) , value ) ) db_member [ 'args' ] = args | Initialize connection arguments for postgres commands . |
54,826 | def download_file ( self , url , filename ) : self . print_message ( "Downloading to file '%s' from URL '%s'" % ( filename , url ) ) try : db_file = urllib2 . urlopen ( url ) with open ( filename , 'wb' ) as output : output . write ( db_file . read ( ) ) db_file . close ( ) except Exception as e : self . error ( str ( e ) ) self . print_message ( "File downloaded" ) | Download file from url to filename . |
54,827 | def unzip_file_if_necessary ( self , source_file ) : if source_file . endswith ( ".gz" ) : self . print_message ( "Decompressing '%s'" % source_file ) subprocess . check_call ( [ "gunzip" , "--force" , source_file ] ) source_file = source_file [ : - len ( ".gz" ) ] return source_file | Unzip file if zipped . |
54,828 | def download_file_from_url ( self , source_app , url ) : if source_app : source_name = source_app else : source_name = urlparse . urlparse ( url ) . netloc . replace ( '.' , '_' ) filename = self . create_file_name ( source_name ) self . download_file ( url , filename ) return filename | Download file from source app or url and return local filename . |
54,829 | def dump_database ( self ) : db_file = self . create_file_name ( self . databases [ 'source' ] [ 'name' ] ) self . print_message ( "Dumping postgres database '%s' to file '%s'" % ( self . databases [ 'source' ] [ 'name' ] , db_file ) ) self . export_pgpassword ( 'source' ) args = [ "pg_dump" , "-Fc" , "--no-acl" , "--no-owner" , "--dbname=%s" % self . databases [ 'source' ] [ 'name' ] , "--file=%s" % db_file , ] args . extend ( self . databases [ 'source' ] [ 'args' ] ) subprocess . check_call ( args ) return db_file | Create dumpfile from postgres database and return filename . |
54,830 | def drop_database ( self ) : self . print_message ( "Dropping database '%s'" % self . databases [ 'destination' ] [ 'name' ] ) self . export_pgpassword ( 'destination' ) args = [ "dropdb" , "--if-exists" , self . databases [ 'destination' ] [ 'name' ] , ] args . extend ( self . databases [ 'destination' ] [ 'args' ] ) subprocess . check_call ( args ) | Drop postgres database . |
54,831 | def create_database ( self ) : self . print_message ( "Creating database '%s'" % self . databases [ 'destination' ] [ 'name' ] ) self . export_pgpassword ( 'destination' ) args = [ "createdb" , self . databases [ 'destination' ] [ 'name' ] , ] args . extend ( self . databases [ 'destination' ] [ 'args' ] ) for arg in self . databases [ 'destination' ] [ 'args' ] : if arg [ : 7 ] == '--user=' : args . append ( '--owner=%s' % arg [ 7 : ] ) subprocess . check_call ( args ) | Create postgres database . |
54,832 | def replace_postgres_db ( self , file_url ) : self . print_message ( "Replacing postgres database" ) if file_url : self . print_message ( "Sourcing data from online backup file '%s'" % file_url ) source_file = self . download_file_from_url ( self . args . source_app , file_url ) elif self . databases [ 'source' ] [ 'name' ] : self . print_message ( "Sourcing data from database '%s'" % self . databases [ 'source' ] [ 'name' ] ) source_file = self . dump_database ( ) else : self . print_message ( "Sourcing data from local backup file %s" % self . args . file ) source_file = self . args . file self . drop_database ( ) self . create_database ( ) source_file = self . unzip_file_if_necessary ( source_file ) self . print_message ( "Importing '%s' into database '%s'" % ( source_file , self . databases [ 'destination' ] [ 'name' ] ) ) args = [ "pg_restore" , "--no-acl" , "--no-owner" , "--dbname=%s" % self . databases [ 'destination' ] [ 'name' ] , source_file , ] args . extend ( self . databases [ 'destination' ] [ 'args' ] ) subprocess . check_call ( args ) | Replace postgres database with database from specified source . |
54,833 | def capture_heroku_database ( self ) : self . print_message ( "Capturing database backup for app '%s'" % self . args . source_app ) args = [ "heroku" , "pg:backups:capture" , "--app=%s" % self . args . source_app , ] if self . args . use_pgbackups : args = [ "heroku" , "pgbackups:capture" , "--app=%s" % self . args . source_app , "--expire" , ] subprocess . check_call ( args ) | Capture Heroku database backup . |
54,834 | def reset_heroku_database ( self ) : self . print_message ( "Resetting database for app '%s'" % self . args . destination_app ) args = [ "heroku" , "pg:reset" , "--app=%s" % self . args . destination_app , "DATABASE_URL" , ] subprocess . check_call ( args ) | Reset Heroku database . |
54,835 | def replace_heroku_db ( self , file_url ) : self . print_message ( "Replacing database for Heroku app '%s'" % self . args . destination_app ) self . reset_heroku_database ( ) if file_url : self . print_message ( "Restoring from URL '%s'" % file_url ) args = [ "heroku" , "pg:backups:restore" , file_url , "--app=%s" % self . args . destination_app , "DATABASE" , "--confirm" , self . args . destination_app , ] if self . args . use_pgbackups : args = [ "heroku" , "pgbackups:restore" , "--app=%s" % self . args . destination_app , "DATABASE_URL" , "--confirm" , self . args . destination_app , file_url , ] subprocess . check_call ( args ) else : self . print_message ( "Pushing data from database '%s'" % self . databases [ 'source' ] [ 'name' ] ) self . print_message ( "NOTE: Any postgres authentication settings you passed to paragres " "will be ignored.\nIf desired, you can export PG* variables.\n" "You will be prompted for your psql password." ) args = [ "heroku" , "pg:push" , self . databases [ 'source' ] [ 'name' ] , "DATABASE_URL" , "--app=%s" % self . args . destination_app , ] subprocess . check_call ( args ) | Replace Heroku database with database from specified source . |
54,836 | def run ( self ) : self . print_message ( "\nBeginning database replacement process.\n" ) if self . args . source_settings : settings = self . parse_db_settings ( self . args . source_settings ) self . initialize_db_args ( settings , 'source' ) if self . args . settings : settings = self . parse_db_settings ( self . args . settings ) self . initialize_db_args ( settings , 'destination' ) if self . args . capture : self . capture_heroku_database ( ) file_url = self . args . url if self . args . source_app : self . print_message ( "Sourcing data from backup for Heroku app '%s'" % self . args . source_app ) file_url = self . get_file_url_for_heroku_app ( self . args . source_app ) if self . args . destination_app : self . replace_heroku_db ( file_url ) elif self . databases [ 'destination' ] [ 'name' ] : self . replace_postgres_db ( file_url ) self . print_message ( "\nDone.\n\nDon't forget to update the Django Site entry if necessary!" ) | Replace a database with the data from the specified source . |
54,837 | def import_task_modules ( ) : top_level_modules = settings . INSTALLED_APPS module_names = [ ] for module in top_level_modules : mod = import_module ( module ) for loader , module_name , is_pkg in pkgutil . walk_packages ( mod . __path__ ) : if not module_name . startswith ( "__" ) : submod_name = "{0}.{1}" . format ( module , module_name ) module_names . append ( submod_name ) modules = map ( import_module , module_names ) return modules | Import all installed apps and add modules to registry |
54,838 | def list ( self , service_rec = None , host_rec = None , hostfilter = None ) : return self . send . service_list ( service_rec , host_rec , hostfilter ) | List a specific service or all services |
54,839 | def info ( self , svc_rec = None , ipaddr = None , proto = None , port = None ) : return self . send . service_info ( svc_rec , ipaddr , proto , port ) | Information about a service . |
54,840 | def add ( self , ipaddr = None , proto = None , port = None , fields = None ) : return self . send . service_add ( ipaddr , proto , port , fields ) | Add a service record |
54,841 | def delete ( self , svc_rec = None , ipaddr = None , proto = None , port = None ) : return self . send . service_del ( svc_rec , ipaddr , proto , port ) | Delete a t_services record |
54,842 | def vulns_list ( self , service_id = None , service_port = None , hostfilter = None ) : return self . send . service_vulns_list ( service_id , service_port , hostfilter ) | List of vulnerabilities for a service |
54,843 | def connect ( nodes ) : for a , b in zip ( nodes [ : - 1 ] , nodes [ 1 : ] ) : a . output = b b . output = queues . Queue ( ) | Connect a list of nodes . |
54,844 | def render_layout ( layout_name , content , ** context ) : layout_block = "content" if ":" in layout_name : layout_name , layout_block = layout_name . split ( ":" ) tpl = '{%% extends "%s" %%}{%% block %s %%}%s{%% endblock %%}' % ( layout_name , layout_block , content ) return render_template_string ( tpl , ** context ) | Uses a jinja template to wrap the content inside a layout . Wraps the content inside a block and adds the extend statement before rendering it with jinja . The block name can be specified in the layout_name after the filename separated by a colon . The default block name is content . |
54,845 | def parse_template ( app , filename ) : if not hasattr ( parse_template , "cache" ) : parse_template . cache = { } if filename not in parse_template . cache : source = get_template_source ( app , filename ) parse_template . cache [ filename ] = app . jinja_env . parse ( source , filename = filename ) return parse_template . cache [ filename ] | Parses the given template using the jinja environment of the given app and returns the AST . ASTs are cached in parse_template . cache |
54,846 | def jinja_node_to_python ( node ) : if isinstance ( node , nodes . Const ) : return node . value if isinstance ( node , nodes . Neg ) : return - jinja_node_to_python ( node . node ) if isinstance ( node , nodes . Name ) : return node . name if isinstance ( node , ( nodes . List , nodes . Tuple ) ) : value = [ ] for i in node . items : value . append ( jinja_node_to_python ( i ) ) return value if isinstance ( node , nodes . Dict ) : value = { } for pair in node . items : value [ pair . key . value ] = jinja_node_to_python ( pair . value ) return value if isinstance ( node , nodes . Call ) : if not isinstance ( node . node , nodes . Name ) or node . node . name not in ( "_" , "translate" , "gettext" ) : raise FormDefinitionError ( "Cannot convert function calls from jinja to python other than translation calls" ) return lazy_translate ( jinja_node_to_python ( node . args [ 0 ] ) ) raise Exception ( "Cannot convert jinja nodes to python" ) | Converts a Jinja2 node to its python equivalent |
54,847 | def in_group ( self , group , dn = False ) : if dn : return group in self . groups ( ) return group . check_member ( self ) | Get whether or not the bound CSH LDAP member object is part of a group . |
54,848 | def savgol_filter ( x , window_length , polyorder , deriv = 0 , delta = 1.0 , axis = - 1 , mode = 'interp' , cval = 0.0 ) : x = np . ma . masked_invalid ( pd . Series ( x ) . interpolate ( ) ) try : ind = np . isfinite ( x ) . nonzero ( ) [ 0 ] [ 0 ] x [ ind : ] = signal . savgol_filter ( x [ ind : ] , window_length , polyorder , deriv , delta , axis , mode , cval ) except IndexError : pass return np . ma . masked_invalid ( x ) | Wrapper for the scipy . signal . savgol_filter function that handles Nan values . |
54,849 | def feedback_results_to_measurements_frame ( feedback_result ) : index = pd . Index ( feedback_result . time * 1e-3 , name = 'seconds' ) df_feedback = pd . DataFrame ( np . column_stack ( [ feedback_result . V_fb , feedback_result . V_hv , feedback_result . fb_resistor , feedback_result . hv_resistor ] ) , columns = [ 'V_fb' , 'V_hv' , 'fb_resistor' , 'hv_resistor' ] , index = index ) df_feedback . insert ( 0 , 'frequency' , feedback_result . frequency ) return df_feedback | Extract measured data from FeedbackResults instance into pandas . DataFrame . |
54,850 | def feedback_results_to_impedance_frame ( feedback_result ) : index = pd . Index ( feedback_result . time * 1e-3 , name = 'seconds' ) df_feedback = pd . DataFrame ( np . column_stack ( [ feedback_result . V_actuation ( ) . filled ( np . NaN ) , feedback_result . capacitance ( ) . filled ( np . NaN ) , feedback_result . Z_device ( ) . filled ( np . NaN ) ] ) , columns = [ 'V_actuation' , 'capacitance' , 'impedance' ] , index = index ) df_feedback . insert ( 0 , 'frequency' , feedback_result . frequency ) df_feedback . insert ( 1 , 'voltage' , feedback_result . voltage ) return df_feedback | Extract computed impedance data from FeedbackResults instance into pandas . DataFrame . |
54,851 | def get_firmwares ( ) : return OrderedDict ( [ ( board_dir . name , [ f . abspath ( ) for f in board_dir . walkfiles ( '*.hex' ) ] ) for board_dir in package_path ( ) . joinpath ( 'firmware' ) . dirs ( ) ] ) | Return dmf_control_board compiled Arduino hex file paths . |
54,852 | def remote_command ( function , self , * args , ** kwargs ) : try : return function ( self , * args , ** kwargs ) except RuntimeError , exception : error_message = str ( exception ) match = CRE_REMOTE_ERROR . match ( error_message ) if match : command_code = int ( match . group ( 'command_int' ) ) return_code = int ( match . group ( 'return_code_int' ) ) raise FirmwareError ( command_code , return_code ) match = CRE_REMOTE_COMMAND_ERROR . match ( error_message ) if match : command_code = int ( match . group ( 'command_int' ) ) command_name = NAMES_BY_COMMAND_CODE [ command_code ] raise RuntimeError ( CRE_REMOTE_COMMAND_ERROR . sub ( command_name , error_message ) ) raise | Catch RuntimeError exceptions raised by remote control board firmware commands and re - raise as more specific FirmwareError exception type which includes command code and return code . |
54,853 | def to_frame ( self , filter_order = 3 ) : window_size = self . _get_window_size ( ) L = np . sqrt ( self . area ) velocity_results = self . mean_velocity ( Lx = L ) mean_velocity = None peak_velocity = None dx = 0 dt = 0 dxdt = np . zeros ( len ( self . time ) ) dxdt_filtered = np . zeros ( len ( self . time ) ) if filter_order and window_size and window_size < filter_order + 2 : filter_order = None if velocity_results and velocity_results [ 'dx' ] : mean_velocity = velocity_results [ 'p' ] [ 0 ] * 1e3 dx = velocity_results [ 'dx' ] dt = velocity_results [ 'dt' ] * 1e-3 t , dxdt = self . dxdt ( Lx = L ) dxdt = np . interp ( self . time , t , dxdt ) * 1e3 dxdt = np . ma . masked_invalid ( dxdt ) t , dxdt_filtered = self . dxdt ( filter_order = filter_order , Lx = L ) dxdt_filtered = np . interp ( self . time , t , dxdt_filtered ) * 1e3 dxdt_filtered = np . ma . masked_invalid ( dxdt_filtered ) peak_velocity = np . max ( dxdt_filtered ) index = pd . Index ( self . time * 1e-3 , name = 'step_time' ) df = pd . DataFrame ( { 'target_voltage' : self . voltage , 'voltage' : self . V_actuation ( ) , 'force' : self . force ( Ly = 1.0 ) * 1e6 , 'Z_device_filtered' : self . Z_device ( filter_order = filter_order ) , 'capacitance_filtered' : self . capacitance ( filter_order = filter_order ) , 'x_position_filtered' : self . x_position ( filter_order = filter_order ) , 'dxdt_filtered' : dxdt_filtered , 'Z_device' : self . Z_device ( ) , 'capacitance' : self . capacitance ( ) , 'x_position' : self . x_position ( ) , 'dxdt' : dxdt , } , index = index ) df [ 'frequency' ] = self . frequency df [ 'area' ] = self . area df [ 'dx' ] = dx df [ 'dt' ] = dt df [ 'mean_velocity' ] = mean_velocity df [ 'peak_velocity' ] = peak_velocity df [ 'window_size' ] = window_size df [ 'filter_order' ] = filter_order return df [ [ u'frequency' , u'target_voltage' , u'voltage' , u'force' , u'area' , u'Z_device_filtered' , u'capacitance_filtered' , u'x_position_filtered' , u'dxdt_filtered' , u'Z_device' , u'capacitance' , u'x_position' , u'dxdt' , u'dx' , u'dt' , u'mean_velocity' , u'peak_velocity' , u'window_size' , u'filter_order' ] ] | Convert data to a pandas . DataFrame . |
54,854 | def set_series_capacitance ( self , channel , value , resistor_index = None ) : if resistor_index is None : resistor_index = self . series_resistor_index ( channel ) try : if channel == 0 : self . calibration . C_hv [ resistor_index ] = value else : self . calibration . C_fb [ resistor_index ] = value except : pass return self . _set_series_capacitance ( channel , value ) | Set the current series capacitance value for the specified channel . |
54,855 | def set_series_resistance ( self , channel , value , resistor_index = None ) : if resistor_index is None : resistor_index = self . series_resistor_index ( channel ) try : if channel == 0 : self . calibration . R_hv [ resistor_index ] = value else : self . calibration . R_fb [ resistor_index ] = value except : pass return self . _set_series_resistance ( channel , value ) | Set the current series resistance value for the specified channel . |
54,856 | def persistent_write ( self , address , byte , refresh_config = False ) : self . _persistent_write ( address , byte ) if refresh_config : self . load_config ( False ) | Write a single byte to an address in persistent memory . |
54,857 | def persistent_read_multibyte ( self , address , count = None , dtype = np . uint8 ) : nbytes = np . dtype ( dtype ) . itemsize if count is not None : nbytes *= count data_bytes = np . array ( [ self . persistent_read ( address + i ) for i in xrange ( nbytes ) ] , dtype = np . uint8 ) result = data_bytes . view ( dtype ) if count is None : return result [ 0 ] return result | Read a chunk of data from persistent memory . |
54,858 | def persistent_write_multibyte ( self , address , data , refresh_config = False ) : for i , byte in enumerate ( data . view ( np . uint8 ) ) : self . persistent_write ( address + i , int ( byte ) ) if refresh_config : self . load_config ( False ) | Write multiple bytes to an address in persistent memory . |
54,859 | def _get_files_modified ( ) : cmd = "git diff-index --cached --name-only --diff-filter=ACMRTUXB HEAD" _ , files_modified , _ = run ( cmd ) extensions = [ re . escape ( ext ) for ext in list ( SUPPORTED_FILES ) + [ ".rst" ] ] test = "(?:{0})$" . format ( "|" . join ( extensions ) ) return list ( filter ( lambda f : re . search ( test , f ) , files_modified ) ) | Get the list of modified files that are Python or Jinja2 . |
54,860 | def _get_git_author ( ) : _ , stdout , _ = run ( "git var GIT_AUTHOR_IDENT" ) git_author = stdout [ 0 ] return git_author [ : git_author . find ( ">" ) + 1 ] | Return the git author from the git variables . |
54,861 | def _get_component ( filename , default = "global" ) : if hasattr ( filename , "decode" ) : filename = filename . decode ( ) parts = filename . split ( os . path . sep ) if len ( parts ) >= 3 : if parts [ 1 ] in "modules legacy ext" . split ( ) : return parts [ 2 ] if len ( parts ) >= 2 : if parts [ 1 ] in "base celery utils" . split ( ) : return parts [ 1 ] if len ( parts ) >= 1 : if parts [ 0 ] in "grunt docs" . split ( ) : return parts [ 0 ] return default | Get component name from filename . |
54,862 | def _prepare_commit_msg ( tmp_file , author , files_modified = None , template = None ) : files_modified = files_modified or [ ] template = template or "{component}:\n\nSigned-off-by: {author}\n{extra}" if hasattr ( template , "decode" ) : template = template . decode ( ) with open ( tmp_file , "r" , "utf-8" ) as fh : contents = fh . readlines ( ) msg = filter ( lambda x : not ( x . startswith ( "#" ) or x . isspace ( ) ) , contents ) if len ( list ( msg ) ) : return component = "unknown" components = _get_components ( files_modified ) if len ( components ) == 1 : component = components [ 0 ] elif len ( components ) > 1 : component = "/" . join ( components ) contents . append ( "# WARNING: Multiple components detected - consider splitting " "commit.\r\n" ) with open ( tmp_file , "w" , "utf-8" ) as fh : fh . write ( template . format ( component = component , author = author , extra = "" . join ( contents ) ) ) | Prepare the commit message in tmp_file . |
54,863 | def _check_message ( message , options ) : options = options or dict ( ) options . update ( get_options ( ) ) options . update ( _read_local_kwalitee_configuration ( ) ) errors = check_message ( message , ** options ) if errors : for error in errors : print ( error , file = sys . stderr ) return False return True | Checking the message and printing the errors . |
54,864 | def _read_local_kwalitee_configuration ( directory = "." ) : filepath = os . path . abspath ( os . path . join ( directory , '.kwalitee.yml' ) ) data = { } if os . path . exists ( filepath ) : with open ( filepath , 'r' ) as file_read : data = yaml . load ( file_read . read ( ) ) return data | Check if the repo has a . kwalitee . yaml file . |
54,865 | def _pre_commit ( files , options ) : errors = [ ] tmpdir = mkdtemp ( ) files_to_check = [ ] try : for ( file_ , content ) in files : dirname , filename = os . path . split ( os . path . abspath ( file_ ) ) prefix = os . path . commonprefix ( [ dirname , tmpdir ] ) dirname = os . path . relpath ( dirname , start = prefix ) dirname = os . path . join ( tmpdir , dirname ) if not os . path . isdir ( dirname ) : os . makedirs ( dirname ) filename = os . path . join ( dirname , filename ) with open ( filename , "wb" ) as fh : fh . write ( content ) files_to_check . append ( ( file_ , filename ) ) for ( file_ , filename ) in files_to_check : errors += list ( map ( lambda x : "{0}: {1}" . format ( file_ , x ) , check_file ( filename , ** options ) or [ ] ) ) finally : shutil . rmtree ( tmpdir , ignore_errors = True ) return errors | Run the check on files of the added version . |
54,866 | def run ( command , raw_output = False ) : p = Popen ( command . split ( ) , stdout = PIPE , stderr = PIPE ) ( stdout , stderr ) = p . communicate ( ) if not raw_output : return ( p . returncode , [ line . rstrip ( ) for line in stdout . decode ( "utf-8" ) . splitlines ( ) ] , [ line . rstrip ( ) for line in stderr . decode ( "utf-8" ) . splitlines ( ) ] ) else : return ( p . returncode , stdout , stderr ) | Run a command using subprocess . |
54,867 | def mpl_weight2qt ( weight ) : try : weight = weights_mpl2qt [ weight ] except KeyError : try : weight = float ( weight ) / 10 except ( ValueError , TypeError ) : weight = QtGui . QFont . Normal else : try : weight = min ( filter ( lambda w : w >= weight , weights_qt2mpl ) , key = lambda w : abs ( w - weight ) ) except ValueError : weight = QtGui . QFont . Normal return weight | Convert a weight from matplotlib definition to a Qt weight |
54,868 | def choose_font ( self , font = None ) : fmt_widget = self . parent ( ) if font is None : if self . current_font : font , ok = QFontDialog . getFont ( self . current_font , fmt_widget , 'Select %s font' % self . fmto_name , QFontDialog . DontUseNativeDialog ) else : font , ok = QFontDialog . getFont ( fmt_widget ) if not ok : return self . current_font = font properties = self . load_properties ( ) properties . update ( self . qfont_to_artist_props ( font ) ) fmt_widget . set_obj ( properties ) self . refresh ( ) | Choose a font for the label through a dialog |
54,869 | def refresh ( self ) : font = self . current_font self . btn_bold . blockSignals ( True ) self . btn_bold . setChecked ( font . weight ( ) > 50 ) self . btn_bold . blockSignals ( False ) self . btn_italic . blockSignals ( True ) self . btn_italic . setChecked ( font . italic ( ) ) self . btn_italic . blockSignals ( False ) self . spin_box . blockSignals ( True ) self . spin_box . setValue ( font . pointSize ( ) ) self . spin_box . blockSignals ( False ) | Refresh the widgets from the current font |
54,870 | def _prettify_response ( self , response ) : if response . content_type == 'text/html; charset=utf-8' : ugly = response . get_data ( as_text = True ) soup = BeautifulSoup ( ugly , 'html.parser' ) pretty = soup . prettify ( formatter = 'html' ) response . direct_passthrough = False response . set_data ( pretty ) return response | Prettify the HTML response . |
54,871 | async def _call ( self , params ) : if self . _session . closed : raise SabnzbdApiException ( 'Session already closed' ) p = { ** self . _default_params , ** params } try : async with timeout ( self . _timeout , loop = self . _session . loop ) : async with self . _session . get ( self . _api_url , params = p ) as resp : data = await resp . json ( ) if data . get ( 'status' , True ) is False : self . _handle_error ( data , params ) else : return data except aiohttp . ClientError : raise SabnzbdApiException ( 'Unable to communicate with Sabnzbd API' ) except asyncio . TimeoutError : raise SabnzbdApiException ( 'SABnzbd API request timed out' ) | Call the SABnzbd API |
54,872 | async def refresh_data ( self ) : queue = await self . get_queue ( ) history = await self . get_history ( ) totals = { } for k in history : if k [ - 4 : ] == 'size' : totals [ k ] = self . _convert_size ( history . get ( k ) ) self . queue = { ** totals , ** queue } | Refresh the cached SABnzbd queue data |
54,873 | def _convert_size ( self , size_str ) : suffix = size_str [ - 1 ] if suffix == 'K' : multiplier = 1.0 / ( 1024.0 * 1024.0 ) elif suffix == 'M' : multiplier = 1.0 / 1024.0 elif suffix == 'T' : multiplier = 1024.0 else : multiplier = 1 try : val = float ( size_str . split ( ' ' ) [ 0 ] ) return val * multiplier except ValueError : return 0.0 | Convert units to GB |
54,874 | def _handle_error ( self , data , params ) : error = data . get ( 'error' , 'API call failed' ) mode = params . get ( 'mode' ) raise SabnzbdApiException ( error , mode = mode ) | Handle an error response from the SABnzbd API |
54,875 | def __generate_key ( self , config ) : cwd = config . get ( 'ssh_path' , self . _install_directory ( ) ) if config . is_affirmative ( 'create' , default = "yes" ) : if not os . path . exists ( cwd ) : os . makedirs ( cwd ) if not os . path . exists ( os . path . join ( cwd , config . get ( 'keyname' ) ) ) : command = "ssh-keygen -t %(type)s -f %(keyname)s -N " % config . to_dict ( ) lib . call ( command , cwd = cwd , output_log_level = logging . DEBUG ) if not config . has ( 'ssh_path' ) : config . set ( 'ssh_path' , cwd ) config . set ( 'ssh_key_path' , os . path . join ( config . get ( 'ssh_path' ) , config . get ( 'keyname' ) ) ) | Generate the ssh key and return the ssh config location |
54,876 | def __install_ssh_config ( self , config ) : if not config . is_affirmative ( 'use_global_ssh' , default = "no" ) : ssh_config_injection = self . _build_ssh_config ( config ) if not os . path . exists ( ssh_config_path ) : if self . injections . in_noninjected_file ( ssh_config_path , "Host %s" % config . get ( 'host' ) ) : if config . is_affirmative ( 'override' ) : self . injections . inject ( ssh_config_path , ssh_config_injection ) else : self . injections . inject ( ssh_config_path , ssh_config_injection ) else : self . injections . inject ( ssh_config_path , ssh_config_injection ) self . injections . commit ( ) | Install the ssh configuration |
54,877 | def _build_ssh_config ( self , config ) : ssh_config_injection = ssh_config_template % { 'host' : config . get ( 'host' ) , 'hostname' : config . get ( 'hostname' ) , 'ssh_key_path' : config . get ( 'ssh_key_path' ) , 'user' : config . get ( 'user' ) } if config . has ( 'port' ) : ssh_config_injection += " Port {0}\n" . format ( config . get ( 'port' ) ) return ssh_config_injection | build the ssh injection configuration |
54,878 | def extract_followups ( task ) : callbacks = task . request . callbacks errbacks = task . request . errbacks task . request . callbacks = None return { 'link' : callbacks , 'link_error' : errbacks } | Retrieve callbacks and errbacks from provided task instance disables tasks callbacks . |
54,879 | def gen_procfile ( ctx , wsgi , dev ) : if wsgi is None : if os . path . exists ( "wsgi.py" ) : wsgi = "wsgi.py" elif os . path . exists ( "app.py" ) : wsgi = "app.py" else : wsgi = "app.py" ctx . invoke ( gen_apppy ) def write_procfile ( filename , server_process , debug ) : processes = [ server_process ] + current_app . processes procfile = [ ] for name , cmd in procfile_processes ( processes , debug ) . iteritems ( ) : procfile . append ( "%s: %s" % ( name , cmd ) ) with open ( filename , "w" ) as f : f . write ( "\n" . join ( procfile ) ) write_procfile ( "Procfile" , ( "web" , [ "gunicorn" , wsgi ] ) , False ) if dev : write_procfile ( "Procfile.dev" , ( "web" , [ "frasco" , "serve" ] ) , True ) | Generates Procfiles which can be used with honcho or foreman . |
54,880 | def g ( self ) : "Hour, 12-hour format without leading zeros; i.e. '1' to '12'" if self . data . hour == 0 : return 12 if self . data . hour > 12 : return self . data . hour - 12 return self . data . hour | Hour 12 - hour format without leading zeros ; i . e . 1 to 12 |
54,881 | def I ( self ) : "'1' if Daylight Savings Time, '0' otherwise." if self . timezone and self . timezone . dst ( self . data ) : return u'1' else : return u'0' | 1 if Daylight Savings Time 0 otherwise . |
54,882 | def S ( self ) : "English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th'" if self . data . day in ( 11 , 12 , 13 ) : return u'th' last = self . data . day % 10 if last == 1 : return u'st' if last == 2 : return u'nd' if last == 3 : return u'rd' return u'th' | English ordinal suffix for the day of the month 2 characters ; i . e . st nd rd or th |
54,883 | def t ( self ) : "Number of days in the given month; i.e. '28' to '31'" return u'%02d' % calendar . monthrange ( self . data . year , self . data . month ) [ 1 ] | Number of days in the given month ; i . e . 28 to 31 |
54,884 | def W ( self ) : "ISO-8601 week number of year, weeks starting on Monday" week_number = None jan1_weekday = self . data . replace ( month = 1 , day = 1 ) . weekday ( ) + 1 weekday = self . data . weekday ( ) + 1 day_of_year = self . z ( ) if day_of_year <= ( 8 - jan1_weekday ) and jan1_weekday > 4 : if jan1_weekday == 5 or ( jan1_weekday == 6 and calendar . isleap ( self . data . year - 1 ) ) : week_number = 53 else : week_number = 52 else : if calendar . isleap ( self . data . year ) : i = 366 else : i = 365 if ( i - day_of_year ) < ( 4 - weekday ) : week_number = 1 else : j = day_of_year + ( 7 - weekday ) + ( jan1_weekday - 1 ) week_number = j // 7 if jan1_weekday > 4 : week_number -= 1 return week_number | ISO - 8601 week number of year weeks starting on Monday |
54,885 | def z ( self ) : "Day of the year; i.e. '0' to '365'" doy = self . year_days [ self . data . month ] + self . data . day if self . L ( ) and self . data . month > 2 : doy += 1 return doy | Day of the year ; i . e . 0 to 365 |
54,886 | def print_metric ( name , count , elapsed ) : _do_print ( name , count , elapsed , file = sys . stdout ) | A metric function that prints to standard output |
54,887 | def stderr_metric ( name , count , elapsed ) : _do_print ( name , count , elapsed , file = sys . stderr ) | A metric function that prints to standard error |
54,888 | def make_multi_metric ( * metrics ) : def multi_metric ( name , count , elapsed ) : for m in metrics : m ( name , count , elapsed ) return multi_metric | Make a new metric function that calls the supplied metrics |
54,889 | def _is_orphan ( scc , graph ) : return all ( p in scc for v in scc for p in graph . parents ( v ) ) | Return False iff the given scc is reachable from elsewhere . |
54,890 | def key_cycles ( ) : graph = garbage ( ) sccs = graph . strongly_connected_components ( ) return [ scc for scc in sccs if _is_orphan ( scc , graph ) ] | Collect cyclic garbage and return the strongly connected components that were keeping the garbage alive . |
54,891 | def _run_command ( self , command , ** kwargs ) : try : return { 'output' : subprocess . check_output ( command , ** kwargs ) } except Exception as e : return { 'error' : str ( e ) } | Wrapper to pass command to plowshare . |
54,892 | def _filter_sources ( self , sources ) : filtered , hosts = [ ] , [ ] for source in sources : if 'error' in source : continue filtered . append ( source ) hosts . append ( source [ 'host_name' ] ) return sorted ( filtered , key = lambda s : self . _hosts_by_success ( hosts ) . index ( s [ 'host_name' ] ) ) | Remove sources with errors and return ordered by host success . |
54,893 | def upload ( self , filename , number_of_hosts ) : return self . multiupload ( filename , self . random_hosts ( number_of_hosts ) ) | Upload the given file to the specified number of hosts . |
54,894 | def download ( self , sources , output_directory , filename ) : valid_sources = self . _filter_sources ( sources ) if not valid_sources : return { 'error' : 'no valid sources' } manager = Manager ( ) successful_downloads = manager . list ( [ ] ) def f ( source ) : if not successful_downloads : result = self . download_from_host ( source , output_directory , filename ) if 'error' in result : self . _host_errors [ source [ 'host_name' ] ] += 1 else : successful_downloads . append ( result ) multiprocessing . dummy . Pool ( len ( valid_sources ) ) . map ( f , valid_sources ) return successful_downloads [ 0 ] if successful_downloads else { } | Download a file from one of the provided sources |
54,895 | def download_from_host ( self , source , output_directory , filename ) : result = self . _run_command ( [ "plowdown" , source [ "url" ] , "-o" , output_directory , "--temp-rename" ] , stderr = open ( "/dev/null" , "w" ) ) result [ 'host_name' ] = source [ 'host_name' ] if 'error' in result : return result temporary_filename = self . parse_output ( result [ 'host_name' ] , result [ 'output' ] ) result [ 'filename' ] = os . path . join ( output_directory , filename ) result . pop ( 'output' ) os . rename ( temporary_filename , result [ 'filename' ] ) return result | Download a file from a given host . |
54,896 | def multiupload ( self , filename , hosts ) : manager = Manager ( ) successful_uploads = manager . list ( [ ] ) def f ( host ) : if len ( successful_uploads ) / float ( len ( hosts ) ) < settings . MIN_FILE_REDUNDANCY : result = self . upload_to_host ( filename , host ) if 'error' in result : self . _host_errors [ host ] += 1 else : successful_uploads . append ( result ) multiprocessing . dummy . Pool ( len ( hosts ) ) . map ( f , self . _hosts_by_success ( hosts ) ) return list ( successful_uploads ) | Upload file to multiple hosts simultaneously |
54,897 | def upload_to_host ( self , filename , hostname ) : result = self . _run_command ( [ "plowup" , hostname , filename ] , stderr = open ( "/dev/null" , "w" ) ) result [ 'host_name' ] = hostname if 'error' not in result : result [ 'url' ] = self . parse_output ( hostname , result . pop ( 'output' ) ) return result | Upload a file to the given host . |
54,898 | def parse_output ( self , hostname , output ) : if isinstance ( output , bytes ) : output = output . decode ( 'utf-8' ) return output . split ( ) [ - 1 ] | Parse plowup s output . |
54,899 | def _generate_queues ( queues , exchange , platform_queue ) : return set ( [ Queue ( 'celery' , exchange , routing_key = 'celery' ) , Queue ( platform_queue , exchange , routing_key = '#' ) , ] + [ Queue ( q_name , exchange , routing_key = q_name ) for q_name in queues ] ) | Queues known by this worker |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.