idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
14,700 | def get_queryset ( self ) : kwargs = { } if self . start_at : kwargs . update ( { '%s__gte' % self . date_field : self . start_at } ) return super ( DateRangeMixin , self ) . get_queryset ( ) . filter ( ** kwargs ) | Implements date range filtering on created_at |
14,701 | def create ( self , fname , lname , group , type , group_api ) : self . __username ( fname , lname ) self . client . add ( self . __distinguished_name ( type , fname = fname , lname = lname ) , API . __object_class ( ) , self . __ldap_attr ( fname , lname , type , group , group_api ) ) | Create an LDAP User . |
14,702 | def show ( self , username ) : filter = [ '(objectclass=posixAccount)' , "(uid={})" . format ( username ) ] return self . client . search ( filter ) | Return a specific user s info in LDIF format . |
14,703 | def find ( self , username ) : filter = [ '(uid={})' . format ( username ) ] results = self . client . search ( filter ) if len ( results ) < 1 : raise ldap_tools . exceptions . NoUserFound ( 'User ({}) not found' . format ( username ) ) return elif len ( results ) > 1 : raise ldap_tools . exceptions . TooManyResults ( 'Multiple users found. Please narrow your search.' ) return else : return results | Find user with given username . |
14,704 | def __username ( self , fname , lname ) : self . username = '.' . join ( [ i . lower ( ) for i in [ fname , lname ] ] ) | Convert first name + last name into first . last style username . |
14,705 | def __distinguished_name ( self , type , fname = None , lname = None , username = None ) : if username is None : uid = "uid={}" . format ( self . username ) else : uid = "uid={}" . format ( username ) dn_list = [ uid , "ou={}" . format ( self . __organizational_unit ( type ) ) , self . client . basedn , ] return ',' . join ( dn_list ) | Assemble the DN of the user . |
14,706 | def __ldap_attr ( self , fname , lname , type , group , group_api ) : return { 'uid' : str ( self . username ) . encode ( ) , 'cn' : ' ' . join ( [ fname , lname ] ) . encode ( ) , 'sn' : str ( lname ) . encode ( ) , 'givenname' : str ( fname ) . encode ( ) , 'homedirectory' : os . path . join ( os . path . sep , 'home' , self . username ) . encode ( ) , 'loginshell' : os . path . join ( os . path . sep , 'bin' , 'bash' ) . encode ( ) , 'mail' : '@' . join ( [ self . username , self . client . mail_domain ] ) . encode ( ) , 'uidnumber' : self . __uidnumber ( type ) , 'gidnumber' : API . __gidnumber ( group , group_api ) , 'userpassword' : str ( '{SSHA}' + API . __create_password ( ) . decode ( ) ) . encode ( ) , } | User LDAP attributes . |
14,707 | def __create_password ( ) : salt = b64encode ( API . __generate_string ( 32 ) ) password = b64encode ( API . __generate_string ( 64 ) ) return b64encode ( sha1 ( password + salt ) . digest ( ) ) | Create a password for the user . |
14,708 | def __generate_string ( length ) : return '' . join ( SystemRandom ( ) . choice ( string . ascii_letters + string . digits ) for x in range ( length ) ) . encode ( ) | Generate a string for password creation . |
14,709 | def create ( config , name , group , type ) : if type not in ( 'user' , 'service' ) : raise click . BadOptionUsage ( "--type must be 'user' or 'service'" ) client = Client ( ) client . prepare_connection ( ) user_api = API ( client ) group_api = GroupApi ( client ) user_api . create ( name [ 0 ] , name [ 1 ] , group , type , group_api ) | Create an LDAP user . |
14,710 | def index ( config ) : client = Client ( ) client . prepare_connection ( ) user_api = API ( client ) CLI . show_user ( user_api . index ( ) ) | Display user info in LDIF format . |
14,711 | def show ( config , username ) : client = Client ( ) client . prepare_connection ( ) user_api = API ( client ) CLI . show_user ( user_api . show ( username ) ) | Display a specific user . |
14,712 | async def set_discovery_enabled ( self ) : endpoint = '/setup/bluetooth/discovery' data = { "enable_discovery" : True } url = API . format ( ip = self . _ipaddress , endpoint = endpoint ) try : async with async_timeout . timeout ( 5 , loop = self . _loop ) : response = await self . _session . post ( url , headers = HEADERS , data = json . dumps ( data ) ) _LOGGER . debug ( response . status ) except ( asyncio . TimeoutError , aiohttp . ClientError , socket . gaierror ) as error : _LOGGER . error ( 'Error connecting to %s - %s' , self . _ipaddress , error ) | Enable bluetooth discoverablility . |
14,713 | async def scan_for_devices_multi_run ( self , runs = 2 ) : run = 1 master = { } while run < runs + 1 : await self . scan_for_devices ( ) await self . get_scan_result ( ) if master is None : for device in self . _devices : mac = device [ 'mac_address' ] master [ mac ] = { } master [ mac ] [ 'rssi' ] = device [ 'rssi' ] master [ mac ] [ 'device_class' ] = device [ 'device_class' ] master [ mac ] [ 'name' ] = device [ 'name' ] master [ mac ] [ 'device_type' ] = device [ 'device_type' ] master [ mac ] [ 'count' ] = 1 else : for device in self . _devices : mac = device [ 'mac_address' ] if master . get ( mac , False ) : master [ mac ] [ 'rssi' ] = device [ 'rssi' ] master [ mac ] [ 'count' ] = str ( 1 + 1 ) else : master [ mac ] = { } master [ mac ] [ 'rssi' ] = device [ 'rssi' ] master [ mac ] [ 'device_class' ] = device [ 'device_class' ] master [ mac ] [ 'name' ] = device [ 'name' ] master [ mac ] [ 'device_type' ] = device [ 'device_type' ] master [ mac ] [ 'count' ] = 1 run = run + 1 result = [ ] for device in master : if int ( master [ device ] [ 'count' ] ) > 1 : result . append ( master [ device ] ) self . _devices = result | Scan for devices multiple times . |
14,714 | def contract_from_file ( fname ) : f = open ( fname ) j = f . read ( ) f . close ( ) return Contract ( json . loads ( j ) ) | Loads a Barrister IDL JSON from the given file and returns a Contract class |
14,715 | def get_prop ( self , key , default_val = None ) : if self . props . has_key ( key ) : return self . props [ key ] else : return default_val | Returns a property set on the context . |
14,716 | def set_error ( self , code , msg , data = None ) : self . error = err_response ( self . request [ "id" ] , code , msg , data ) | Set an error on this request which will prevent request execution . Should only be called from pre hook methods . If called from a post hook this operation will be ignored . |
14,717 | def add_handler ( self , iface_name , handler ) : if self . contract . has_interface ( iface_name ) : self . handlers [ iface_name ] = handler else : raise RpcException ( ERR_INVALID_REQ , "Unknown interface: '%s'" , iface_name ) | Associates the given handler with the interface name . If the interface does not exist in the Contract an RpcException is raised . |
14,718 | def set_filters ( self , filters ) : if filters == None or isinstance ( filters , ( tuple , list ) ) : self . filters = filters else : self . filters = [ filters ] | Sets the filters for the server . |
14,719 | def call ( self , req , props = None ) : resp = None if self . log . isEnabledFor ( logging . DEBUG ) : self . log . debug ( "Request: %s" % str ( req ) ) if isinstance ( req , list ) : if len ( req ) < 1 : resp = err_response ( None , ERR_INVALID_REQ , "Invalid Request. Empty batch." ) else : resp = [ ] for r in req : resp . append ( self . _call_and_format ( r , props ) ) else : resp = self . _call_and_format ( req , props ) if self . log . isEnabledFor ( logging . DEBUG ) : self . log . debug ( "Response: %s" % str ( resp ) ) return resp | Executes a Barrister request and returns a response . If the request is a list then the response will also be a list . If the request is an empty list a RpcException is raised . |
14,720 | def _call ( self , context ) : req = context . request if not req . has_key ( "method" ) : raise RpcException ( ERR_INVALID_REQ , "Invalid Request. No 'method'." ) method = req [ "method" ] if method == "barrister-idl" : return self . contract . idl_parsed iface_name , func_name = unpack_method ( method ) if self . handlers . has_key ( iface_name ) : iface_impl = self . handlers [ iface_name ] func = getattr ( iface_impl , func_name ) if func : if req . has_key ( "params" ) : params = req [ "params" ] else : params = [ ] if self . validate_req : self . contract . validate_request ( iface_name , func_name , params ) if hasattr ( iface_impl , "barrister_pre" ) : pre_hook = getattr ( iface_impl , "barrister_pre" ) pre_hook ( context , params ) if params : result = func ( * params ) else : result = func ( ) if self . validate_resp : self . contract . validate_response ( iface_name , func_name , result ) return result else : msg = "Method '%s' not found" % ( method ) raise RpcException ( ERR_METHOD_NOT_FOUND , msg ) else : msg = "No implementation of '%s' found" % ( iface_name ) raise RpcException ( ERR_METHOD_NOT_FOUND , msg ) | Executes a single request against a handler . If the req . method == barrister - idl the Contract IDL JSON structure is returned . Otherwise the method is resolved to a handler based on the interface name and the appropriate function is called on the handler . |
14,721 | def request ( self , req ) : data = json . dumps ( req ) req = urllib2 . Request ( self . url , data , self . headers ) f = self . opener . open ( req ) resp = f . read ( ) f . close ( ) return json . loads ( resp ) | Makes a request against the server and returns the deserialized result . |
14,722 | def call ( self , iface_name , func_name , params ) : req = self . to_request ( iface_name , func_name , params ) if self . log . isEnabledFor ( logging . DEBUG ) : self . log . debug ( "Request: %s" % str ( req ) ) resp = self . transport . request ( req ) if self . log . isEnabledFor ( logging . DEBUG ) : self . log . debug ( "Response: %s" % str ( resp ) ) return self . to_result ( iface_name , func_name , resp ) | Makes a single RPC request and returns the result . |
14,723 | def to_request ( self , iface_name , func_name , params ) : if self . validate_req : self . contract . validate_request ( iface_name , func_name , params ) method = "%s.%s" % ( iface_name , func_name ) reqid = self . id_gen ( ) return { "jsonrpc" : "2.0" , "id" : reqid , "method" : method , "params" : params } | Converts the arguments to a JSON - RPC request dict . The id field is populated using the id_gen function passed to the Client constructor . |
14,724 | def to_result ( self , iface_name , func_name , resp ) : if resp . has_key ( "error" ) : e = resp [ "error" ] data = None if e . has_key ( "data" ) : data = e [ "data" ] raise RpcException ( e [ "code" ] , e [ "message" ] , data ) result = resp [ "result" ] if self . validate_resp : self . contract . validate_response ( iface_name , func_name , result ) return result | Takes a JSON - RPC response and checks for an error slot . If it exists a RpcException is raised . If no error slot exists the result slot is returned . |
14,725 | def validate_request ( self , iface_name , func_name , params ) : self . interface ( iface_name ) . function ( func_name ) . validate_params ( params ) | Validates that the given params match the expected length and types for this interface and function . |
14,726 | def validate_response ( self , iface_name , func_name , resp ) : self . interface ( iface_name ) . function ( func_name ) . validate_response ( resp ) | Validates that the response matches the return type for the function |
14,727 | def get ( self , name ) : if self . structs . has_key ( name ) : return self . structs [ name ] elif self . enums . has_key ( name ) : return self . enums [ name ] elif self . interfaces . has_key ( name ) : return self . interfaces [ name ] else : raise RpcException ( ERR_INVALID_PARAMS , "Unknown entity: '%s'" % name ) | Returns the struct enum or interface with the given name or raises RpcException if no elements match that name . |
14,728 | def struct ( self , struct_name ) : if self . structs . has_key ( struct_name ) : return self . structs [ struct_name ] else : raise RpcException ( ERR_INVALID_PARAMS , "Unknown struct: '%s'" , struct_name ) | Returns the struct with the given name or raises RpcException if no struct matches |
14,729 | def interface ( self , iface_name ) : if self . has_interface ( iface_name ) : return self . interfaces [ iface_name ] else : raise RpcException ( ERR_INVALID_PARAMS , "Unknown interface: '%s'" , iface_name ) | Returns the interface with the given name or raises RpcException if no interface matches |
14,730 | def validate ( self , expected_type , is_array , val ) : if val == None : if expected_type . optional : return True , None else : return False , "Value cannot be null" elif is_array : if not isinstance ( val , list ) : return self . _type_err ( val , "list" ) else : for v in val : ok , msg = self . validate ( expected_type , False , v ) if not ok : return ok , msg elif expected_type . type == "int" : if not isinstance ( val , ( long , int ) ) : return self . _type_err ( val , "int" ) elif expected_type . type == "float" : if not isinstance ( val , ( float , int , long ) ) : return self . _type_err ( val , "float" ) elif expected_type . type == "bool" : if not isinstance ( val , bool ) : return self . _type_err ( val , "bool" ) elif expected_type . type == "string" : if not isinstance ( val , ( str , unicode ) ) : return self . _type_err ( val , "string" ) else : return self . get ( expected_type . type ) . validate ( val ) return True , None | Validates that the expected type matches the value |
14,731 | def function ( self , func_name ) : if self . functions . has_key ( func_name ) : return self . functions [ func_name ] else : raise RpcException ( ERR_METHOD_NOT_FOUND , "%s: Unknown function: '%s'" , self . name , func_name ) | Returns the Function instance associated with the given func_name or raises a RpcException if no function matches . |
14,732 | def validate ( self , val ) : if val in self . values : return True , None else : return False , "'%s' is not in enum: %s" % ( val , str ( self . values ) ) | Validates that the val is in the list of values for this Enum . |
14,733 | def field ( self , name ) : if self . fields . has_key ( name ) : return self . fields [ name ] elif self . extends : if not self . parent : self . parent = self . contract . struct ( self . extends ) return self . parent . field ( name ) else : return None | Returns the field on this struct with the given name . Will try to find this name on all ancestors if this struct extends another . |
14,734 | def validate ( self , val ) : if type ( val ) is not dict : return False , "%s is not a dict" % ( str ( val ) ) for k , v in val . items ( ) : field = self . field ( k ) if field : ok , msg = self . contract . validate ( field , field . is_array , v ) if not ok : return False , "field '%s': %s" % ( field . name , msg ) else : return False , "field '%s' not found in struct %s" % ( k , self . name ) all_fields = self . get_all_fields ( [ ] ) for field in all_fields : if not val . has_key ( field . name ) and not field . optional : return False , "field '%s' missing from: %s" % ( field . name , str ( val ) ) return True , None | Validates that the val matches the expected fields for this struct . val must be a dict and must contain only fields represented by this struct and its ancestors . |
14,735 | def get_all_fields ( self , arr ) : for k , v in self . fields . items ( ) : arr . append ( v ) if self . extends : parent = self . contract . get ( self . extends ) if parent : return parent . get_all_fields ( arr ) return arr | Returns a list containing this struct s fields and all the fields of its ancestors . Used during validation . |
14,736 | def validate_params ( self , params ) : plen = 0 if params != None : plen = len ( params ) if len ( self . params ) != plen : vals = ( self . full_name , len ( self . params ) , plen ) msg = "Function '%s' expects %d param(s). %d given." % vals raise RpcException ( ERR_INVALID_PARAMS , msg ) if params != None : i = 0 for p in self . params : self . _validate_param ( p , params [ i ] ) i += 1 | Validates params against expected types for this function . Raises RpcException if the params are invalid . |
14,737 | def validate_response ( self , resp ) : ok , msg = self . contract . validate ( self . returns , self . returns . is_array , resp ) if not ok : vals = ( self . full_name , str ( resp ) , msg ) msg = "Function '%s' invalid response: '%s'. %s" % vals raise RpcException ( ERR_INVALID_RESP , msg ) | Validates resp against expected return type for this function . Raises RpcException if the response is invalid . |
14,738 | def submit ( xml_root , submit_config , session , dry_run = None , ** kwargs ) : properties . xunit_fill_testrun_id ( xml_root , kwargs . get ( "testrun_id" ) ) if dry_run is not None : properties . set_dry_run ( xml_root , dry_run ) xml_input = utils . etree_to_string ( xml_root ) logger . info ( "Submitting data to %s" , submit_config . submit_target ) files = { "file" : ( "results.xml" , xml_input ) } try : response = session . post ( submit_config . submit_target , files = files ) except Exception as err : logger . error ( err ) response = None return SubmitResponse ( response ) | Submits data to the Polarion Importer . |
14,739 | def submit_and_verify ( xml_str = None , xml_file = None , xml_root = None , config = None , session = None , dry_run = None , ** kwargs ) : try : config = config or configuration . get_config ( ) xml_root = _get_xml_root ( xml_root , xml_str , xml_file ) submit_config = SubmitConfig ( xml_root , config , ** kwargs ) session = session or utils . get_session ( submit_config . credentials , config ) submit_response = submit ( xml_root , submit_config , session , dry_run = dry_run , ** kwargs ) except Dump2PolarionException as err : logger . error ( err ) return None valid_response = submit_response . validate_response ( ) if not valid_response or kwargs . get ( "no_verify" ) : return submit_response . response response = verify_submit ( session , submit_config . queue_url , submit_config . log_url , submit_response . job_ids , timeout = kwargs . get ( "verify_timeout" ) , log_file = kwargs . get ( "log_file" ) , ) return response | Submits data to the Polarion Importer and checks that it was imported . |
14,740 | def get_job_ids ( self ) : if not self . parsed_response : return None try : job_ids = self . parsed_response [ "files" ] [ "results.xml" ] [ "job-ids" ] except KeyError : return None if not job_ids or job_ids == [ 0 ] : return None return job_ids | Returns job IDs of the import . |
14,741 | def validate_response ( self ) : if self . response is None : logger . error ( "Failed to submit" ) return False if not self . response : logger . error ( "HTTP status %d: failed to submit to %s" , self . response . status_code , self . response . url , ) return False if not self . parsed_response : logger . error ( "Submit to %s failed, invalid response received" , self . response . url ) return False error_message = self . get_error_message ( ) if error_message : logger . error ( "Submit to %s failed with error" , self . response . url ) logger . debug ( "Error message: %s" , error_message ) return False if not self . job_ids : logger . error ( "Submit to %s failed to get job id" , self . response . url ) return False logger . info ( "Results received by the Importer (HTTP status %d)" , self . response . status_code ) logger . info ( "Job IDs: %s" , self . job_ids ) return True | Checks that the response is valid and import succeeded . |
14,742 | def get_targets ( self ) : if self . xml_root . tag == "testcases" : self . submit_target = self . config . get ( "testcase_taget" ) self . queue_url = self . config . get ( "testcase_queue" ) self . log_url = self . config . get ( "testcase_log" ) elif self . xml_root . tag == "testsuites" : self . submit_target = self . config . get ( "xunit_target" ) self . queue_url = self . config . get ( "xunit_queue" ) self . log_url = self . config . get ( "xunit_log" ) elif self . xml_root . tag == "requirements" : self . submit_target = self . config . get ( "requirement_target" ) self . queue_url = self . config . get ( "requirement_queue" ) self . log_url = self . config . get ( "requirement_log" ) else : raise Dump2PolarionException ( "Failed to submit to Polarion - submit target not found" ) | Sets targets . |
14,743 | def get_credentials ( self , ** kwargs ) : login = ( kwargs . get ( "user" ) or os . environ . get ( "POLARION_USERNAME" ) or self . config . get ( "username" ) ) pwd = ( kwargs . get ( "password" ) or os . environ . get ( "POLARION_PASSWORD" ) or self . config . get ( "password" ) ) if not all ( [ login , pwd ] ) : raise Dump2PolarionException ( "Failed to submit to Polarion - missing credentials" ) self . credentials = ( login , pwd ) | Sets credentails . |
14,744 | def e_164 ( msisdn : str ) -> str : number = phonenumbers . parse ( "+{}" . format ( msisdn . lstrip ( "+" ) ) , None ) return phonenumbers . format_number ( number , phonenumbers . PhoneNumberFormat . E164 ) | Returns the msisdn in E . 164 international format . |
14,745 | def loadFile ( self , filePath ) : self . _filePath = filePath if self . _proc . state ( ) != QProcess . Running : self . _kill ( ) self . _run ( self . _filePath ) else : self . _execute ( "pausing_keep_force pt_step 1" ) self . _execute ( "get_property pause" ) self . _execute ( "loadfile \"%s\"" % self . _filePath ) self . _data . reset ( ) self . videoDataChanged . emit ( False ) self . _changePlayingState ( True ) | Loads a file |
14,746 | def play ( self ) : if self . _proc . state ( ) == QProcess . Running : if self . isPlaying is False : self . _execute ( "pause" ) self . _changePlayingState ( True ) elif self . _filePath is not None : self . _kill ( ) self . _run ( self . _filePath ) self . _changePlayingState ( True ) | Starts a playback |
14,747 | def fourier ( x , N ) : term = 0. for n in range ( 1 , N , 2 ) : term += ( 1. / n ) * math . sin ( n * math . pi * x / L ) return ( 4. / ( math . pi ) ) * term | Fourier approximation with N terms |
14,748 | def parse_enum ( enum ) : docs = enum [ 'comment' ] code = '<span class="k">enum</span> <span class="gs">%s</span> {\n' % enum [ 'name' ] for v in enum [ "values" ] : if v [ 'comment' ] : for line in v [ 'comment' ] . split ( "\n" ) : code += ' <span class="c1">// %s</span>\n' % line code += ' <span class="nv">%s</span>\n' % v [ 'value' ] code += "}" return to_section ( docs , code ) | Returns a docco section for the given enum . |
14,749 | def parse_struct ( s ) : docs = s [ 'comment' ] code = '<span class="k">struct</span> <span class="gs">%s</span>' % s [ 'name' ] if s [ 'extends' ] : code += ' extends <span class="gs">%s</span>' % s [ 'extends' ] code += ' {\n' namelen = 0 typelen = 0 for v in s [ "fields" ] : tlen = len ( format_type ( v , includeOptional = False ) ) if len ( v [ 'name' ] ) > namelen : namelen = len ( v [ 'name' ] ) if tlen > typelen : typelen = tlen namelen += 1 typelen += 1 formatstr = ' <span class="nv">%s</span><span class="kt">%s %s</span>\n' i = 0 for v in s [ "fields" ] : if v . has_key ( 'comment' ) and v [ 'comment' ] : if i > 0 : code += "\n" for line in v [ 'comment' ] . split ( "\n" ) : code += ' <span class="c1">// %s</span>\n' % line opt = "" if v . has_key ( 'optional' ) and v [ 'optional' ] == True : opt = " [optional]" code += formatstr % ( string . ljust ( v [ 'name' ] , namelen ) , string . ljust ( format_type ( v , includeOptional = False ) , typelen ) , opt ) i += 1 code += "}" return to_section ( docs , code ) | Returns a docco section for the given struct . |
14,750 | def parse_interface ( iface ) : sections = [ ] docs = iface [ 'comment' ] code = '<span class="k">interface</span> <span class="gs">%s</span> {\n' % iface [ 'name' ] for v in iface [ "functions" ] : func_code = ' <span class="nf">%s</span>(' % v [ 'name' ] i = 0 for p in v [ "params" ] : if i == 0 : i = 1 else : func_code += ", " func_code += '<span class="na">%s</span> <span class="kt">%s</span>' % ( p [ 'name' ] , format_type ( p ) ) func_code += ') <span class="kt">%s</span>\n' % format_type ( v [ 'returns' ] ) if v . has_key ( 'comment' ) and v [ 'comment' ] : if code : sections . append ( to_section ( docs , code ) ) docs = v [ 'comment' ] code = func_code else : code += func_code code += "}" sections . append ( to_section ( docs , code ) ) return sections | Returns a docco section for the given interface . |
14,751 | def to_sections ( idl_parsed ) : sections = [ ] for entity in idl_parsed : if entity [ "type" ] == "comment" : sections . append ( to_section ( entity [ "value" ] , "" ) ) elif entity [ "type" ] == "enum" : sections . append ( parse_enum ( entity ) ) elif entity [ "type" ] == "struct" : sections . append ( parse_struct ( entity ) ) elif entity [ "type" ] == "interface" : sections . extend ( parse_interface ( entity ) ) return sections | Iterates through elements in idl_parsed list and returns a list of section dicts . Currently elements of type comment enum struct and interface are processed . |
14,752 | def sub_location ( self , nbr ) : assert nbr > - 1 , "Sub location number must be greater or equal to 0!" assert nbr < self . nbr_of_sub_locations ( ) - 1 , "Sub location number must be lower than %d!" % self . nbr_of_sub_locations ( ) - 1 return self . _locations_list [ nbr ] | Return a given sub location 0 - based . |
14,753 | def get_locations_list ( self , lower_bound = 0 , upper_bound = None ) : real_upper_bound = upper_bound if upper_bound is None : real_upper_bound = self . nbr_of_sub_locations ( ) try : return self . _locations_list [ lower_bound : real_upper_bound ] except : return list ( ) | Return the internal location list . |
14,754 | def get_args ( args , kwargs , arg_names ) : n_args = len ( arg_names ) if len ( args ) + len ( kwargs ) > n_args : raise MoultScannerError ( 'Too many arguments supplied. Expected: {}' . format ( n_args ) ) out_args = { } for i , a in enumerate ( args ) : out_args [ arg_names [ i ] ] = a for a in arg_names : if a not in out_args : out_args [ a ] = None out_args . update ( kwargs ) return out_args | Get arguments as a dict . |
14,755 | def ast_scan_file ( filename , re_fallback = True ) : try : with io . open ( filename , 'rb' ) as fp : try : root = ast . parse ( fp . read ( ) , filename = filename ) except ( SyntaxError , IndentationError ) : if re_fallback : log . debug ( 'Falling back to regex scanner' ) return _ast_scan_file_re ( filename ) else : log . error ( 'Could not parse file: %s' , filename ) log . info ( 'Exception:' , exc_info = True ) return None , None log . debug ( 'Starting AST Scan: %s' , filename ) ast_visitor . reset ( filename ) ast_visitor . visit ( root ) log . debug ( 'Project path: %s' , ast_visitor . import_root ) return ast_visitor . scope , ast_visitor . imports except IOError : log . warn ( 'Could not open file: %s' , filename ) return None , None | Scans a file for imports using AST . |
14,756 | def dump ( d , fmt = 'json' , stream = None ) : if fmt == 'json' : return _dump_json ( d , stream = stream ) elif fmt == 'yaml' : return yaml . dump ( d , stream ) elif fmt == 'lha' : s = _dump_lha ( d ) if stream is None : return s else : return stream . write ( s ) | Serialize structured data into a stream in JSON YAML or LHA format . If stream is None return the produced string instead . |
14,757 | def inline_for_model ( model , variants = [ ] , inline_args = { } ) : if not isinstance ( model , ModelBase ) : raise ValueError ( "inline_for_model requires it's argument to be a Django Model" ) d = dict ( model = model ) if variants : d [ 'variants' ] = variants if inline_args : d [ 'args' ] = inline_args class_name = "%sInline" % model . _meta . module_name . capitalize ( ) return type ( class_name , ( ModelInline , ) , d ) | A shortcut function to produce ModelInlines for django models |
14,758 | def initialize_mpi ( mpi = False ) : if mpi : import mpi4py . MPI comm = mpi4py . MPI . COMM_WORLD rank = comm . Get_rank ( ) size = comm . Get_size ( ) else : comm = None rank = 0 size = 1 return { "comm" : comm , "rank" : rank , "size" : size , "mode" : mpi } | initialize mpi settings |
14,759 | def create_ports ( port , mpi , rank ) : if port == "random" or port is None : ports = { } else : port = int ( port ) ports = { "REQ" : port + 0 , "PUSH" : port + 1 , "SUB" : port + 2 } if mpi == 'all' : for port in ports : ports [ port ] += ( rank * 3 ) return ports | create a list of ports for the current rank |
14,760 | def import_from_string ( full_class_name ) : s = full_class_name . split ( '.' ) class_name = s [ - 1 ] module_name = full_class_name [ : - len ( class_name ) - 1 ] module = importlib . import_module ( module_name ) klass = getattr ( module , class_name ) return klass | return a class based on it s full class name |
14,761 | def create_bmi_model ( self , engine , bmi_class = None , wrapper_kwargs = None ) : if wrapper_kwargs is None : wrapper_kwargs = { } if bmi_class is None : wrapper_class = bmi . wrapper . BMIWrapper else : wrapper_class = self . import_from_string ( bmi_class ) try : model = wrapper_class ( engine , ** wrapper_kwargs ) except TypeError as e : logger . warn ( 'Model wrapper %s does not accept engine as a first argument' , wrapper_class ) model = wrapper_class ( ** wrapper_kwargs ) return model | initialize a bmi mode using an optional class |
14,762 | def register ( self ) : result = requests . post ( urljoin ( self . tracker , 'models' ) , data = json . dumps ( self . metadata ) ) logger . debug ( "registered at server %s: %s" , self . tracker , result ) self . metadata [ "tracker" ] = result . json ( ) | register model at tracking server |
14,763 | def unregister ( self ) : uuid = self . metadata [ "tracker" ] [ "uuid" ] result = requests . delete ( urljoin ( self . tracker , 'models' + "/" + uuid ) ) logger . debug ( "unregistered at server %s with %s: %s" , self . tracker , uuid , result ) | unregister model at tracking server |
14,764 | def create_sockets ( self ) : ports = self . ports context = zmq . Context ( ) poller = zmq . Poller ( ) rep = context . socket ( zmq . REP ) if "REQ" in ports : rep . bind ( "tcp://*:{port}" . format ( port = ports [ "REQ" ] ) ) else : ports [ "REQ" ] = rep . bind_to_random_port ( "tcp://*" ) pull = context . socket ( zmq . PULL ) if "PUSH" in ports : pull . bind ( "tcp://*:{port}" . format ( port = ports [ "PUSH" ] ) ) else : ports [ "PUSH" ] = pull . bind_to_random_port ( "tcp://*" ) pub = context . socket ( zmq . PUB ) if "SUB" in ports : pub . bind ( "tcp://*:{port}" . format ( port = ports [ "SUB" ] ) ) else : ports [ "SUB" ] = pub . bind_to_random_port ( "tcp://*" ) poller . register ( rep , zmq . POLLIN ) poller . register ( pull , zmq . POLLIN ) sockets = dict ( poller = poller , rep = rep , pull = pull , pub = pub ) return sockets | create zmq sockets |
14,765 | def run ( self ) : model = self . model configfile = self . configfile interval = self . interval sockets = self . sockets model . initialize ( configfile ) if model . state == 'pause' : logger . info ( "model initialized and started in pause mode, waiting for requests" ) else : logger . info ( "model started and initialized, running" ) if self . tracker : self . register ( ) atexit . register ( self . unregister ) self . process_incoming ( ) counter = itertools . count ( ) logger . info ( "Entering timeloop..." ) for i in counter : while model . state == "pause" : self . process_incoming ( ) else : self . process_incoming ( ) if model . state == "quit" : break dt = model . get_time_step ( ) or - 1 model . update ( dt ) if i % interval : continue for key in self . output_vars : value = model . get_var ( key ) metadata = { 'name' : key , 'iteration' : i } logger . debug ( "sending {}" . format ( metadata ) ) if 'pub' in sockets : send_array ( sockets [ 'pub' ] , value , metadata = metadata ) logger . info ( "Finalizing..." ) model . finalize ( ) | run the model |
14,766 | def calc_basics ( width = - 1 , length = - 1 , height = 2.4 , prevailing_wind = 2.8 ) : if width == - 1 : width = int ( input ( 'enter building width : ' ) ) if length == - 1 : length = int ( input ( 'enter building length : ' ) ) res = { } res [ 'area' ] = width * length res [ 'perim' ] = 2 * width + 2 * length res [ 'roof_cladding' ] = res [ 'area' ] res [ 'wall_cladding' ] = res [ 'perim' ] * height pprint ( res ) return res | calculate various aspects of the structure |
14,767 | def bld_rafter_deflection ( length = - 9 , force = - 9 , E_mod_elasticity = - 9 , I_moment_of_intertia = - 9 ) : if length == - 9 : length = float ( input ( 'enter rafter length : ' ) ) if force == - 9 : force = float ( input ( 'enter Force or weight applied to roof : ' ) ) if E_mod_elasticity == - 9 : E_mod_elasticity = float ( input ( 'enter modulus of elasticity x10**5 (Steel beam example=2.1) : ' ) ) if I_moment_of_intertia == - 9 : I_moment_of_intertia = float ( input ( 'enter members "moment of intertia x10**6" (for Steel beam 410UB53.7=188 ) :' ) ) res = { } res [ 'max deflection - centre load' ] = ( 1 * force * ( length ** 3 ) ) / ( 48 * ( E_mod_elasticity * 10 ** 5 ) * ( I_moment_of_intertia * 10 ** 6 ) ) res [ 'max deflection - distrib load' ] = ( 5 * force * ( length ** 4 ) ) / ( 384 * ( E_mod_elasticity * 10 ** 5 ) * ( I_moment_of_intertia * 10 ** 6 ) ) pprint ( res ) return res | calculate rafter deflections - see test_calc_building_design . py for Sample values for equations below from Structures II course |
14,768 | def get_parties ( self , obj ) : return PartySerializer ( Party . objects . all ( ) , many = True ) . data | All parties . |
14,769 | def get_elections ( self , obj ) : election_day = ElectionDay . objects . get ( date = self . context [ "election_date" ] ) kwargs = { "race__office__body" : obj , "election_day" : election_day } if self . context . get ( "division" ) and obj . slug == "senate" : kwargs [ "division" ] = self . context [ "division" ] elif self . context . get ( "division" ) and obj . slug == "house" : kwargs [ "division__parent" ] = self . context [ "division" ] if obj . slug == "house" and not self . context . get ( "division" ) : kwargs [ "race__special" ] = False elections = Election . objects . filter ( ** kwargs ) return ElectionSerializer ( elections , many = True ) . data | All elections held on an election day . |
14,770 | def parse_channels ( self ) : channels = [ ] for channel in self . _project_dict [ "channels" ] : channels . append ( Channel ( channel , self . _is_sixteen_bit , self . _ignore_list ) ) return channels | Creates an array of Channel objects from the project |
14,771 | def update ( self ) : for channel in self . channels : channel . update ( ) for i in range ( len ( self . _project_dict [ "channels" ] ) ) : channel_dict = self . _project_dict [ "channels" ] [ i ] for channel in self . channels : if channel . name == channel_dict [ "common.ALLTYPES_NAME" ] : self . _project_dict [ "channels" ] [ i ] = channel . as_dict ( ) | Updates the dictionary of the project |
14,772 | def cmd ( send , * _ ) : url = get ( 'http://distrowatch.com/random.php' ) . url match = re . search ( '=(.*)' , url ) if match : send ( match . group ( 1 ) ) else : send ( "no distro found" ) | Gets a random distro . |
14,773 | def cmd ( send , * _ ) : try : output = subprocess . check_output ( [ 'ddate' ] , universal_newlines = True ) except subprocess . CalledProcessError : output = 'Today is the day you install ddate!' for line in output . splitlines ( ) : send ( line ) | Returns the Discordian date . |
14,774 | def cmd ( send , msg , args ) : parser = arguments . ArgParser ( args [ 'config' ] ) parser . add_argument ( '--chan' , '--channel' , action = arguments . ChanParser ) try : cmdargs , extra = parser . parse_known_args ( msg ) except arguments . ArgumentException as e : send ( str ( e ) ) return target = cmdargs . channels [ 0 ] if hasattr ( cmdargs , 'channels' ) else args [ 'target' ] mode = " " . join ( extra ) if not mode : send ( 'Please specify a mode.' ) elif target == 'private' : send ( "Modes don't work in a PM!" ) else : with args [ 'handler' ] . data_lock : if target not in args [ 'handler' ] . channels : send ( "Bot not in channel %s" % target ) elif args [ 'botnick' ] not in args [ 'handler' ] . opers [ target ] : send ( "Bot must be opped in channel %s" % target ) else : args [ 'handler' ] . connection . mode ( target , mode ) if args [ 'target' ] != args [ 'config' ] [ 'core' ] [ 'ctrlchan' ] : send ( "Mode \"%s\" on %s by %s" % ( mode , target , args [ 'nick' ] ) , target = args [ 'config' ] [ 'core' ] [ 'ctrlchan' ] ) | Sets a mode . |
14,775 | def as_json ( context ) : info = { 'info' : cgi . escape ( pprint . pformat ( context . context ) ) , } return Response ( content_type = 'application/json' , body = json . dumps ( info ) ) | Return an object s representation as JSON |
14,776 | def as_tree ( context ) : tree = _build_tree ( context , 2 , 1 ) if type ( tree ) == dict : tree = [ tree ] return Response ( content_type = 'application/json' , body = json . dumps ( tree ) ) | Return info about an object s members as JSON |
14,777 | def main ( ) : character1 = Character ( 'Albogh' , str = 4 , int = 7 , sta = 50 ) character2 = Character ( 'Zoltor' , str = 6 , int = 6 , sta = 70 ) print ( 'PLAYER1 [start]:' , character1 ) print ( 'PLAYER2 [start]:' , character2 ) b = Battle ( character1 , character2 ) print ( b ) print ( 'PLAYER1 [end]:' , character1 ) print ( 'PLAYER2 [end]:' , character2 ) | Prototype to see how an RPG simulation might be used in the AIKIF framework . The idea is to build a simple character and run a simulation to see how it succeeds in a random world against another players character character stats world locations |
14,778 | def fight ( self , moves = 10 ) : for i in range ( 1 , moves ) : result , dmg = self . calc_move ( self . c1 , self . c2 ) print ( self . c1 . name + ' ' + result + ' for ' + str ( dmg ) ) self . c1 . sta = self . c1 . sta - dmg if self . is_character_dead ( self . c1 ) : print ( self . c1 . name + ' has died' ) return result , dmg = self . calc_move ( self . c2 , self . c1 ) print ( self . c2 . name + ' ' + result + ' for ' + str ( dmg ) ) self . c2 . sta = self . c2 . sta - dmg if self . is_character_dead ( self . c2 ) : print ( self . c2 . name + ' has died' ) return | runs a series of fights |
14,779 | def duration ( self ) : if self . completion_ts : end = self . completed else : end = datetime . utcnow ( ) return end - self . started | Return a timedelta for this build . |
14,780 | def estimate_completion ( self ) : if self . state != build_states . BUILDING : defer . returnValue ( self . completed ) avg_delta = yield self . connection . getAverageBuildDuration ( self . name ) est_completion = self . started + avg_delta defer . returnValue ( est_completion ) | Estimate completion time for a build . |
14,781 | def target ( self ) : task = yield self . task ( ) if not task : yield defer . succeed ( None ) defer . returnValue ( None ) defer . returnValue ( task . target ) | Find the target name for this build . |
14,782 | def task ( self ) : if not self . task_id : return defer . succeed ( None ) return self . connection . getTaskInfo ( self . task_id ) | Find the task for this build . |
14,783 | def task_id ( self ) : if self [ 'task_id' ] : return self [ 'task_id' ] if self . extra and 'container_koji_task_id' in self . extra : return self . extra [ 'container_koji_task_id' ] | Hack to return a task ID for a build including container CG builds . |
14,784 | def get_images ( self , obj ) : return { str ( i . tag ) : i . image . url for i in obj . images . all ( ) } | Object of images serialized by tag name . |
14,785 | def get_override_winner ( self , obj ) : if obj . election . division . level . name == DivisionLevel . DISTRICT : division = obj . election . division . parent else : division = obj . election . division vote = obj . votes . filter ( division = division ) . first ( ) return vote . winning if vote else False | Winner marked in backend . |
14,786 | def get_override_votes ( self , obj ) : if hasattr ( obj , "meta" ) : if obj . meta . override_ap_votes : all_votes = None for ce in obj . candidate_elections . all ( ) : if all_votes : all_votes = all_votes | ce . votes . all ( ) else : all_votes = ce . votes . all ( ) return VotesSerializer ( all_votes , many = True ) . data return False | Votes entered into backend . Only used if override_ap_votes = True . |
14,787 | def save ( self , * args , ** kwargs ) : if self . publication : publication = self . publication if not self . title : self . title = publication . title if not self . subtitle : first_author = publication . first_author if first_author == publication . last_author : authors = first_author else : authors = '{} et al.' . format ( first_author ) self . subtitle = '{}, {} ({})' . format ( authors , publication . journal , publication . year ) if not self . description : self . description = publication . abstract if self . publication . year and not self . pk : delta = timezone . now ( ) - self . publish_datetime if self . publish_datetime <= timezone . now ( ) and delta . days == 0 : self . publish_datetime = datetime . datetime ( year = int ( self . publication . year ) , month = int ( self . publication . month or 1 ) , day = int ( self . publication . day or 1 ) , ) super ( ) . save ( * args , ** kwargs ) | Before saving if slide is for a publication use publication info for slide s title subtitle description . |
14,788 | def execute ( self , context = None , stdout = None , stderr = None ) : total_benchmark = Benchmark ( ) self . context = context or Context ( ) if self . _is_collection ( ) : self . stdout = sys . stdout self . stderr = sys . stderr else : self . stdout = stdout or StringIO ( ) self . stderr = stderr or StringIO ( ) self . _record_start_example ( self . formatter ) try : with total_benchmark , Replace ( sys , 'stdout' , self . stdout ) , Replace ( sys , 'stderr' , self . stderr ) : self . _setup ( ) self . _execute ( ) self . _teardown ( ) except Exception as e : self . example . error = e self . example . traceback = sys . exc_info ( ) [ 2 ] finally : self . example . real_time = total_benchmark . total_time self . _record_end_example ( self . formatter ) self . context = None self . example . stdout = self . stdout self . example . stderr = self . stderr return self . example . error is None | Does all the work of running an example . |
14,789 | def run ( self , context = None , stdout = None , stderr = None ) : "Like execute, but records a skip if the should_skip method returns True." if self . should_skip ( ) : self . _record_skipped_example ( self . formatter ) self . num_skipped += 1 else : self . execute ( context , stdout , stderr ) return self . num_successes , self . num_failures , self . num_skipped | Like execute but records a skip if the should_skip method returns True . |
14,790 | def _setup ( self ) : "Resets the state and prepares for running the example." self . example . error = None self . example . traceback = '' c = Context ( parent = self . context ) self . context = c if self . is_root_runner : run . before_all . execute ( self . context ) self . example . before ( self . context ) | Resets the state and prepares for running the example . |
14,791 | def _execute_example_group ( self ) : "Handles the execution of Example Group" for example in self . example : runner = self . __class__ ( example , self . formatter ) runner . is_root_runner = False successes , failures , skipped = runner . run ( self . context ) self . num_successes += successes self . num_failures += failures self . num_skipped += skipped | Handles the execution of Example Group |
14,792 | def _execute_example ( self ) : "Handles the execution of the Example" test_benchmark = Benchmark ( ) try : with Registry ( ) , test_benchmark : if accepts_arg ( self . example . testfn ) : self . example . testfn ( self . context ) else : self . context . inject_into_self ( self . example . testfn ) self . example . testfn ( ) self . num_successes += 1 except KeyboardInterrupt : raise except : raise self . num_failures += 1 finally : self . example . user_time = test_benchmark . total_time | Handles the execution of the Example |
14,793 | def _teardown ( self ) : "Handles the restoration of any potential global state set." self . example . after ( self . context ) if self . is_root_runner : run . after_all . execute ( self . context ) self . has_ran = True | Handles the restoration of any potential global state set . |
14,794 | def pypy_json_encode ( value , pretty = False ) : global _dealing_with_problem if pretty : return pretty_json ( value ) try : _buffer = UnicodeBuilder ( 2048 ) _value2json ( value , _buffer ) output = _buffer . build ( ) return output except Exception as e : from mo_logs import Log if _dealing_with_problem : Log . error ( "Serialization of JSON problems" , e ) else : Log . warning ( "Serialization of JSON problems" , e ) _dealing_with_problem = True try : return pretty_json ( value ) except Exception as f : Log . error ( "problem serializing object" , f ) finally : _dealing_with_problem = False | pypy DOES NOT OPTIMIZE GENERATOR CODE WELL |
14,795 | def problem_serializing ( value , e = None ) : from mo_logs import Log try : typename = type ( value ) . __name__ except Exception : typename = "<error getting name>" try : rep = text_type ( repr ( value ) ) except Exception as _ : rep = None if rep == None : Log . error ( "Problem turning value of type {{type}} to json" , type = typename , cause = e ) else : Log . error ( "Problem turning value ({{value}}) of type {{type}} to json" , value = rep , type = typename , cause = e ) | THROW ERROR ABOUT SERIALIZING |
14,796 | def unicode_key ( key ) : if not isinstance ( key , ( text_type , binary_type ) ) : from mo_logs import Log Log . error ( "{{key|quote}} is not a valid key" , key = key ) return quote ( text_type ( key ) ) | CONVERT PROPERTY VALUE TO QUOTED NAME OF SAME |
14,797 | def cmd ( send , _ , args ) : curr = datetime . now ( ) uptime = args [ 'handler' ] . uptime load_avg = ', ' . join ( [ str ( x ) for x in os . getloadavg ( ) ] ) starttime = curr - uptime [ 'start' ] reloaded = curr - uptime [ 'reloaded' ] send ( "Time since start: %s, load average: %s" % ( starttime , load_avg ) ) send ( "Time since reload: %s" % reloaded ) | Shows the bot s uptime . |
14,798 | def pcwd ( func ) : @ wraps ( func ) def inner ( * args , ** kw ) : with PreserveWorkingDirectory ( ) : return func ( * args , ** kw ) return inner | A decorator to provide the functionality of the PreserveWorkingDirectory context manager for functions and methods . |
14,799 | def cmd ( send , msg , _ ) : msg = msg . encode ( 'utf-8' ) send ( hashlib . sha512 ( msg ) . hexdigest ( ) ) | SHA512 hashes something . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.