idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
54,800
async def multi_set ( self , pairs , ttl = SENTINEL , dumps_fn = None , namespace = None , _conn = None ) : start = time . monotonic ( ) dumps = dumps_fn or self . _serializer . dumps tmp_pairs = [ ] for key , value in pairs : tmp_pairs . append ( ( self . build_key ( key , namespace = namespace ) , dumps ( value ) ) ) await self . _multi_set ( tmp_pairs , ttl = self . _get_ttl ( ttl ) , _conn = _conn ) logger . debug ( "MULTI_SET %s %d (%.4f)s" , [ key for key , value in tmp_pairs ] , len ( pairs ) , time . monotonic ( ) - start , ) return True
Stores multiple values in the given keys .
54,801
async def delete ( self , key , namespace = None , _conn = None ) : start = time . monotonic ( ) ns_key = self . build_key ( key , namespace = namespace ) ret = await self . _delete ( ns_key , _conn = _conn ) logger . debug ( "DELETE %s %d (%.4f)s" , ns_key , ret , time . monotonic ( ) - start ) return ret
Deletes the given key .
54,802
async def exists ( self , key , namespace = None , _conn = None ) : start = time . monotonic ( ) ns_key = self . build_key ( key , namespace = namespace ) ret = await self . _exists ( ns_key , _conn = _conn ) logger . debug ( "EXISTS %s %d (%.4f)s" , ns_key , ret , time . monotonic ( ) - start ) return ret
Check key exists in the cache .
54,803
async def expire ( self , key , ttl , namespace = None , _conn = None ) : start = time . monotonic ( ) ns_key = self . build_key ( key , namespace = namespace ) ret = await self . _expire ( ns_key , ttl , _conn = _conn ) logger . debug ( "EXPIRE %s %d (%.4f)s" , ns_key , ret , time . monotonic ( ) - start ) return ret
Set the ttl to the given key . By setting it to 0 it will disable it
54,804
async def clear ( self , namespace = None , _conn = None ) : start = time . monotonic ( ) ret = await self . _clear ( namespace , _conn = _conn ) logger . debug ( "CLEAR %s %d (%.4f)s" , namespace , ret , time . monotonic ( ) - start ) return ret
Clears the cache in the cache namespace . If an alternative namespace is given it will clear those ones instead .
54,805
async def raw ( self , command , * args , _conn = None , ** kwargs ) : start = time . monotonic ( ) ret = await self . _raw ( command , * args , encoding = self . serializer . encoding , _conn = _conn , ** kwargs ) logger . debug ( "%s (%.4f)s" , command , time . monotonic ( ) - start ) return ret
Send the raw command to the underlying client . Note that by using this CMD you will lose compatibility with other backends .
54,806
async def close ( self , * args , _conn = None , ** kwargs ) : start = time . monotonic ( ) ret = await self . _close ( * args , _conn = _conn , ** kwargs ) logger . debug ( "CLOSE (%.4f)s" , time . monotonic ( ) - start ) return ret
Perform any resource clean up necessary to exit the program safely . After closing cmd execution is still possible but you will have to close again before exiting .
54,807
def get ( self , alias : str ) : try : return self . _caches [ alias ] except KeyError : pass config = self . get_alias_config ( alias ) cache = _create_cache ( ** deepcopy ( config ) ) self . _caches [ alias ] = cache return cache
Retrieve cache identified by alias . Will return always the same instance
54,808
def create ( self , alias = None , cache = None , ** kwargs ) : if alias : config = self . get_alias_config ( alias ) elif cache : warnings . warn ( "Creating a cache with an explicit config is deprecated, use 'aiocache.Cache'" , DeprecationWarning , ) config = { "cache" : cache } else : raise TypeError ( "create call needs to receive an alias or a cache" ) cache = _create_cache ( ** { ** config , ** kwargs } ) return cache
Create a new cache . Either alias or cache params are required . You can use kwargs to pass extra parameters to configure the cache .
54,809
async def async_poller ( client , initial_response , deserialization_callback , polling_method ) : try : client = client if isinstance ( client , ServiceClientAsync ) else client . _client except AttributeError : raise ValueError ( "Poller client parameter must be a low-level msrest Service Client or a SDK client." ) response = initial_response . response if isinstance ( initial_response , ClientRawResponse ) else initial_response if isinstance ( deserialization_callback , type ) and issubclass ( deserialization_callback , Model ) : deserialization_callback = deserialization_callback . deserialize polling_method . initialize ( client , response , deserialization_callback ) await polling_method . run ( ) return polling_method . resource ( )
Async Poller for long running operations .
54,810
def send ( self , request , ** kwargs ) : session = request . context . session old_max_redirects = None if 'max_redirects' in kwargs : warnings . warn ( "max_redirects in operation kwargs is deprecated, use config.redirect_policy instead" , DeprecationWarning ) old_max_redirects = session . max_redirects session . max_redirects = int ( kwargs [ 'max_redirects' ] ) old_trust_env = None if 'use_env_proxies' in kwargs : warnings . warn ( "use_env_proxies in operation kwargs is deprecated, use config.proxies instead" , DeprecationWarning ) old_trust_env = session . trust_env session . trust_env = bool ( kwargs [ 'use_env_proxies' ] ) old_retries = { } if 'retries' in kwargs : warnings . warn ( "retries in operation kwargs is deprecated, use config.retry_policy instead" , DeprecationWarning ) max_retries = kwargs [ 'retries' ] for protocol in self . _protocols : old_retries [ protocol ] = session . adapters [ protocol ] . max_retries session . adapters [ protocol ] . max_retries = max_retries try : return self . next . send ( request , ** kwargs ) finally : if old_max_redirects : session . max_redirects = old_max_redirects if old_trust_env : session . trust_env = old_trust_env if old_retries : for protocol in self . _protocols : session . adapters [ protocol ] . max_retries = old_retries [ protocol ]
Patch the current session with Request level operation config .
54,811
def _request ( self , method , url , params , headers , content , form_content ) : request = ClientRequest ( method , self . format_url ( url ) ) if params : request . format_parameters ( params ) if headers : request . headers . update ( headers ) if "Accept" not in request . headers : _LOGGER . debug ( "Accept header absent and forced to application/json" ) request . headers [ 'Accept' ] = 'application/json' if content is not None : request . add_content ( content ) if form_content : request . add_formdata ( form_content ) return request
Create ClientRequest object .
54,812
def format_url ( self , url , ** kwargs ) : url = url . format ( ** kwargs ) parsed = urlparse ( url ) if not parsed . scheme or not parsed . netloc : url = url . lstrip ( '/' ) base = self . config . base_url . format ( ** kwargs ) . rstrip ( '/' ) url = urljoin ( base + '/' , url ) return url
Format request URL with the client base URL unless the supplied URL is already absolute .
54,813
def get ( self , url , params = None , headers = None , content = None , form_content = None ) : request = self . _request ( 'GET' , url , params , headers , content , form_content ) request . method = 'GET' return request
Create a GET request object .
54,814
def put ( self , url , params = None , headers = None , content = None , form_content = None ) : request = self . _request ( 'PUT' , url , params , headers , content , form_content ) return request
Create a PUT request object .
54,815
def send_formdata ( self , request , headers = None , content = None , ** config ) : request . headers = headers request . add_formdata ( content ) return self . send ( request , ** config )
Send data as a multipart form - data request . We only deal with file - like objects or strings at this point . The requests is not yet streamed .
54,816
def add_header ( self , header , value ) : warnings . warn ( "Private attribute _client.add_header is deprecated. Use config.headers instead." , DeprecationWarning ) self . config . headers [ header ] = value
Add a persistent header - this header will be applied to all requests sent during the current client session .
54,817
def signed_session ( self , session = None ) : session = super ( ApiKeyCredentials , self ) . signed_session ( session ) session . headers . update ( self . in_headers ) try : session . params . update ( self . in_query ) except AttributeError : raise ValueError ( "session.params must be a dict to be used in ApiKeyCredentials" ) return session
Create requests session with ApiKey .
54,818
def deserialize_from_text ( cls , data , content_type = None ) : if hasattr ( data , 'read' ) : data = cast ( IO , data ) . read ( ) if isinstance ( data , bytes ) : data_as_str = data . decode ( encoding = 'utf-8-sig' ) else : data_as_str = cast ( str , data ) data_as_str = data_as_str . lstrip ( _BOM ) if content_type is None : return data if content_type in cls . JSON_MIMETYPES : try : return json . loads ( data_as_str ) except ValueError as err : raise DeserializationError ( "JSON is invalid: {}" . format ( err ) , err ) elif "xml" in ( content_type or [ ] ) : try : return ET . fromstring ( data_as_str ) except ET . ParseError : def _json_attemp ( data ) : try : return True , json . loads ( data ) except ValueError : return False , None success , json_result = _json_attemp ( data ) if success : return json_result _LOGGER . critical ( "Wasn't XML not JSON, failing" ) raise_with_traceback ( DeserializationError , "XML is invalid" ) raise DeserializationError ( "Cannot deserialize content-type: {}" . format ( content_type ) )
Decode data according to content - type .
54,819
def deserialize_from_http_generics ( cls , body_bytes , headers ) : content_type = None if 'content-type' in headers : content_type = headers [ 'content-type' ] . split ( ";" ) [ 0 ] . strip ( ) . lower ( ) else : content_type = "application/json" if body_bytes : return cls . deserialize_from_text ( body_bytes , content_type ) return None
Deserialize from HTTP response .
54,820
def on_response ( self , request , response , ** kwargs ) : if kwargs . get ( "stream" , True ) : return http_response = response . http_response response . context [ self . CONTEXT_NAME ] = self . deserialize_from_http_generics ( http_response . text ( ) , http_response . headers )
Extract data from the body of a REST response object .
54,821
def add_headers ( self , header_dict ) : if not self . response : return for name , data_type in header_dict . items ( ) : value = self . response . headers . get ( name ) value = self . _deserialize ( data_type , value ) self . headers [ name ] = value
Deserialize a specific header .
54,822
def log_request ( _ , request , * _args , ** _kwargs ) : if not _LOGGER . isEnabledFor ( logging . DEBUG ) : return try : _LOGGER . debug ( "Request URL: %r" , request . url ) _LOGGER . debug ( "Request method: %r" , request . method ) _LOGGER . debug ( "Request headers:" ) for header , value in request . headers . items ( ) : if header . lower ( ) == 'authorization' : value = '*****' _LOGGER . debug ( " %r: %r" , header , value ) _LOGGER . debug ( "Request body:" ) if isinstance ( request . body , types . GeneratorType ) : _LOGGER . debug ( "File upload" ) else : _LOGGER . debug ( str ( request . body ) ) except Exception as err : _LOGGER . debug ( "Failed to log request: %r" , err )
Log a client request .
54,823
def log_response ( _ , _request , response , * _args , ** kwargs ) : if not _LOGGER . isEnabledFor ( logging . DEBUG ) : return None try : _LOGGER . debug ( "Response status: %r" , response . status_code ) _LOGGER . debug ( "Response headers:" ) for res_header , value in response . headers . items ( ) : _LOGGER . debug ( " %r: %r" , res_header , value ) _LOGGER . debug ( "Response content:" ) pattern = re . compile ( r'attachment; ?filename=["\w.]+' , re . IGNORECASE ) header = response . headers . get ( 'content-disposition' ) if header and pattern . match ( header ) : filename = header . partition ( '=' ) [ 2 ] _LOGGER . debug ( "File attachments: %s" , filename ) elif response . headers . get ( "content-type" , "" ) . endswith ( "octet-stream" ) : _LOGGER . debug ( "Body contains binary data." ) elif response . headers . get ( "content-type" , "" ) . startswith ( "image" ) : _LOGGER . debug ( "Body contains image data." ) else : if kwargs . get ( 'stream' , False ) : _LOGGER . debug ( "Body is streamable" ) else : _LOGGER . debug ( response . text ( ) ) return response except Exception as err : _LOGGER . debug ( "Failed to log response: %s" , repr ( err ) ) return response
Log a server response .
54,824
def _clear_config ( self ) : for section in self . _config . sections ( ) : self . _config . remove_section ( section )
Clearout config object in memory .
54,825
def format_parameters ( self , params ) : query = urlparse ( self . url ) . query if query : self . url = self . url . partition ( '?' ) [ 0 ] existing_params = { p [ 0 ] : p [ - 1 ] for p in [ p . partition ( '=' ) for p in query . split ( '&' ) ] } params . update ( existing_params ) query_params = [ "{}={}" . format ( k , v ) for k , v in params . items ( ) ] query = '?' + '&' . join ( query_params ) self . url = self . url + query
Format parameters into a valid query string . It s assumed all parameters have already been quoted as valid URL strings .
54,826
def _format_data ( data ) : if hasattr ( data , 'read' ) : data = cast ( IO , data ) data_name = None try : if data . name [ 0 ] != '<' and data . name [ - 1 ] != '>' : data_name = os . path . basename ( data . name ) except ( AttributeError , TypeError ) : pass return ( data_name , data , "application/octet-stream" ) return ( None , cast ( str , data ) )
Format field data according to whether it is a stream or a string for a form - data request .
54,827
def add_formdata ( self , content = None ) : if content is None : content = { } content_type = self . headers . pop ( 'Content-Type' , None ) if self . headers else None if content_type and content_type . lower ( ) == 'application/x-www-form-urlencoded' : self . data = { f : d for f , d in content . items ( ) if d is not None } else : self . files = { f : self . _format_data ( d ) for f , d in content . items ( ) if d is not None }
Add data as a multipart form - data request to the request .
54,828
def raise_with_traceback ( exception , message = "" , * args , ** kwargs ) : exc_type , exc_value , exc_traceback = sys . exc_info ( ) exc_msg = "{}, {}: {}" . format ( message , exc_type . __name__ , exc_value ) error = exception ( exc_msg , * args , ** kwargs ) try : raise error . with_traceback ( exc_traceback ) except AttributeError : error . __traceback__ = exc_traceback raise error
Raise exception with a specified traceback .
54,829
def _patch_redirect ( session ) : def enforce_http_spec ( resp , request ) : if resp . status_code in ( 301 , 302 ) and request . method not in [ 'GET' , 'HEAD' ] : return False return True redirect_logic = session . resolve_redirects def wrapped_redirect ( resp , req , ** kwargs ) : attempt = enforce_http_spec ( resp , req ) return redirect_logic ( resp , req , ** kwargs ) if attempt else [ ] wrapped_redirect . is_msrest_patched = True session . resolve_redirects = wrapped_redirect
Whether redirect policy should be applied based on status code .
54,830
def _init_session ( self , session ) : _patch_redirect ( session ) max_retries = self . config . retry_policy ( ) for protocol in self . _protocols : session . adapters [ protocol ] . max_retries = max_retries
Init session level configuration of requests .
54,831
def _configure_send ( self , request , ** kwargs ) : requests_kwargs = { } session = kwargs . pop ( 'session' , self . session ) if session is not self . session : self . _init_session ( session ) session . max_redirects = int ( self . config . redirect_policy ( ) ) session . trust_env = bool ( self . config . proxies . use_env_settings ) requests_kwargs . update ( self . config . connection ( ) ) requests_kwargs [ 'allow_redirects' ] = bool ( self . config . redirect_policy ) requests_kwargs [ 'headers' ] = self . config . headers . copy ( ) proxies = self . config . proxies ( ) if proxies : requests_kwargs [ 'proxies' ] = proxies for key in kwargs : if key in self . _REQUESTS_KWARGS : requests_kwargs [ key ] = kwargs [ key ] def make_user_hook_cb ( user_hook , session ) : def user_hook_cb ( r , * args , ** kwargs ) : kwargs . setdefault ( "msrest" , { } ) [ 'session' ] = session return user_hook ( r , * args , ** kwargs ) return user_hook_cb hooks = [ ] for user_hook in self . config . hooks : hooks . append ( make_user_hook_cb ( user_hook , self . session ) ) if hooks : requests_kwargs [ 'hooks' ] = { 'response' : hooks } output_kwargs = self . config . session_configuration_callback ( session , self . config , kwargs , ** requests_kwargs ) if output_kwargs is not None : requests_kwargs = output_kwargs if session is not self . session : requests_kwargs [ 'session' ] = session requests_kwargs [ 'stream' ] = kwargs . get ( 'stream' , True ) if request . files : requests_kwargs [ 'files' ] = request . files elif request . data : requests_kwargs [ 'data' ] = request . data requests_kwargs [ 'headers' ] . update ( request . headers ) return requests_kwargs
Configure the kwargs to use with requests .
54,832
def full_restapi_key_transformer ( key , attr_desc , value ) : keys = _FLATTEN . split ( attr_desc [ 'key' ] ) return ( [ _decode_attribute_map_key ( k ) for k in keys ] , value )
A key transformer that returns the full RestAPI key path .
54,833
def last_restapi_key_transformer ( key , attr_desc , value ) : key , value = full_restapi_key_transformer ( key , attr_desc , value ) return ( key [ - 1 ] , value )
A key transformer that returns the last RestAPI key .
54,834
def _create_xml_node ( tag , prefix = None , ns = None ) : if prefix and ns : ET . register_namespace ( prefix , ns ) if ns : return ET . Element ( "{" + ns + "}" + tag ) else : return ET . Element ( tag )
Create a XML node .
54,835
def _create_xml_node ( cls ) : try : xml_map = cls . _xml_map except AttributeError : raise ValueError ( "This model has no XML definition" ) return _create_xml_node ( xml_map . get ( 'name' , cls . __name__ ) , xml_map . get ( "prefix" , None ) , xml_map . get ( "ns" , None ) )
Create XML node from _xml_map .
54,836
def validate ( self ) : validation_result = [ ] for attr_name , value in [ ( attr , getattr ( self , attr ) ) for attr in self . _attribute_map ] : attr_desc = self . _attribute_map [ attr_name ] if attr_name == "additional_properties" and attr_desc [ "key" ] == '' : continue attr_type = attr_desc [ 'type' ] try : debug_name = "{}.{}" . format ( self . __class__ . __name__ , attr_name ) Serializer . validate ( value , debug_name , ** self . _validation . get ( attr_name , { } ) ) except ValidationError as validation_error : validation_result . append ( validation_error ) validation_result += _recursive_validate ( attr_name , attr_type , value ) return validation_result
Validate this model recursively and return a list of ValidationError .
54,837
def serialize ( self , keep_readonly = False ) : serializer = Serializer ( self . _infer_class_models ( ) ) return serializer . _serialize ( self , keep_readonly = keep_readonly )
Return the JSON that would be sent to azure from this model .
54,838
def as_dict ( self , keep_readonly = True , key_transformer = attribute_transformer ) : serializer = Serializer ( self . _infer_class_models ( ) ) return serializer . _serialize ( self , key_transformer = key_transformer , keep_readonly = keep_readonly )
Return a dict that can be JSONify using json . dump .
54,839
def deserialize ( cls , data , content_type = None ) : deserializer = Deserializer ( cls . _infer_class_models ( ) ) return deserializer ( cls . __name__ , data , content_type = content_type )
Parse a str using the RestAPI syntax and return a model .
54,840
def from_dict ( cls , data , key_extractors = None , content_type = None ) : deserializer = Deserializer ( cls . _infer_class_models ( ) ) deserializer . key_extractors = [ rest_key_case_insensitive_extractor , attribute_key_case_insensitive_extractor , last_rest_key_case_insensitive_extractor ] if key_extractors is None else key_extractors return deserializer ( cls . __name__ , data , content_type = content_type )
Parse a dict using given key extractor return a model .
54,841
def _classify ( cls , response , objects ) : for subtype_key in cls . __dict__ . get ( '_subtype_map' , { } ) . keys ( ) : subtype_value = None rest_api_response_key = cls . _get_rest_key_parts ( subtype_key ) [ - 1 ] subtype_value = response . pop ( rest_api_response_key , None ) or response . pop ( subtype_key , None ) if subtype_value : if cls . __name__ == subtype_value : return cls flatten_mapping_type = cls . _flatten_subtype ( subtype_key , objects ) try : return objects [ flatten_mapping_type [ subtype_value ] ] except KeyError : _LOGGER . warning ( "Subtype value %s has no mapping, use base class %s." , subtype_value , cls . __name__ , ) break else : _LOGGER . warning ( "Discriminator %s is absent or null, use base class %s." , subtype_key , cls . __name__ ) break return cls
Check the class _subtype_map for any child classes . We want to ignore any inherited _subtype_maps . Remove the polymorphic key from the initial data .
54,842
def body ( self , data , data_type , ** kwargs ) : if data is None : raise ValidationError ( "required" , "body" , True ) internal_data_type = data_type . strip ( '[]{}' ) internal_data_type = self . dependencies . get ( internal_data_type , None ) if internal_data_type and not isinstance ( internal_data_type , Enum ) : try : deserializer = Deserializer ( self . dependencies ) deserializer . additional_properties_detection = False if issubclass ( internal_data_type , Model ) and internal_data_type . is_xml_model ( ) : deserializer . key_extractors = [ attribute_key_case_insensitive_extractor , ] else : deserializer . key_extractors = [ rest_key_case_insensitive_extractor , attribute_key_case_insensitive_extractor , last_rest_key_case_insensitive_extractor ] data = deserializer . _deserialize ( data_type , data ) except DeserializationError as err : raise_with_traceback ( SerializationError , "Unable to build a model: " + str ( err ) , err ) if self . client_side_validation : errors = _recursive_validate ( data_type , data_type , data ) if errors : raise errors [ 0 ] return self . _serialize ( data , data_type , ** kwargs )
Serialize data intended for a request body .
54,843
def url ( self , name , data , data_type , ** kwargs ) : if self . client_side_validation : data = self . validate ( data , name , required = True , ** kwargs ) try : output = self . serialize_data ( data , data_type , ** kwargs ) if data_type == 'bool' : output = json . dumps ( output ) if kwargs . get ( 'skip_quote' ) is True : output = str ( output ) else : output = quote ( str ( output ) , safe = '' ) except SerializationError : raise TypeError ( "{} must be type {}." . format ( name , data_type ) ) else : return output
Serialize data intended for a URL path .
54,844
def header ( self , name , data , data_type , ** kwargs ) : if self . client_side_validation : data = self . validate ( data , name , required = True , ** kwargs ) try : if data_type in [ '[str]' ] : data = [ "" if d is None else d for d in data ] output = self . serialize_data ( data , data_type , ** kwargs ) if data_type == 'bool' : output = json . dumps ( output ) except SerializationError : raise TypeError ( "{} must be type {}." . format ( name , data_type ) ) else : return str ( output )
Serialize data intended for a request header .
54,845
def validate ( cls , data , name , ** kwargs ) : required = kwargs . get ( 'required' , False ) if required and data is None : raise ValidationError ( "required" , name , True ) elif data is None : return elif kwargs . get ( 'readonly' ) : return try : for key , value in kwargs . items ( ) : validator = cls . validation . get ( key , lambda x , y : False ) if validator ( data , value ) : raise ValidationError ( key , name , value ) except TypeError : raise ValidationError ( "unknown" , name , "unknown" ) else : return data
Validate that a piece of data meets certain conditions
54,846
def serialize_data ( self , data , data_type , ** kwargs ) : if data is None : raise ValueError ( "No value for given attribute" ) try : if data_type in self . basic_types . values ( ) : return self . serialize_basic ( data , data_type , ** kwargs ) elif data_type in self . serialize_type : return self . serialize_type [ data_type ] ( data , ** kwargs ) enum_type = self . dependencies . get ( data_type , data . __class__ ) if issubclass ( enum_type , Enum ) : return Serializer . serialize_enum ( data , enum_obj = enum_type ) iter_type = data_type [ 0 ] + data_type [ - 1 ] if iter_type in self . serialize_type : return self . serialize_type [ iter_type ] ( data , data_type [ 1 : - 1 ] , ** kwargs ) except ( ValueError , TypeError ) as err : msg = "Unable to serialize value: {!r} as type: {!r}." raise_with_traceback ( SerializationError , msg . format ( data , data_type ) , err ) else : return self . _serialize ( data , ** kwargs )
Serialize generic data according to supplied data type .
54,847
def serialize_basic ( self , data , data_type , ** kwargs ) : custom_serializer = self . _get_custom_serializers ( data_type , ** kwargs ) if custom_serializer : return custom_serializer ( data ) if data_type == 'str' : return self . serialize_unicode ( data ) return eval ( data_type ) ( data )
Serialize basic builting data type . Serializes objects to str int float or bool .
54,848
def serialize_unicode ( self , data ) : try : return data . value except AttributeError : pass try : if isinstance ( data , unicode ) : return data . encode ( encoding = 'utf-8' ) except NameError : return str ( data ) else : return str ( data )
Special handling for serializing unicode strings in Py2 . Encode to UTF - 8 if unicode otherwise handle as a str .
54,849
def serialize_iter ( self , data , iter_type , div = None , ** kwargs ) : if isinstance ( data , str ) : raise SerializationError ( "Refuse str type as a valid iter type." ) serialization_ctxt = kwargs . get ( "serialization_ctxt" , { } ) serialized = [ ] for d in data : try : serialized . append ( self . serialize_data ( d , iter_type , ** kwargs ) ) except ValueError : serialized . append ( None ) if div : serialized = [ '' if s is None else str ( s ) for s in serialized ] serialized = div . join ( serialized ) if 'xml' in serialization_ctxt : xml_desc = serialization_ctxt [ 'xml' ] xml_name = xml_desc [ 'name' ] is_wrapped = "wrapped" in xml_desc and xml_desc [ "wrapped" ] node_name = xml_desc . get ( "itemsName" , xml_name ) if is_wrapped : final_result = _create_xml_node ( xml_name , xml_desc . get ( 'prefix' , None ) , xml_desc . get ( 'ns' , None ) ) else : final_result = [ ] for el in serialized : if isinstance ( el , ET . Element ) : el_node = el else : el_node = _create_xml_node ( node_name , xml_desc . get ( 'prefix' , None ) , xml_desc . get ( 'ns' , None ) ) if el is not None : el_node . text = str ( el ) final_result . append ( el_node ) return final_result return serialized
Serialize iterable .
54,850
def serialize_dict ( self , attr , dict_type , ** kwargs ) : serialization_ctxt = kwargs . get ( "serialization_ctxt" , { } ) serialized = { } for key , value in attr . items ( ) : try : serialized [ self . serialize_unicode ( key ) ] = self . serialize_data ( value , dict_type , ** kwargs ) except ValueError : serialized [ self . serialize_unicode ( key ) ] = None if 'xml' in serialization_ctxt : xml_desc = serialization_ctxt [ 'xml' ] xml_name = xml_desc [ 'name' ] final_result = _create_xml_node ( xml_name , xml_desc . get ( 'prefix' , None ) , xml_desc . get ( 'ns' , None ) ) for key , value in serialized . items ( ) : ET . SubElement ( final_result , key ) . text = value return final_result return serialized
Serialize a dictionary of objects .
54,851
def serialize_base64 ( attr , ** kwargs ) : encoded = b64encode ( attr ) . decode ( 'ascii' ) return encoded . strip ( '=' ) . replace ( '+' , '-' ) . replace ( '/' , '_' )
Serialize str into base - 64 string .
54,852
def serialize_date ( attr , ** kwargs ) : if isinstance ( attr , str ) : attr = isodate . parse_date ( attr ) t = "{:04}-{:02}-{:02}" . format ( attr . year , attr . month , attr . day ) return t
Serialize Date object into ISO - 8601 formatted string .
54,853
def serialize_duration ( attr , ** kwargs ) : if isinstance ( attr , str ) : attr = isodate . parse_duration ( attr ) return isodate . duration_isoformat ( attr )
Serialize TimeDelta object into ISO - 8601 formatted string .
54,854
def serialize_rfc ( attr , ** kwargs ) : try : if not attr . tzinfo : _LOGGER . warning ( "Datetime with no tzinfo will be considered UTC." ) utc = attr . utctimetuple ( ) except AttributeError : raise TypeError ( "RFC1123 object must be valid Datetime object." ) return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT" . format ( Serializer . days [ utc . tm_wday ] , utc . tm_mday , Serializer . months [ utc . tm_mon ] , utc . tm_year , utc . tm_hour , utc . tm_min , utc . tm_sec )
Serialize Datetime object into RFC - 1123 formatted string .
54,855
def serialize_iso ( attr , ** kwargs ) : if isinstance ( attr , str ) : attr = isodate . parse_datetime ( attr ) try : if not attr . tzinfo : _LOGGER . warning ( "Datetime with no tzinfo will be considered UTC." ) utc = attr . utctimetuple ( ) if utc . tm_year > 9999 or utc . tm_year < 1 : raise OverflowError ( "Hit max or min date" ) microseconds = str ( attr . microsecond ) . rjust ( 6 , '0' ) . rstrip ( '0' ) . ljust ( 3 , '0' ) if microseconds : microseconds = '.' + microseconds date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}" . format ( utc . tm_year , utc . tm_mon , utc . tm_mday , utc . tm_hour , utc . tm_min , utc . tm_sec ) return date + microseconds + 'Z' except ( ValueError , OverflowError ) as err : msg = "Unable to serialize datetime object." raise_with_traceback ( SerializationError , msg , err ) except AttributeError as err : msg = "ISO-8601 object must be valid Datetime object." raise_with_traceback ( TypeError , msg , err )
Serialize Datetime object into ISO - 8601 formatted string .
54,856
def _deserialize ( self , target_obj , data ) : if hasattr ( data , "_attribute_map" ) : constants = [ name for name , config in getattr ( data , '_validation' , { } ) . items ( ) if config . get ( 'constant' ) ] try : for attr , mapconfig in data . _attribute_map . items ( ) : if attr in constants : continue value = getattr ( data , attr ) if value is None : continue local_type = mapconfig [ 'type' ] internal_data_type = local_type . strip ( '[]{}' ) if internal_data_type not in self . dependencies or isinstance ( internal_data_type , Enum ) : continue setattr ( data , attr , self . _deserialize ( local_type , value ) ) return data except AttributeError : return response , class_name = self . _classify_target ( target_obj , data ) if isinstance ( response , basestring ) : return self . deserialize_data ( data , response ) elif isinstance ( response , type ) and issubclass ( response , Enum ) : return self . deserialize_enum ( data , response ) if data is None : return data try : attributes = response . _attribute_map d_attrs = { } for attr , attr_desc in attributes . items ( ) : if attr == "additional_properties" and attr_desc [ "key" ] == '' : continue raw_value = None attr_desc = attr_desc . copy ( ) internal_data_type = attr_desc [ "type" ] . strip ( '[]{}' ) if internal_data_type in self . dependencies : attr_desc [ "internalType" ] = self . dependencies [ internal_data_type ] for key_extractor in self . key_extractors : found_value = key_extractor ( attr , attr_desc , data ) if found_value is not None : if raw_value is not None and raw_value != found_value : raise KeyError ( 'Use twice the key: "{}"' . format ( attr ) ) raw_value = found_value value = self . deserialize_data ( raw_value , attr_desc [ 'type' ] ) d_attrs [ attr ] = value except ( AttributeError , TypeError , KeyError ) as err : msg = "Unable to deserialize to object: " + class_name raise_with_traceback ( DeserializationError , msg , err ) else : additional_properties = self . _build_additional_properties ( attributes , data ) return self . _instantiate_model ( response , d_attrs , additional_properties )
Call the deserializer on a model .
54,857
def _classify_target ( self , target , data ) : if target is None : return None , None if isinstance ( target , basestring ) : try : target = self . dependencies [ target ] except KeyError : return target , target try : target = target . _classify ( data , self . dependencies ) except AttributeError : pass return target , target . __class__ . __name__
Check to see whether the deserialization target object can be classified into a subclass . Once classification has been determined initialize object .
54,858
def _unpack_content ( raw_data , content_type = None ) : from . pipeline . universal import RawDeserializer context = getattr ( raw_data , "context" , { } ) if context : if RawDeserializer . CONTEXT_NAME in context : return context [ RawDeserializer . CONTEXT_NAME ] raise ValueError ( "This pipeline didn't have the RawDeserializer policy; can't deserialize" ) if hasattr ( raw_data , "body" ) : return RawDeserializer . deserialize_from_http_generics ( raw_data . text ( ) , raw_data . headers ) if hasattr ( raw_data , '_content_consumed' ) : return RawDeserializer . deserialize_from_http_generics ( raw_data . text , raw_data . headers ) if isinstance ( raw_data , ( basestring , bytes ) ) or hasattr ( raw_data , 'read' ) : return RawDeserializer . deserialize_from_text ( raw_data , content_type ) return raw_data
Extract the correct structure for deserialization .
54,859
def _instantiate_model ( self , response , attrs , additional_properties = None ) : if callable ( response ) : subtype = getattr ( response , '_subtype_map' , { } ) try : readonly = [ k for k , v in response . _validation . items ( ) if v . get ( 'readonly' ) ] const = [ k for k , v in response . _validation . items ( ) if v . get ( 'constant' ) ] kwargs = { k : v for k , v in attrs . items ( ) if k not in subtype and k not in readonly + const } response_obj = response ( ** kwargs ) for attr in readonly : setattr ( response_obj , attr , attrs . get ( attr ) ) if additional_properties : response_obj . additional_properties = additional_properties return response_obj except TypeError as err : msg = "Unable to deserialize {} into model {}. " . format ( kwargs , response ) raise DeserializationError ( msg + str ( err ) ) else : try : for attr , value in attrs . items ( ) : setattr ( response , attr , value ) return response except Exception as exp : msg = "Unable to populate response model. " msg += "Type: {}, Error: {}" . format ( type ( response ) , exp ) raise DeserializationError ( msg )
Instantiate a response model passing in deserialized args .
54,860
def deserialize_data ( self , data , data_type ) : if data is None : return data try : if not data_type : return data if data_type in self . basic_types . values ( ) : return self . deserialize_basic ( data , data_type ) if data_type in self . deserialize_type : if isinstance ( data , self . deserialize_expected_types . get ( data_type , tuple ( ) ) ) : return data is_a_text_parsing_type = lambda x : x not in [ "object" , "[]" , r"{}" ] if isinstance ( data , ET . Element ) and is_a_text_parsing_type ( data_type ) and not data . text : return None data_val = self . deserialize_type [ data_type ] ( data ) return data_val iter_type = data_type [ 0 ] + data_type [ - 1 ] if iter_type in self . deserialize_type : return self . deserialize_type [ iter_type ] ( data , data_type [ 1 : - 1 ] ) obj_type = self . dependencies [ data_type ] if issubclass ( obj_type , Enum ) : if isinstance ( data , ET . Element ) : data = data . text return self . deserialize_enum ( data , obj_type ) except ( ValueError , TypeError , AttributeError ) as err : msg = "Unable to deserialize response data." msg += " Data: {}, {}" . format ( data , data_type ) raise_with_traceback ( DeserializationError , msg , err ) else : return self . _deserialize ( obj_type , data )
Process data for deserialization according to data type .
54,861
def deserialize_iter ( self , attr , iter_type ) : if attr is None : return None if isinstance ( attr , ET . Element ) : attr = list ( attr ) if not isinstance ( attr , ( list , set ) ) : raise DeserializationError ( "Cannot deserialize as [{}] an object of type {}" . format ( iter_type , type ( attr ) ) ) return [ self . deserialize_data ( a , iter_type ) for a in attr ]
Deserialize an iterable .
54,862
def deserialize_dict ( self , attr , dict_type ) : if isinstance ( attr , list ) : return { x [ 'key' ] : self . deserialize_data ( x [ 'value' ] , dict_type ) for x in attr } if isinstance ( attr , ET . Element ) : attr = { el . tag : el . text for el in attr } return { k : self . deserialize_data ( v , dict_type ) for k , v in attr . items ( ) }
Deserialize a dictionary .
54,863
def deserialize_object ( self , attr , ** kwargs ) : if attr is None : return None if isinstance ( attr , ET . Element ) : return attr if isinstance ( attr , basestring ) : return self . deserialize_basic ( attr , 'str' ) obj_type = type ( attr ) if obj_type in self . basic_types : return self . deserialize_basic ( attr , self . basic_types [ obj_type ] ) if obj_type is _long_type : return self . deserialize_long ( attr ) if obj_type == dict : deserialized = { } for key , value in attr . items ( ) : try : deserialized [ key ] = self . deserialize_object ( value , ** kwargs ) except ValueError : deserialized [ key ] = None return deserialized if obj_type == list : deserialized = [ ] for obj in attr : try : deserialized . append ( self . deserialize_object ( obj , ** kwargs ) ) except ValueError : pass return deserialized else : error = "Cannot deserialize generic object with type: " raise TypeError ( error + str ( obj_type ) )
Deserialize a generic object . This will be handled as a dictionary .
54,864
def deserialize_basic ( self , attr , data_type ) : if isinstance ( attr , ET . Element ) : attr = attr . text if not attr : if data_type == "str" : return '' else : return None if data_type == 'bool' : if attr in [ True , False , 1 , 0 ] : return bool ( attr ) elif isinstance ( attr , basestring ) : if attr . lower ( ) in [ 'true' , '1' ] : return True elif attr . lower ( ) in [ 'false' , '0' ] : return False raise TypeError ( "Invalid boolean value: {}" . format ( attr ) ) if data_type == 'str' : return self . deserialize_unicode ( attr ) return eval ( data_type ) ( attr )
Deserialize baisc builtin data type from string . Will attempt to convert to str int float and bool . This function will also accept 1 0 true and false as valid bool values .
54,865
def deserialize_unicode ( data ) : if isinstance ( data , Enum ) : return data try : if isinstance ( data , unicode ) : return data except NameError : return str ( data ) else : return str ( data )
Preserve unicode objects in Python 2 otherwise return data as a string .
54,866
def deserialize_enum ( data , enum_obj ) : if isinstance ( data , enum_obj ) : return data if isinstance ( data , Enum ) : data = data . value if isinstance ( data , int ) : try : return list ( enum_obj . __members__ . values ( ) ) [ data ] except IndexError : error = "{!r} is not a valid index for enum {!r}" raise DeserializationError ( error . format ( data , enum_obj ) ) try : return enum_obj ( str ( data ) ) except ValueError : for enum_value in enum_obj : if enum_value . value . lower ( ) == str ( data ) . lower ( ) : return enum_value _LOGGER . warning ( "Deserializer is not able to find %s as valid enum in %s" , data , enum_obj ) return Deserializer . deserialize_unicode ( data )
Deserialize string into enum object .
54,867
def deserialize_bytearray ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text return bytearray ( b64decode ( attr ) )
Deserialize string into bytearray .
54,868
def deserialize_base64 ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text padding = '=' * ( 3 - ( len ( attr ) + 3 ) % 4 ) attr = attr + padding encoded = attr . replace ( '-' , '+' ) . replace ( '_' , '/' ) return b64decode ( encoded )
Deserialize base64 encoded string into string .
54,869
def deserialize_decimal ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text try : return decimal . Decimal ( attr ) except decimal . DecimalException as err : msg = "Invalid decimal {}" . format ( attr ) raise_with_traceback ( DeserializationError , msg , err )
Deserialize string into Decimal object .
54,870
def deserialize_duration ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text try : duration = isodate . parse_duration ( attr ) except ( ValueError , OverflowError , AttributeError ) as err : msg = "Cannot deserialize duration object." raise_with_traceback ( DeserializationError , msg , err ) else : return duration
Deserialize ISO - 8601 formatted string into TimeDelta object .
54,871
def deserialize_date ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text if re . search ( r"[^\W\d_]" , attr , re . I + re . U ) : raise DeserializationError ( "Date must have only digits and -. Received: %s" % attr ) return isodate . parse_date ( attr , defaultmonth = None , defaultday = None )
Deserialize ISO - 8601 formatted string into Date object .
54,872
def deserialize_rfc ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text try : date_obj = datetime . datetime . strptime ( attr , "%a, %d %b %Y %H:%M:%S %Z" ) if not date_obj . tzinfo : date_obj = date_obj . replace ( tzinfo = TZ_UTC ) except ValueError as err : msg = "Cannot deserialize to rfc datetime object." raise_with_traceback ( DeserializationError , msg , err ) else : return date_obj
Deserialize RFC - 1123 formatted string into Datetime object .
54,873
def deserialize_iso ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text try : attr = attr . upper ( ) match = Deserializer . valid_date . match ( attr ) if not match : raise ValueError ( "Invalid datetime string: " + attr ) check_decimal = attr . split ( '.' ) if len ( check_decimal ) > 1 : decimal_str = "" for digit in check_decimal [ 1 ] : if digit . isdigit ( ) : decimal_str += digit else : break if len ( decimal_str ) > 6 : attr = attr . replace ( decimal_str , decimal_str [ 0 : 6 ] ) date_obj = isodate . parse_datetime ( attr ) test_utc = date_obj . utctimetuple ( ) if test_utc . tm_year > 9999 or test_utc . tm_year < 1 : raise OverflowError ( "Hit max or min date" ) except ( ValueError , OverflowError , AttributeError ) as err : msg = "Cannot deserialize datetime object." raise_with_traceback ( DeserializationError , msg , err ) else : return date_obj
Deserialize ISO - 8601 formatted string into Datetime object .
54,874
def raw ( self ) : raw = ClientRawResponse ( self . current_page , self . _response ) if self . _raw_headers : raw . add_headers ( self . _raw_headers ) return raw
Get current page as ClientRawResponse .
54,875
def advance_page ( self ) : if self . next_link is None : raise StopIteration ( "End of paging" ) self . _current_page_iter_index = 0 self . _response = self . _get_next ( self . next_link ) self . _derserializer ( self , self . _response ) return self . current_page
Force moving the cursor to the next azure call .
54,876
def _ensureAtomicity ( fn ) : @ ensureScoopStartedProperly def wrapper ( * args , ** kwargs ) : from . import _control _control . execQueue . socket . pumpInfoSocket ( ) for key , value in kwargs . items ( ) : if key in itertools . chain ( * ( elem . keys ( ) for elem in elements . values ( ) ) ) : raise TypeError ( "This constant already exists: {0}." . format ( key ) ) while all ( key in elements . get ( scoop . worker , [ ] ) for key in kwargs . keys ( ) ) is not True : scoop . logger . debug ( "Sending global variables {0}..." . format ( list ( kwargs . keys ( ) ) ) ) fn ( * args , ** kwargs ) _control . execQueue . socket . pumpInfoSocket ( ) time . sleep ( 0.1 ) elementNames = list ( itertools . chain ( * ( elem . keys ( ) for elem in elements . values ( ) ) ) ) if len ( elementNames ) != len ( set ( elementNames ) ) : raise TypeError ( "This constant already exists: {0}." . format ( key ) ) return wrapper
Ensure atomicity of passed elements on the whole worker pool
54,877
def getConst ( name , timeout = 0.1 ) : from . import _control import time timeStamp = time . time ( ) while True : _control . execQueue . socket . pumpInfoSocket ( ) constants = dict ( reduce ( lambda x , y : x + list ( y . items ( ) ) , elements . values ( ) , [ ] ) ) timeoutHappened = time . time ( ) - timeStamp > timeout if constants . get ( name ) is not None or timeoutHappened : return constants . get ( name ) time . sleep ( 0.01 )
Get a shared constant .
54,878
def launchBootstraps ( ) : global processes worker_amount , verbosity , args = getArgs ( ) was_origin = False if verbosity >= 1 : sys . stderr . write ( "Launching {0} worker(s) using {1}.\n" . format ( worker_amount , os . environ [ 'SHELL' ] if 'SHELL' in os . environ else "an unknown shell" , ) ) sys . stderr . flush ( ) processes = [ ] for _ in range ( worker_amount ) : command = [ sys . executable , "-m" , BOOTSTRAP_MODULE ] + args if verbosity >= 3 : sys . stderr . write ( "Executing '{0}'...\n" . format ( command ) ) sys . stderr . flush ( ) processes . append ( Popen ( command ) ) try : args . remove ( "--origin" ) except ValueError : pass else : was_origin = True if was_origin : try : processes [ 0 ] . wait ( ) except KeyboardInterrupt : pass else : for p in processes : p . wait ( )
Launch the bootstrap instances in separate subprocesses
54,879
def resolve ( self , s ) : name = s . split ( '.' ) used = name . pop ( 0 ) try : found = self . importer ( used ) for frag in name : used += '.' + frag try : found = getattr ( found , frag ) except AttributeError : self . importer ( used ) found = getattr ( found , frag ) return found except ImportError : e , tb = sys . exc_info ( ) [ 1 : ] v = ValueError ( 'Cannot resolve %r: %s' % ( s , e ) ) v . __cause__ , v . __traceback__ = e , tb raise v
Resolve strings to objects using standard import and attribute syntax .
54,880
def as_tuple ( self , value ) : if isinstance ( value , list ) : value = tuple ( value ) return value
Utility function which converts lists to tuples .
54,881
def configure_formatter ( self , config ) : if '()' in config : factory = config [ '()' ] try : result = self . configure_custom ( config ) except TypeError , te : if "'format'" not in str ( te ) : raise config [ 'fmt' ] = config . pop ( 'format' ) config [ '()' ] = factory result = self . configure_custom ( config ) else : fmt = config . get ( 'format' , None ) dfmt = config . get ( 'datefmt' , None ) result = logging . Formatter ( fmt , dfmt ) return result
Configure a formatter from a dictionary .
54,882
def configure_filter ( self , config ) : if '()' in config : result = self . configure_custom ( config ) else : name = config . get ( 'name' , '' ) result = logging . Filter ( name ) return result
Configure a filter from a dictionary .
54,883
def configure_logger ( self , name , config , incremental = False ) : logger = logging . getLogger ( name ) self . common_logger_config ( logger , config , incremental ) propagate = config . get ( 'propagate' , None ) if propagate is not None : logger . propagate = propagate
Configure a non - root logger from a dictionary .
54,884
def configure_root ( self , config , incremental = False ) : root = logging . getLogger ( ) self . common_logger_config ( root , config , incremental )
Configure a root logger from a dictionary .
54,885
def sliceImage ( image , divWidth , divHeight ) : w , h = image . size tiles = [ ] for y in range ( 0 , h - 1 , h / divHeight ) : my = min ( y + h / divHeight , h ) for x in range ( 0 , w - 1 , w / divWidth ) : mx = min ( x + w / divWidth , w ) tiles . append ( image . crop ( ( x , y , mx , my ) ) ) return tiles
Divide the received image in multiple tiles
54,886
def resizeTile ( index , size ) : resized = tiles [ index ] . resize ( size , Image . ANTIALIAS ) return sImage ( resized . tostring ( ) , resized . size , resized . mode )
Apply Antialiasing resizing to tile
54,887
def initLogging ( verbosity = 0 , name = "SCOOP" ) : global loggingConfig verbose_levels = { - 2 : "CRITICAL" , - 1 : "ERROR" , 0 : "WARNING" , 1 : "INFO" , 2 : "DEBUG" , 3 : "DEBUG" , 4 : "NOSET" , } log_handlers = { "console" : { "class" : "logging.StreamHandler" , "formatter" : "{name}Formatter" . format ( name = name ) , "stream" : "ext://sys.stderr" , } , } loggingConfig . update ( { "{name}Logger" . format ( name = name ) : { "handlers" : [ "console" ] , "level" : verbose_levels [ verbosity ] , } , } ) dict_log_config = { "version" : 1 , "handlers" : log_handlers , "loggers" : loggingConfig , "formatters" : { "{name}Formatter" . format ( name = name ) : { "format" : "[%(asctime)-15s] %(module)-9s " "%(levelname)-7s %(message)s" , } , } , } dictConfig ( dict_log_config ) return logging . getLogger ( "{name}Logger" . format ( name = name ) )
Creates a logger .
54,888
def externalHostname ( hosts ) : hostname = hosts [ 0 ] [ 0 ] if hostname in localHostnames and len ( hosts ) > 1 : hostname = socket . getfqdn ( ) . split ( "." ) [ 0 ] try : socket . getaddrinfo ( hostname , None ) except socket . gaierror : raise Exception ( "\nThe first host (containing a broker) is not" " routable.\nMake sure the address is correct." ) return hostname
Ensure external hostname is routable .
54,889
def getHosts ( filename = None , hostlist = None ) : if filename : return getHostsFromFile ( filename ) elif hostlist : return getHostsFromList ( hostlist ) elif getEnv ( ) == "SLURM" : return getHostsFromSLURM ( ) elif getEnv ( ) == "PBS" : return getHostsFromPBS ( ) elif getEnv ( ) == "SGE" : return getHostsFromSGE ( ) else : return getDefaultHosts ( )
Return a list of hosts depending on the environment
54,890
def getHostsFromFile ( filename ) : valid_hostname = r"^[^ /\t=\n]+" workers = r"\d+" hostname_re = re . compile ( valid_hostname ) worker_re = re . compile ( workers ) hosts = [ ] with open ( filename ) as f : for line in f : if re . search ( '[\[\]]' , line ) : hosts = hosts + parseSLURM ( line . strip ( ) ) else : host = hostname_re . search ( line . strip ( ) ) if host : hostname = host . group ( ) n = worker_re . search ( line [ host . end ( ) : ] ) if n : n = n . group ( ) else : n = 0 hosts . append ( ( hostname , int ( n ) ) ) return hosts
Parse a file to return a list of hosts .
54,891
def getHostsFromList ( hostlist ) : if any ( re . search ( '[\[\]]' , x ) for x in hostlist ) : return parseSLURM ( str ( hostlist ) ) hostlist = groupTogether ( hostlist ) retVal = [ ] for key , group in groupby ( hostlist ) : retVal . append ( ( key , len ( list ( group ) ) ) ) return retVal
Return the hosts from the command line
54,892
def parseSLURM ( string ) : import subprocess , os hostsstr = subprocess . check_output ( [ "scontrol" , "show" , "hostnames" , string ] ) if sys . version_info . major > 2 : hostsstr = hostsstr . decode ( ) hosts = hostsstr . split ( os . linesep ) hosts = filter ( None , hosts ) hosts = [ ( host , 1 ) for host in hosts ] return hosts
Return a host list from a SLURM string
54,893
def getHostsFromPBS ( ) : with open ( os . environ [ "PBS_NODEFILE" ] , 'r' ) as hosts : hostlist = groupTogether ( hosts . read ( ) . split ( ) ) retVal = [ ] for key , group in groupby ( hostlist ) : retVal . append ( ( key , len ( list ( group ) ) ) ) return retVal
Return a host list in a PBS environment
54,894
def getHostsFromSGE ( ) : with open ( os . environ [ "PE_HOSTFILE" ] , 'r' ) as hosts : return [ ( host . split ( ) [ 0 ] , int ( host . split ( ) [ 1 ] ) ) for host in hosts ]
Return a host list in a SGE environment
54,895
def getWorkerQte ( hosts ) : if "SLURM_NTASKS" in os . environ : return int ( os . environ [ "SLURM_NTASKS" ] ) elif "PBS_NP" in os . environ : return int ( os . environ [ "PBS_NP" ] ) elif "NSLOTS" in os . environ : return int ( os . environ [ "NSLOTS" ] ) else : return sum ( host [ 1 ] for host in hosts )
Return the number of workers to launch depending on the environment
54,896
def functionFactory ( in_code , name , defaults , globals_ , imports ) : def generatedFunction ( ) : pass generatedFunction . __code__ = marshal . loads ( in_code ) generatedFunction . __name__ = name generatedFunction . __defaults = defaults generatedFunction . __globals__ . update ( pickle . loads ( globals_ ) ) for key , value in imports . items ( ) : imported_module = __import__ ( value ) scoop . logger . debug ( "Dynamically loaded module {0}" . format ( value ) ) generatedFunction . __globals__ . update ( { key : imported_module } ) return generatedFunction
Creates a function at runtime using binary compiled inCode
54,897
def makeLambdaPicklable ( lambda_function ) : if isinstance ( lambda_function , type ( lambda : None ) ) and lambda_function . __name__ == '<lambda>' : def __reduce_ex__ ( proto ) : return unpickleLambda , ( marshal . dumps ( lambda_function . __code__ ) , ) lambda_function . __reduce_ex__ = __reduce_ex__ return lambda_function
Take input lambda function l and makes it picklable .
54,898
def addConnector ( self , wire1 , wire2 ) : if wire1 == wire2 : return if wire1 > wire2 : wire1 , wire2 = wire2 , wire1 try : last_level = self [ - 1 ] except IndexError : self . append ( [ ( wire1 , wire2 ) ] ) return for wires in last_level : if wires [ 1 ] >= wire1 and wires [ 0 ] <= wire2 : self . append ( [ ( wire1 , wire2 ) ] ) return last_level . append ( ( wire1 , wire2 ) )
Add a connector between wire1 and wire2 in the network .
54,899
def sort ( self , values ) : for level in self : for wire1 , wire2 in level : if values [ wire1 ] > values [ wire2 ] : values [ wire1 ] , values [ wire2 ] = values [ wire2 ] , values [ wire1 ]
Sort the values in - place based on the connectors in the network .