idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
59,800 | def serialize ( self ) : data = super ( Note , self ) . serialize ( ) data . update ( { "verb" : "post" , "object" : { "objectType" : self . object_type , "content" : self . content , } } ) if self . display_name : data [ "object" ] [ "displayName" ] = self . display_name return data | Converts the post to something compatible with json . dumps |
59,801 | def context ( self ) : type = "client_associate" if self . key is None else "client_update" data = { "type" : type , "application_type" : self . type , } if self . key : data [ "client_id" ] = self . key data [ "client_secret" ] = self . secret if self . name : data [ "application_name" ] = self . name if self . logo : data [ "logo_url" ] = self . logo if self . contacts : data [ "contacts" ] = " " . join ( self . contacts ) if self . redirect : data [ "redirect_uri" ] = " " . join ( self . redirect ) return json . dumps ( data ) | Provides request context |
59,802 | def request ( self , server = None ) : request = { "headers" : { "Content-Type" : "application/json" } , "timeout" : self . _pump . timeout , "data" : self . context , } url = "{proto}://{server}/{endpoint}" . format ( proto = self . _pump . protocol , server = server or self . server , endpoint = self . ENDPOINT , ) response = self . _pump . _requester ( requests . post , url , ** request ) try : server_data = response . json ( ) except ValueError : raise ClientException ( response . content ) if "error" in server_data : raise ClientException ( server_data [ "error" ] , self . context ) _log . debug ( "Client registration recieved: %(id)s %(secret)s %(expire)s" , { "id" : server_data [ "client_id" ] , "secret" : server_data [ "client_secret" ] , "expire" : server_data [ "expires_at" ] , } ) return server_data | Sends the request |
59,803 | def register ( self , server = None ) : if ( self . key or self . secret ) : return self . update ( ) server_data = self . request ( server ) self . key = server_data [ "client_id" ] self . secret = server_data [ "client_secret" ] self . expirey = server_data [ "expires_at" ] | Registers the client with the Pump API retrieving the id and secret |
59,804 | def update ( self ) : error = "" if self . key is None : error = "To update a client you need to provide a key" if self . secret is None : error = "To update a client you need to provide the secret" if error : raise ClientException ( error ) self . request ( ) return True | Updates the information the Pump server has about the client |
59,805 | def compile_extensions ( macros , compat = False ) : import distutils . sysconfig import distutils . ccompiler import tempfile import shutil from textwrap import dedent libraries = [ 'rrd' ] include_dirs = [ package_dir , '/usr/local/include' ] library_dirs = [ '/usr/local/lib' ] compiler_args = dict ( libraries = libraries , include_dirs = include_dirs , library_dirs = library_dirs , define_macros = macros ) exts = [ Extension ( 'rrdtool' , sources = [ 'rrdtoolmodule.c' ] , ** compiler_args ) ] if compat : return exts c_code = dedent ( ) tmp_dir = tempfile . mkdtemp ( prefix = 'tmp_python_rrdtool' ) bin_file_name = os . path . join ( tmp_dir , 'test_rrdtool' ) file_name = bin_file_name + '.c' with open ( file_name , 'w' ) as fp : fp . write ( c_code ) compiler = distutils . ccompiler . new_compiler ( ) assert isinstance ( compiler , distutils . ccompiler . CCompiler ) for s in include_dirs : compiler . add_include_dir ( s ) for s in library_dirs : compiler . add_library_dir ( s ) for s in libraries : compiler . add_library ( s ) for s in macros : compiler . define_macro ( * s ) distutils . sysconfig . customize_compiler ( compiler ) try : compiler . link_executable ( compiler . compile ( [ file_name ] ) , bin_file_name , libraries = libraries ) except CompileError : sys . exit ( 'Error: Unable to compile the binary module. Do you have the rrdtool header and libraries installed?' ) ret = None except LinkError as exc : shutil . rmtree ( tmp_dir ) raise else : return exts shutil . rmtree ( tmp_dir ) return ret | Compiler subroutine to test whether some functions are available on the target system . Since the rrdtool headers shipped with most packages do not disclose any versioning information we cannot test whether a given function is available that way . Instead use this to manually try to compile code and see if it works . |
59,806 | def add ( self , obj ) : activity = { "verb" : "add" , "object" : { "objectType" : obj . object_type , "id" : obj . id } , "target" : { "objectType" : self . object_type , "id" : self . id } } self . _post_activity ( activity ) self . _members = None | Adds a member to the collection . |
59,807 | def remove ( self , obj ) : activity = { "verb" : "remove" , "object" : { "objectType" : obj . object_type , "id" : obj . id } , "target" : { "objectType" : self . object_type , "id" : self . id } } self . _post_activity ( activity ) self . _members = None | Removes a member from the collection . |
59,808 | def _post_activity ( self , activity , unserialize = True ) : feed_url = "{proto}://{server}/api/user/{username}/feed" . format ( proto = self . _pump . protocol , server = self . _pump . client . server , username = self . _pump . client . nickname ) data = self . _pump . request ( feed_url , method = "POST" , data = activity ) if not data : return False if "error" in data : raise PumpException ( data [ "error" ] ) if unserialize : if "target" in data : self . unserialize ( data [ "target" ] ) else : if "author" not in data [ "object" ] : data [ "object" ] [ "author" ] = data [ "actor" ] for key in [ "to" , "cc" , "bto" , "bcc" ] : if key not in data [ "object" ] and key in data : data [ "object" ] [ key ] = data [ key ] self . unserialize ( data [ "object" ] ) return True | Posts a activity to feed |
59,809 | def _add_links ( self , links , key = "href" , proxy_key = "proxyURL" , endpoints = None ) : if endpoints is None : endpoints = [ "likes" , "replies" , "shares" , "self" , "followers" , "following" , "lists" , "favorites" , "members" ] if links . get ( "links" ) : for endpoint in links [ 'links' ] : if isinstance ( links [ 'links' ] [ endpoint ] , dict ) : self . _add_link ( endpoint , links [ 'links' ] [ endpoint ] [ "href" ] ) else : self . _add_link ( endpoint , links [ "links" ] [ endpoint ] ) for endpoint in endpoints : if links . get ( endpoint , None ) is None : continue if "pump_io" in links [ endpoint ] : self . _add_link ( endpoint , links [ endpoint ] [ "pump_io" ] [ proxy_key ] ) elif "url" in links [ endpoint ] : self . _add_link ( endpoint , links [ endpoint ] [ "url" ] ) else : self . _add_link ( endpoint , links [ endpoint ] [ key ] ) return self . links | Parses and adds block of links |
59,810 | def _set_people ( self , people ) : if hasattr ( people , "object_type" ) : people = [ people ] elif hasattr ( people , "__iter__" ) : people = list ( people ) return people | Sets who the object is sent to |
59,811 | def from_file ( self , filename ) : mimetype = mimetypes . guess_type ( filename ) [ 0 ] or "application/octal-stream" headers = { "Content-Type" : mimetype , "Content-Length" : str ( os . path . getsize ( filename ) ) , } file_data = self . _pump . request ( "/api/user/{0}/uploads" . format ( self . _pump . client . nickname ) , method = "POST" , data = open ( filename , "rb" ) . read ( ) , headers = headers , ) data = { "verb" : "post" , "object" : file_data , } data . update ( self . serialize ( ) ) if not self . content and not self . display_name and not self . license : self . _post_activity ( data ) else : self . _post_activity ( data , unserialize = False ) if self . content : file_data [ 'content' ] = self . content if self . display_name : file_data [ 'displayName' ] = self . display_name if self . license : file_data [ 'license' ] = self . license data = { "verb" : "update" , "object" : file_data , } self . _post_activity ( data ) return self | Uploads a file from a filename on your system . |
59,812 | def unserialize ( self , data ) : if "author" not in data [ "object" ] : data [ "object" ] [ "author" ] = data [ "actor" ] for key in [ "to" , "cc" , "bto" , "bcc" ] : if key not in data [ "object" ] and key in data : data [ "object" ] [ key ] = data [ key ] Mapper ( pypump = self . _pump ) . parse_map ( self , data = data ) self . _add_links ( data ) return self | From JSON - > Activity object |
59,813 | def create_store ( self ) : if self . store_class is not None : return self . store_class . load ( self . client . webfinger , self ) raise NotImplementedError ( "You need to specify PyPump.store_class or override PyPump.create_store method." ) | Creates store object |
59,814 | def _build_url ( self , endpoint ) : server = None if "://" in endpoint : server , endpoint = self . _deconstruct_url ( endpoint ) endpoint = endpoint . lstrip ( "/" ) url = "{proto}://{server}/{endpoint}" . format ( proto = self . protocol , server = self . client . server if server is None else server , endpoint = endpoint , ) return url | Returns a fully qualified URL |
59,815 | def _deconstruct_url ( self , url ) : url = url . split ( "://" , 1 ) [ - 1 ] server , endpoint = url . split ( "/" , 1 ) return ( server , endpoint ) | Breaks down URL and returns server and endpoint |
59,816 | def _add_client ( self , url , key = None , secret = None ) : if "://" in url : server , endpoint = self . _deconstruct_url ( url ) else : server = url if server not in self . _server_cache : if not ( key and secret ) : client = Client ( webfinger = self . client . webfinger , name = self . client . name , type = self . client . type , ) client . set_pump ( self ) client . register ( server ) else : client = Client ( webfinger = self . client . webfinger , key = key , secret = secret , type = self . client . type , name = self . client . name , ) client . set_pump ( self ) self . _server_cache [ server ] = client | Creates Client object with key and secret for server and adds it to _server_cache if it doesnt already exist |
59,817 | def request ( self , endpoint , method = "GET" , data = "" , raw = False , params = None , retries = None , client = None , headers = None , timeout = None , ** kwargs ) : retries = self . retries if retries is None else retries timeout = self . timeout if timeout is None else timeout if client is None : client = self . setup_oauth_client ( endpoint ) c = client . client fnc = OAuth1Session ( c . client_key , client_secret = c . client_secret , resource_owner_key = c . resource_owner_key , resource_owner_secret = c . resource_owner_secret ) elif client is False : fnc = requests params = { } if params is None else params if data and isinstance ( data , dict ) : data = json . dumps ( data ) if not raw : url = self . _build_url ( endpoint ) else : url = endpoint headers = headers or { "Content-Type" : "application/json" } request = { "headers" : headers , "params" : params , "timeout" : timeout , } request . update ( kwargs ) if method == "POST" : fnc = fnc . post request . update ( { "data" : data } ) elif method == "PUT" : fnc = fnc . put request . update ( { "data" : data } ) elif method == "GET" : fnc = fnc . get elif method == "DELETE" : fnc = fnc . delete for attempt in range ( 1 + retries ) : response = self . _requester ( fnc = fnc , endpoint = endpoint , raw = raw , ** request ) if response . status_code == 200 : return response . json ( ) if response . status_code == 400 : try : try : data = response . json ( ) error = data [ "error" ] except ValueError : error = response . content if not error : raise IndexError except IndexError : error = "400 - Bad request." raise PyPumpException ( error ) if response . ok : return response error = "Request Failed to {url} (response: {data} | status: {status})" error = error . format ( url = url , data = response . content , status = response . status_code ) raise PyPumpException ( error ) | Make request to endpoint with OAuth . Returns dictionary with response data . |
59,818 | def oauth_request ( self ) : self . _server_tokens = self . request_token ( ) self . store [ "oauth-request-token" ] = self . _server_tokens [ "token" ] self . store [ "oauth-request-secret" ] = self . _server_tokens [ "token_secret" ] result = self . verifier_callback ( self . construct_oauth_url ( ) ) if result is not None : self . verifier ( result ) | Makes a oauth connection |
59,819 | def construct_oauth_url ( self ) : response = self . _requester ( requests . head , "{0}://{1}/" . format ( self . protocol , self . client . server ) , allow_redirects = False ) if response . is_redirect : server = response . headers [ 'location' ] else : server = response . url path = "oauth/authorize?oauth_token={token}" . format ( token = self . store [ "oauth-request-token" ] ) return "{server}{path}" . format ( server = server , path = path ) | Constructs verifier OAuth URL |
59,820 | def setup_oauth_client ( self , url = None ) : if url and "://" in url : server , endpoint = self . _deconstruct_url ( url ) else : server = self . client . server if server not in self . _server_cache : self . _add_client ( server ) if server == self . client . server : self . oauth = OAuth1 ( client_key = self . store [ "client-key" ] , client_secret = self . store [ "client-secret" ] , resource_owner_key = self . store [ "oauth-access-token" ] , resource_owner_secret = self . store [ "oauth-access-secret" ] , ) return self . oauth else : return OAuth1 ( client_key = self . _server_cache [ server ] . key , client_secret = self . _server_cache [ server ] . secret , ) | Sets up client for requests to pump |
59,821 | def request_token ( self ) : client = OAuth1 ( client_key = self . _server_cache [ self . client . server ] . key , client_secret = self . _server_cache [ self . client . server ] . secret , callback_uri = self . callback , ) request = { "auth" : client } response = self . _requester ( requests . post , "oauth/request_token" , ** request ) data = parse . parse_qs ( response . text ) data = { 'token' : data [ self . PARAM_TOKEN ] [ 0 ] , 'token_secret' : data [ self . PARAM_TOKEN_SECRET ] [ 0 ] } return data | Gets OAuth request token |
59,822 | def request_access ( self , verifier ) : client = OAuth1 ( client_key = self . _server_cache [ self . client . server ] . key , client_secret = self . _server_cache [ self . client . server ] . secret , resource_owner_key = self . store [ "oauth-request-token" ] , resource_owner_secret = self . store [ "oauth-request-secret" ] , verifier = verifier , ) request = { "auth" : client } response = self . _requester ( requests . post , "oauth/access_token" , ** request ) data = parse . parse_qs ( response . text ) self . store [ "oauth-access-token" ] = data [ self . PARAM_TOKEN ] [ 0 ] self . store [ "oauth-access-secret" ] = data [ self . PARAM_TOKEN_SECRET ] [ 0 ] self . _server_tokens = { } | Get OAuth access token so we can make requests |
59,823 | def logged_in ( self ) : if "oauth-access-token" not in self . store : return False response = self . request ( "/api/whoami" , allow_redirects = False ) if response . status_code != 302 : return False if response . headers [ "location" ] != self . me . links [ "self" ] : return False return True | Return boolean if is logged in |
59,824 | def cudnnCreate ( ) : handle = ctypes . c_void_p ( ) status = _libcudnn . cudnnCreate ( ctypes . byref ( handle ) ) cudnnCheckStatus ( status ) return handle . value | Initialize cuDNN . |
59,825 | def cudnnDestroy ( handle ) : status = _libcudnn . cudnnDestroy ( ctypes . c_void_p ( handle ) ) cudnnCheckStatus ( status ) | Release cuDNN resources . |
59,826 | def cudnnSetStream ( handle , id ) : status = _libcudnn . cudnnSetStream ( handle , id ) cudnnCheckStatus ( status ) | Set current cuDNN library stream . |
59,827 | def cudnnGetStream ( handle ) : id = ctypes . c_void_p ( ) status = _libcudnn . cudnnGetStream ( handle , ctypes . byref ( id ) ) cudnnCheckStatus ( status ) return id . value | Get current cuDNN library stream . |
59,828 | def cudnnCreateTensorDescriptor ( ) : tensor = ctypes . c_void_p ( ) status = _libcudnn . cudnnCreateTensorDescriptor ( ctypes . byref ( tensor ) ) cudnnCheckStatus ( status ) return tensor . value | Create a Tensor descriptor object . |
59,829 | def cudnnSetTensor4dDescriptor ( tensorDesc , format , dataType , n , c , h , w ) : status = _libcudnn . cudnnSetTensor4dDescriptor ( tensorDesc , format , dataType , n , c , h , w ) cudnnCheckStatus ( status ) | Initialize a previously created Tensor 4D object . |
59,830 | def cudnnSetTensor4dDescriptorEx ( tensorDesc , dataType , n , c , h , w , nStride , cStride , hStride , wStride ) : status = _libcudnn . cudnnSetTensor4dDescriptorEx ( tensorDesc , dataType , n , c , h , w , nStride , cStride , hStride , wStride ) cudnnCheckStatus ( status ) | Initialize a Tensor descriptor object with strides . |
59,831 | def cudnnGetTensor4dDescriptor ( tensorDesc ) : dataType = ctypes . c_int ( ) n = ctypes . c_int ( ) c = ctypes . c_int ( ) h = ctypes . c_int ( ) w = ctypes . c_int ( ) nStride = ctypes . c_int ( ) cStride = ctypes . c_int ( ) hStride = ctypes . c_int ( ) wStride = ctypes . c_int ( ) status = _libcudnn . cudnnGetTensor4dDescriptor ( tensorDesc , ctypes . byref ( dataType ) , ctypes . byref ( n ) , ctypes . byref ( c ) , ctypes . byref ( h ) , ctypes . byref ( w ) , ctypes . byref ( nStride ) , ctypes . byref ( cStride ) , ctypes . byref ( hStride ) , ctypes . byref ( wStride ) ) cudnnCheckStatus ( status ) return dataType . value , n . value , c . value , h . value , w . value , nStride . value , cStride . value , hStride . value , wStride . value | Get parameters of a Tensor descriptor object . |
59,832 | def cudnnCreateFilterDescriptor ( ) : wDesc = ctypes . c_void_p ( ) status = _libcudnn . cudnnCreateFilterDescriptor ( ctypes . byref ( wDesc ) ) cudnnCheckStatus ( status ) return wDesc . value | Create a filter descriptor . |
59,833 | def cudnnSetFilter4dDescriptor ( wDesc , dataType , format , k , c , h , w ) : status = _libcudnn . cudnnSetFilter4dDescriptor ( wDesc , dataType , format , k , c , h , w ) cudnnCheckStatus ( status ) | Initialize a filter descriptor . |
59,834 | def cudnnGetFilter4dDescriptor ( wDesc ) : dataType = ctypes . c_int ( ) format = ctypes . c_int ( ) k = ctypes . c_int ( ) c = ctypes . c_int ( ) h = ctypes . c_int ( ) w = ctypes . c_int ( ) status = _libcudnn . cudnnGetFilter4dDescriptor ( wDesc , ctypes . byref ( dataType ) , ctypes . byref ( format ) , ctypes . byref ( k ) , ctypes . byref ( c ) , ctypes . byref ( h ) , ctypes . byref ( w ) ) cudnnCheckStatus ( status ) return dataType . value , format . value , k . value , c . value , h . value , w . value | Get parameters of filter descriptor . |
59,835 | def cudnnCreateConvolutionDescriptor ( ) : convDesc = ctypes . c_void_p ( ) status = _libcudnn . cudnnCreateConvolutionDescriptor ( ctypes . byref ( convDesc ) ) cudnnCheckStatus ( status ) return convDesc . value | Create a convolution descriptor . |
59,836 | def cudnnSetConvolution2dDescriptor ( convDesc , pad_h , pad_w , u , v , dilation_h , dilation_w , mode , computeType ) : status = _libcudnn . cudnnSetConvolution2dDescriptor ( convDesc , pad_h , pad_w , u , v , dilation_h , dilation_w , mode , computeType ) cudnnCheckStatus ( status ) | Initialize a convolution descriptor . |
59,837 | def cudnnGetConvolution2dDescriptor ( convDesc ) : pad_h = ctypes . c_int ( ) pad_w = ctypes . c_int ( ) u = ctypes . c_int ( ) v = ctypes . c_int ( ) dilation_h = ctypes . c_int ( ) dilation_w = ctypes . c_int ( ) mode = ctypes . c_int ( ) computeType = ctypes . c_int ( ) status = _libcudnn . cudnnGetConvolution2dDescriptor ( convDesc , ctypes . byref ( pad_h ) , ctypes . byref ( pad_w ) , ctypes . byref ( u ) , ctypes . byref ( v ) , ctypes . byref ( dilation_h ) , ctypes . byref ( dilation_w ) , ctypes . byref ( mode ) , ctypes . byref ( computeType ) ) cudnnCheckStatus ( status ) return ( pad_h . value , pad_w . value , u . value , v . value , upscalex . value , upscaley . value , mode . value , computeType . value ) | Get a convolution descriptor . |
59,838 | def cudnnGetConvolution2dForwardOutputDim ( convDesc , inputTensorDesc , wDesc ) : n = ctypes . c_int ( ) c = ctypes . c_int ( ) h = ctypes . c_int ( ) w = ctypes . c_int ( ) status = _libcudnn . cudnnGetConvolution2dForwardOutputDim ( convDesc , inputTensorDesc , wDesc , ctypes . byref ( n ) , ctypes . byref ( c ) , ctypes . byref ( h ) , ctypes . byref ( w ) ) cudnnCheckStatus ( status ) return n . value , c . value , h . value , w . value | Return the dimensions of the output tensor given a convolution descriptor . |
59,839 | def cudnnGetConvolutionForwardAlgorithm ( handle , srcDesc , wDesc , convDesc , destDesc , preference , memoryLimitInbytes ) : algo = ctypes . c_int ( ) status = _libcudnn . cudnnGetConvolutionForwardAlgorithm ( handle , srcDesc , wDesc , convDesc , destDesc , preference , ctypes . c_size_t ( memoryLimitInbytes ) , ctypes . byref ( algo ) ) cudnnCheckStatus ( status ) return algo | This function returns the best algorithm to choose for the forward convolution depending on the critera expressed in the cudnnConvolutionFwdPreference_t enumerant . |
59,840 | def cudnnGetConvolutionForwardWorkspaceSize ( handle , srcDesc , wDesc , convDesc , destDesc , algo ) : sizeInBytes = ctypes . c_size_t ( ) status = _libcudnn . cudnnGetConvolutionForwardWorkspaceSize ( handle , srcDesc , wDesc , convDesc , destDesc , algo , ctypes . byref ( sizeInBytes ) ) cudnnCheckStatus ( status ) return sizeInBytes | This function returns the amount of GPU memory workspace the user needs to allocate to be able to call cudnnConvolutionForward with the specified algorithm . |
59,841 | def cudnnSoftmaxForward ( handle , algorithm , mode , alpha , srcDesc , srcData , beta , destDesc , destData ) : dataType = cudnnGetTensor4dDescriptor ( destDesc ) [ 0 ] if dataType == cudnnDataType [ 'CUDNN_DATA_DOUBLE' ] : alphaRef = ctypes . byref ( ctypes . c_double ( alpha ) ) betaRef = ctypes . byref ( ctypes . c_double ( beta ) ) else : alphaRef = ctypes . byref ( ctypes . c_float ( alpha ) ) betaRef = ctypes . byref ( ctypes . c_float ( beta ) ) status = _libcudnn . cudnnSoftmaxForward ( handle , algorithm , mode , alphaRef , srcDesc , srcData , betaRef , destDesc , destData ) cudnnCheckStatus ( status ) | This routing computes the softmax function |
59,842 | def cudnnCreatePoolingDescriptor ( ) : poolingDesc = ctypes . c_void_p ( ) status = _libcudnn . cudnnCreatePoolingDescriptor ( ctypes . byref ( poolingDesc ) ) cudnnCheckStatus ( status ) return poolingDesc . value | Create pooling descriptor . |
59,843 | def cudnnSetPooling2dDescriptor ( poolingDesc , mode , windowHeight , windowWidth , verticalPadding , horizontalPadding , verticalStride , horizontalStride ) : status = _libcudnn . cudnnSetPooling2dDescriptor ( poolingDesc , mode , windowHeight , windowWidth , verticalPadding , horizontalPadding , verticalStride , horizontalStride ) cudnnCheckStatus ( status ) | Initialize a 2D pooling descriptor . |
59,844 | def cudnnGetPooling2dDescriptor ( poolingDesc ) : mode = ctypes . c_int ( ) windowHeight = ctypes . c_int ( ) windowWidth = ctypes . c_int ( ) verticalPadding = ctypes . c_int ( ) horizontalPadding = ctypes . c_int ( ) verticalStride = ctypes . c_int ( ) horizontalStride = ctypes . c_int ( ) status = _libcudnn . cudnnGetPooling2dDescriptor ( poolingDesc , ctypes . byref ( mode ) , ctypes . byref ( windowHeight ) , ctypes . byref ( windowWidth ) , ctypes . byref ( verticalPadding ) , ctypes . byref ( horizontalPadding ) , ctypes . byref ( verticalStride ) , ctypes . byref ( horizontalStride ) ) cudnnCheckStatus ( status ) return mode . value , windowHeight . value , windowWidth . value , verticalStride . value , horizontalStride . value | This function queries a previously created pooling descriptor object . |
59,845 | def cudnnActivationBackward ( handle , mode , alpha , srcDesc , srcData , srcDiffDesc , srcDiffData , destDesc , destData , beta , destDiffDesc , destDiffData ) : dataType = cudnnGetTensor4dDescriptor ( destDesc ) [ 0 ] if dataType == cudnnDataType [ 'CUDNN_DATA_DOUBLE' ] : alphaRef = ctypes . byref ( ctypes . c_double ( alpha ) ) betaRef = ctypes . byref ( ctypes . c_double ( beta ) ) else : alphaRef = ctypes . byref ( ctypes . c_float ( alpha ) ) betaRef = ctypes . byref ( ctypes . c_float ( beta ) ) status = _libcudnn . cudnnActivationBackward ( handle , mode , alphaRef , srcDesc , srcData , srcDiffDesc , srcDiffData , destDesc , destData , betaRef , destDiffDesc , destDiffData ) cudnnCheckStatus ( status ) | Gradient of activation function . |
59,846 | def __prefix_key ( self , key ) : if self . prefix is None : return key if key . startswith ( self . prefix + "-" ) : return key return "{0}-{1}" . format ( self . prefix , key ) | This will add the prefix to the key if one exists on the store |
59,847 | def export ( self ) : data = { } for key , value in self . items ( ) : data [ key ] = value return data | Exports as dictionary |
59,848 | def save ( self ) : if self . filename is None : raise StoreException ( "Filename must be set to write store to disk" ) filename = "{filename}.{date}.tmp" . format ( filename = self . filename , date = datetime . datetime . utcnow ( ) . strftime ( '%Y-%m-%dT%H_%M_%S.%f' ) ) mode = stat . S_IRUSR | stat . S_IWUSR fd = os . open ( filename , os . O_WRONLY | os . O_CREAT , mode ) fout = os . fdopen ( fd , "w" ) fout . write ( json . dumps ( self . export ( ) ) ) fout . close ( ) if os . path . isfile ( self . filename ) : os . remove ( self . filename ) os . rename ( filename , self . filename ) | Saves dictionary to disk in JSON format . |
59,849 | def get_filename ( cls ) : config_home = os . environ . get ( "XDG_CONFIG_HOME" , "~/.config" ) config_home = os . path . expanduser ( config_home ) base_path = os . path . join ( config_home , "PyPump" ) if not os . path . isdir ( base_path ) : os . makedirs ( base_path ) return os . path . join ( base_path , "credentials.json" ) | Gets filename of store on disk |
59,850 | def load ( cls , webfinger , pypump ) : filename = cls . get_filename ( ) if os . path . isfile ( filename ) : data = open ( filename ) . read ( ) data = json . loads ( data ) store = cls ( data , filename = filename ) else : store = cls ( filename = filename ) store . prefix = webfinger return store | Load JSON from disk into store object |
59,851 | def pause ( message = 'Press any key to continue . . . ' ) : if message is not None : print ( message , end = '' ) sys . stdout . flush ( ) getch ( ) print ( ) | Prints the specified message if it s not None and waits for a keypress . |
59,852 | def covalent_bonds ( atoms , threshold = 1.1 ) : bonds = [ ] for a , b in atoms : bond_distance = ( element_data [ a . element . title ( ) ] [ 'atomic radius' ] + element_data [ b . element . title ( ) ] [ 'atomic radius' ] ) / 100 dist = distance ( a . _vector , b . _vector ) if dist <= bond_distance * threshold : bonds . append ( CovalentBond ( a , b , dist ) ) return bonds | Returns all the covalent bonds in a list of Atom pairs . |
59,853 | def find_covalent_bonds ( ampal , max_range = 2.2 , threshold = 1.1 , tag = True ) : sectors = gen_sectors ( ampal . get_atoms ( ) , max_range * 1.1 ) bonds = [ ] for sector in sectors . values ( ) : atoms = itertools . combinations ( sector , 2 ) bonds . extend ( covalent_bonds ( atoms , threshold = threshold ) ) bond_set = list ( set ( bonds ) ) if tag : for bond in bond_set : a , b = bond . a , bond . b if 'covalent_bonds' not in a . tags : a . tags [ 'covalent_bonds' ] = [ b ] else : a . tags [ 'covalent_bonds' ] . append ( b ) if 'covalent_bonds' not in b . tags : b . tags [ 'covalent_bonds' ] = [ a ] else : b . tags [ 'covalent_bonds' ] . append ( a ) return bond_set | Finds all covalent bonds in the AMPAL object . |
59,854 | def generate_covalent_bond_graph ( covalent_bonds ) : bond_graph = networkx . Graph ( ) for inter in covalent_bonds : bond_graph . add_edge ( inter . a , inter . b ) return bond_graph | Generates a graph of the covalent bond network described by the interactions . |
59,855 | def generate_bond_subgraphs_from_break ( bond_graph , atom1 , atom2 ) : bond_graph . remove_edge ( atom1 , atom2 ) try : subgraphs = list ( networkx . connected_component_subgraphs ( bond_graph , copy = False ) ) finally : bond_graph . add_edge ( atom1 , atom2 ) return subgraphs | Splits the bond graph between two atoms to producing subgraphs . |
59,856 | def cap ( v , l ) : s = str ( v ) return s if len ( s ) <= l else s [ - l : ] | Shortens string is above certain length . |
59,857 | def find_atoms_within_distance ( atoms , cutoff_distance , point ) : return [ x for x in atoms if distance ( x , point ) <= cutoff_distance ] | Returns atoms within the distance from the point . |
59,858 | def centre_of_atoms ( atoms , mass_weighted = True ) : points = [ x . _vector for x in atoms ] if mass_weighted : masses = [ x . mass for x in atoms ] else : masses = [ ] return centre_of_mass ( points = points , masses = masses ) | Returns centre point of any list of atoms . |
59,859 | def assign_force_field ( self , ff , mol2 = False ) : if hasattr ( self , 'ligands' ) : atoms = self . get_atoms ( ligands = True , inc_alt_states = True ) else : atoms = self . get_atoms ( inc_alt_states = True ) for atom in atoms : w_str = None a_ff_id = None if atom . element == 'H' : continue elif atom . ampal_parent . mol_code in ff : if atom . res_label in ff [ atom . ampal_parent . mol_code ] : a_ff_id = ( atom . ampal_parent . mol_code , atom . res_label ) elif atom . res_label in ff [ 'WLD' ] : a_ff_id = ( 'WLD' , atom . res_label ) else : w_str = ( '{} atom is not parameterised in the selected ' 'force field for {} residues, this will be ' 'ignored.' ) . format ( atom . res_label , atom . ampal_parent . mol_code ) elif atom . res_label in ff [ 'WLD' ] : a_ff_id = ( 'WLD' , atom . res_label ) elif mol2 and ( atom . ampal_parent . mol_code . capitalize ( ) in ff [ 'MOL2' ] ) : a_ff_id = ( 'MOL2' , atom . res_label . capitalize ( ) ) else : if not mol2 : w_str = ( '{} ({}) atom is not parameterised in the selected' ' residue force field. Try activating the heavy ' ' atom force field (haff).' ) . format ( atom . element , atom . res_label ) else : w_str = ( '{} ({}) atom is not parameterised in the selected' ' force field.' ) . format ( atom . element , atom . res_label ) if w_str : warnings . warn ( w_str , NotParameterisedWarning ) atom . _ff_id = a_ff_id self . tags [ 'assigned_ff' ] = True return | Assigns force field parameters to Atoms in the AMPAL object . |
59,860 | def update_ff ( self , ff , mol2 = False , force_ff_assign = False ) : aff = False if force_ff_assign : aff = True elif 'assigned_ff' not in self . tags : aff = True elif not self . tags [ 'assigned_ff' ] : aff = True if aff : self . assign_force_field ( ff , mol2 = mol2 ) return | Manages assigning the force field parameters . |
59,861 | def get_internal_energy ( self , assign_ff = True , ff = None , mol2 = False , force_ff_assign = False ) : if not ff : ff = global_settings [ 'buff' ] [ 'force_field' ] if assign_ff : self . update_ff ( ff , mol2 = mol2 , force_ff_assign = force_ff_assign ) interactions = find_intra_ampal ( self , ff . distance_cutoff ) buff_score = score_interactions ( interactions , ff ) return buff_score | Calculates the internal energy of the AMPAL object . |
59,862 | def rotate ( self , angle , axis , point = None , radians = False , inc_alt_states = True ) : q = Quaternion . angle_and_axis ( angle = angle , axis = axis , radians = radians ) for atom in self . get_atoms ( inc_alt_states = inc_alt_states ) : atom . _vector = q . rotate_vector ( v = atom . _vector , point = point ) return | Rotates every atom in the AMPAL object . |
59,863 | def translate ( self , vector , inc_alt_states = True ) : vector = numpy . array ( vector ) for atom in self . get_atoms ( inc_alt_states = inc_alt_states ) : atom . _vector += vector return | Translates every atom in the AMPAL object . |
59,864 | def rmsd ( self , other , backbone = False ) : assert type ( self ) == type ( other ) if backbone and hasattr ( self , 'backbone' ) : points1 = self . backbone . get_atoms ( ) points2 = other . backbone . get_atoms ( ) else : points1 = self . get_atoms ( ) points2 = other . get_atoms ( ) points1 = [ x . _vector for x in points1 ] points2 = [ x . _vector for x in points2 ] return rmsd ( points1 = points1 , points2 = points2 ) | Calculates the RMSD between two AMPAL objects . |
59,865 | def append ( self , item ) : if isinstance ( item , Monomer ) : self . _monomers . append ( item ) else : raise TypeError ( 'Only Monomer objects can be appended to an Polymer.' ) return | Appends a Monomer to the Polymer . |
59,866 | def extend ( self , polymer ) : if isinstance ( polymer , Polymer ) : self . _monomers . extend ( polymer ) else : raise TypeError ( 'Only Polymer objects may be merged with a Polymer using unary operator "+".' ) return | Extends the Polymer with the contents of another Polymer . |
59,867 | def get_monomers ( self , ligands = True ) : if ligands and self . ligands : monomers = self . _monomers + self . ligands . _monomers else : monomers = self . _monomers return iter ( monomers ) | Retrieves all the Monomers from the AMPAL object . |
59,868 | def get_atoms ( self , ligands = True , inc_alt_states = False ) : if ligands and self . ligands : monomers = self . _monomers + self . ligands . _monomers else : monomers = self . _monomers atoms = itertools . chain ( * ( list ( m . get_atoms ( inc_alt_states = inc_alt_states ) ) for m in monomers ) ) return atoms | Flat list of all the Atoms in the Polymer . |
59,869 | def relabel_monomers ( self , labels = None ) : if labels : if len ( self . _monomers ) == len ( labels ) : for monomer , label in zip ( self . _monomers , labels ) : monomer . id = str ( label ) else : error_string = ( 'Number of Monomers ({}) and number of labels ' '({}) must be equal.' ) raise ValueError ( error_string . format ( len ( self . _monomers ) , len ( labels ) ) ) else : for i , monomer in enumerate ( self . _monomers ) : monomer . id = str ( i + 1 ) return | Relabels the either in numerically or using a list of labels . |
59,870 | def relabel_atoms ( self , start = 1 ) : counter = start for atom in self . get_atoms ( ) : atom . id = counter counter += 1 return | Relabels all Atoms in numerical order . |
59,871 | def make_pdb ( self , alt_states = False , inc_ligands = True ) : if any ( [ False if x . id else True for x in self . _monomers ] ) : self . relabel_monomers ( ) if self . ligands and inc_ligands : monomers = self . _monomers + self . ligands . _monomers else : monomers = self . _monomers pdb_str = write_pdb ( monomers , self . id , alt_states = alt_states ) return pdb_str | Generates a PDB string for the Polymer . |
59,872 | def rotate ( self , angle , axis , point = None , radians = False ) : q = Quaternion . angle_and_axis ( angle = angle , axis = axis , radians = radians ) self . _vector = q . rotate_vector ( v = self . _vector , point = point ) return | Rotates Atom by angle . |
59,873 | def dict_from_mmcif ( mmcif , path = True ) : if path : with open ( mmcif , 'r' ) as foo : lines = foo . readlines ( ) else : lines = mmcif . splitlines ( ) lines = [ ' ' . join ( x . strip ( ) . split ( ) ) for x in lines ] loop = False cif_data = { } for i , line in enumerate ( lines ) : if not line : continue if line == '#' : loop = False continue if not loop : if line [ : 5 ] == 'loop_' : loop = True key_list = [ ] continue elif line [ 0 ] == '_' : if len ( line . split ( ) ) == 1 : current_key = line count = 1 while True : try : if lines [ i + count ] [ 0 ] != '_' : count += 1 elif i + count > len ( lines ) : break else : if count > 1 : try : cif_data [ current_key ] = ' ' . join ( lines [ i + 1 : i + count ] ) except IndexError : cif_data [ current_key ] = None else : cif_data [ current_key ] = None break except IndexError : break continue elif len ( line . split ( ) ) > 1 : line = line . split ( ) try : cif_data [ line [ 0 ] ] = ' ' . join ( line [ 1 : ] ) except IndexError : cif_data [ line [ 0 ] ] = None continue else : continue else : if line [ 0 ] == '_' : if len ( line . split ( ) ) == 1 : key_list . append ( line ) if line not in cif_data . keys ( ) : cif_data [ line ] = [ ] else : if '\"' in line and line . count ( '\"' ) % 2 == 0 : line_parts = [ x . strip ( ) for x in line . split ( '\"' ) if x ] line = [ ] for part in line_parts : if line_parts . index ( part ) % 2 == 0 : for x in part . split ( ) : line . append ( x ) else : line . append ( part ) elif '\'' in line and line . count ( '\'' ) % 2 == 0 : line = [ x . strip ( ) for x in line . split ( '\'' ) if x ] elif len ( key_list ) == len ( line . split ( ) ) : line = line . split ( ) if len ( key_list ) == len ( line ) : for j , v in enumerate ( line ) : cif_data [ key_list [ j ] ] . append ( line [ j ] ) else : continue return cif_data | Parse mmcif file into a dictionary . |
59,874 | def get_protein_dict ( cif_data ) : mmcif_data_names = { 'keywords' : '_struct_keywords.text' , 'header' : '_struct_keywords.pdbx_keywords' , 'space_group' : '_symmetry.space_group_name_H-M' , 'experimental_method' : '_exptl.method' , 'crystal_growth' : '_exptl_crystal_grow.pdbx_details' , 'resolution' : '_refine.ls_d_res_high' , 'r_value_obs' : '_refine.ls_R_factor_obs' , 'atoms_protein' : '_refine_hist.pdbx_number_atoms_protein' , 'atoms_solvent' : '_refine_hist.number_atoms_solvent' , 'atoms_ligand' : '_refine_hist.pdbx_number_atoms_ligand' , 'atoms_nucleic_acid' : '_refine_hist.pdbx_number_atoms_nucleic_acid' , 'atoms_total' : '_refine_hist.number_atoms_total' , 'title' : '_struct.title' , 'pdb_descriptor' : '_struct.pdbx_descriptor' , 'model_details' : '_struct.pdbx_model_details' , 'casp_flag' : '_struct.pdbx_CASP_flag' , 'model_type_details' : '_struct.pdbx_model_type_details' , 'ncbi_taxonomy' : '_entity_src_nat.pdbx_ncbi_taxonomy_id' , 'ncbi_taxonomy_gene' : '_entity_src_gen.pdbx_gene_src_ncbi_taxonomy_id' , 'ncbi_taxonomy_host_org' : '_entity_src_gen.pdbx_host_org_ncbi_taxonomy_id' , } protein_dict = { } for column_name , cif_name in mmcif_data_names . items ( ) : try : data = cif_data [ cif_name ] except IndexError : data = None except KeyError : data = None protein_dict [ column_name ] = data if isinstance ( cif_data [ '_database_PDB_rev.date_original' ] , str ) : protein_dict [ 'deposition_date' ] = cif_data [ '_database_PDB_rev.date_original' ] else : protein_dict [ 'deposition_date' ] = cif_data [ '_database_PDB_rev.date_original' ] [ 0 ] if isinstance ( cif_data [ '_database_PDB_rev.date' ] , str ) : protein_dict [ 'release_date' ] = cif_data [ '_database_PDB_rev.date' ] protein_dict [ 'last_modified_date' ] = cif_data [ '_database_PDB_rev.date' ] else : protein_dict [ 'release_date' ] = cif_data [ '_database_PDB_rev.date' ] [ 0 ] protein_dict [ 'last_modified_date' ] = cif_data [ '_database_PDB_rev.date' ] [ - 1 ] crystal_growth = protein_dict [ 'crystal_growth' ] if type ( crystal_growth ) == list and len ( crystal_growth ) >= 1 : protein_dict [ 'crystal_growth' ] = crystal_growth [ 0 ] else : protein_dict [ 'crystal_growth' ] = None taxonomy_keys = [ 'ncbi_taxonomy' , 'ncbi_taxonomy_gene' , 'ncbi_taxonomy_host_org' ] for taxonomy_key in taxonomy_keys : if protein_dict [ taxonomy_key ] : if type ( protein_dict [ taxonomy_key ] ) == list : try : protein_dict [ taxonomy_key ] = int ( protein_dict [ taxonomy_key ] [ 0 ] ) except ValueError or IndexError : protein_dict [ taxonomy_key ] = None ints = [ 'atoms_ligand' , 'atoms_nucleic_acid' , 'atoms_protein' , 'atoms_solvent' , 'atoms_total' ] floats = [ 'r_value_obs' , 'resolution' ] dates = [ 'deposition_date' , 'release_date' , 'last_modified_date' ] for k , v in protein_dict . items ( ) : if v : if v == '?' or v == 'None' or v == '.' : protein_dict [ k ] = None elif k in ints : protein_dict [ k ] = int ( v ) elif k in floats : protein_dict [ k ] = float ( v ) elif k in dates : protein_dict [ k ] = datetime . datetime . strptime ( v , '%Y-%m-%d' ) elif type ( v ) == str : v = v . replace ( 'loop_' , '' ) v = v . replace ( ' # ' , '' ) if v [ 0 ] == v [ - 1 ] == '\'' : protein_dict [ k ] = v [ 1 : - 1 ] return protein_dict | Parse cif_data dict for a subset of its data . |
59,875 | def parse_PISCES_output ( pisces_output , path = False ) : pisces_dict = { } if path : pisces_path = Path ( pisces_output ) pisces_content = pisces_path . read_text ( ) . splitlines ( ) [ 1 : ] else : pisces_content = pisces_output . splitlines ( ) [ 1 : ] for line in pisces_content : pdb = line . split ( ) [ 0 ] [ : 4 ] . lower ( ) chain = line . split ( ) [ 0 ] [ - 1 ] pdb_dict = { 'length' : line . split ( ) [ 1 ] , 'method' : line . split ( ) [ 2 ] , 'resolution' : line . split ( ) [ 3 ] , 'R-factor' : line . split ( ) [ 4 ] , 'R-free' : line . split ( ) [ 5 ] } if pdb in pisces_dict : pisces_dict [ pdb ] [ 'chains' ] . append ( chain ) else : pdb_dict [ 'chains' ] = [ chain ] pisces_dict [ pdb ] = pdb_dict return pisces_dict | Takes the output list of a PISCES cull and returns in a usable dictionary . |
59,876 | def download_decode ( URL , encoding = 'utf-8' , verbose = True ) : if verbose : print ( "Downloading data from " + URL ) req = Request ( URL ) try : with urlopen ( req ) as u : decoded_file = u . read ( ) . decode ( encoding ) except URLError as e : if hasattr ( e , 'reason' ) : print ( 'Server could not be reached.' ) print ( 'Reason: ' , e . reason ) elif hasattr ( e , 'code' ) : print ( 'The server couldn\'t fulfill the request.' ) print ( 'Error code: ' , e . code ) return None return decoded_file | Downloads data from URL and returns decoded contents . |
59,877 | def olderado_best_model ( pdb_id ) : pdb_code = pdb_id [ : 4 ] . lower ( ) olderado_url = 'http://www.ebi.ac.uk/pdbe/nmr/olderado/searchEntry?pdbCode=' + pdb_code olderado_page = download_decode ( olderado_url , verbose = False ) if olderado_page : parsed_page = BeautifulSoup ( olderado_page , 'html.parser' ) else : return None try : best_model = parsed_page . find_all ( 'td' ) [ 1 ] except IndexError : print ( "No model info could be found for {0} - ensure that it's an NMR structure." . format ( pdb_id ) ) return None try : model_no = int ( best_model . string ) except ValueError as v : print ( "Did not find a number for best model." ) raise v return model_no | Checks the Olderado web server and returns the most representative conformation for PDB NMR structures . |
59,878 | def buff_eval ( params ) : specification , sequence , parsed_ind = params model = specification ( * parsed_ind ) model . build ( ) model . pack_new_sequences ( sequence ) return model . buff_interaction_energy . total_energy | Builds and evaluates BUFF energy of model in parallelization |
59,879 | def buff_internal_eval ( params ) : specification , sequence , parsed_ind = params model = specification ( * parsed_ind ) model . build ( ) model . pack_new_sequences ( sequence ) return model . buff_internal_energy . total_energy | Builds and evaluates BUFF internal energy of a model in parallelization |
59,880 | def rmsd_eval ( rmsd_params ) : specification , sequence , parsed_ind , reference_pdb = rmsd_params model = specification ( * parsed_ind ) model . pack_new_sequences ( sequence ) ca , bb , aa = run_profit ( model . pdb , reference_pdb , path1 = False , path2 = False ) return bb | Builds a model and runs profit against a reference model . |
59,881 | def comparator_eval ( comparator_params ) : top1 , top2 , params1 , params2 , seq1 , seq2 , movements = comparator_params xrot , yrot , zrot , xtrans , ytrans , ztrans = movements obj1 = top1 ( * params1 ) obj2 = top2 ( * params2 ) obj2 . rotate ( xrot , [ 1 , 0 , 0 ] ) obj2 . rotate ( yrot , [ 0 , 1 , 0 ] ) obj2 . rotate ( zrot , [ 0 , 0 , 1 ] ) obj2 . translate ( [ xtrans , ytrans , ztrans ] ) model = obj1 + obj2 model . relabel_all ( ) model . pack_new_sequences ( seq1 + seq2 ) return model . buff_interaction_energy . total_energy | Gets BUFF score for interaction between two AMPAL objects |
59,882 | def parameters ( self , sequence , value_means , value_ranges , arrangement ) : self . _params [ 'sequence' ] = sequence self . _params [ 'value_means' ] = value_means self . _params [ 'value_ranges' ] = value_ranges self . _params [ 'arrangement' ] = arrangement if any ( x <= 0 for x in self . _params [ 'value_ranges' ] ) : raise ValueError ( "range values must be greater than zero" ) self . _params [ 'variable_parameters' ] = [ ] for i in range ( len ( self . _params [ 'value_means' ] ) ) : self . _params [ 'variable_parameters' ] . append ( "" . join ( [ 'var' , str ( i ) ] ) ) if len ( set ( arrangement ) . intersection ( self . _params [ 'variable_parameters' ] ) ) != len ( self . _params [ 'value_means' ] ) : raise ValueError ( "argument mismatch!" ) if len ( self . _params [ 'value_ranges' ] ) != len ( self . _params [ 'value_means' ] ) : raise ValueError ( "argument mismatch!" ) | Relates the individual to be evolved to the full parameter string . |
59,883 | def make_energy_funnel_data ( self , cores = 1 ) : if not self . parameter_log : raise AttributeError ( 'No parameter log data to make funnel, have you ran the ' 'optimiser?' ) model_cls = self . _params [ 'specification' ] gen_tagged = [ ] for gen , models in enumerate ( self . parameter_log ) : for model in models : gen_tagged . append ( ( model [ 0 ] , model [ 1 ] , gen ) ) sorted_pps = sorted ( gen_tagged , key = lambda x : x [ 1 ] ) top_result = sorted_pps [ 0 ] top_result_model = model_cls ( * top_result [ 0 ] ) if ( cores == 1 ) or ( sys . platform == 'win32' ) : energy_rmsd_gen = map ( self . funnel_rebuild , [ ( x , top_result_model , self . _params [ 'specification' ] ) for x in sorted_pps [ 1 : ] ] ) else : with futures . ProcessPoolExecutor ( max_workers = self . _params [ 'processors' ] ) as executor : energy_rmsd_gen = executor . map ( self . funnel_rebuild , [ ( x , top_result_model , self . _params [ 'specification' ] ) for x in sorted_pps [ 1 : ] ] ) return list ( energy_rmsd_gen ) | Compares models created during the minimisation to the best model . |
59,884 | def funnel_rebuild ( psg_trm_spec ) : param_score_gen , top_result_model , specification = psg_trm_spec params , score , gen = param_score_gen model = specification ( * params ) rmsd = top_result_model . rmsd ( model ) return rmsd , score , gen | Rebuilds a model and compares it to a reference model . |
59,885 | def update_pop ( self ) : candidates = [ ] for ind in self . population : candidates . append ( self . crossover ( ind ) ) self . _params [ 'model_count' ] += len ( candidates ) self . assign_fitnesses ( candidates ) for i in range ( len ( self . population ) ) : if candidates [ i ] . fitness > self . population [ i ] . fitness : self . population [ i ] = candidates [ i ] | Updates the population according to crossover and fitness criteria . |
59,886 | def initialize_pop ( self ) : self . population = self . toolbox . swarm ( n = self . _params [ 'popsize' ] ) if self . _params [ 'neighbours' ] : for i in range ( len ( self . population ) ) : self . population [ i ] . ident = i self . population [ i ] . neighbours = list ( set ( [ ( i - x ) % len ( self . population ) for x in range ( 1 , self . _params [ 'neighbours' ] + 1 ) ] + [ i ] + [ ( i + x ) % len ( self . population ) for x in range ( 1 , self . _params [ 'neighbours' ] + 1 ) ] ) ) else : for i in range ( len ( self . population ) ) : self . population [ i ] . ident = i self . population [ i ] . neighbours = [ x for x in range ( len ( self . population ) ) ] self . assign_fitnesses ( self . population ) for part in self . population : part . best = creator . Particle ( part ) part . best . fitness . values = part . fitness . values | Generates initial population with random positions and speeds . |
59,887 | def initialize_pop ( self ) : self . toolbox . register ( "individual" , self . generate ) self . toolbox . register ( "population" , tools . initRepeat , list , self . toolbox . individual ) self . population = self . toolbox . population ( n = self . _params [ 'popsize' ] ) self . assign_fitnesses ( self . population ) self . _params [ 'model_count' ] += len ( self . population ) | Assigns initial fitnesses . |
59,888 | def randomise_proposed_value ( self ) : if self . parameter_type is MMCParameterType . UNIFORM_DIST : ( a , b ) = self . static_dist_or_list self . proposed_value = random . uniform ( a , b ) elif self . parameter_type is MMCParameterType . NORMAL_DIST : ( mu , sigma ) = self . static_dist_or_list self . proposed_value = random . normalvariate ( mu , sigma ) elif self . parameter_type is MMCParameterType . DISCRETE_RANGE : ( min_v , max_v , step ) = self . static_dist_or_list self . proposed_value = random . choice ( numpy . arange ( min_v , max_v , step ) ) elif self . parameter_type is MMCParameterType . LIST : self . proposed_value = random . choice ( self . static_dist_or_list ) elif self . parameter_type is MMCParameterType . STATIC_VALUE : raise TypeError ( 'This value is static, it cannot be mutated.' ) else : raise TypeError ( 'Cannot randomise this parameter, unknown parameter type.' ) return | Creates a randomly the proposed value . |
59,889 | def accept_proposed_value ( self ) : if self . proposed_value is not None : self . current_value = self . proposed_value self . proposed_value = None return | Changes the current value to the proposed value . |
59,890 | def start_optimisation ( self , rounds , temp = 298.15 ) : self . _generate_initial_model ( ) self . _mmc_loop ( rounds , temp = temp ) return | Begin the optimisation run . |
59,891 | def _generate_initial_model ( self ) : initial_parameters = [ p . current_value for p in self . current_parameters ] try : initial_model = self . specification ( * initial_parameters ) except TypeError : raise TypeError ( 'Failed to build initial model. Make sure that the input ' 'parameters match the number and order of arguements ' 'expected by the input specification.' ) initial_model . pack_new_sequences ( self . sequences ) self . current_energy = self . eval_function ( initial_model ) self . best_energy = copy . deepcopy ( self . current_energy ) self . best_parameters = copy . deepcopy ( self . current_parameters ) self . best_model = initial_model return | Creates the initial model for the optimistation . |
59,892 | def _mmc_loop ( self , rounds , temp = 298.15 , verbose = True ) : current_round = 0 while current_round < rounds : modifiable = list ( filter ( lambda p : p . parameter_type is not MMCParameterType . STATIC_VALUE , self . current_parameters ) ) chosen_parameter = random . choice ( modifiable ) if chosen_parameter . parameter_type is MMCParameterType . UNIFORM_DIST : chosen_parameter . randomise_proposed_value ( ) else : chosen_parameter . randomise_proposed_value ( ) proposed_parameters = [ p . current_value if p . proposed_value is None else p . proposed_value for p in self . current_parameters ] model = self . specification ( * proposed_parameters ) model . pack_new_sequences ( self . sequences ) proposed_energy = self . eval_function ( model ) if verbose : sys . stdout . write ( '\rRound: {}, Current energy: {}, Proposed energy: {} ' '(best {}), {}. ' . format ( current_round , float_f ( self . current_energy ) , float_f ( proposed_energy ) , float_f ( self . best_energy ) , "ACCEPTED" if self . check_move ( proposed_energy , self . current_energy , t = temp ) else "DECLINED" ) ) sys . stdout . flush ( ) if self . check_move ( proposed_energy , self . current_energy , t = temp ) : for p in self . current_parameters : p . accept_proposed_value ( ) self . current_energy = proposed_energy if self . current_energy < self . best_energy : self . best_energy = copy . deepcopy ( self . current_energy ) self . best_parameters = copy . deepcopy ( self . current_parameters ) self . best_model = model else : for p in self . current_parameters : p . reject_proposed_value ( ) current_round += 1 return | The main MMC loop . |
59,893 | def _crossover ( self , ind ) : if self . neighbours : a , b , c = random . sample ( [ self . population [ i ] for i in ind . neighbours ] , 3 ) else : a , b , c = random . sample ( self . population , 3 ) y = self . toolbox . clone ( a ) y . ident = ind . ident y . neighbours = ind . neighbours del y . fitness . values ident = random . randrange ( len ( self . value_means ) ) for i , value in enumerate ( y ) : if i == ident or random . random ( ) < self . cxpb : entry = a [ i ] + random . lognormvariate ( - 1.2 , 0.5 ) * self . diff_weight * ( b [ i ] - c [ i ] ) tries = 0 while abs ( entry ) > 1.0 : tries += 1 entry = a [ i ] + random . lognormvariate ( - 1.2 , 0.5 ) * self . diff_weight * ( b [ i ] - c [ i ] ) if tries > 10000 : entry = a [ i ] y [ i ] = entry return y | Used by the evolution process to generate a new individual . |
59,894 | def _generate ( self ) : part = creator . Particle ( [ random . uniform ( - 1 , 1 ) for _ in range ( len ( self . value_means ) ) ] ) part . speed = [ random . uniform ( - self . max_speed , self . max_speed ) for _ in range ( len ( self . value_means ) ) ] part . smin = - self . max_speed part . smax = self . max_speed part . ident = None part . neighbours = None return part | Generates a particle using the creator function . |
59,895 | def update_particle ( self , part , chi = 0.729843788 , c = 2.05 ) : neighbour_pool = [ self . population [ i ] for i in part . neighbours ] best_neighbour = max ( neighbour_pool , key = lambda x : x . best . fitness ) ce1 = ( c * random . uniform ( 0 , 1 ) for _ in range ( len ( part ) ) ) ce2 = ( c * random . uniform ( 0 , 1 ) for _ in range ( len ( part ) ) ) ce1_p = map ( operator . mul , ce1 , map ( operator . sub , part . best , part ) ) ce2_g = map ( operator . mul , ce2 , map ( operator . sub , best_neighbour . best , part ) ) chi_list = [ chi ] * len ( part ) chi_list2 = [ 1 - chi ] * len ( part ) a = map ( operator . sub , map ( operator . mul , chi_list , map ( operator . add , ce1_p , ce2_g ) ) , map ( operator . mul , chi_list2 , part . speed ) ) part . speed = list ( map ( operator . add , part . speed , a ) ) for i , speed in enumerate ( part . speed ) : if speed < part . smin : part . speed [ i ] = part . smin elif speed > part . smax : part . speed [ i ] = part . smax part [ : ] = list ( map ( operator . add , part , part . speed ) ) return | Constriction factor update particle method . |
59,896 | def _make_individual ( self , paramlist ) : part = creator . Individual ( paramlist ) part . ident = None return part | Makes an individual particle . |
59,897 | def number_of_mmols ( code ) : if mmols_numbers : if code in mmols_numbers . keys ( ) : mmol = mmols_numbers [ code ] [ 0 ] return mmol counter = 1 while True : pdbe_url = "http://www.ebi.ac.uk/pdbe/static/entry/download/{0}-assembly-{1}.cif.gz" . format ( code , counter ) r = requests . get ( pdbe_url ) if r . status_code == 200 : counter += 1 else : break if counter == 1 : while True : pdb_url = "http://www.rcsb.org/pdb/files/{0}.pdb{1}.gz" . format ( code . upper ( ) , counter ) r = requests . get ( pdb_url ) if r . status_code == 200 and r . encoding is None : counter += 1 else : break if counter == 1 : pdb_url = "http://files.rcsb.org/download/{0}.pdb" . format ( code . upper ( ) ) r = requests . get ( pdb_url ) if r . status_code == 200 : counter += 1 num_mmols = counter - 1 if num_mmols == 0 : raise ValueError ( 'Could not access ANY .mmol files for {0}' . format ( code ) ) return num_mmols | Number of . mmol files associated with code in the PDBE . |
59,898 | def get_mmol ( code , mmol_number = None , outfile = None ) : if not mmol_number : try : mmol_number = preferred_mmol ( code = code ) except ( ValueError , TypeError , IOError ) : print ( "No mmols for {0}" . format ( code ) ) return None if mmols_numbers : if code in mmols_numbers . keys ( ) : num_mmols = mmols_numbers [ code ] [ 0 ] if mmol_number > num_mmols : raise ValueError ( 'There are only {0} mmols for code {1}. mmol_number {2} is too big' . format ( num_mmols , code , mmol_number ) ) pdbe_url = "http://www.ebi.ac.uk/pdbe/entry-files/download/{0}_{1}.mmol" . format ( code , mmol_number ) r = requests . get ( pdbe_url ) if r . status_code == 200 : mmol_string = r . text else : pdb_url = "http://www.rcsb.org/pdb/files/{0}.pdb{1}.gz" . format ( code . upper ( ) , mmol_number ) r = requests . get ( pdb_url ) if r . status_code == 200 : temp_gz = tempfile . NamedTemporaryFile ( ) temp_gz . write ( r . content ) with gzip . open ( temp_gz . name , 'rb' ) as foo : mmol_string = foo . read ( ) . decode ( ) else : print ( "Could not download mmol file for {0}.\n Got requests status_code {1}" . format ( code , r . status_code ) ) return None if outfile and mmol_string : with open ( outfile , 'w' ) as foo : foo . write ( mmol_string ) return mmol_string | Get mmol file from PDBe and return its content as a string . Write to file if outfile given . |
59,899 | def get_mmcif ( code , outfile = None ) : pdbe_url = "http://www.ebi.ac.uk/pdbe/entry-files/download/{0}.cif" . format ( code ) r = requests . get ( pdbe_url ) if r . status_code == 200 : mmcif_string = r . text else : print ( "Could not download mmcif file for {0}" . format ( code ) ) mmcif_string = None if outfile and mmcif_string : with open ( outfile , 'w' ) as foo : foo . write ( mmcif_string ) return mmcif_string | Get mmcif file associated with code from PDBE . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.