idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
18,100
def nodes ( self ) : if not hasattr ( self , '_nodes' ) : base_url = "{}/{}" . format ( NodeBalancerConfig . api_endpoint , NodeBalancerNode . derived_url_path ) result = self . _client . _get_objects ( base_url , NodeBalancerNode , model = self , parent_id = ( self . id , self . nodebalancer_id ) ) self . _set ( '_nodes' , result ) return self . _nodes
This is a special derived_class relationship because NodeBalancerNode is the only api object that requires two parent_ids
18,101
def attach ( self , to_linode , config = None ) : result = self . _client . post ( '{}/attach' . format ( Volume . api_endpoint ) , model = self , data = { "linode_id" : to_linode . id if issubclass ( type ( to_linode ) , Base ) else to_linode , "config" : None if not config else config . id if issubclass ( type ( config ) , Base ) else config , } ) if not 'id' in result : raise UnexpectedResponseError ( 'Unexpected response when attaching volume!' , json = result ) self . _populate ( result ) return True
Attaches this Volume to the given Linode
18,102
def detach ( self ) : self . _client . post ( '{}/detach' . format ( Volume . api_endpoint ) , model = self ) return True
Detaches this Volume if it is attached
18,103
def resize ( self , size ) : result = self . _client . post ( '{}/resize' . format ( Volume . api_endpoint , model = self , data = { "size" : size } ) ) self . _populate ( result . json ) return True
Resizes this Volume
18,104
def clone ( self , label ) : result = self . _client . post ( '{}/clone' . format ( Volume . api_endpoint ) , model = self , data = { 'label' : label } ) if not 'id' in result : raise UnexpectedResponseError ( 'Unexpected response cloning volume!' ) return Volume ( self . _client , result [ 'id' ] , result )
Clones this volume to a new volume in the same region with the given label
18,105
def _get_raw_objects ( self ) : if not hasattr ( self , '_raw_objects' ) : result = self . _client . get ( type ( self ) . api_endpoint , model = self ) self . _raw_objects = result return self . _raw_objects
Helper function to populate the first page of raw objects for this tag . This has the side effect of creating the _raw_objects attribute of this object .
18,106
def objects ( self ) : data = self . _get_raw_objects ( ) return PaginatedList . make_paginated_list ( data , self . _client , TaggedObjectProxy , page_url = type ( self ) . api_endpoint . format ( ** vars ( self ) ) )
Returns a list of objects with this Tag . This list may contain any taggable object type .
18,107
def make_instance ( cls , id , client , parent_id = None , json = None ) : make_cls = CLASS_MAP . get ( id ) if make_cls is None : return None real_json = json [ 'data' ] real_id = real_json [ 'id' ] return Base . make ( real_id , client , make_cls , parent_id = None , json = real_json )
Overrides Base s make_instance to allow dynamic creation of objects based on the defined type in the response json .
18,108
def resize ( self , new_size ) : self . _client . post ( '{}/resize' . format ( Disk . api_endpoint ) , model = self , data = { "size" : new_size } ) return True
Resizes this disk . The Linode Instance this disk belongs to must have sufficient space available to accommodate the new size and must be offline .
18,109
def _populate ( self , json ) : from . volume import Volume DerivedBase . _populate ( self , json ) devices = { } for device_index , device in json [ 'devices' ] . items ( ) : if not device : devices [ device_index ] = None continue dev = None if 'disk_id' in device and device [ 'disk_id' ] : dev = Disk . make_instance ( device [ 'disk_id' ] , self . _client , parent_id = self . linode_id ) else : dev = Volume . make_instance ( device [ 'volume_id' ] , self . _client , parent_id = self . linode_id ) devices [ device_index ] = dev self . _set ( 'devices' , MappedObject ( ** devices ) )
Map devices more nicely while populating .
18,110
def ips ( self ) : if not hasattr ( self , '_ips' ) : result = self . _client . get ( "{}/ips" . format ( Instance . api_endpoint ) , model = self ) if not "ipv4" in result : raise UnexpectedResponseError ( 'Unexpected response loading IPs' , json = result ) v4pub = [ ] for c in result [ 'ipv4' ] [ 'public' ] : i = IPAddress ( self . _client , c [ 'address' ] , c ) v4pub . append ( i ) v4pri = [ ] for c in result [ 'ipv4' ] [ 'private' ] : i = IPAddress ( self . _client , c [ 'address' ] , c ) v4pri . append ( i ) shared_ips = [ ] for c in result [ 'ipv4' ] [ 'shared' ] : i = IPAddress ( self . _client , c [ 'address' ] , c ) shared_ips . append ( i ) slaac = IPAddress ( self . _client , result [ 'ipv6' ] [ 'slaac' ] [ 'address' ] , result [ 'ipv6' ] [ 'slaac' ] ) link_local = IPAddress ( self . _client , result [ 'ipv6' ] [ 'link_local' ] [ 'address' ] , result [ 'ipv6' ] [ 'link_local' ] ) pools = [ ] for p in result [ 'ipv6' ] [ 'global' ] : pools . append ( IPv6Pool ( self . _client , p [ 'range' ] ) ) ips = MappedObject ( ** { "ipv4" : { "public" : v4pub , "private" : v4pri , "shared" : shared_ips , } , "ipv6" : { "slaac" : slaac , "link_local" : link_local , "pools" : pools , } , } ) self . _set ( '_ips' , ips ) return self . _ips
The ips related collection is not normalized like the others so we have to make an ad - hoc object to return for its response
18,111
def available_backups ( self ) : if not hasattr ( self , '_avail_backups' ) : result = self . _client . get ( "{}/backups" . format ( Instance . api_endpoint ) , model = self ) if not 'automatic' in result : raise UnexpectedResponseError ( 'Unexpected response loading available backups!' , json = result ) automatic = [ ] for a in result [ 'automatic' ] : cur = Backup ( self . _client , a [ 'id' ] , self . id , a ) automatic . append ( cur ) snap = None if result [ 'snapshot' ] [ 'current' ] : snap = Backup ( self . _client , result [ 'snapshot' ] [ 'current' ] [ 'id' ] , self . id , result [ 'snapshot' ] [ 'current' ] ) psnap = None if result [ 'snapshot' ] [ 'in_progress' ] : psnap = Backup ( self . _client , result [ 'snapshot' ] [ 'in_progress' ] [ 'id' ] , self . id , result [ 'snapshot' ] [ 'in_progress' ] ) self . _set ( '_avail_backups' , MappedObject ( ** { "automatic" : automatic , "snapshot" : { "current" : snap , "in_progress" : psnap , } } ) ) return self . _avail_backups
The backups response contains what backups are available to be restored .
18,112
def invalidate ( self ) : if hasattr ( self , '_avail_backups' ) : del self . _avail_backups if hasattr ( self , '_ips' ) : del self . _ips Base . invalidate ( self )
Clear out cached properties
18,113
def config_create ( self , kernel = None , label = None , devices = [ ] , disks = [ ] , volumes = [ ] , ** kwargs ) : from . volume import Volume hypervisor_prefix = 'sd' if self . hypervisor == 'kvm' else 'xvd' device_names = [ hypervisor_prefix + string . ascii_lowercase [ i ] for i in range ( 0 , 8 ) ] device_map = { device_names [ i ] : None for i in range ( 0 , len ( device_names ) ) } if devices and ( disks or volumes ) : raise ValueError ( 'You may not call config_create with "devices" and ' 'either of "disks" or "volumes" specified!' ) if not devices : if not isinstance ( disks , list ) : disks = [ disks ] if not isinstance ( volumes , list ) : volumes = [ volumes ] devices = [ ] for d in disks : if d is None : devices . append ( None ) elif isinstance ( d , Disk ) : devices . append ( d ) else : devices . append ( Disk ( self . _client , int ( d ) , self . id ) ) for v in volumes : if v is None : devices . append ( None ) elif isinstance ( v , Volume ) : devices . append ( v ) else : devices . append ( Volume ( self . _client , int ( v ) ) ) if not devices : raise ValueError ( 'Must include at least one disk or volume!' ) for i , d in enumerate ( devices ) : if d is None : pass elif isinstance ( d , Disk ) : device_map [ device_names [ i ] ] = { 'disk_id' : d . id } elif isinstance ( d , Volume ) : device_map [ device_names [ i ] ] = { 'volume_id' : d . id } else : raise TypeError ( 'Disk or Volume expected!' ) params = { 'kernel' : kernel . id if issubclass ( type ( kernel ) , Base ) else kernel , 'label' : label if label else "{}_config_{}" . format ( self . label , len ( self . configs ) ) , 'devices' : device_map , } params . update ( kwargs ) result = self . _client . post ( "{}/configs" . format ( Instance . api_endpoint ) , model = self , data = params ) self . invalidate ( ) if not 'id' in result : raise UnexpectedResponseError ( 'Unexpected response creating config!' , json = result ) c = Config ( self . _client , result [ 'id' ] , self . id , result ) return c
Creates a Linode Config with the given attributes .
18,114
def enable_backups ( self ) : self . _client . post ( "{}/backups/enable" . format ( Instance . api_endpoint ) , model = self ) self . invalidate ( ) return True
Enable Backups for this Instance . When enabled we will automatically backup your Instance s data so that it can be restored at a later date . For more information on Instance s Backups service and pricing see our Backups Page _
18,115
def mutate ( self ) : self . _client . post ( '{}/mutate' . format ( Instance . api_endpoint ) , model = self ) return True
Upgrades this Instance to the latest generation type
18,116
def initiate_migration ( self ) : self . _client . post ( '{}/migrate' . format ( Instance . api_endpoint ) , model = self )
Initiates a pending migration that is already scheduled for this Linode Instance
18,117
def clone ( self , to_linode = None , region = None , service = None , configs = [ ] , disks = [ ] , label = None , group = None , with_backups = None ) : if to_linode and region : raise ValueError ( 'You may only specify one of "to_linode" and "region"' ) if region and not service : raise ValueError ( 'Specifying a region requires a "service" as well' ) if not isinstance ( configs , list ) and not isinstance ( configs , PaginatedList ) : configs = [ configs ] if not isinstance ( disks , list ) and not isinstance ( disks , PaginatedList ) : disks = [ disks ] cids = [ c . id if issubclass ( type ( c ) , Base ) else c for c in configs ] dids = [ d . id if issubclass ( type ( d ) , Base ) else d for d in disks ] params = { "linode_id" : to_linode . id if issubclass ( type ( to_linode ) , Base ) else to_linode , "region" : region . id if issubclass ( type ( region ) , Base ) else region , "type" : service . id if issubclass ( type ( service ) , Base ) else service , "configs" : cids if cids else None , "disks" : dids if dids else None , "label" : label , "group" : group , "with_backups" : with_backups , } result = self . _client . post ( '{}/clone' . format ( Instance . api_endpoint ) , model = self , data = params ) if not 'id' in result : raise UnexpectedResponseError ( 'Unexpected response cloning Instance!' , json = result ) l = Instance ( self . _client , result [ 'id' ] , result ) return l
Clones this linode into a new linode or into a new linode in the given region
18,118
def stats ( self ) : return self . _client . get ( '{}/stats' . format ( Instance . api_endpoint ) , model = self )
Returns the JSON stats for this Instance
18,119
def stats_for ( self , dt ) : if not isinstance ( dt , datetime ) : raise TypeError ( 'stats_for requires a datetime object!' ) return self . _client . get ( '{}/stats/' . format ( dt . strftime ( '%Y/%m' ) ) )
Returns stats for the month containing the given datetime
18,120
def _populate ( self , json ) : Base . _populate ( self , json ) mapped_udfs = [ ] for udf in self . user_defined_fields : t = UserDefinedFieldType . text choices = None if hasattr ( udf , 'oneof' ) : t = UserDefinedFieldType . select_one choices = udf . oneof . split ( ',' ) elif hasattr ( udf , 'manyof' ) : t = UserDefinedFieldType . select_many choices = udf . manyof . split ( ',' ) mapped_udfs . append ( UserDefinedField ( udf . name , udf . label if hasattr ( udf , 'label' ) else None , udf . example if hasattr ( udf , 'example' ) else None , t , choices = choices ) ) self . _set ( 'user_defined_fields' , mapped_udfs ) ndist = [ Image ( self . _client , d ) for d in self . images ] self . _set ( 'images' , ndist )
Override the populate method to map user_defined_fields to fancy values
18,121
def _populate ( self , json ) : super ( InvoiceItem , self ) . _populate ( json ) self . from_date = datetime . strptime ( json [ 'from' ] , DATE_FORMAT ) self . to_date = datetime . strptime ( json [ 'to' ] , DATE_FORMAT )
Allows population of from_date from the returned from attribute which is a reserved word in python . Also populates to_date to be complete .
18,122
def reset_secret ( self ) : result = self . _client . post ( "{}/reset_secret" . format ( OAuthClient . api_endpoint ) , model = self ) if not 'id' in result : raise UnexpectedResponseError ( 'Unexpected response when resetting secret!' , json = result ) self . _populate ( result ) return self . secret
Resets the client secret for this client .
18,123
def thumbnail ( self , dump_to = None ) : headers = { "Authorization" : "token {}" . format ( self . _client . token ) } result = requests . get ( '{}/{}/thumbnail' . format ( self . _client . base_url , OAuthClient . api_endpoint . format ( id = self . id ) ) , headers = headers ) if not result . status_code == 200 : raise ApiError ( 'No thumbnail found for OAuthClient {}' . format ( self . id ) ) if dump_to : with open ( dump_to , 'wb+' ) as f : f . write ( result . content ) return result . content
This returns binary data that represents a 128x128 image . If dump_to is given attempts to write the image to a file at the given location .
18,124
def set_thumbnail ( self , thumbnail ) : headers = { "Authorization" : "token {}" . format ( self . _client . token ) , "Content-type" : "image/png" , } if not isinstance ( thumbnail , bytes ) : with open ( thumbnail , 'rb' ) as f : thumbnail = f . read ( ) result = requests . put ( '{}/{}/thumbnail' . format ( self . _client . base_url , OAuthClient . api_endpoint . format ( id = self . id ) ) , headers = headers , data = thumbnail ) if not result . status_code == 200 : errors = [ ] j = result . json ( ) if 'errors' in j : errors = [ e [ 'reason' ] for e in j [ 'errors' ] ] raise ApiError ( '{}: {}' . format ( result . status_code , errors ) , json = j ) return True
Sets the thumbnail for this OAuth Client . If thumbnail is bytes uploads it as a png . Otherwise assumes thumbnail is a path to the thumbnail and reads it in as bytes before uploading .
18,125
def grants ( self ) : from linode_api4 . objects . account import UserGrants if not hasattr ( self , '_grants' ) : resp = self . _client . get ( UserGrants . api_endpoint . format ( username = self . username ) ) grants = UserGrants ( self . _client , self . username , resp ) self . _set ( '_grants' , grants ) return self . _grants
Retrieves the grants for this user . If the user is unrestricted this will result in an ApiError . This is smart and will only fetch from the api once unless the object is invalidated .
18,126
def save ( self ) : resp = self . _client . put ( type ( self ) . api_endpoint , model = self , data = self . _serialize ( ) ) if 'error' in resp : return False return True
Send this object s mutable values to the server in a PUT request
18,127
def delete ( self ) : resp = self . _client . delete ( type ( self ) . api_endpoint , model = self ) if 'error' in resp : return False self . invalidate ( ) return True
Sends a DELETE request for this object
18,128
def invalidate ( self ) : for key in [ k for k in type ( self ) . properties . keys ( ) if not type ( self ) . properties [ k ] . identifier ] : self . _set ( key , None ) self . _set ( '_populated' , False )
Invalidates all non - identifier Properties this object has locally causing the next access to re - fetch them from the server
18,129
def _serialize ( self ) : result = { a : getattr ( self , a ) for a in type ( self ) . properties if type ( self ) . properties [ a ] . mutable } for k , v in result . items ( ) : if isinstance ( v , Base ) : result [ k ] = v . id return result
A helper method to build a dict of all mutable Properties of this object
18,130
def _api_get ( self ) : json = self . _client . get ( type ( self ) . api_endpoint , model = self ) self . _populate ( json )
A helper method to GET this object from the server
18,131
def _populate ( self , json ) : if not json : return self . _set ( '_raw_json' , json ) for key in json : if key in ( k for k in type ( self ) . properties . keys ( ) if not type ( self ) . properties [ k ] . identifier ) : if type ( self ) . properties [ key ] . relationship and not json [ key ] is None : if isinstance ( json [ key ] , list ) : objs = [ ] for d in json [ key ] : if not 'id' in d : continue new_class = type ( self ) . properties [ key ] . relationship obj = new_class . make_instance ( d [ 'id' ] , getattr ( self , '_client' ) ) if obj : obj . _populate ( d ) objs . append ( obj ) self . _set ( key , objs ) else : if isinstance ( json [ key ] , dict ) : related_id = json [ key ] [ 'id' ] else : related_id = json [ key ] new_class = type ( self ) . properties [ key ] . relationship obj = new_class . make_instance ( related_id , getattr ( self , '_client' ) ) if obj and isinstance ( json [ key ] , dict ) : obj . _populate ( json [ key ] ) self . _set ( key , obj ) elif type ( self ) . properties [ key ] . slug_relationship and not json [ key ] is None : self . _set ( key , type ( self ) . properties [ key ] . slug_relationship ( self . _client , json [ key ] ) ) elif type ( json [ key ] ) is dict : self . _set ( key , MappedObject ( ** json [ key ] ) ) elif type ( json [ key ] ) is list : mapping = MappedObject ( _list = json [ key ] ) self . _set ( key , mapping . _list ) elif type ( self ) . properties [ key ] . is_datetime : try : t = time . strptime ( json [ key ] , DATE_FORMAT ) self . _set ( key , datetime . fromtimestamp ( time . mktime ( t ) ) ) except : self . _set ( key , json [ key ] ) else : self . _set ( key , json [ key ] ) self . _set ( '_populated' , True ) self . _set ( '_last_updated' , datetime . now ( ) )
A helper method that given a JSON object representing this object assigns values based on the properties dict and the attributes of its Properties .
18,132
def make ( id , client , cls , parent_id = None , json = None ) : from . dbase import DerivedBase if issubclass ( cls , DerivedBase ) : return cls ( client , id , parent_id , json ) else : return cls ( client , id , json )
Makes an api object based on an id and class .
18,133
def make_instance ( cls , id , client , parent_id = None , json = None ) : return Base . make ( id , client , cls , parent_id = parent_id , json = json )
Makes an instance of the class this is called on and returns it .
18,134
def to ( self , linode ) : from . linode import Instance if not isinstance ( linode , Instance ) : raise ValueError ( "IP Address can only be assigned to a Linode!" ) return { "address" : self . address , "linode_id" : linode . id }
This is a helper method for ip - assign and should not be used outside of that context . It s used to cleanly build an IP Assign request with pretty python syntax .
18,135
def token_create ( self , label = None , expiry = None , scopes = None , ** kwargs ) : if label : kwargs [ 'label' ] = label if expiry : if isinstance ( expiry , datetime ) : expiry = datetime . strftime ( expiry , "%Y-%m-%dT%H:%M:%S" ) kwargs [ 'expiry' ] = expiry if scopes : kwargs [ 'scopes' ] = scopes result = self . client . post ( '/profile/tokens' , data = kwargs ) if not 'id' in result : raise UnexpectedResponseError ( 'Unexpected response when creating Personal Access ' 'Token!' , json = result ) token = PersonalAccessToken ( self . client , result [ 'id' ] , result ) return token
Creates and returns a new Personal Access Token
18,136
def ssh_key_upload ( self , key , label ) : if not key . startswith ( SSH_KEY_TYPES ) : path = os . path . expanduser ( key ) if os . path . isfile ( path ) : with open ( path ) as f : key = f . read ( ) . strip ( ) if not key . startswith ( SSH_KEY_TYPES ) : raise ValueError ( 'Invalid SSH Public Key' ) params = { 'ssh_key' : key , 'label' : label , } result = self . client . post ( '/profile/sshkeys' , data = params ) if not 'id' in result : raise UnexpectedResponseError ( 'Unexpected response when uploading SSH Key!' , json = result ) ssh_key = SSHKey ( self . client , result [ 'id' ] , result ) return ssh_key
Uploads a new SSH Public Key to your profile This key can be used in later Linode deployments .
18,137
def client_create ( self , label = None ) : result = self . client . post ( '/longview/clients' , data = { "label" : label } ) if not 'id' in result : raise UnexpectedResponseError ( 'Unexpected response when creating Longivew ' 'Client!' , json = result ) c = LongviewClient ( self . client , result [ 'id' ] , result ) return c
Creates a new LongviewClient optionally with a given label .
18,138
def events_mark_seen ( self , event ) : last_seen = event if isinstance ( event , int ) else event . id self . client . post ( '{}/seen' . format ( Event . api_endpoint ) , model = Event ( self . client , last_seen ) )
Marks event as the last event we have seen . If event is an int it is treated as an event_id otherwise it should be an event object whose id will be used .
18,139
def settings ( self ) : result = self . client . get ( '/account/settings' ) if not 'managed' in result : raise UnexpectedResponseError ( 'Unexpected response when getting account settings!' , json = result ) s = AccountSettings ( self . client , result [ 'managed' ] , result ) return s
Resturns the account settings data for this acocunt . This is not a listing endpoint .
18,140
def oauth_client_create ( self , name , redirect_uri , ** kwargs ) : params = { "label" : name , "redirect_uri" : redirect_uri , } params . update ( kwargs ) result = self . client . post ( '/account/oauth-clients' , data = params ) if not 'id' in result : raise UnexpectedResponseError ( 'Unexpected response when creating OAuth Client!' , json = result ) c = OAuthClient ( self . client , result [ 'id' ] , result ) return c
Make a new OAuth Client and return it
18,141
def transfer ( self ) : result = self . client . get ( '/account/transfer' ) if not 'used' in result : raise UnexpectedResponseError ( 'Unexpected response when getting Transfer Pool!' ) return MappedObject ( ** result )
Returns a MappedObject containing the account s transfer pool data
18,142
def ip_allocate ( self , linode , public = True ) : result = self . client . post ( '/networking/ipv4/' , data = { "linode_id" : linode . id if isinstance ( linode , Base ) else linode , "type" : "ipv4" , "public" : public , } ) if not 'address' in result : raise UnexpectedResponseError ( 'Unexpected response when adding IPv4 address!' , json = result ) ip = IPAddress ( self . client , result [ 'address' ] , result ) return ip
Allocates an IP to a Instance you own . Additional IPs must be requested by opening a support ticket first .
18,143
def load ( self , target_type , target_id , target_parent_id = None ) : result = target_type . make_instance ( target_id , self , parent_id = target_parent_id ) result . _api_get ( ) return result
Constructs and immediately loads the object circumventing the lazy - loading scheme by immediately making an API request . Does not load related objects .
18,144
def _api_call ( self , endpoint , model = None , method = None , data = None , filters = None ) : if not self . token : raise RuntimeError ( "You do not have an API token!" ) if not method : raise ValueError ( "Method is required for API calls!" ) if model : endpoint = endpoint . format ( ** vars ( model ) ) url = '{}{}' . format ( self . base_url , endpoint ) headers = { 'Authorization' : "Bearer {}" . format ( self . token ) , 'Content-Type' : 'application/json' , 'User-Agent' : self . _user_agent , } if filters : headers [ 'X-Filter' ] = json . dumps ( filters ) body = None if data is not None : body = json . dumps ( data ) response = method ( url , headers = headers , data = body ) warning = response . headers . get ( 'Warning' , None ) if warning : logger . warning ( 'Received warning from server: {}' . format ( warning ) ) if 399 < response . status_code < 600 : j = None error_msg = '{}: ' . format ( response . status_code ) try : j = response . json ( ) if 'errors' in j . keys ( ) : for e in j [ 'errors' ] : error_msg += '{}; ' . format ( e [ 'reason' ] ) if 'reason' in e . keys ( ) else '' except : pass raise ApiError ( error_msg , status = response . status_code , json = j ) if response . status_code != 204 : j = response . json ( ) else : j = None return j
Makes a call to the linode api . Data should only be given if the method is POST or PUT and should be a dictionary
18,145
def image_create ( self , disk , label = None , description = None ) : params = { "disk_id" : disk . id if issubclass ( type ( disk ) , Base ) else disk , } if label is not None : params [ "label" ] = label if description is not None : params [ "description" ] = description result = self . post ( '/images' , data = params ) if not 'id' in result : raise UnexpectedResponseError ( 'Unexpected response when creating an ' 'Image from disk {}' . format ( disk ) ) return Image ( self , result [ 'id' ] , result )
Creates a new Image from a disk you own .
18,146
def nodebalancer_create ( self , region , ** kwargs ) : params = { "region" : region . id if isinstance ( region , Base ) else region , } params . update ( kwargs ) result = self . post ( '/nodebalancers' , data = params ) if not 'id' in result : raise UnexpectedResponseError ( 'Unexpected response when creating Nodebalaner!' , json = result ) n = NodeBalancer ( self , result [ 'id' ] , result ) return n
Creates a new NodeBalancer in the given Region .
18,147
def domain_create ( self , domain , master = True , ** kwargs ) : params = { 'domain' : domain , 'type' : 'master' if master else 'slave' , } params . update ( kwargs ) result = self . post ( '/domains' , data = params ) if not 'id' in result : raise UnexpectedResponseError ( 'Unexpected response when creating Domain!' , json = result ) d = Domain ( self , result [ 'id' ] , result ) return d
Registers a new Domain on the acting user s account . Make sure to point your registrar to Linode s nameservers so that Linode s DNS manager will correctly serve your domain .
18,148
def tag_create ( self , label , instances = None , domains = None , nodebalancers = None , volumes = None , entities = [ ] ) : linode_ids , nodebalancer_ids , domain_ids , volume_ids = [ ] , [ ] , [ ] , [ ] sorter = zip ( ( linode_ids , nodebalancer_ids , domain_ids , volume_ids ) , ( instances , nodebalancers , domains , volumes ) ) for id_list , input_list in sorter : if input_list is not None : for cur in input_list : if isinstance ( cur , int ) : id_list . append ( cur ) else : id_list . append ( cur . id ) type_map = { Instance : linode_ids , NodeBalancer : nodebalancer_ids , Domain : domain_ids , Volume : volume_ids , } for e in entities : if type ( e ) in type_map : type_map [ type ( e ) ] . append ( e . id ) else : raise ValueError ( 'Unsupported entity type {}' . format ( type ( e ) ) ) params = { 'label' : label , 'linodes' : linode_ids or None , 'nodebalancers' : nodebalancer_ids or None , 'domains' : domain_ids or None , 'volumes' : volume_ids or None , } result = self . post ( '/tags' , data = params ) if not 'label' in result : raise UnexpectedResponseError ( 'Unexpected response when creating Tag!' , json = result ) t = Tag ( self , result [ 'label' ] , result ) return t
Creates a new Tag and optionally applies it to the given entities .
18,149
def volume_create ( self , label , region = None , linode = None , size = 20 , ** kwargs ) : if not ( region or linode ) : raise ValueError ( 'region or linode required!' ) params = { "label" : label , "size" : size , "region" : region . id if issubclass ( type ( region ) , Base ) else region , "linode_id" : linode . id if issubclass ( type ( linode ) , Base ) else linode , } params . update ( kwargs ) result = self . post ( '/volumes' , data = params ) if not 'id' in result : raise UnexpectedResponseError ( 'Unexpected response when creating volume!' , json = result ) v = Volume ( self , result [ 'id' ] , result ) return v
Creates a new Block Storage Volume either in the given Region or attached to the given Instance .
18,150
def expire_token ( self , token ) : r = requests . post ( self . _login_uri ( "/oauth/token/expire" ) , data = { "client_id" : self . client_id , "client_secret" : self . client_secret , "token" : token , } ) if r . status_code != 200 : raise ApiError ( "Failed to expire token!" , r ) return True
Given a token makes a request to the authentication server to expire it immediately . This is considered a responsible way to log out a user . If you simply remove the session your application has for the user without expiring their token the user is not _really_ logged out .
18,151
def grants ( self ) : from linode_api4 . objects . account import UserGrants resp = self . _client . get ( '/profile/grants' ) grants = None if resp is not None : grants = UserGrants ( self . _client , self . username , resp ) return grants
Returns grants for the current user
18,152
def add_whitelist_entry ( self , address , netmask , note = None ) : result = self . _client . post ( "{}/whitelist" . format ( Profile . api_endpoint ) , data = { "address" : address , "netmask" : netmask , "note" : note , } ) if not 'id' in result : raise UnexpectedResponseError ( "Unexpected response creating whitelist entry!" ) return WhitelistEntry ( result [ 'id' ] , self . _client , json = result )
Adds a new entry to this user s IP whitelist if enabled
18,153
def confirm_login_allowed ( self , user ) : if not user . is_active : raise forms . ValidationError ( self . error_messages [ 'inactive' ] , code = 'inactive' , )
Controls whether the given User may log in . This is a policy setting independent of end - user authentication . This default behavior is to allow login by active users and reject login by inactive users .
18,154
def broken_chains ( samples , chains ) : samples = np . asarray ( samples ) if samples . ndim != 2 : raise ValueError ( "expected samples to be a numpy 2D array" ) num_samples , num_variables = samples . shape num_chains = len ( chains ) broken = np . zeros ( ( num_samples , num_chains ) , dtype = bool , order = 'F' ) for cidx , chain in enumerate ( chains ) : if isinstance ( chain , set ) : chain = list ( chain ) chain = np . asarray ( chain ) if chain . ndim > 1 : raise ValueError ( "chains should be 1D array_like objects" ) if len ( chain ) <= 1 : continue all_ = ( samples [ : , chain ] == 1 ) . all ( axis = 1 ) any_ = ( samples [ : , chain ] == 1 ) . any ( axis = 1 ) broken [ : , cidx ] = np . bitwise_xor ( all_ , any_ ) return broken
Find the broken chains .
18,155
def discard ( samples , chains ) : samples = np . asarray ( samples ) if samples . ndim != 2 : raise ValueError ( "expected samples to be a numpy 2D array" ) num_samples , num_variables = samples . shape num_chains = len ( chains ) broken = broken_chains ( samples , chains ) unbroken_idxs , = np . where ( ~ broken . any ( axis = 1 ) ) chain_variables = np . fromiter ( ( np . asarray ( tuple ( chain ) ) [ 0 ] if isinstance ( chain , set ) else np . asarray ( chain ) [ 0 ] for chain in chains ) , count = num_chains , dtype = int ) return samples [ np . ix_ ( unbroken_idxs , chain_variables ) ] , unbroken_idxs
Discard broken chains .
18,156
def majority_vote ( samples , chains ) : samples = np . asarray ( samples ) if samples . ndim != 2 : raise ValueError ( "expected samples to be a numpy 2D array" ) num_samples , num_variables = samples . shape num_chains = len ( chains ) unembedded = np . empty ( ( num_samples , num_chains ) , dtype = 'int8' , order = 'F' ) if samples . all ( ) : for cidx , chain in enumerate ( chains ) : unembedded [ : , cidx ] = 2 * ( samples [ : , chain ] . sum ( axis = 1 ) >= 0 ) - 1 else : for cidx , chain in enumerate ( chains ) : mid = len ( chain ) / 2 unembedded [ : , cidx ] = ( samples [ : , chain ] . sum ( axis = 1 ) >= mid ) return unembedded , np . arange ( num_samples )
Use the most common element in broken chains .
18,157
def weighted_random ( samples , chains ) : samples = np . asarray ( samples ) if samples . ndim != 2 : raise ValueError ( "expected samples to be a numpy 2D array" ) idx = [ np . random . choice ( chain ) for chain in chains ] num_samples , num_variables = samples . shape return samples [ : , idx ] , np . arange ( num_samples )
Determine the sample values of chains by weighed random choice .
18,158
def validate_anneal_schedule ( self , anneal_schedule ) : if 'anneal_schedule' not in self . parameters : raise RuntimeError ( "anneal_schedule is not an accepted parameter for this sampler" ) properties = self . properties try : min_anneal_time , max_anneal_time = properties [ 'annealing_time_range' ] max_anneal_schedule_points = properties [ 'max_anneal_schedule_points' ] except KeyError : raise RuntimeError ( "annealing_time_range and max_anneal_schedule_points are not properties of this solver" ) if not isinstance ( anneal_schedule , list ) : raise TypeError ( "anneal_schedule should be a list" ) elif len ( anneal_schedule ) < 2 or len ( anneal_schedule ) > max_anneal_schedule_points : msg = ( "anneal_schedule must contain between 2 and {} points (contains {})" ) . format ( max_anneal_schedule_points , len ( anneal_schedule ) ) raise ValueError ( msg ) try : t_list , s_list = zip ( * anneal_schedule ) except ValueError : raise ValueError ( "anneal_schedule should be a list of 2-tuples" ) if not all ( tail_t < lead_t for tail_t , lead_t in zip ( t_list , t_list [ 1 : ] ) ) : raise ValueError ( "Time t must increase for all points in the schedule" ) if t_list [ - 1 ] > max_anneal_time : raise ValueError ( "schedule cannot be longer than the maximum anneal time of {}" . format ( max_anneal_time ) ) start_s , end_s = s_list [ 0 ] , s_list [ - 1 ] if end_s != 1 : raise ValueError ( "In the final point, anneal fraction s must equal 1." ) if start_s == 1 : pass elif start_s == 0 : if not all ( tail_s <= lead_s for tail_s , lead_s in zip ( s_list , s_list [ 1 : ] ) ) : raise ValueError ( "For forward anneals, anneal fraction s must monotonically increase" ) else : msg = ( "In the first point, anneal fraction s must equal 0 for forward annealing or " "1 for reverse annealing" ) raise ValueError ( msg ) max_slope = 1.0 / min_anneal_time for ( t0 , s0 ) , ( t1 , s1 ) in zip ( anneal_schedule , anneal_schedule [ 1 : ] ) : if abs ( ( s0 - s1 ) / ( t0 - t1 ) ) > max_slope : raise ValueError ( "the maximum slope cannot exceed {}" . format ( max_slope ) )
Raise an exception if the specified schedule is invalid for the sampler .
18,159
def target_to_source ( target_adjacency , embedding ) : source_adjacency = { v : set ( ) for v in embedding } reverse_embedding = { } for v , chain in iteritems ( embedding ) : for u in chain : if u in reverse_embedding : raise ValueError ( "target node {} assigned to more than one source node" . format ( u ) ) reverse_embedding [ u ] = v for v , n in iteritems ( reverse_embedding ) : neighbors = target_adjacency [ v ] for u in neighbors : if u not in reverse_embedding : continue m = reverse_embedding [ u ] if m == n : continue source_adjacency [ n ] . add ( m ) source_adjacency [ m ] . add ( n ) return source_adjacency
Derive the source adjacency from an embedding and target adjacency .
18,160
def chain_to_quadratic ( chain , target_adjacency , chain_strength ) : quadratic = { } seen = set ( ) try : next_level = { next ( iter ( chain ) ) } except StopIteration : raise ValueError ( "chain must have at least one variable" ) while next_level : this_level = next_level next_level = set ( ) for v in this_level : if v not in seen : seen . add ( v ) for u in target_adjacency [ v ] : if u not in chain : continue next_level . add ( u ) if u != v and ( u , v ) not in quadratic : quadratic [ ( v , u ) ] = - chain_strength if len ( chain ) != len ( seen ) : raise ValueError ( '{} is not a connected chain' . format ( chain ) ) return quadratic
Determine the quadratic biases that induce the given chain .
18,161
def chain_break_frequency ( samples_like , embedding ) : if isinstance ( samples_like , dimod . SampleSet ) : labels = samples_like . variables samples = samples_like . record . sample num_occurrences = samples_like . record . num_occurrences else : samples , labels = dimod . as_samples ( samples_like ) num_occurrences = np . ones ( samples . shape [ 0 ] ) if not all ( v == idx for idx , v in enumerate ( labels ) ) : labels_to_idx = { v : idx for idx , v in enumerate ( labels ) } embedding = { v : { labels_to_idx [ u ] for u in chain } for v , chain in embedding . items ( ) } if not embedding : return { } variables , chains = zip ( * embedding . items ( ) ) broken = broken_chains ( samples , chains ) return { v : float ( np . average ( broken [ : , cidx ] , weights = num_occurrences ) ) for cidx , v in enumerate ( variables ) }
Determine the frequency of chain breaks in the given samples .
18,162
def edgelist_to_adjacency ( edgelist ) : adjacency = dict ( ) for u , v in edgelist : if u in adjacency : adjacency [ u ] . add ( v ) else : adjacency [ u ] = { v } if v in adjacency : adjacency [ v ] . add ( u ) else : adjacency [ v ] = { u } return adjacency
Converts an iterator of edges to an adjacency dict .
18,163
def sample ( self , bqm , ** kwargs ) : embedded_bqm = dimod . BinaryQuadraticModel . empty ( bqm . vartype ) __ , __ , target_adjacency = self . child . structure for embedding in self . embeddings : embedded_bqm . update ( dwave . embedding . embed_bqm ( bqm , embedding , target_adjacency ) ) tiled_response = self . child . sample ( embedded_bqm , ** kwargs ) responses = [ ] for embedding in self . embeddings : embedding = { v : chain for v , chain in embedding . items ( ) if v in bqm . variables } responses . append ( dwave . embedding . unembed_sampleset ( tiled_response , embedding , bqm ) ) return dimod . concatenate ( responses )
Sample from the specified binary quadratic model .
18,164
def cache_connect ( database = None ) : if database is None : database = cache_file ( ) if os . path . isfile ( database ) : conn = sqlite3 . connect ( database ) else : conn = sqlite3 . connect ( database ) conn . executescript ( schema ) with conn as cur : cur . execute ( "PRAGMA foreign_keys = ON;" ) conn . row_factory = sqlite3 . Row return conn
Returns a connection object to a sqlite database .
18,165
def insert_chain ( cur , chain , encoded_data = None ) : if encoded_data is None : encoded_data = { } if 'nodes' not in encoded_data : encoded_data [ 'nodes' ] = json . dumps ( sorted ( chain ) , separators = ( ',' , ':' ) ) if 'chain_length' not in encoded_data : encoded_data [ 'chain_length' ] = len ( chain ) insert = "INSERT OR IGNORE INTO chain(chain_length, nodes) VALUES (:chain_length, :nodes);" cur . execute ( insert , encoded_data )
Insert a chain into the cache .
18,166
def iter_chain ( cur ) : select = "SELECT nodes FROM chain" for nodes , in cur . execute ( select ) : yield json . loads ( nodes )
Iterate over all of the chains in the database .
18,167
def insert_system ( cur , system_name , encoded_data = None ) : if encoded_data is None : encoded_data = { } if 'system_name' not in encoded_data : encoded_data [ 'system_name' ] = system_name insert = "INSERT OR IGNORE INTO system(system_name) VALUES (:system_name);" cur . execute ( insert , encoded_data )
Insert a system name into the cache .
18,168
def insert_flux_bias ( cur , chain , system , flux_bias , chain_strength , encoded_data = None ) : if encoded_data is None : encoded_data = { } insert_chain ( cur , chain , encoded_data ) insert_system ( cur , system , encoded_data ) if 'flux_bias' not in encoded_data : encoded_data [ 'flux_bias' ] = _encode_real ( flux_bias ) if 'chain_strength' not in encoded_data : encoded_data [ 'chain_strength' ] = _encode_real ( chain_strength ) if 'insert_time' not in encoded_data : encoded_data [ 'insert_time' ] = datetime . datetime . now ( ) insert = cur . execute ( insert , encoded_data )
Insert a flux bias offset into the cache .
18,169
def get_flux_biases_from_cache ( cur , chains , system_name , chain_strength , max_age = 3600 ) : select = encoded_data = { 'chain_strength' : _encode_real ( chain_strength ) , 'system_name' : system_name , 'time_limit' : datetime . datetime . now ( ) + datetime . timedelta ( seconds = - max_age ) } flux_biases = { } for chain in chains : encoded_data [ 'chain_length' ] = len ( chain ) encoded_data [ 'nodes' ] = json . dumps ( sorted ( chain ) , separators = ( ',' , ':' ) ) row = cur . execute ( select , encoded_data ) . fetchone ( ) if row is None : raise MissingFluxBias flux_bias = _decode_real ( * row ) if flux_bias == 0 : continue flux_biases . update ( { v : flux_bias for v in chain } ) return flux_biases
Determine the flux biases for all of the the given chains system and chain strength .
18,170
def insert_graph ( cur , nodelist , edgelist , encoded_data = None ) : if encoded_data is None : encoded_data = { } if 'num_nodes' not in encoded_data : encoded_data [ 'num_nodes' ] = len ( nodelist ) if 'num_edges' not in encoded_data : encoded_data [ 'num_edges' ] = len ( edgelist ) if 'edges' not in encoded_data : encoded_data [ 'edges' ] = json . dumps ( edgelist , separators = ( ',' , ':' ) ) insert = cur . execute ( insert , encoded_data )
Insert a graph into the cache .
18,171
def select_embedding_from_tag ( cur , embedding_tag , target_nodelist , target_edgelist ) : encoded_data = { 'num_nodes' : len ( target_nodelist ) , 'num_edges' : len ( target_edgelist ) , 'edges' : json . dumps ( target_edgelist , separators = ( ',' , ':' ) ) , 'tag' : embedding_tag } select = embedding = { v : json . loads ( chain ) for v , chain in cur . execute ( select , encoded_data ) } return embedding
Select an embedding from the given tag and target graph .
18,172
def select_embedding_from_source ( cur , source_nodelist , source_edgelist , target_nodelist , target_edgelist ) : encoded_data = { 'target_num_nodes' : len ( target_nodelist ) , 'target_num_edges' : len ( target_edgelist ) , 'target_edges' : json . dumps ( target_edgelist , separators = ( ',' , ':' ) ) , 'source_num_nodes' : len ( source_nodelist ) , 'source_num_edges' : len ( source_edgelist ) , 'source_edges' : json . dumps ( source_edgelist , separators = ( ',' , ':' ) ) } select = embedding = { v : json . loads ( chain ) for v , chain in cur . execute ( select , encoded_data ) } return embedding
Select an embedding from the source graph and target graph .
18,173
def draw_chimera_bqm ( bqm , width = None , height = None ) : linear = bqm . linear . keys ( ) quadratic = bqm . quadratic . keys ( ) if width is None and height is None : graph_size = ceil ( sqrt ( ( max ( linear ) + 1 ) / 8.0 ) ) width = graph_size height = graph_size if not width or not height : raise Exception ( "Both dimensions must be defined, not just one." ) G0 = chimera_graph ( height , width , 4 ) G = chimera_graph ( height , width , 4 ) non_chimera_nodes = [ ] non_chimera_edges = [ ] for node in linear : if not node in G . nodes : non_chimera_nodes . append ( node ) for edge in quadratic : if not edge in G . edges : non_chimera_edges . append ( edge ) linear_set = set ( linear ) g_node_set = set ( G . nodes ) quadratic_set = set ( map ( frozenset , quadratic ) ) g_edge_set = set ( map ( frozenset , G . edges ) ) non_chimera_nodes = linear_set - g_node_set non_chimera_edges = quadratic_set - g_edge_set if non_chimera_nodes or non_chimera_edges : raise Exception ( "Input graph is not a chimera graph: Nodes: %s Edges: %s" % ( non_chimera_nodes , non_chimera_edges ) ) remove_nodes = list ( g_node_set - linear_set ) remove_edges = list ( g_edge_set - quadratic_set ) for edge in remove_edges : G . remove_edge ( * edge ) for node in remove_nodes : G . remove_node ( node ) node_size = 100 draw_chimera ( G0 , node_size = node_size * 0.5 , node_color = 'black' , edge_color = 'black' ) draw_chimera ( G , node_size = node_size , linear_biases = bqm . linear , quadratic_biases = bqm . quadratic , width = 3 ) return
Draws a Chimera Graph representation of a Binary Quadratic Model .
18,174
def embed_bqm ( source_bqm , embedding , target_adjacency , chain_strength = 1.0 , smear_vartype = None ) : if smear_vartype is dimod . SPIN and source_bqm . vartype is dimod . BINARY : return embed_bqm ( source_bqm . spin , embedding , target_adjacency , chain_strength = chain_strength , smear_vartype = None ) . binary elif smear_vartype is dimod . BINARY and source_bqm . vartype is dimod . SPIN : return embed_bqm ( source_bqm . binary , embedding , target_adjacency , chain_strength = chain_strength , smear_vartype = None ) . spin target_bqm = source_bqm . empty ( source_bqm . vartype ) target_bqm . add_offset ( source_bqm . offset ) for v , bias in iteritems ( source_bqm . linear ) : if v in embedding : chain = embedding [ v ] else : raise MissingChainError ( v ) if any ( u not in target_adjacency for u in chain ) : raise InvalidNodeError ( v , next ( u not in target_adjacency for u in chain ) ) b = bias / len ( chain ) target_bqm . add_variables_from ( { u : b for u in chain } ) for ( u , v ) , bias in iteritems ( source_bqm . quadratic ) : available_interactions = { ( s , t ) for s in embedding [ u ] for t in embedding [ v ] if s in target_adjacency [ t ] } if not available_interactions : raise MissingEdgeError ( u , v ) b = bias / len ( available_interactions ) target_bqm . add_interactions_from ( ( u , v , b ) for u , v in available_interactions ) for chain in itervalues ( embedding ) : if len ( chain ) == 1 : v , = chain target_bqm . add_variable ( v , 0.0 ) continue quadratic_chain_biases = chain_to_quadratic ( chain , target_adjacency , chain_strength ) target_bqm . add_interactions_from ( quadratic_chain_biases , vartype = dimod . SPIN ) energy_diff = - sum ( itervalues ( quadratic_chain_biases ) ) target_bqm . add_offset ( energy_diff ) return target_bqm
Embed a binary quadratic model onto a target graph .
18,175
def embed_ising ( source_h , source_J , embedding , target_adjacency , chain_strength = 1.0 ) : source_bqm = dimod . BinaryQuadraticModel . from_ising ( source_h , source_J ) target_bqm = embed_bqm ( source_bqm , embedding , target_adjacency , chain_strength = chain_strength ) target_h , target_J , __ = target_bqm . to_ising ( ) return target_h , target_J
Embed an Ising problem onto a target graph .
18,176
def embed_qubo ( source_Q , embedding , target_adjacency , chain_strength = 1.0 ) : source_bqm = dimod . BinaryQuadraticModel . from_qubo ( source_Q ) target_bqm = embed_bqm ( source_bqm , embedding , target_adjacency , chain_strength = chain_strength ) target_Q , __ = target_bqm . to_qubo ( ) return target_Q
Embed a QUBO onto a target graph .
18,177
def unembed_sampleset ( target_sampleset , embedding , source_bqm , chain_break_method = None , chain_break_fraction = False ) : if chain_break_method is None : chain_break_method = majority_vote variables = list ( source_bqm ) try : chains = [ embedding [ v ] for v in variables ] except KeyError : raise ValueError ( "given bqm does not match the embedding" ) chain_idxs = [ [ target_sampleset . variables . index [ v ] for v in chain ] for chain in chains ] record = target_sampleset . record unembedded , idxs = chain_break_method ( record . sample , chain_idxs ) try : energies = source_bqm . energies ( ( unembedded , variables ) ) except ValueError : datatypes = [ ( 'sample' , np . dtype ( np . int8 ) , ( len ( variables ) , ) ) , ( 'energy' , np . float ) ] datatypes . extend ( ( name , record [ name ] . dtype , record [ name ] . shape [ 1 : ] ) for name in record . dtype . names if name not in { 'sample' , 'energy' } ) if chain_break_fraction : datatypes . append ( ( 'chain_break_fraction' , np . float64 ) ) data = np . rec . array ( np . empty ( 0 , dtype = datatypes ) ) return dimod . SampleSet ( data , variables , target_sampleset . info . copy ( ) , target_sampleset . vartype ) reserved = { 'sample' , 'energy' } vectors = { name : record [ name ] [ idxs ] for name in record . dtype . names if name not in reserved } if chain_break_fraction : vectors [ 'chain_break_fraction' ] = broken_chains ( record . sample , chain_idxs ) . mean ( axis = 1 ) [ idxs ] return dimod . SampleSet . from_samples ( ( unembedded , variables ) , target_sampleset . vartype , energy = energies , info = target_sampleset . info . copy ( ) , ** vectors )
Unembed the samples set .
18,178
def sample ( self , bqm , chain_strength = 1.0 , chain_break_fraction = True , ** parameters ) : if self . embedding is None : child = self . child __ , target_edgelist , target_adjacency = child . structure source_edgelist = list ( bqm . quadratic ) + [ ( v , v ) for v in bqm . linear ] embedding = minorminer . find_embedding ( source_edgelist , target_edgelist ) super ( LazyFixedEmbeddingComposite , self ) . _set_graph_related_init ( embedding = embedding ) return super ( LazyFixedEmbeddingComposite , self ) . sample ( bqm , chain_strength = chain_strength , chain_break_fraction = chain_break_fraction , ** parameters )
Sample the binary quadratic model .
18,179
def _accumulate_random ( count , found , oldthing , newthing ) : if randint ( 1 , count + found ) <= found : return count + found , newthing else : return count + found , oldthing
This performs on - line random selection .
18,180
def _bulk_to_linear ( M , N , L , qubits ) : "Converts a list of chimera coordinates to linear indices." return [ 2 * L * N * x + 2 * L * y + L * u + k for x , y , u , k in qubits ]
Converts a list of chimera coordinates to linear indices .
18,181
def _to_linear ( M , N , L , q ) : "Converts a qubit in chimera coordinates to its linear index." ( x , y , u , k ) = q return 2 * L * N * x + 2 * L * y + L * u + k
Converts a qubit in chimera coordinates to its linear index .
18,182
def _bulk_to_chimera ( M , N , L , qubits ) : "Converts a list of linear indices to chimera coordinates." return [ ( q // N // L // 2 , ( q // L // 2 ) % N , ( q // L ) % 2 , q % L ) for q in qubits ]
Converts a list of linear indices to chimera coordinates .
18,183
def _to_chimera ( M , N , L , q ) : "Converts a qubit's linear index to chimera coordinates." return ( q // N // L // 2 , ( q // L // 2 ) % N , ( q // L ) % 2 , q % L )
Converts a qubit s linear index to chimera coordinates .
18,184
def _compute_vline_scores ( self ) : M , N , L = self . M , self . N , self . L vline_score = { } for x in range ( M ) : laststart = [ 0 if ( x , 0 , 1 , k ) in self else None for k in range ( L ) ] for y in range ( N ) : block = [ 0 ] * ( y + 1 ) for k in range ( L ) : if ( x , y , 1 , k ) not in self : laststart [ k ] = None elif laststart [ k ] is None : laststart [ k ] = y block [ y ] += 1 elif y and ( x , y , 1 , k ) not in self [ x , y - 1 , 1 , k ] : laststart [ k ] = y else : for y1 in range ( laststart [ k ] , y + 1 ) : block [ y1 ] += 1 for y1 in range ( y + 1 ) : vline_score [ x , y1 , y ] = block [ y1 ] self . _vline_score = vline_score
Does the hard work to prepare vline_score .
18,185
def _compute_hline_scores ( self ) : M , N , L = self . M , self . N , self . L hline_score = { } for y in range ( N ) : laststart = [ 0 if ( 0 , y , 0 , k ) in self else None for k in range ( L ) ] for x in range ( M ) : block = [ 0 ] * ( x + 1 ) for k in range ( L ) : if ( x , y , 0 , k ) not in self : laststart [ k ] = None elif laststart [ k ] is None : laststart [ k ] = x block [ x ] += 1 elif x and ( x , y , 0 , k ) not in self [ x - 1 , y , 0 , k ] : laststart [ k ] = x else : for x1 in range ( laststart [ k ] , x + 1 ) : block [ x1 ] += 1 for x1 in range ( x + 1 ) : hline_score [ y , x1 , x ] = block [ x1 ] self . _hline_score = hline_score
Does the hard work to prepare hline_score .
18,186
def biclique ( self , xmin , xmax , ymin , ymax ) : Aside = sum ( ( self . maximum_hline_bundle ( y , xmin , xmax ) for y in range ( ymin , ymax + 1 ) ) , [ ] ) Bside = sum ( ( self . maximum_vline_bundle ( x , ymin , ymax ) for x in range ( xmin , xmax + 1 ) ) , [ ] ) return Aside , Bside
Compute a maximum - sized complete bipartite graph contained in the rectangle defined by xmin xmax ymin ymax where each chain of qubits is either a vertical line or a horizontal line .
18,187
def _contains_line ( self , line ) : return all ( v in self for v in line ) and all ( u in self [ v ] for u , v in zip ( line , line [ 1 : : ] ) )
Test if a chain of qubits is completely contained in self . In particular test if all qubits are present and the couplers connecting those qubits are also connected .
18,188
def maximum_ell_bundle ( self , ell ) : ( x0 , x1 , y0 , y1 ) = ell hlines = self . maximum_hline_bundle ( y0 , x0 , x1 ) vlines = self . maximum_vline_bundle ( x0 , y0 , y1 ) if self . random_bundles : shuffle ( hlines ) shuffle ( vlines ) return [ v + h for h , v in zip ( hlines , vlines ) ]
Return a maximum ell bundle in the rectangle bounded by
18,189
def nativeCliqueEmbed ( self , width ) : maxCWR = { } M , N = self . M , self . N maxscore = None count = 0 key = None for w in range ( width + 2 ) : h = width - w - 2 for ymin in range ( N - h ) : ymax = ymin + h for xmin in range ( M - w ) : xmax = xmin + w R = ( xmin , xmax , ymin , ymax ) score , best = self . maxCliqueWithRectangle ( R , maxCWR ) maxCWR [ R ] = best if maxscore is None or ( score is not None and maxscore < score ) : maxscore = score key = None count = 0 if maxscore == score : count , key = _accumulate_random ( count , best [ 3 ] , key , R ) clique = [ ] while key in maxCWR : score , ell , key , num = maxCWR [ key ] if ell is not None : meb = self . maximum_ell_bundle ( ell ) clique . extend ( meb ) return maxscore , clique
Compute a maximum - sized native clique embedding in an induced subgraph of chimera with all chainlengths width + 1 .
18,190
def _compute_all_deletions ( self ) : minimum_evil = [ ] for disabled_qubits in map ( set , product ( * self . _evil ) ) : newmin = [ ] for s in minimum_evil : if s < disabled_qubits : break elif disabled_qubits < s : continue newmin . append ( s ) else : minimum_evil = newmin + [ disabled_qubits ] return minimum_evil
Returns all minimal edge covers of the set of evil edges .
18,191
def _compute_deletions ( self ) : M , N , L , edgelist = self . M , self . N , self . L , self . _edgelist if 2 ** len ( self . _evil ) <= self . _proc_limit : deletions = self . _compute_all_deletions ( ) self . _processors = [ self . _subprocessor ( d ) for d in deletions ] else : self . _processors = None
If there are fewer than self . _proc_limit possible deletion sets compute all subprocessors obtained by deleting a minimal subset of qubits .
18,192
def _random_subprocessor ( self ) : deletion = set ( ) for e in self . _evil : if e [ 0 ] in deletion or e [ 1 ] in deletion : continue deletion . add ( choice ( e ) ) return self . _subprocessor ( deletion )
Creates a random subprocessor where there is a coupler between every pair of working qubits on opposite sides of the same cell . This is guaranteed to be minimal in that adding a qubit back in will reintroduce a bad coupler but not to have minimum size .
18,193
def _objective_bestscore ( self , old , new ) : ( oldscore , oldthing ) = old ( newscore , newthing ) = new if oldscore is None : return True if newscore is None : return False return oldscore < newscore
An objective function that returns True if new has a better score than old and False otherwise .
18,194
def nativeCliqueEmbed ( self , width ) : def f ( x ) : return x . nativeCliqueEmbed ( width ) objective = self . _objective_bestscore return self . _translate ( self . _map_to_processors ( f , objective ) )
Compute a maximum - sized native clique embedding in an induced subgraph of chimera with chainsize width + 1 . If possible returns a uniform choice among all largest cliques .
18,195
def _translate ( self , embedding ) : "Translates an embedding back to linear coordinates if necessary." if embedding is None : return None if not self . _linear : return embedding return [ _bulk_to_linear ( self . M , self . N , self . L , chain ) for chain in embedding ]
Translates an embedding back to linear coordinates if necessary .
18,196
def _validate_chain_strength ( sampler , chain_strength ) : properties = sampler . properties if 'extended_j_range' in properties : max_chain_strength = - min ( properties [ 'extended_j_range' ] ) elif 'j_range' in properties : max_chain_strength = - min ( properties [ 'j_range' ] ) else : raise ValueError ( "input sampler should have 'j_range' and/or 'extended_j_range' property." ) if chain_strength is None : chain_strength = max_chain_strength elif chain_strength > max_chain_strength : raise ValueError ( "Provided chain strength exceedds the allowed range." ) return chain_strength
Validate the provided chain strength checking J - ranges of the sampler s children .
18,197
def sample ( self , bqm , apply_flux_bias_offsets = True , ** kwargs ) : child = self . child if apply_flux_bias_offsets : if self . flux_biases is not None : kwargs [ FLUX_BIAS_KWARG ] = self . flux_biases return child . sample ( bqm , ** kwargs )
Sample from the given Ising model .
18,198
def get_flux_biases ( sampler , embedding , chain_strength , num_reads = 1000 , max_age = 3600 ) : if not isinstance ( sampler , dimod . Sampler ) : raise TypeError ( "input sampler should be DWaveSampler" ) system_name = sampler . properties . get ( 'chip_id' , str ( sampler . __class__ ) ) try : with cache_connect ( ) as cur : fbo = get_flux_biases_from_cache ( cur , embedding . values ( ) , system_name , chain_strength = chain_strength , max_age = max_age ) return fbo except MissingFluxBias : pass try : import dwave . drivers as drivers except ImportError : msg = ( "dwave-drivers not found, cannot calculate flux biases. dwave-drivers can be " "installed with " "'pip install dwave-drivers --extra-index-url https://pypi.dwavesys.com/simple'. " "See documentation for dwave-drivers license." ) raise RuntimeError ( msg ) fbo = drivers . oneshot_flux_bias ( sampler , embedding . values ( ) , num_reads = num_reads , chain_strength = chain_strength ) with cache_connect ( ) as cur : for chain in embedding . values ( ) : v = next ( iter ( chain ) ) flux_bias = fbo . get ( v , 0.0 ) insert_flux_bias ( cur , chain , system_name , flux_bias , chain_strength ) return fbo
Get the flux bias offsets for sampler and embedding .
18,199
def find_clique_embedding ( k , m , n = None , t = None , target_edges = None ) : import random _ , nodes = k m , n , t , target_edges = _chimera_input ( m , n , t , target_edges ) if len ( nodes ) == 1 : qubits = set ( ) . union ( * target_edges ) qubit = random . choice ( tuple ( qubits ) ) embedding = [ [ qubit ] ] elif len ( nodes ) == 2 : if not isinstance ( target_edges , list ) : edges = list ( target_edges ) edge = edges [ random . randrange ( len ( edges ) ) ] embedding = [ [ edge [ 0 ] ] , [ edge [ 1 ] ] ] else : embedding = processor ( target_edges , M = m , N = n , L = t ) . tightestNativeClique ( len ( nodes ) ) if not embedding : raise ValueError ( "cannot find a K{} embedding for given Chimera lattice" . format ( k ) ) return dict ( zip ( nodes , embedding ) )
Find an embedding for a clique in a Chimera graph .