partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
valid
Droplet.get_actions
Returns a list of Action objects This actions can be used to check the droplet's status
digitalocean/Droplet.py
def get_actions(self): """ Returns a list of Action objects This actions can be used to check the droplet's status """ answer = self.get_data("droplets/%s/actions/" % self.id, type=GET) actions = [] for action_dict in answer['actions']: action = Action(**action_dict) action.token = self.token action.droplet_id = self.id action.load() actions.append(action) return actions
def get_actions(self): """ Returns a list of Action objects This actions can be used to check the droplet's status """ answer = self.get_data("droplets/%s/actions/" % self.id, type=GET) actions = [] for action_dict in answer['actions']: action = Action(**action_dict) action.token = self.token action.droplet_id = self.id action.load() actions.append(action) return actions
[ "Returns", "a", "list", "of", "Action", "objects", "This", "actions", "can", "be", "used", "to", "check", "the", "droplet", "s", "status" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Droplet.py#L579-L593
[ "def", "get_actions", "(", "self", ")", ":", "answer", "=", "self", ".", "get_data", "(", "\"droplets/%s/actions/\"", "%", "self", ".", "id", ",", "type", "=", "GET", ")", "actions", "=", "[", "]", "for", "action_dict", "in", "answer", "[", "'actions'", "]", ":", "action", "=", "Action", "(", "*", "*", "action_dict", ")", "action", ".", "token", "=", "self", ".", "token", "action", ".", "droplet_id", "=", "self", ".", "id", "action", ".", "load", "(", ")", "actions", ".", "append", "(", "action", ")", "return", "actions" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Droplet.get_action
Returns a specific Action by its ID. Args: action_id (int): id of action
digitalocean/Droplet.py
def get_action(self, action_id): """Returns a specific Action by its ID. Args: action_id (int): id of action """ return Action.get_object( api_token=self.token, action_id=action_id )
def get_action(self, action_id): """Returns a specific Action by its ID. Args: action_id (int): id of action """ return Action.get_object( api_token=self.token, action_id=action_id )
[ "Returns", "a", "specific", "Action", "by", "its", "ID", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Droplet.py#L595-L604
[ "def", "get_action", "(", "self", ",", "action_id", ")", ":", "return", "Action", ".", "get_object", "(", "api_token", "=", "self", ".", "token", ",", "action_id", "=", "action_id", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Droplet.get_snapshots
This method will return the snapshots/images connected to that specific droplet.
digitalocean/Droplet.py
def get_snapshots(self): """ This method will return the snapshots/images connected to that specific droplet. """ snapshots = list() for id in self.snapshot_ids: snapshot = Image() snapshot.id = id snapshot.token = self.token snapshots.append(snapshot) return snapshots
def get_snapshots(self): """ This method will return the snapshots/images connected to that specific droplet. """ snapshots = list() for id in self.snapshot_ids: snapshot = Image() snapshot.id = id snapshot.token = self.token snapshots.append(snapshot) return snapshots
[ "This", "method", "will", "return", "the", "snapshots", "/", "images", "connected", "to", "that", "specific", "droplet", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Droplet.py#L606-L617
[ "def", "get_snapshots", "(", "self", ")", ":", "snapshots", "=", "list", "(", ")", "for", "id", "in", "self", ".", "snapshot_ids", ":", "snapshot", "=", "Image", "(", ")", "snapshot", ".", "id", "=", "id", "snapshot", ".", "token", "=", "self", ".", "token", "snapshots", ".", "append", "(", "snapshot", ")", "return", "snapshots" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Droplet.get_kernel_available
Get a list of kernels available
digitalocean/Droplet.py
def get_kernel_available(self): """ Get a list of kernels available """ kernels = list() data = self.get_data("droplets/%s/kernels/" % self.id) while True: for jsond in data[u'kernels']: kernel = Kernel(**jsond) kernel.token = self.token kernels.append(kernel) try: url = data[u'links'][u'pages'].get(u'next') if not url: break data = self.get_data(url) except KeyError: # No links. break return kernels
def get_kernel_available(self): """ Get a list of kernels available """ kernels = list() data = self.get_data("droplets/%s/kernels/" % self.id) while True: for jsond in data[u'kernels']: kernel = Kernel(**jsond) kernel.token = self.token kernels.append(kernel) try: url = data[u'links'][u'pages'].get(u'next') if not url: break data = self.get_data(url) except KeyError: # No links. break return kernels
[ "Get", "a", "list", "of", "kernels", "available" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Droplet.py#L619-L639
[ "def", "get_kernel_available", "(", "self", ")", ":", "kernels", "=", "list", "(", ")", "data", "=", "self", ".", "get_data", "(", "\"droplets/%s/kernels/\"", "%", "self", ".", "id", ")", "while", "True", ":", "for", "jsond", "in", "data", "[", "u'kernels'", "]", ":", "kernel", "=", "Kernel", "(", "*", "*", "jsond", ")", "kernel", ".", "token", "=", "self", ".", "token", "kernels", ".", "append", "(", "kernel", ")", "try", ":", "url", "=", "data", "[", "u'links'", "]", "[", "u'pages'", "]", ".", "get", "(", "u'next'", ")", "if", "not", "url", ":", "break", "data", "=", "self", ".", "get_data", "(", "url", ")", "except", "KeyError", ":", "# No links.", "break", "return", "kernels" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Domain.get_object
Class method that will return a Domain object by ID.
digitalocean/Domain.py
def get_object(cls, api_token, domain_name): """ Class method that will return a Domain object by ID. """ domain = cls(token=api_token, name=domain_name) domain.load() return domain
def get_object(cls, api_token, domain_name): """ Class method that will return a Domain object by ID. """ domain = cls(token=api_token, name=domain_name) domain.load() return domain
[ "Class", "method", "that", "will", "return", "a", "Domain", "object", "by", "ID", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Domain.py#L16-L22
[ "def", "get_object", "(", "cls", ",", "api_token", ",", "domain_name", ")", ":", "domain", "=", "cls", "(", "token", "=", "api_token", ",", "name", "=", "domain_name", ")", "domain", ".", "load", "(", ")", "return", "domain" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Domain.create_new_domain_record
Create new domain record. https://developers.digitalocean.com/#create-a-new-domain-record Args: type: The record type (A, MX, CNAME, etc). name: The host name, alias, or service being defined by the record data: Variable data depending on record type. Optional Args: priority: The priority of the host port: The port that the service is accessible on weight: The weight of records with the same priority
digitalocean/Domain.py
def create_new_domain_record(self, *args, **kwargs): """ Create new domain record. https://developers.digitalocean.com/#create-a-new-domain-record Args: type: The record type (A, MX, CNAME, etc). name: The host name, alias, or service being defined by the record data: Variable data depending on record type. Optional Args: priority: The priority of the host port: The port that the service is accessible on weight: The weight of records with the same priority """ data = { "type": kwargs.get("type", None), "name": kwargs.get("name", None), "data": kwargs.get("data", None) } #  Optional Args if kwargs.get("priority", None): data['priority'] = kwargs.get("priority", None) if kwargs.get("port", None): data['port'] = kwargs.get("port", None) if kwargs.get("weight", None): data['weight'] = kwargs.get("weight", None) return self.get_data( "domains/%s/records" % self.name, type=POST, params=data )
def create_new_domain_record(self, *args, **kwargs): """ Create new domain record. https://developers.digitalocean.com/#create-a-new-domain-record Args: type: The record type (A, MX, CNAME, etc). name: The host name, alias, or service being defined by the record data: Variable data depending on record type. Optional Args: priority: The priority of the host port: The port that the service is accessible on weight: The weight of records with the same priority """ data = { "type": kwargs.get("type", None), "name": kwargs.get("name", None), "data": kwargs.get("data", None) } #  Optional Args if kwargs.get("priority", None): data['priority'] = kwargs.get("priority", None) if kwargs.get("port", None): data['port'] = kwargs.get("port", None) if kwargs.get("weight", None): data['weight'] = kwargs.get("weight", None) return self.get_data( "domains/%s/records" % self.name, type=POST, params=data )
[ "Create", "new", "domain", "record", ".", "https", ":", "//", "developers", ".", "digitalocean", ".", "com", "/", "#create", "-", "a", "-", "new", "-", "domain", "-", "record" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Domain.py#L39-L74
[ "def", "create_new_domain_record", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "data", "=", "{", "\"type\"", ":", "kwargs", ".", "get", "(", "\"type\"", ",", "None", ")", ",", "\"name\"", ":", "kwargs", ".", "get", "(", "\"name\"", ",", "None", ")", ",", "\"data\"", ":", "kwargs", ".", "get", "(", "\"data\"", ",", "None", ")", "}", "#  Optional Args", "if", "kwargs", ".", "get", "(", "\"priority\"", ",", "None", ")", ":", "data", "[", "'priority'", "]", "=", "kwargs", ".", "get", "(", "\"priority\"", ",", "None", ")", "if", "kwargs", ".", "get", "(", "\"port\"", ",", "None", ")", ":", "data", "[", "'port'", "]", "=", "kwargs", ".", "get", "(", "\"port\"", ",", "None", ")", "if", "kwargs", ".", "get", "(", "\"weight\"", ",", "None", ")", ":", "data", "[", "'weight'", "]", "=", "kwargs", ".", "get", "(", "\"weight\"", ",", "None", ")", "return", "self", ".", "get_data", "(", "\"domains/%s/records\"", "%", "self", ".", "name", ",", "type", "=", "POST", ",", "params", "=", "data", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Domain.create
Create new doamin
digitalocean/Domain.py
def create(self): """ Create new doamin """ # URL https://api.digitalocean.com/v2/domains data = { "name": self.name, "ip_address": self.ip_address, } domain = self.get_data("domains", type=POST, params=data) return domain
def create(self): """ Create new doamin """ # URL https://api.digitalocean.com/v2/domains data = { "name": self.name, "ip_address": self.ip_address, } domain = self.get_data("domains", type=POST, params=data) return domain
[ "Create", "new", "doamin" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Domain.py#L76-L87
[ "def", "create", "(", "self", ")", ":", "# URL https://api.digitalocean.com/v2/domains", "data", "=", "{", "\"name\"", ":", "self", ".", "name", ",", "\"ip_address\"", ":", "self", ".", "ip_address", ",", "}", "domain", "=", "self", ".", "get_data", "(", "\"domains\"", ",", "type", "=", "POST", ",", "params", "=", "data", ")", "return", "domain" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Domain.get_records
Returns a list of Record objects
digitalocean/Domain.py
def get_records(self, params=None): """ Returns a list of Record objects """ if params is None: params = {} # URL https://api.digitalocean.com/v2/domains/[NAME]/records/ records = [] data = self.get_data("domains/%s/records/" % self.name, type=GET, params=params) for record_data in data['domain_records']: record = Record(domain_name=self.name, **record_data) record.token = self.token records.append(record) return records
def get_records(self, params=None): """ Returns a list of Record objects """ if params is None: params = {} # URL https://api.digitalocean.com/v2/domains/[NAME]/records/ records = [] data = self.get_data("domains/%s/records/" % self.name, type=GET, params=params) for record_data in data['domain_records']: record = Record(domain_name=self.name, **record_data) record.token = self.token records.append(record) return records
[ "Returns", "a", "list", "of", "Record", "objects" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Domain.py#L89-L106
[ "def", "get_records", "(", "self", ",", "params", "=", "None", ")", ":", "if", "params", "is", "None", ":", "params", "=", "{", "}", "# URL https://api.digitalocean.com/v2/domains/[NAME]/records/", "records", "=", "[", "]", "data", "=", "self", ".", "get_data", "(", "\"domains/%s/records/\"", "%", "self", ".", "name", ",", "type", "=", "GET", ",", "params", "=", "params", ")", "for", "record_data", "in", "data", "[", "'domain_records'", "]", ":", "record", "=", "Record", "(", "domain_name", "=", "self", ".", "name", ",", "*", "*", "record_data", ")", "record", ".", "token", "=", "self", ".", "token", "records", ".", "append", "(", "record", ")", "return", "records" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Account.get_object
Class method that will return an Account object.
digitalocean/Account.py
def get_object(cls, api_token): """ Class method that will return an Account object. """ acct = cls(token=api_token) acct.load() return acct
def get_object(cls, api_token): """ Class method that will return an Account object. """ acct = cls(token=api_token) acct.load() return acct
[ "Class", "method", "that", "will", "return", "an", "Account", "object", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Account.py#L18-L24
[ "def", "get_object", "(", "cls", ",", "api_token", ")", ":", "acct", "=", "cls", "(", "token", "=", "api_token", ")", "acct", ".", "load", "(", ")", "return", "acct" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
FloatingIP.get_object
Class method that will return a FloatingIP object by its IP. Args: api_token: str - token ip: str - floating ip address
digitalocean/FloatingIP.py
def get_object(cls, api_token, ip): """ Class method that will return a FloatingIP object by its IP. Args: api_token: str - token ip: str - floating ip address """ floating_ip = cls(token=api_token, ip=ip) floating_ip.load() return floating_ip
def get_object(cls, api_token, ip): """ Class method that will return a FloatingIP object by its IP. Args: api_token: str - token ip: str - floating ip address """ floating_ip = cls(token=api_token, ip=ip) floating_ip.load() return floating_ip
[ "Class", "method", "that", "will", "return", "a", "FloatingIP", "object", "by", "its", "IP", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/FloatingIP.py#L14-L24
[ "def", "get_object", "(", "cls", ",", "api_token", ",", "ip", ")", ":", "floating_ip", "=", "cls", "(", "token", "=", "api_token", ",", "ip", "=", "ip", ")", "floating_ip", ".", "load", "(", ")", "return", "floating_ip" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
FloatingIP.load
Load the FloatingIP object from DigitalOcean. Requires self.ip to be set.
digitalocean/FloatingIP.py
def load(self): """ Load the FloatingIP object from DigitalOcean. Requires self.ip to be set. """ data = self.get_data('floating_ips/%s' % self.ip, type=GET) floating_ip = data['floating_ip'] # Setting the attribute values for attr in floating_ip.keys(): setattr(self, attr, floating_ip[attr]) return self
def load(self): """ Load the FloatingIP object from DigitalOcean. Requires self.ip to be set. """ data = self.get_data('floating_ips/%s' % self.ip, type=GET) floating_ip = data['floating_ip'] # Setting the attribute values for attr in floating_ip.keys(): setattr(self, attr, floating_ip[attr]) return self
[ "Load", "the", "FloatingIP", "object", "from", "DigitalOcean", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/FloatingIP.py#L26-L39
[ "def", "load", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "'floating_ips/%s'", "%", "self", ".", "ip", ",", "type", "=", "GET", ")", "floating_ip", "=", "data", "[", "'floating_ip'", "]", "# Setting the attribute values", "for", "attr", "in", "floating_ip", ".", "keys", "(", ")", ":", "setattr", "(", "self", ",", "attr", ",", "floating_ip", "[", "attr", "]", ")", "return", "self" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
FloatingIP.create
Creates a FloatingIP and assigns it to a Droplet. Note: Every argument and parameter given to this method will be assigned to the object. Args: droplet_id: int - droplet id
digitalocean/FloatingIP.py
def create(self, *args, **kwargs): """ Creates a FloatingIP and assigns it to a Droplet. Note: Every argument and parameter given to this method will be assigned to the object. Args: droplet_id: int - droplet id """ data = self.get_data('floating_ips/', type=POST, params={'droplet_id': self.droplet_id}) if data: self.ip = data['floating_ip']['ip'] self.region = data['floating_ip']['region'] return self
def create(self, *args, **kwargs): """ Creates a FloatingIP and assigns it to a Droplet. Note: Every argument and parameter given to this method will be assigned to the object. Args: droplet_id: int - droplet id """ data = self.get_data('floating_ips/', type=POST, params={'droplet_id': self.droplet_id}) if data: self.ip = data['floating_ip']['ip'] self.region = data['floating_ip']['region'] return self
[ "Creates", "a", "FloatingIP", "and", "assigns", "it", "to", "a", "Droplet", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/FloatingIP.py#L41-L59
[ "def", "create", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "data", "=", "self", ".", "get_data", "(", "'floating_ips/'", ",", "type", "=", "POST", ",", "params", "=", "{", "'droplet_id'", ":", "self", ".", "droplet_id", "}", ")", "if", "data", ":", "self", ".", "ip", "=", "data", "[", "'floating_ip'", "]", "[", "'ip'", "]", "self", ".", "region", "=", "data", "[", "'floating_ip'", "]", "[", "'region'", "]", "return", "self" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
FloatingIP.reserve
Creates a FloatingIP in a region without assigning it to a specific Droplet. Note: Every argument and parameter given to this method will be assigned to the object. Args: region_slug: str - region's slug (e.g. 'nyc3')
digitalocean/FloatingIP.py
def reserve(self, *args, **kwargs): """ Creates a FloatingIP in a region without assigning it to a specific Droplet. Note: Every argument and parameter given to this method will be assigned to the object. Args: region_slug: str - region's slug (e.g. 'nyc3') """ data = self.get_data('floating_ips/', type=POST, params={'region': self.region_slug}) if data: self.ip = data['floating_ip']['ip'] self.region = data['floating_ip']['region'] return self
def reserve(self, *args, **kwargs): """ Creates a FloatingIP in a region without assigning it to a specific Droplet. Note: Every argument and parameter given to this method will be assigned to the object. Args: region_slug: str - region's slug (e.g. 'nyc3') """ data = self.get_data('floating_ips/', type=POST, params={'region': self.region_slug}) if data: self.ip = data['floating_ip']['ip'] self.region = data['floating_ip']['region'] return self
[ "Creates", "a", "FloatingIP", "in", "a", "region", "without", "assigning", "it", "to", "a", "specific", "Droplet", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/FloatingIP.py#L61-L80
[ "def", "reserve", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "data", "=", "self", ".", "get_data", "(", "'floating_ips/'", ",", "type", "=", "POST", ",", "params", "=", "{", "'region'", ":", "self", ".", "region_slug", "}", ")", "if", "data", ":", "self", ".", "ip", "=", "data", "[", "'floating_ip'", "]", "[", "'ip'", "]", "self", ".", "region", "=", "data", "[", "'floating_ip'", "]", "[", "'region'", "]", "return", "self" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
FloatingIP.assign
Assign a FloatingIP to a Droplet. Args: droplet_id: int - droplet id
digitalocean/FloatingIP.py
def assign(self, droplet_id): """ Assign a FloatingIP to a Droplet. Args: droplet_id: int - droplet id """ return self.get_data( "floating_ips/%s/actions/" % self.ip, type=POST, params={"type": "assign", "droplet_id": droplet_id} )
def assign(self, droplet_id): """ Assign a FloatingIP to a Droplet. Args: droplet_id: int - droplet id """ return self.get_data( "floating_ips/%s/actions/" % self.ip, type=POST, params={"type": "assign", "droplet_id": droplet_id} )
[ "Assign", "a", "FloatingIP", "to", "a", "Droplet", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/FloatingIP.py#L88-L99
[ "def", "assign", "(", "self", ",", "droplet_id", ")", ":", "return", "self", ".", "get_data", "(", "\"floating_ips/%s/actions/\"", "%", "self", ".", "ip", ",", "type", "=", "POST", ",", "params", "=", "{", "\"type\"", ":", "\"assign\"", ",", "\"droplet_id\"", ":", "droplet_id", "}", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Firewall.get_object
Class method that will return a Firewall object by ID.
digitalocean/Firewall.py
def get_object(cls, api_token, firewall_id): """ Class method that will return a Firewall object by ID. """ firewall = cls(token=api_token, id=firewall_id) firewall.load() return firewall
def get_object(cls, api_token, firewall_id): """ Class method that will return a Firewall object by ID. """ firewall = cls(token=api_token, id=firewall_id) firewall.load() return firewall
[ "Class", "method", "that", "will", "return", "a", "Firewall", "object", "by", "ID", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Firewall.py#L148-L154
[ "def", "get_object", "(", "cls", ",", "api_token", ",", "firewall_id", ")", ":", "firewall", "=", "cls", "(", "token", "=", "api_token", ",", "id", "=", "firewall_id", ")", "firewall", ".", "load", "(", ")", "return", "firewall" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Firewall.add_tags
Add tags to this Firewall.
digitalocean/Firewall.py
def add_tags(self, tags): """ Add tags to this Firewall. """ return self.get_data( "firewalls/%s/tags" % self.id, type=POST, params={"tags": tags} )
def add_tags(self, tags): """ Add tags to this Firewall. """ return self.get_data( "firewalls/%s/tags" % self.id, type=POST, params={"tags": tags} )
[ "Add", "tags", "to", "this", "Firewall", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Firewall.py#L218-L226
[ "def", "add_tags", "(", "self", ",", "tags", ")", ":", "return", "self", ".", "get_data", "(", "\"firewalls/%s/tags\"", "%", "self", ".", "id", ",", "type", "=", "POST", ",", "params", "=", "{", "\"tags\"", ":", "tags", "}", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Firewall.remove_tags
Remove tags from this Firewall.
digitalocean/Firewall.py
def remove_tags(self, tags): """ Remove tags from this Firewall. """ return self.get_data( "firewalls/%s/tags" % self.id, type=DELETE, params={"tags": tags} )
def remove_tags(self, tags): """ Remove tags from this Firewall. """ return self.get_data( "firewalls/%s/tags" % self.id, type=DELETE, params={"tags": tags} )
[ "Remove", "tags", "from", "this", "Firewall", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Firewall.py#L228-L236
[ "def", "remove_tags", "(", "self", ",", "tags", ")", ":", "return", "self", ".", "get_data", "(", "\"firewalls/%s/tags\"", "%", "self", ".", "id", ",", "type", "=", "DELETE", ",", "params", "=", "{", "\"tags\"", ":", "tags", "}", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
SSHKey.get_object
Class method that will return a SSHKey object by ID.
digitalocean/SSHKey.py
def get_object(cls, api_token, ssh_key_id): """ Class method that will return a SSHKey object by ID. """ ssh_key = cls(token=api_token, id=ssh_key_id) ssh_key.load() return ssh_key
def get_object(cls, api_token, ssh_key_id): """ Class method that will return a SSHKey object by ID. """ ssh_key = cls(token=api_token, id=ssh_key_id) ssh_key.load() return ssh_key
[ "Class", "method", "that", "will", "return", "a", "SSHKey", "object", "by", "ID", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/SSHKey.py#L15-L21
[ "def", "get_object", "(", "cls", ",", "api_token", ",", "ssh_key_id", ")", ":", "ssh_key", "=", "cls", "(", "token", "=", "api_token", ",", "id", "=", "ssh_key_id", ")", "ssh_key", ".", "load", "(", ")", "return", "ssh_key" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
SSHKey.load
Load the SSHKey object from DigitalOcean. Requires either self.id or self.fingerprint to be set.
digitalocean/SSHKey.py
def load(self): """ Load the SSHKey object from DigitalOcean. Requires either self.id or self.fingerprint to be set. """ identifier = None if self.id: identifier = self.id elif self.fingerprint is not None: identifier = self.fingerprint data = self.get_data("account/keys/%s" % identifier, type=GET) ssh_key = data['ssh_key'] # Setting the attribute values for attr in ssh_key.keys(): setattr(self, attr, ssh_key[attr]) self.id = ssh_key['id']
def load(self): """ Load the SSHKey object from DigitalOcean. Requires either self.id or self.fingerprint to be set. """ identifier = None if self.id: identifier = self.id elif self.fingerprint is not None: identifier = self.fingerprint data = self.get_data("account/keys/%s" % identifier, type=GET) ssh_key = data['ssh_key'] # Setting the attribute values for attr in ssh_key.keys(): setattr(self, attr, ssh_key[attr]) self.id = ssh_key['id']
[ "Load", "the", "SSHKey", "object", "from", "DigitalOcean", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/SSHKey.py#L23-L42
[ "def", "load", "(", "self", ")", ":", "identifier", "=", "None", "if", "self", ".", "id", ":", "identifier", "=", "self", ".", "id", "elif", "self", ".", "fingerprint", "is", "not", "None", ":", "identifier", "=", "self", ".", "fingerprint", "data", "=", "self", ".", "get_data", "(", "\"account/keys/%s\"", "%", "identifier", ",", "type", "=", "GET", ")", "ssh_key", "=", "data", "[", "'ssh_key'", "]", "# Setting the attribute values", "for", "attr", "in", "ssh_key", ".", "keys", "(", ")", ":", "setattr", "(", "self", ",", "attr", ",", "ssh_key", "[", "attr", "]", ")", "self", ".", "id", "=", "ssh_key", "[", "'id'", "]" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
SSHKey.load_by_pub_key
This method will load a SSHKey object from DigitalOcean from a public_key. This method will avoid problems like uploading the same public_key twice.
digitalocean/SSHKey.py
def load_by_pub_key(self, public_key): """ This method will load a SSHKey object from DigitalOcean from a public_key. This method will avoid problems like uploading the same public_key twice. """ data = self.get_data("account/keys/") for jsoned in data['ssh_keys']: if jsoned.get('public_key', "") == public_key: self.id = jsoned['id'] self.load() return self return None
def load_by_pub_key(self, public_key): """ This method will load a SSHKey object from DigitalOcean from a public_key. This method will avoid problems like uploading the same public_key twice. """ data = self.get_data("account/keys/") for jsoned in data['ssh_keys']: if jsoned.get('public_key', "") == public_key: self.id = jsoned['id'] self.load() return self return None
[ "This", "method", "will", "load", "a", "SSHKey", "object", "from", "DigitalOcean", "from", "a", "public_key", ".", "This", "method", "will", "avoid", "problems", "like", "uploading", "the", "same", "public_key", "twice", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/SSHKey.py#L44-L57
[ "def", "load_by_pub_key", "(", "self", ",", "public_key", ")", ":", "data", "=", "self", ".", "get_data", "(", "\"account/keys/\"", ")", "for", "jsoned", "in", "data", "[", "'ssh_keys'", "]", ":", "if", "jsoned", ".", "get", "(", "'public_key'", ",", "\"\"", ")", "==", "public_key", ":", "self", ".", "id", "=", "jsoned", "[", "'id'", "]", "self", ".", "load", "(", ")", "return", "self", "return", "None" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
SSHKey.create
Create the SSH Key
digitalocean/SSHKey.py
def create(self): """ Create the SSH Key """ input_params = { "name": self.name, "public_key": self.public_key, } data = self.get_data("account/keys/", type=POST, params=input_params) if data: self.id = data['ssh_key']['id']
def create(self): """ Create the SSH Key """ input_params = { "name": self.name, "public_key": self.public_key, } data = self.get_data("account/keys/", type=POST, params=input_params) if data: self.id = data['ssh_key']['id']
[ "Create", "the", "SSH", "Key" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/SSHKey.py#L59-L71
[ "def", "create", "(", "self", ")", ":", "input_params", "=", "{", "\"name\"", ":", "self", ".", "name", ",", "\"public_key\"", ":", "self", ".", "public_key", ",", "}", "data", "=", "self", ".", "get_data", "(", "\"account/keys/\"", ",", "type", "=", "POST", ",", "params", "=", "input_params", ")", "if", "data", ":", "self", ".", "id", "=", "data", "[", "'ssh_key'", "]", "[", "'id'", "]" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
SSHKey.edit
Edit the SSH Key
digitalocean/SSHKey.py
def edit(self): """ Edit the SSH Key """ input_params = { "name": self.name, "public_key": self.public_key, } data = self.get_data( "account/keys/%s" % self.id, type=PUT, params=input_params ) if data: self.id = data['ssh_key']['id']
def edit(self): """ Edit the SSH Key """ input_params = { "name": self.name, "public_key": self.public_key, } data = self.get_data( "account/keys/%s" % self.id, type=PUT, params=input_params ) if data: self.id = data['ssh_key']['id']
[ "Edit", "the", "SSH", "Key" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/SSHKey.py#L73-L89
[ "def", "edit", "(", "self", ")", ":", "input_params", "=", "{", "\"name\"", ":", "self", ".", "name", ",", "\"public_key\"", ":", "self", ".", "public_key", ",", "}", "data", "=", "self", ".", "get_data", "(", "\"account/keys/%s\"", "%", "self", ".", "id", ",", "type", "=", "PUT", ",", "params", "=", "input_params", ")", "if", "data", ":", "self", ".", "id", "=", "data", "[", "'ssh_key'", "]", "[", "'id'", "]" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_all_regions
This function returns a list of Region object.
digitalocean/Manager.py
def get_all_regions(self): """ This function returns a list of Region object. """ data = self.get_data("regions/") regions = list() for jsoned in data['regions']: region = Region(**jsoned) region.token = self.token regions.append(region) return regions
def get_all_regions(self): """ This function returns a list of Region object. """ data = self.get_data("regions/") regions = list() for jsoned in data['regions']: region = Region(**jsoned) region.token = self.token regions.append(region) return regions
[ "This", "function", "returns", "a", "list", "of", "Region", "object", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L36-L46
[ "def", "get_all_regions", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "\"regions/\"", ")", "regions", "=", "list", "(", ")", "for", "jsoned", "in", "data", "[", "'regions'", "]", ":", "region", "=", "Region", "(", "*", "*", "jsoned", ")", "region", ".", "token", "=", "self", ".", "token", "regions", ".", "append", "(", "region", ")", "return", "regions" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_all_droplets
This function returns a list of Droplet object.
digitalocean/Manager.py
def get_all_droplets(self, tag_name=None): """ This function returns a list of Droplet object. """ params = dict() if tag_name: params["tag_name"] = tag_name data = self.get_data("droplets/", params=params) droplets = list() for jsoned in data['droplets']: droplet = Droplet(**jsoned) droplet.token = self.token for net in droplet.networks['v4']: if net['type'] == 'private': droplet.private_ip_address = net['ip_address'] if net['type'] == 'public': droplet.ip_address = net['ip_address'] if droplet.networks['v6']: droplet.ip_v6_address = droplet.networks['v6'][0]['ip_address'] if "backups" in droplet.features: droplet.backups = True else: droplet.backups = False if "ipv6" in droplet.features: droplet.ipv6 = True else: droplet.ipv6 = False if "private_networking" in droplet.features: droplet.private_networking = True else: droplet.private_networking = False droplets.append(droplet) return droplets
def get_all_droplets(self, tag_name=None): """ This function returns a list of Droplet object. """ params = dict() if tag_name: params["tag_name"] = tag_name data = self.get_data("droplets/", params=params) droplets = list() for jsoned in data['droplets']: droplet = Droplet(**jsoned) droplet.token = self.token for net in droplet.networks['v4']: if net['type'] == 'private': droplet.private_ip_address = net['ip_address'] if net['type'] == 'public': droplet.ip_address = net['ip_address'] if droplet.networks['v6']: droplet.ip_v6_address = droplet.networks['v6'][0]['ip_address'] if "backups" in droplet.features: droplet.backups = True else: droplet.backups = False if "ipv6" in droplet.features: droplet.ipv6 = True else: droplet.ipv6 = False if "private_networking" in droplet.features: droplet.private_networking = True else: droplet.private_networking = False droplets.append(droplet) return droplets
[ "This", "function", "returns", "a", "list", "of", "Droplet", "object", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L48-L86
[ "def", "get_all_droplets", "(", "self", ",", "tag_name", "=", "None", ")", ":", "params", "=", "dict", "(", ")", "if", "tag_name", ":", "params", "[", "\"tag_name\"", "]", "=", "tag_name", "data", "=", "self", ".", "get_data", "(", "\"droplets/\"", ",", "params", "=", "params", ")", "droplets", "=", "list", "(", ")", "for", "jsoned", "in", "data", "[", "'droplets'", "]", ":", "droplet", "=", "Droplet", "(", "*", "*", "jsoned", ")", "droplet", ".", "token", "=", "self", ".", "token", "for", "net", "in", "droplet", ".", "networks", "[", "'v4'", "]", ":", "if", "net", "[", "'type'", "]", "==", "'private'", ":", "droplet", ".", "private_ip_address", "=", "net", "[", "'ip_address'", "]", "if", "net", "[", "'type'", "]", "==", "'public'", ":", "droplet", ".", "ip_address", "=", "net", "[", "'ip_address'", "]", "if", "droplet", ".", "networks", "[", "'v6'", "]", ":", "droplet", ".", "ip_v6_address", "=", "droplet", ".", "networks", "[", "'v6'", "]", "[", "0", "]", "[", "'ip_address'", "]", "if", "\"backups\"", "in", "droplet", ".", "features", ":", "droplet", ".", "backups", "=", "True", "else", ":", "droplet", ".", "backups", "=", "False", "if", "\"ipv6\"", "in", "droplet", ".", "features", ":", "droplet", ".", "ipv6", "=", "True", "else", ":", "droplet", ".", "ipv6", "=", "False", "if", "\"private_networking\"", "in", "droplet", ".", "features", ":", "droplet", ".", "private_networking", "=", "True", "else", ":", "droplet", ".", "private_networking", "=", "False", "droplets", ".", "append", "(", "droplet", ")", "return", "droplets" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_droplet
Return a Droplet by its ID.
digitalocean/Manager.py
def get_droplet(self, droplet_id): """ Return a Droplet by its ID. """ return Droplet.get_object(api_token=self.token, droplet_id=droplet_id)
def get_droplet(self, droplet_id): """ Return a Droplet by its ID. """ return Droplet.get_object(api_token=self.token, droplet_id=droplet_id)
[ "Return", "a", "Droplet", "by", "its", "ID", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L88-L92
[ "def", "get_droplet", "(", "self", ",", "droplet_id", ")", ":", "return", "Droplet", ".", "get_object", "(", "api_token", "=", "self", ".", "token", ",", "droplet_id", "=", "droplet_id", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_all_sizes
This function returns a list of Size object.
digitalocean/Manager.py
def get_all_sizes(self): """ This function returns a list of Size object. """ data = self.get_data("sizes/") sizes = list() for jsoned in data['sizes']: size = Size(**jsoned) size.token = self.token sizes.append(size) return sizes
def get_all_sizes(self): """ This function returns a list of Size object. """ data = self.get_data("sizes/") sizes = list() for jsoned in data['sizes']: size = Size(**jsoned) size.token = self.token sizes.append(size) return sizes
[ "This", "function", "returns", "a", "list", "of", "Size", "object", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L94-L104
[ "def", "get_all_sizes", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "\"sizes/\"", ")", "sizes", "=", "list", "(", ")", "for", "jsoned", "in", "data", "[", "'sizes'", "]", ":", "size", "=", "Size", "(", "*", "*", "jsoned", ")", "size", ".", "token", "=", "self", ".", "token", "sizes", ".", "append", "(", "size", ")", "return", "sizes" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_images
This function returns a list of Image object.
digitalocean/Manager.py
def get_images(self, private=False, type=None): """ This function returns a list of Image object. """ params = {} if private: params['private'] = 'true' if type: params['type'] = type data = self.get_data("images/", params=params) images = list() for jsoned in data['images']: image = Image(**jsoned) image.token = self.token images.append(image) return images
def get_images(self, private=False, type=None): """ This function returns a list of Image object. """ params = {} if private: params['private'] = 'true' if type: params['type'] = type data = self.get_data("images/", params=params) images = list() for jsoned in data['images']: image = Image(**jsoned) image.token = self.token images.append(image) return images
[ "This", "function", "returns", "a", "list", "of", "Image", "object", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L106-L121
[ "def", "get_images", "(", "self", ",", "private", "=", "False", ",", "type", "=", "None", ")", ":", "params", "=", "{", "}", "if", "private", ":", "params", "[", "'private'", "]", "=", "'true'", "if", "type", ":", "params", "[", "'type'", "]", "=", "type", "data", "=", "self", ".", "get_data", "(", "\"images/\"", ",", "params", "=", "params", ")", "images", "=", "list", "(", ")", "for", "jsoned", "in", "data", "[", "'images'", "]", ":", "image", "=", "Image", "(", "*", "*", "jsoned", ")", "image", ".", "token", "=", "self", ".", "token", "images", ".", "append", "(", "image", ")", "return", "images" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_image
Return a Image by its ID/Slug.
digitalocean/Manager.py
def get_image(self, image_id_or_slug): """ Return a Image by its ID/Slug. """ return Image.get_object( api_token=self.token, image_id_or_slug=image_id_or_slug, )
def get_image(self, image_id_or_slug): """ Return a Image by its ID/Slug. """ return Image.get_object( api_token=self.token, image_id_or_slug=image_id_or_slug, )
[ "Return", "a", "Image", "by", "its", "ID", "/", "Slug", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L131-L138
[ "def", "get_image", "(", "self", ",", "image_id_or_slug", ")", ":", "return", "Image", ".", "get_object", "(", "api_token", "=", "self", ".", "token", ",", "image_id_or_slug", "=", "image_id_or_slug", ",", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_global_images
This function returns a list of Image objects representing public DigitalOcean images (e.g. base distribution images and 'One-Click' applications).
digitalocean/Manager.py
def get_global_images(self): """ This function returns a list of Image objects representing public DigitalOcean images (e.g. base distribution images and 'One-Click' applications). """ data = self.get_images() images = list() for i in data: if i.public: i.token = self.token images.append(i) return images
def get_global_images(self): """ This function returns a list of Image objects representing public DigitalOcean images (e.g. base distribution images and 'One-Click' applications). """ data = self.get_images() images = list() for i in data: if i.public: i.token = self.token images.append(i) return images
[ "This", "function", "returns", "a", "list", "of", "Image", "objects", "representing", "public", "DigitalOcean", "images", "(", "e", ".", "g", ".", "base", "distribution", "images", "and", "One", "-", "Click", "applications", ")", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L148-L160
[ "def", "get_global_images", "(", "self", ")", ":", "data", "=", "self", ".", "get_images", "(", ")", "images", "=", "list", "(", ")", "for", "i", "in", "data", ":", "if", "i", ".", "public", ":", "i", ".", "token", "=", "self", ".", "token", "images", ".", "append", "(", "i", ")", "return", "images" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_all_domains
This function returns a list of Domain object.
digitalocean/Manager.py
def get_all_domains(self): """ This function returns a list of Domain object. """ data = self.get_data("domains/") domains = list() for jsoned in data['domains']: domain = Domain(**jsoned) domain.token = self.token domains.append(domain) return domains
def get_all_domains(self): """ This function returns a list of Domain object. """ data = self.get_data("domains/") domains = list() for jsoned in data['domains']: domain = Domain(**jsoned) domain.token = self.token domains.append(domain) return domains
[ "This", "function", "returns", "a", "list", "of", "Domain", "object", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L178-L188
[ "def", "get_all_domains", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "\"domains/\"", ")", "domains", "=", "list", "(", ")", "for", "jsoned", "in", "data", "[", "'domains'", "]", ":", "domain", "=", "Domain", "(", "*", "*", "jsoned", ")", "domain", ".", "token", "=", "self", ".", "token", "domains", ".", "append", "(", "domain", ")", "return", "domains" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_domain
Return a Domain by its domain_name
digitalocean/Manager.py
def get_domain(self, domain_name): """ Return a Domain by its domain_name """ return Domain.get_object(api_token=self.token, domain_name=domain_name)
def get_domain(self, domain_name): """ Return a Domain by its domain_name """ return Domain.get_object(api_token=self.token, domain_name=domain_name)
[ "Return", "a", "Domain", "by", "its", "domain_name" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L190-L194
[ "def", "get_domain", "(", "self", ",", "domain_name", ")", ":", "return", "Domain", ".", "get_object", "(", "api_token", "=", "self", ".", "token", ",", "domain_name", "=", "domain_name", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_all_sshkeys
This function returns a list of SSHKey object.
digitalocean/Manager.py
def get_all_sshkeys(self): """ This function returns a list of SSHKey object. """ data = self.get_data("account/keys/") ssh_keys = list() for jsoned in data['ssh_keys']: ssh_key = SSHKey(**jsoned) ssh_key.token = self.token ssh_keys.append(ssh_key) return ssh_keys
def get_all_sshkeys(self): """ This function returns a list of SSHKey object. """ data = self.get_data("account/keys/") ssh_keys = list() for jsoned in data['ssh_keys']: ssh_key = SSHKey(**jsoned) ssh_key.token = self.token ssh_keys.append(ssh_key) return ssh_keys
[ "This", "function", "returns", "a", "list", "of", "SSHKey", "object", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L196-L206
[ "def", "get_all_sshkeys", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "\"account/keys/\"", ")", "ssh_keys", "=", "list", "(", ")", "for", "jsoned", "in", "data", "[", "'ssh_keys'", "]", ":", "ssh_key", "=", "SSHKey", "(", "*", "*", "jsoned", ")", "ssh_key", ".", "token", "=", "self", ".", "token", "ssh_keys", ".", "append", "(", "ssh_key", ")", "return", "ssh_keys" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_ssh_key
Return a SSHKey object by its ID.
digitalocean/Manager.py
def get_ssh_key(self, ssh_key_id): """ Return a SSHKey object by its ID. """ return SSHKey.get_object(api_token=self.token, ssh_key_id=ssh_key_id)
def get_ssh_key(self, ssh_key_id): """ Return a SSHKey object by its ID. """ return SSHKey.get_object(api_token=self.token, ssh_key_id=ssh_key_id)
[ "Return", "a", "SSHKey", "object", "by", "its", "ID", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L208-L212
[ "def", "get_ssh_key", "(", "self", ",", "ssh_key_id", ")", ":", "return", "SSHKey", ".", "get_object", "(", "api_token", "=", "self", ".", "token", ",", "ssh_key_id", "=", "ssh_key_id", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_all_tags
This method returns a list of all tags.
digitalocean/Manager.py
def get_all_tags(self): """ This method returns a list of all tags. """ data = self.get_data("tags") return [ Tag(token=self.token, **tag) for tag in data['tags'] ]
def get_all_tags(self): """ This method returns a list of all tags. """ data = self.get_data("tags") return [ Tag(token=self.token, **tag) for tag in data['tags'] ]
[ "This", "method", "returns", "a", "list", "of", "all", "tags", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L214-L221
[ "def", "get_all_tags", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "\"tags\"", ")", "return", "[", "Tag", "(", "token", "=", "self", ".", "token", ",", "*", "*", "tag", ")", "for", "tag", "in", "data", "[", "'tags'", "]", "]" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_all_floating_ips
This function returns a list of FloatingIP objects.
digitalocean/Manager.py
def get_all_floating_ips(self): """ This function returns a list of FloatingIP objects. """ data = self.get_data("floating_ips") floating_ips = list() for jsoned in data['floating_ips']: floating_ip = FloatingIP(**jsoned) floating_ip.token = self.token floating_ips.append(floating_ip) return floating_ips
def get_all_floating_ips(self): """ This function returns a list of FloatingIP objects. """ data = self.get_data("floating_ips") floating_ips = list() for jsoned in data['floating_ips']: floating_ip = FloatingIP(**jsoned) floating_ip.token = self.token floating_ips.append(floating_ip) return floating_ips
[ "This", "function", "returns", "a", "list", "of", "FloatingIP", "objects", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L229-L239
[ "def", "get_all_floating_ips", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "\"floating_ips\"", ")", "floating_ips", "=", "list", "(", ")", "for", "jsoned", "in", "data", "[", "'floating_ips'", "]", ":", "floating_ip", "=", "FloatingIP", "(", "*", "*", "jsoned", ")", "floating_ip", ".", "token", "=", "self", ".", "token", "floating_ips", ".", "append", "(", "floating_ip", ")", "return", "floating_ips" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_floating_ip
Returns a of FloatingIP object by its IP address.
digitalocean/Manager.py
def get_floating_ip(self, ip): """ Returns a of FloatingIP object by its IP address. """ return FloatingIP.get_object(api_token=self.token, ip=ip)
def get_floating_ip(self, ip): """ Returns a of FloatingIP object by its IP address. """ return FloatingIP.get_object(api_token=self.token, ip=ip)
[ "Returns", "a", "of", "FloatingIP", "object", "by", "its", "IP", "address", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L241-L245
[ "def", "get_floating_ip", "(", "self", ",", "ip", ")", ":", "return", "FloatingIP", ".", "get_object", "(", "api_token", "=", "self", ".", "token", ",", "ip", "=", "ip", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_all_load_balancers
Returns a list of Load Balancer objects.
digitalocean/Manager.py
def get_all_load_balancers(self): """ Returns a list of Load Balancer objects. """ data = self.get_data("load_balancers") load_balancers = list() for jsoned in data['load_balancers']: load_balancer = LoadBalancer(**jsoned) load_balancer.token = self.token load_balancer.health_check = HealthCheck(**jsoned['health_check']) load_balancer.sticky_sessions = StickySesions(**jsoned['sticky_sessions']) forwarding_rules = list() for rule in jsoned['forwarding_rules']: forwarding_rules.append(ForwardingRule(**rule)) load_balancer.forwarding_rules = forwarding_rules load_balancers.append(load_balancer) return load_balancers
def get_all_load_balancers(self): """ Returns a list of Load Balancer objects. """ data = self.get_data("load_balancers") load_balancers = list() for jsoned in data['load_balancers']: load_balancer = LoadBalancer(**jsoned) load_balancer.token = self.token load_balancer.health_check = HealthCheck(**jsoned['health_check']) load_balancer.sticky_sessions = StickySesions(**jsoned['sticky_sessions']) forwarding_rules = list() for rule in jsoned['forwarding_rules']: forwarding_rules.append(ForwardingRule(**rule)) load_balancer.forwarding_rules = forwarding_rules load_balancers.append(load_balancer) return load_balancers
[ "Returns", "a", "list", "of", "Load", "Balancer", "objects", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L247-L264
[ "def", "get_all_load_balancers", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "\"load_balancers\"", ")", "load_balancers", "=", "list", "(", ")", "for", "jsoned", "in", "data", "[", "'load_balancers'", "]", ":", "load_balancer", "=", "LoadBalancer", "(", "*", "*", "jsoned", ")", "load_balancer", ".", "token", "=", "self", ".", "token", "load_balancer", ".", "health_check", "=", "HealthCheck", "(", "*", "*", "jsoned", "[", "'health_check'", "]", ")", "load_balancer", ".", "sticky_sessions", "=", "StickySesions", "(", "*", "*", "jsoned", "[", "'sticky_sessions'", "]", ")", "forwarding_rules", "=", "list", "(", ")", "for", "rule", "in", "jsoned", "[", "'forwarding_rules'", "]", ":", "forwarding_rules", ".", "append", "(", "ForwardingRule", "(", "*", "*", "rule", ")", ")", "load_balancer", ".", "forwarding_rules", "=", "forwarding_rules", "load_balancers", ".", "append", "(", "load_balancer", ")", "return", "load_balancers" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_load_balancer
Returns a Load Balancer object by its ID. Args: id (str): Load Balancer ID
digitalocean/Manager.py
def get_load_balancer(self, id): """ Returns a Load Balancer object by its ID. Args: id (str): Load Balancer ID """ return LoadBalancer.get_object(api_token=self.token, id=id)
def get_load_balancer(self, id): """ Returns a Load Balancer object by its ID. Args: id (str): Load Balancer ID """ return LoadBalancer.get_object(api_token=self.token, id=id)
[ "Returns", "a", "Load", "Balancer", "object", "by", "its", "ID", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L266-L273
[ "def", "get_load_balancer", "(", "self", ",", "id", ")", ":", "return", "LoadBalancer", ".", "get_object", "(", "api_token", "=", "self", ".", "token", ",", "id", "=", "id", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_certificate
Returns a Certificate object by its ID. Args: id (str): Certificate ID
digitalocean/Manager.py
def get_certificate(self, id): """ Returns a Certificate object by its ID. Args: id (str): Certificate ID """ return Certificate.get_object(api_token=self.token, cert_id=id)
def get_certificate(self, id): """ Returns a Certificate object by its ID. Args: id (str): Certificate ID """ return Certificate.get_object(api_token=self.token, cert_id=id)
[ "Returns", "a", "Certificate", "object", "by", "its", "ID", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L275-L282
[ "def", "get_certificate", "(", "self", ",", "id", ")", ":", "return", "Certificate", ".", "get_object", "(", "api_token", "=", "self", ".", "token", ",", "cert_id", "=", "id", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_all_certificates
This function returns a list of Certificate objects.
digitalocean/Manager.py
def get_all_certificates(self): """ This function returns a list of Certificate objects. """ data = self.get_data("certificates") certificates = list() for jsoned in data['certificates']: cert = Certificate(**jsoned) cert.token = self.token certificates.append(cert) return certificates
def get_all_certificates(self): """ This function returns a list of Certificate objects. """ data = self.get_data("certificates") certificates = list() for jsoned in data['certificates']: cert = Certificate(**jsoned) cert.token = self.token certificates.append(cert) return certificates
[ "This", "function", "returns", "a", "list", "of", "Certificate", "objects", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L284-L295
[ "def", "get_all_certificates", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "\"certificates\"", ")", "certificates", "=", "list", "(", ")", "for", "jsoned", "in", "data", "[", "'certificates'", "]", ":", "cert", "=", "Certificate", "(", "*", "*", "jsoned", ")", "cert", ".", "token", "=", "self", ".", "token", "certificates", ".", "append", "(", "cert", ")", "return", "certificates" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_snapshot
Return a Snapshot by its ID.
digitalocean/Manager.py
def get_snapshot(self, snapshot_id): """ Return a Snapshot by its ID. """ return Snapshot.get_object( api_token=self.token, snapshot_id=snapshot_id )
def get_snapshot(self, snapshot_id): """ Return a Snapshot by its ID. """ return Snapshot.get_object( api_token=self.token, snapshot_id=snapshot_id )
[ "Return", "a", "Snapshot", "by", "its", "ID", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L297-L303
[ "def", "get_snapshot", "(", "self", ",", "snapshot_id", ")", ":", "return", "Snapshot", ".", "get_object", "(", "api_token", "=", "self", ".", "token", ",", "snapshot_id", "=", "snapshot_id", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_all_snapshots
This method returns a list of all Snapshots.
digitalocean/Manager.py
def get_all_snapshots(self): """ This method returns a list of all Snapshots. """ data = self.get_data("snapshots/") return [ Snapshot(token=self.token, **snapshot) for snapshot in data['snapshots'] ]
def get_all_snapshots(self): """ This method returns a list of all Snapshots. """ data = self.get_data("snapshots/") return [ Snapshot(token=self.token, **snapshot) for snapshot in data['snapshots'] ]
[ "This", "method", "returns", "a", "list", "of", "all", "Snapshots", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L305-L313
[ "def", "get_all_snapshots", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "\"snapshots/\"", ")", "return", "[", "Snapshot", "(", "token", "=", "self", ".", "token", ",", "*", "*", "snapshot", ")", "for", "snapshot", "in", "data", "[", "'snapshots'", "]", "]" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_droplet_snapshots
This method returns a list of all Snapshots based on Droplets.
digitalocean/Manager.py
def get_droplet_snapshots(self): """ This method returns a list of all Snapshots based on Droplets. """ data = self.get_data("snapshots?resource_type=droplet") return [ Snapshot(token=self.token, **snapshot) for snapshot in data['snapshots'] ]
def get_droplet_snapshots(self): """ This method returns a list of all Snapshots based on Droplets. """ data = self.get_data("snapshots?resource_type=droplet") return [ Snapshot(token=self.token, **snapshot) for snapshot in data['snapshots'] ]
[ "This", "method", "returns", "a", "list", "of", "all", "Snapshots", "based", "on", "Droplets", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L315-L323
[ "def", "get_droplet_snapshots", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "\"snapshots?resource_type=droplet\"", ")", "return", "[", "Snapshot", "(", "token", "=", "self", ".", "token", ",", "*", "*", "snapshot", ")", "for", "snapshot", "in", "data", "[", "'snapshots'", "]", "]" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_volume_snapshots
This method returns a list of all Snapshots based on volumes.
digitalocean/Manager.py
def get_volume_snapshots(self): """ This method returns a list of all Snapshots based on volumes. """ data = self.get_data("snapshots?resource_type=volume") return [ Snapshot(token=self.token, **snapshot) for snapshot in data['snapshots'] ]
def get_volume_snapshots(self): """ This method returns a list of all Snapshots based on volumes. """ data = self.get_data("snapshots?resource_type=volume") return [ Snapshot(token=self.token, **snapshot) for snapshot in data['snapshots'] ]
[ "This", "method", "returns", "a", "list", "of", "all", "Snapshots", "based", "on", "volumes", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L325-L333
[ "def", "get_volume_snapshots", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "\"snapshots?resource_type=volume\"", ")", "return", "[", "Snapshot", "(", "token", "=", "self", ".", "token", ",", "*", "*", "snapshot", ")", "for", "snapshot", "in", "data", "[", "'snapshots'", "]", "]" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_all_volumes
This function returns a list of Volume objects.
digitalocean/Manager.py
def get_all_volumes(self, region=None): """ This function returns a list of Volume objects. """ if region: url = "volumes?region={}".format(region) else: url = "volumes" data = self.get_data(url) volumes = list() for jsoned in data['volumes']: volume = Volume(**jsoned) volume.token = self.token volumes.append(volume) return volumes
def get_all_volumes(self, region=None): """ This function returns a list of Volume objects. """ if region: url = "volumes?region={}".format(region) else: url = "volumes" data = self.get_data(url) volumes = list() for jsoned in data['volumes']: volume = Volume(**jsoned) volume.token = self.token volumes.append(volume) return volumes
[ "This", "function", "returns", "a", "list", "of", "Volume", "objects", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L335-L349
[ "def", "get_all_volumes", "(", "self", ",", "region", "=", "None", ")", ":", "if", "region", ":", "url", "=", "\"volumes?region={}\"", ".", "format", "(", "region", ")", "else", ":", "url", "=", "\"volumes\"", "data", "=", "self", ".", "get_data", "(", "url", ")", "volumes", "=", "list", "(", ")", "for", "jsoned", "in", "data", "[", "'volumes'", "]", ":", "volume", "=", "Volume", "(", "*", "*", "jsoned", ")", "volume", ".", "token", "=", "self", ".", "token", "volumes", ".", "append", "(", "volume", ")", "return", "volumes" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_volume
Returns a Volume object by its ID.
digitalocean/Manager.py
def get_volume(self, volume_id): """ Returns a Volume object by its ID. """ return Volume.get_object(api_token=self.token, volume_id=volume_id)
def get_volume(self, volume_id): """ Returns a Volume object by its ID. """ return Volume.get_object(api_token=self.token, volume_id=volume_id)
[ "Returns", "a", "Volume", "object", "by", "its", "ID", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L351-L355
[ "def", "get_volume", "(", "self", ",", "volume_id", ")", ":", "return", "Volume", ".", "get_object", "(", "api_token", "=", "self", ".", "token", ",", "volume_id", "=", "volume_id", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_all_firewalls
This function returns a list of Firewall objects.
digitalocean/Manager.py
def get_all_firewalls(self): """ This function returns a list of Firewall objects. """ data = self.get_data("firewalls") firewalls = list() for jsoned in data['firewalls']: firewall = Firewall(**jsoned) firewall.token = self.token in_rules = list() for rule in jsoned['inbound_rules']: in_rules.append(InboundRule(**rule)) firewall.inbound_rules = in_rules out_rules = list() for rule in jsoned['outbound_rules']: out_rules.append(OutboundRule(**rule)) firewall.outbound_rules = out_rules firewalls.append(firewall) return firewalls
def get_all_firewalls(self): """ This function returns a list of Firewall objects. """ data = self.get_data("firewalls") firewalls = list() for jsoned in data['firewalls']: firewall = Firewall(**jsoned) firewall.token = self.token in_rules = list() for rule in jsoned['inbound_rules']: in_rules.append(InboundRule(**rule)) firewall.inbound_rules = in_rules out_rules = list() for rule in jsoned['outbound_rules']: out_rules.append(OutboundRule(**rule)) firewall.outbound_rules = out_rules firewalls.append(firewall) return firewalls
[ "This", "function", "returns", "a", "list", "of", "Firewall", "objects", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L357-L375
[ "def", "get_all_firewalls", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "\"firewalls\"", ")", "firewalls", "=", "list", "(", ")", "for", "jsoned", "in", "data", "[", "'firewalls'", "]", ":", "firewall", "=", "Firewall", "(", "*", "*", "jsoned", ")", "firewall", ".", "token", "=", "self", ".", "token", "in_rules", "=", "list", "(", ")", "for", "rule", "in", "jsoned", "[", "'inbound_rules'", "]", ":", "in_rules", ".", "append", "(", "InboundRule", "(", "*", "*", "rule", ")", ")", "firewall", ".", "inbound_rules", "=", "in_rules", "out_rules", "=", "list", "(", ")", "for", "rule", "in", "jsoned", "[", "'outbound_rules'", "]", ":", "out_rules", ".", "append", "(", "OutboundRule", "(", "*", "*", "rule", ")", ")", "firewall", ".", "outbound_rules", "=", "out_rules", "firewalls", ".", "append", "(", "firewall", ")", "return", "firewalls" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Manager.get_firewall
Return a Firewall by its ID.
digitalocean/Manager.py
def get_firewall(self, firewall_id): """ Return a Firewall by its ID. """ return Firewall.get_object( api_token=self.token, firewall_id=firewall_id, )
def get_firewall(self, firewall_id): """ Return a Firewall by its ID. """ return Firewall.get_object( api_token=self.token, firewall_id=firewall_id, )
[ "Return", "a", "Firewall", "by", "its", "ID", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Manager.py#L377-L384
[ "def", "get_firewall", "(", "self", ",", "firewall_id", ")", ":", "return", "Firewall", ".", "get_object", "(", "api_token", "=", "self", ".", "token", ",", "firewall_id", "=", "firewall_id", ",", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
LoadBalancer.get_object
Class method that will return a LoadBalancer object by its ID. Args: api_token (str): DigitalOcean API token id (str): Load Balancer ID
digitalocean/LoadBalancer.py
def get_object(cls, api_token, id): """ Class method that will return a LoadBalancer object by its ID. Args: api_token (str): DigitalOcean API token id (str): Load Balancer ID """ load_balancer = cls(token=api_token, id=id) load_balancer.load() return load_balancer
def get_object(cls, api_token, id): """ Class method that will return a LoadBalancer object by its ID. Args: api_token (str): DigitalOcean API token id (str): Load Balancer ID """ load_balancer = cls(token=api_token, id=id) load_balancer.load() return load_balancer
[ "Class", "method", "that", "will", "return", "a", "LoadBalancer", "object", "by", "its", "ID", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/LoadBalancer.py#L146-L156
[ "def", "get_object", "(", "cls", ",", "api_token", ",", "id", ")", ":", "load_balancer", "=", "cls", "(", "token", "=", "api_token", ",", "id", "=", "id", ")", "load_balancer", ".", "load", "(", ")", "return", "load_balancer" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
LoadBalancer.load
Loads updated attributues for a LoadBalancer object. Requires self.id to be set.
digitalocean/LoadBalancer.py
def load(self): """ Loads updated attributues for a LoadBalancer object. Requires self.id to be set. """ data = self.get_data('load_balancers/%s' % self.id, type=GET) load_balancer = data['load_balancer'] # Setting the attribute values for attr in load_balancer.keys(): if attr == 'health_check': health_check = HealthCheck(**load_balancer['health_check']) setattr(self, attr, health_check) elif attr == 'sticky_sessions': sticky_ses = StickySesions(**load_balancer['sticky_sessions']) setattr(self, attr, sticky_ses) elif attr == 'forwarding_rules': rules = list() for rule in load_balancer['forwarding_rules']: rules.append(ForwardingRule(**rule)) setattr(self, attr, rules) else: setattr(self, attr, load_balancer[attr]) return self
def load(self): """ Loads updated attributues for a LoadBalancer object. Requires self.id to be set. """ data = self.get_data('load_balancers/%s' % self.id, type=GET) load_balancer = data['load_balancer'] # Setting the attribute values for attr in load_balancer.keys(): if attr == 'health_check': health_check = HealthCheck(**load_balancer['health_check']) setattr(self, attr, health_check) elif attr == 'sticky_sessions': sticky_ses = StickySesions(**load_balancer['sticky_sessions']) setattr(self, attr, sticky_ses) elif attr == 'forwarding_rules': rules = list() for rule in load_balancer['forwarding_rules']: rules.append(ForwardingRule(**rule)) setattr(self, attr, rules) else: setattr(self, attr, load_balancer[attr]) return self
[ "Loads", "updated", "attributues", "for", "a", "LoadBalancer", "object", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/LoadBalancer.py#L158-L183
[ "def", "load", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "'load_balancers/%s'", "%", "self", ".", "id", ",", "type", "=", "GET", ")", "load_balancer", "=", "data", "[", "'load_balancer'", "]", "# Setting the attribute values", "for", "attr", "in", "load_balancer", ".", "keys", "(", ")", ":", "if", "attr", "==", "'health_check'", ":", "health_check", "=", "HealthCheck", "(", "*", "*", "load_balancer", "[", "'health_check'", "]", ")", "setattr", "(", "self", ",", "attr", ",", "health_check", ")", "elif", "attr", "==", "'sticky_sessions'", ":", "sticky_ses", "=", "StickySesions", "(", "*", "*", "load_balancer", "[", "'sticky_sessions'", "]", ")", "setattr", "(", "self", ",", "attr", ",", "sticky_ses", ")", "elif", "attr", "==", "'forwarding_rules'", ":", "rules", "=", "list", "(", ")", "for", "rule", "in", "load_balancer", "[", "'forwarding_rules'", "]", ":", "rules", ".", "append", "(", "ForwardingRule", "(", "*", "*", "rule", ")", ")", "setattr", "(", "self", ",", "attr", ",", "rules", ")", "else", ":", "setattr", "(", "self", ",", "attr", ",", "load_balancer", "[", "attr", "]", ")", "return", "self" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
LoadBalancer.create
Creates a new LoadBalancer. Note: Every argument and parameter given to this method will be assigned to the object. Args: name (str): The Load Balancer's name region (str): The slug identifier for a DigitalOcean region algorithm (str, optional): The load balancing algorithm to be used. Currently, it must be either "round_robin" or "least_connections" forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects health_check (obj, optional): A `HealthCheck` object sticky_sessions (obj, optional): A `StickySessions` object redirect_http_to_https (bool, optional): A boolean indicating whether HTTP requests to the Load Balancer should be redirected to HTTPS droplet_ids (obj:`list` of `int`): A list of IDs representing Droplets to be added to the Load Balancer (mutually exclusive with 'tag') tag (str): A string representing a DigitalOcean Droplet tag (mutually exclusive with 'droplet_ids')
digitalocean/LoadBalancer.py
def create(self, *args, **kwargs): """ Creates a new LoadBalancer. Note: Every argument and parameter given to this method will be assigned to the object. Args: name (str): The Load Balancer's name region (str): The slug identifier for a DigitalOcean region algorithm (str, optional): The load balancing algorithm to be used. Currently, it must be either "round_robin" or "least_connections" forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects health_check (obj, optional): A `HealthCheck` object sticky_sessions (obj, optional): A `StickySessions` object redirect_http_to_https (bool, optional): A boolean indicating whether HTTP requests to the Load Balancer should be redirected to HTTPS droplet_ids (obj:`list` of `int`): A list of IDs representing Droplets to be added to the Load Balancer (mutually exclusive with 'tag') tag (str): A string representing a DigitalOcean Droplet tag (mutually exclusive with 'droplet_ids') """ rules_dict = [rule.__dict__ for rule in self.forwarding_rules] params = {'name': self.name, 'region': self.region, 'forwarding_rules': rules_dict, 'redirect_http_to_https': self.redirect_http_to_https} if self.droplet_ids and self.tag: raise ValueError('droplet_ids and tag are mutually exclusive args') elif self.tag: params['tag'] = self.tag else: params['droplet_ids'] = self.droplet_ids if self.algorithm: params['algorithm'] = self.algorithm if self.health_check: params['health_check'] = self.health_check.__dict__ if self.sticky_sessions: params['sticky_sessions'] = self.sticky_sessions.__dict__ data = self.get_data('load_balancers/', type=POST, params=params) if data: self.id = data['load_balancer']['id'] self.ip = data['load_balancer']['ip'] self.algorithm = data['load_balancer']['algorithm'] self.health_check = HealthCheck( **data['load_balancer']['health_check']) self.sticky_sessions = StickySesions( **data['load_balancer']['sticky_sessions']) self.droplet_ids = data['load_balancer']['droplet_ids'] self.status = data['load_balancer']['status'] self.created_at = data['load_balancer']['created_at'] return self
def create(self, *args, **kwargs): """ Creates a new LoadBalancer. Note: Every argument and parameter given to this method will be assigned to the object. Args: name (str): The Load Balancer's name region (str): The slug identifier for a DigitalOcean region algorithm (str, optional): The load balancing algorithm to be used. Currently, it must be either "round_robin" or "least_connections" forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects health_check (obj, optional): A `HealthCheck` object sticky_sessions (obj, optional): A `StickySessions` object redirect_http_to_https (bool, optional): A boolean indicating whether HTTP requests to the Load Balancer should be redirected to HTTPS droplet_ids (obj:`list` of `int`): A list of IDs representing Droplets to be added to the Load Balancer (mutually exclusive with 'tag') tag (str): A string representing a DigitalOcean Droplet tag (mutually exclusive with 'droplet_ids') """ rules_dict = [rule.__dict__ for rule in self.forwarding_rules] params = {'name': self.name, 'region': self.region, 'forwarding_rules': rules_dict, 'redirect_http_to_https': self.redirect_http_to_https} if self.droplet_ids and self.tag: raise ValueError('droplet_ids and tag are mutually exclusive args') elif self.tag: params['tag'] = self.tag else: params['droplet_ids'] = self.droplet_ids if self.algorithm: params['algorithm'] = self.algorithm if self.health_check: params['health_check'] = self.health_check.__dict__ if self.sticky_sessions: params['sticky_sessions'] = self.sticky_sessions.__dict__ data = self.get_data('load_balancers/', type=POST, params=params) if data: self.id = data['load_balancer']['id'] self.ip = data['load_balancer']['ip'] self.algorithm = data['load_balancer']['algorithm'] self.health_check = HealthCheck( **data['load_balancer']['health_check']) self.sticky_sessions = StickySesions( **data['load_balancer']['sticky_sessions']) self.droplet_ids = data['load_balancer']['droplet_ids'] self.status = data['load_balancer']['status'] self.created_at = data['load_balancer']['created_at'] return self
[ "Creates", "a", "new", "LoadBalancer", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/LoadBalancer.py#L185-L244
[ "def", "create", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "rules_dict", "=", "[", "rule", ".", "__dict__", "for", "rule", "in", "self", ".", "forwarding_rules", "]", "params", "=", "{", "'name'", ":", "self", ".", "name", ",", "'region'", ":", "self", ".", "region", ",", "'forwarding_rules'", ":", "rules_dict", ",", "'redirect_http_to_https'", ":", "self", ".", "redirect_http_to_https", "}", "if", "self", ".", "droplet_ids", "and", "self", ".", "tag", ":", "raise", "ValueError", "(", "'droplet_ids and tag are mutually exclusive args'", ")", "elif", "self", ".", "tag", ":", "params", "[", "'tag'", "]", "=", "self", ".", "tag", "else", ":", "params", "[", "'droplet_ids'", "]", "=", "self", ".", "droplet_ids", "if", "self", ".", "algorithm", ":", "params", "[", "'algorithm'", "]", "=", "self", ".", "algorithm", "if", "self", ".", "health_check", ":", "params", "[", "'health_check'", "]", "=", "self", ".", "health_check", ".", "__dict__", "if", "self", ".", "sticky_sessions", ":", "params", "[", "'sticky_sessions'", "]", "=", "self", ".", "sticky_sessions", ".", "__dict__", "data", "=", "self", ".", "get_data", "(", "'load_balancers/'", ",", "type", "=", "POST", ",", "params", "=", "params", ")", "if", "data", ":", "self", ".", "id", "=", "data", "[", "'load_balancer'", "]", "[", "'id'", "]", "self", ".", "ip", "=", "data", "[", "'load_balancer'", "]", "[", "'ip'", "]", "self", ".", "algorithm", "=", "data", "[", "'load_balancer'", "]", "[", "'algorithm'", "]", "self", ".", "health_check", "=", "HealthCheck", "(", "*", "*", "data", "[", "'load_balancer'", "]", "[", "'health_check'", "]", ")", "self", ".", "sticky_sessions", "=", "StickySesions", "(", "*", "*", "data", "[", "'load_balancer'", "]", "[", "'sticky_sessions'", "]", ")", "self", ".", "droplet_ids", "=", "data", "[", "'load_balancer'", "]", "[", "'droplet_ids'", "]", "self", ".", "status", "=", "data", "[", "'load_balancer'", "]", "[", "'status'", "]", "self", ".", "created_at", "=", "data", "[", "'load_balancer'", "]", "[", "'created_at'", "]", "return", "self" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
LoadBalancer.save
Save the LoadBalancer
digitalocean/LoadBalancer.py
def save(self): """ Save the LoadBalancer """ forwarding_rules = [rule.__dict__ for rule in self.forwarding_rules] data = { 'name': self.name, 'region': self.region['slug'], 'forwarding_rules': forwarding_rules, 'redirect_http_to_https': self.redirect_http_to_https } if self.tag: data['tag'] = self.tag else: data['droplet_ids'] = self.droplet_ids if self.algorithm: data["algorithm"] = self.algorithm if self.health_check: data['health_check'] = self.health_check.__dict__ if self.sticky_sessions: data['sticky_sessions'] = self.sticky_sessions.__dict__ return self.get_data("load_balancers/%s/" % self.id, type=PUT, params=data)
def save(self): """ Save the LoadBalancer """ forwarding_rules = [rule.__dict__ for rule in self.forwarding_rules] data = { 'name': self.name, 'region': self.region['slug'], 'forwarding_rules': forwarding_rules, 'redirect_http_to_https': self.redirect_http_to_https } if self.tag: data['tag'] = self.tag else: data['droplet_ids'] = self.droplet_ids if self.algorithm: data["algorithm"] = self.algorithm if self.health_check: data['health_check'] = self.health_check.__dict__ if self.sticky_sessions: data['sticky_sessions'] = self.sticky_sessions.__dict__ return self.get_data("load_balancers/%s/" % self.id, type=PUT, params=data)
[ "Save", "the", "LoadBalancer" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/LoadBalancer.py#L246-L273
[ "def", "save", "(", "self", ")", ":", "forwarding_rules", "=", "[", "rule", ".", "__dict__", "for", "rule", "in", "self", ".", "forwarding_rules", "]", "data", "=", "{", "'name'", ":", "self", ".", "name", ",", "'region'", ":", "self", ".", "region", "[", "'slug'", "]", ",", "'forwarding_rules'", ":", "forwarding_rules", ",", "'redirect_http_to_https'", ":", "self", ".", "redirect_http_to_https", "}", "if", "self", ".", "tag", ":", "data", "[", "'tag'", "]", "=", "self", ".", "tag", "else", ":", "data", "[", "'droplet_ids'", "]", "=", "self", ".", "droplet_ids", "if", "self", ".", "algorithm", ":", "data", "[", "\"algorithm\"", "]", "=", "self", ".", "algorithm", "if", "self", ".", "health_check", ":", "data", "[", "'health_check'", "]", "=", "self", ".", "health_check", ".", "__dict__", "if", "self", ".", "sticky_sessions", ":", "data", "[", "'sticky_sessions'", "]", "=", "self", ".", "sticky_sessions", ".", "__dict__", "return", "self", ".", "get_data", "(", "\"load_balancers/%s/\"", "%", "self", ".", "id", ",", "type", "=", "PUT", ",", "params", "=", "data", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
LoadBalancer.add_droplets
Assign a LoadBalancer to a Droplet. Args: droplet_ids (obj:`list` of `int`): A list of Droplet IDs
digitalocean/LoadBalancer.py
def add_droplets(self, droplet_ids): """ Assign a LoadBalancer to a Droplet. Args: droplet_ids (obj:`list` of `int`): A list of Droplet IDs """ return self.get_data( "load_balancers/%s/droplets/" % self.id, type=POST, params={"droplet_ids": droplet_ids} )
def add_droplets(self, droplet_ids): """ Assign a LoadBalancer to a Droplet. Args: droplet_ids (obj:`list` of `int`): A list of Droplet IDs """ return self.get_data( "load_balancers/%s/droplets/" % self.id, type=POST, params={"droplet_ids": droplet_ids} )
[ "Assign", "a", "LoadBalancer", "to", "a", "Droplet", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/LoadBalancer.py#L281-L292
[ "def", "add_droplets", "(", "self", ",", "droplet_ids", ")", ":", "return", "self", ".", "get_data", "(", "\"load_balancers/%s/droplets/\"", "%", "self", ".", "id", ",", "type", "=", "POST", ",", "params", "=", "{", "\"droplet_ids\"", ":", "droplet_ids", "}", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
LoadBalancer.remove_droplets
Unassign a LoadBalancer. Args: droplet_ids (obj:`list` of `int`): A list of Droplet IDs
digitalocean/LoadBalancer.py
def remove_droplets(self, droplet_ids): """ Unassign a LoadBalancer. Args: droplet_ids (obj:`list` of `int`): A list of Droplet IDs """ return self.get_data( "load_balancers/%s/droplets/" % self.id, type=DELETE, params={"droplet_ids": droplet_ids} )
def remove_droplets(self, droplet_ids): """ Unassign a LoadBalancer. Args: droplet_ids (obj:`list` of `int`): A list of Droplet IDs """ return self.get_data( "load_balancers/%s/droplets/" % self.id, type=DELETE, params={"droplet_ids": droplet_ids} )
[ "Unassign", "a", "LoadBalancer", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/LoadBalancer.py#L294-L305
[ "def", "remove_droplets", "(", "self", ",", "droplet_ids", ")", ":", "return", "self", ".", "get_data", "(", "\"load_balancers/%s/droplets/\"", "%", "self", ".", "id", ",", "type", "=", "DELETE", ",", "params", "=", "{", "\"droplet_ids\"", ":", "droplet_ids", "}", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
LoadBalancer.add_forwarding_rules
Adds new forwarding rules to a LoadBalancer. Args: forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects
digitalocean/LoadBalancer.py
def add_forwarding_rules(self, forwarding_rules): """ Adds new forwarding rules to a LoadBalancer. Args: forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects """ rules_dict = [rule.__dict__ for rule in forwarding_rules] return self.get_data( "load_balancers/%s/forwarding_rules/" % self.id, type=POST, params={"forwarding_rules": rules_dict} )
def add_forwarding_rules(self, forwarding_rules): """ Adds new forwarding rules to a LoadBalancer. Args: forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects """ rules_dict = [rule.__dict__ for rule in forwarding_rules] return self.get_data( "load_balancers/%s/forwarding_rules/" % self.id, type=POST, params={"forwarding_rules": rules_dict} )
[ "Adds", "new", "forwarding", "rules", "to", "a", "LoadBalancer", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/LoadBalancer.py#L307-L320
[ "def", "add_forwarding_rules", "(", "self", ",", "forwarding_rules", ")", ":", "rules_dict", "=", "[", "rule", ".", "__dict__", "for", "rule", "in", "forwarding_rules", "]", "return", "self", ".", "get_data", "(", "\"load_balancers/%s/forwarding_rules/\"", "%", "self", ".", "id", ",", "type", "=", "POST", ",", "params", "=", "{", "\"forwarding_rules\"", ":", "rules_dict", "}", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
LoadBalancer.remove_forwarding_rules
Removes existing forwarding rules from a LoadBalancer. Args: forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects
digitalocean/LoadBalancer.py
def remove_forwarding_rules(self, forwarding_rules): """ Removes existing forwarding rules from a LoadBalancer. Args: forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects """ rules_dict = [rule.__dict__ for rule in forwarding_rules] return self.get_data( "load_balancers/%s/forwarding_rules/" % self.id, type=DELETE, params={"forwarding_rules": rules_dict} )
def remove_forwarding_rules(self, forwarding_rules): """ Removes existing forwarding rules from a LoadBalancer. Args: forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects """ rules_dict = [rule.__dict__ for rule in forwarding_rules] return self.get_data( "load_balancers/%s/forwarding_rules/" % self.id, type=DELETE, params={"forwarding_rules": rules_dict} )
[ "Removes", "existing", "forwarding", "rules", "from", "a", "LoadBalancer", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/LoadBalancer.py#L322-L335
[ "def", "remove_forwarding_rules", "(", "self", ",", "forwarding_rules", ")", ":", "rules_dict", "=", "[", "rule", ".", "__dict__", "for", "rule", "in", "forwarding_rules", "]", "return", "self", ".", "get_data", "(", "\"load_balancers/%s/forwarding_rules/\"", "%", "self", ".", "id", ",", "type", "=", "DELETE", ",", "params", "=", "{", "\"forwarding_rules\"", ":", "rules_dict", "}", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Metadata.get_data
Customized version of get_data to directly get the data without using the authentication method.
digitalocean/Metadata.py
def get_data(self, url, headers=dict(), params=dict(), render_json=True): """ Customized version of get_data to directly get the data without using the authentication method. """ url = urljoin(self.end_point, url) response = requests.get(url, headers=headers, params=params, timeout=self.get_timeout()) if render_json: return response.json() return response.content
def get_data(self, url, headers=dict(), params=dict(), render_json=True): """ Customized version of get_data to directly get the data without using the authentication method. """ url = urljoin(self.end_point, url) response = requests.get(url, headers=headers, params=params, timeout=self.get_timeout()) if render_json: return response.json() return response.content
[ "Customized", "version", "of", "get_data", "to", "directly", "get", "the", "data", "without", "using", "the", "authentication", "method", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Metadata.py#L23-L35
[ "def", "get_data", "(", "self", ",", "url", ",", "headers", "=", "dict", "(", ")", ",", "params", "=", "dict", "(", ")", ",", "render_json", "=", "True", ")", ":", "url", "=", "urljoin", "(", "self", ".", "end_point", ",", "url", ")", "response", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "headers", ",", "params", "=", "params", ",", "timeout", "=", "self", ".", "get_timeout", "(", ")", ")", "if", "render_json", ":", "return", "response", ".", "json", "(", ")", "return", "response", ".", "content" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Record.get_object
Class method that will return a Record object by ID and the domain.
digitalocean/Record.py
def get_object(cls, api_token, domain, record_id): """ Class method that will return a Record object by ID and the domain. """ record = cls(token=api_token, domain=domain, id=record_id) record.load() return record
def get_object(cls, api_token, domain, record_id): """ Class method that will return a Record object by ID and the domain. """ record = cls(token=api_token, domain=domain, id=record_id) record.load() return record
[ "Class", "method", "that", "will", "return", "a", "Record", "object", "by", "ID", "and", "the", "domain", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Record.py#L39-L45
[ "def", "get_object", "(", "cls", ",", "api_token", ",", "domain", ",", "record_id", ")", ":", "record", "=", "cls", "(", "token", "=", "api_token", ",", "domain", "=", "domain", ",", "id", "=", "record_id", ")", "record", ".", "load", "(", ")", "return", "record" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Record.create
Creates a new record for a domain. Args: type (str): The type of the DNS record (e.g. A, CNAME, TXT). name (str): The host name, alias, or service being defined by the record. data (int): Variable data depending on record type. priority (int): The priority for SRV and MX records. port (int): The port for SRV records. ttl (int): The time to live for the record, in seconds. weight (int): The weight for SRV records. flags (int): An unsigned integer between 0-255 used for CAA records. tags (string): The parameter tag for CAA records. Valid values are "issue", "wildissue", or "iodef"
digitalocean/Record.py
def create(self): """ Creates a new record for a domain. Args: type (str): The type of the DNS record (e.g. A, CNAME, TXT). name (str): The host name, alias, or service being defined by the record. data (int): Variable data depending on record type. priority (int): The priority for SRV and MX records. port (int): The port for SRV records. ttl (int): The time to live for the record, in seconds. weight (int): The weight for SRV records. flags (int): An unsigned integer between 0-255 used for CAA records. tags (string): The parameter tag for CAA records. Valid values are "issue", "wildissue", or "iodef" """ input_params = { "type": self.type, "data": self.data, "name": self.name, "priority": self.priority, "port": self.port, "ttl": self.ttl, "weight": self.weight, "flags": self.flags, "tags": self.tags } data = self.get_data( "domains/%s/records" % (self.domain), type=POST, params=input_params, ) if data: self.id = data['domain_record']['id']
def create(self): """ Creates a new record for a domain. Args: type (str): The type of the DNS record (e.g. A, CNAME, TXT). name (str): The host name, alias, or service being defined by the record. data (int): Variable data depending on record type. priority (int): The priority for SRV and MX records. port (int): The port for SRV records. ttl (int): The time to live for the record, in seconds. weight (int): The weight for SRV records. flags (int): An unsigned integer between 0-255 used for CAA records. tags (string): The parameter tag for CAA records. Valid values are "issue", "wildissue", or "iodef" """ input_params = { "type": self.type, "data": self.data, "name": self.name, "priority": self.priority, "port": self.port, "ttl": self.ttl, "weight": self.weight, "flags": self.flags, "tags": self.tags } data = self.get_data( "domains/%s/records" % (self.domain), type=POST, params=input_params, ) if data: self.id = data['domain_record']['id']
[ "Creates", "a", "new", "record", "for", "a", "domain", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Record.py#L47-L83
[ "def", "create", "(", "self", ")", ":", "input_params", "=", "{", "\"type\"", ":", "self", ".", "type", ",", "\"data\"", ":", "self", ".", "data", ",", "\"name\"", ":", "self", ".", "name", ",", "\"priority\"", ":", "self", ".", "priority", ",", "\"port\"", ":", "self", ".", "port", ",", "\"ttl\"", ":", "self", ".", "ttl", ",", "\"weight\"", ":", "self", ".", "weight", ",", "\"flags\"", ":", "self", ".", "flags", ",", "\"tags\"", ":", "self", ".", "tags", "}", "data", "=", "self", ".", "get_data", "(", "\"domains/%s/records\"", "%", "(", "self", ".", "domain", ")", ",", "type", "=", "POST", ",", "params", "=", "input_params", ",", ")", "if", "data", ":", "self", ".", "id", "=", "data", "[", "'domain_record'", "]", "[", "'id'", "]" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Record.destroy
Destroy the record
digitalocean/Record.py
def destroy(self): """ Destroy the record """ return self.get_data( "domains/%s/records/%s" % (self.domain, self.id), type=DELETE, )
def destroy(self): """ Destroy the record """ return self.get_data( "domains/%s/records/%s" % (self.domain, self.id), type=DELETE, )
[ "Destroy", "the", "record" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Record.py#L85-L92
[ "def", "destroy", "(", "self", ")", ":", "return", "self", ".", "get_data", "(", "\"domains/%s/records/%s\"", "%", "(", "self", ".", "domain", ",", "self", ".", "id", ")", ",", "type", "=", "DELETE", ",", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Record.save
Save existing record
digitalocean/Record.py
def save(self): """ Save existing record """ data = { "type": self.type, "data": self.data, "name": self.name, "priority": self.priority, "port": self.port, "ttl": self.ttl, "weight": self.weight, "flags": self.flags, "tags": self.tags } return self.get_data( "domains/%s/records/%s" % (self.domain, self.id), type=PUT, params=data )
def save(self): """ Save existing record """ data = { "type": self.type, "data": self.data, "name": self.name, "priority": self.priority, "port": self.port, "ttl": self.ttl, "weight": self.weight, "flags": self.flags, "tags": self.tags } return self.get_data( "domains/%s/records/%s" % (self.domain, self.id), type=PUT, params=data )
[ "Save", "existing", "record" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Record.py#L94-L113
[ "def", "save", "(", "self", ")", ":", "data", "=", "{", "\"type\"", ":", "self", ".", "type", ",", "\"data\"", ":", "self", ".", "data", ",", "\"name\"", ":", "self", ".", "name", ",", "\"priority\"", ":", "self", ".", "priority", ",", "\"port\"", ":", "self", ".", "port", ",", "\"ttl\"", ":", "self", ".", "ttl", ",", "\"weight\"", ":", "self", ".", "weight", ",", "\"flags\"", ":", "self", ".", "flags", ",", "\"tags\"", ":", "self", ".", "tags", "}", "return", "self", ".", "get_data", "(", "\"domains/%s/records/%s\"", "%", "(", "self", ".", "domain", ",", "self", ".", "id", ")", ",", "type", "=", "PUT", ",", "params", "=", "data", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
BaseAPI.__perform_request
This method will perform the real request, in this way we can customize only the "output" of the API call by using self.__call_api method. This method will return the request object.
digitalocean/baseapi.py
def __perform_request(self, url, type=GET, params=None): """ This method will perform the real request, in this way we can customize only the "output" of the API call by using self.__call_api method. This method will return the request object. """ if params is None: params = {} if not self.token: raise TokenError("No token provided. Please use a valid token") url = urlparse.urljoin(self.end_point, url) # lookup table to find out the appropriate requests method, # headers and payload type (json or query parameters) identity = lambda x: x json_dumps = lambda x: json.dumps(x) lookup = { GET: (self._session.get, {}, 'params', identity), POST: (self._session.post, {'Content-type': 'application/json'}, 'data', json_dumps), PUT: (self._session.put, {'Content-type': 'application/json'}, 'data', json_dumps), DELETE: (self._session.delete, {'content-type': 'application/json'}, 'data', json_dumps), } requests_method, headers, payload, transform = lookup[type] agent = "{0}/{1} {2}/{3}".format('python-digitalocean', __version__, requests.__name__, requests.__version__) headers.update({'Authorization': 'Bearer ' + self.token, 'User-Agent': agent}) kwargs = {'headers': headers, payload: transform(params)} timeout = self.get_timeout() if timeout: kwargs['timeout'] = timeout # remove token from log headers_str = str(headers).replace(self.token.strip(), 'TOKEN') self._log.debug('%s %s %s:%s %s %s' % (type, url, payload, params, headers_str, timeout)) return requests_method(url, **kwargs)
def __perform_request(self, url, type=GET, params=None): """ This method will perform the real request, in this way we can customize only the "output" of the API call by using self.__call_api method. This method will return the request object. """ if params is None: params = {} if not self.token: raise TokenError("No token provided. Please use a valid token") url = urlparse.urljoin(self.end_point, url) # lookup table to find out the appropriate requests method, # headers and payload type (json or query parameters) identity = lambda x: x json_dumps = lambda x: json.dumps(x) lookup = { GET: (self._session.get, {}, 'params', identity), POST: (self._session.post, {'Content-type': 'application/json'}, 'data', json_dumps), PUT: (self._session.put, {'Content-type': 'application/json'}, 'data', json_dumps), DELETE: (self._session.delete, {'content-type': 'application/json'}, 'data', json_dumps), } requests_method, headers, payload, transform = lookup[type] agent = "{0}/{1} {2}/{3}".format('python-digitalocean', __version__, requests.__name__, requests.__version__) headers.update({'Authorization': 'Bearer ' + self.token, 'User-Agent': agent}) kwargs = {'headers': headers, payload: transform(params)} timeout = self.get_timeout() if timeout: kwargs['timeout'] = timeout # remove token from log headers_str = str(headers).replace(self.token.strip(), 'TOKEN') self._log.debug('%s %s %s:%s %s %s' % (type, url, payload, params, headers_str, timeout)) return requests_method(url, **kwargs)
[ "This", "method", "will", "perform", "the", "real", "request", "in", "this", "way", "we", "can", "customize", "only", "the", "output", "of", "the", "API", "call", "by", "using", "self", ".", "__call_api", "method", ".", "This", "method", "will", "return", "the", "request", "object", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/baseapi.py#L68-L116
[ "def", "__perform_request", "(", "self", ",", "url", ",", "type", "=", "GET", ",", "params", "=", "None", ")", ":", "if", "params", "is", "None", ":", "params", "=", "{", "}", "if", "not", "self", ".", "token", ":", "raise", "TokenError", "(", "\"No token provided. Please use a valid token\"", ")", "url", "=", "urlparse", ".", "urljoin", "(", "self", ".", "end_point", ",", "url", ")", "# lookup table to find out the appropriate requests method,", "# headers and payload type (json or query parameters)", "identity", "=", "lambda", "x", ":", "x", "json_dumps", "=", "lambda", "x", ":", "json", ".", "dumps", "(", "x", ")", "lookup", "=", "{", "GET", ":", "(", "self", ".", "_session", ".", "get", ",", "{", "}", ",", "'params'", ",", "identity", ")", ",", "POST", ":", "(", "self", ".", "_session", ".", "post", ",", "{", "'Content-type'", ":", "'application/json'", "}", ",", "'data'", ",", "json_dumps", ")", ",", "PUT", ":", "(", "self", ".", "_session", ".", "put", ",", "{", "'Content-type'", ":", "'application/json'", "}", ",", "'data'", ",", "json_dumps", ")", ",", "DELETE", ":", "(", "self", ".", "_session", ".", "delete", ",", "{", "'content-type'", ":", "'application/json'", "}", ",", "'data'", ",", "json_dumps", ")", ",", "}", "requests_method", ",", "headers", ",", "payload", ",", "transform", "=", "lookup", "[", "type", "]", "agent", "=", "\"{0}/{1} {2}/{3}\"", ".", "format", "(", "'python-digitalocean'", ",", "__version__", ",", "requests", ".", "__name__", ",", "requests", ".", "__version__", ")", "headers", ".", "update", "(", "{", "'Authorization'", ":", "'Bearer '", "+", "self", ".", "token", ",", "'User-Agent'", ":", "agent", "}", ")", "kwargs", "=", "{", "'headers'", ":", "headers", ",", "payload", ":", "transform", "(", "params", ")", "}", "timeout", "=", "self", ".", "get_timeout", "(", ")", "if", "timeout", ":", "kwargs", "[", "'timeout'", "]", "=", "timeout", "# remove token from log", "headers_str", "=", "str", "(", "headers", ")", ".", "replace", "(", "self", ".", "token", ".", "strip", "(", ")", ",", "'TOKEN'", ")", "self", ".", "_log", ".", "debug", "(", "'%s %s %s:%s %s %s'", "%", "(", "type", ",", "url", ",", "payload", ",", "params", ",", "headers_str", ",", "timeout", ")", ")", "return", "requests_method", "(", "url", ",", "*", "*", "kwargs", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
BaseAPI.__deal_with_pagination
Perform multiple calls in order to have a full list of elements when the API are "paginated". (content list is divided in more than one page)
digitalocean/baseapi.py
def __deal_with_pagination(self, url, method, params, data): """ Perform multiple calls in order to have a full list of elements when the API are "paginated". (content list is divided in more than one page) """ all_data = data while data.get("links", {}).get("pages", {}).get("next"): url, query = data["links"]["pages"]["next"].split("?", 1) # Merge the query parameters for key, value in urlparse.parse_qs(query).items(): params[key] = value data = self.__perform_request(url, method, params).json() # Merge the dictionaries for key, value in data.items(): if isinstance(value, list) and key in all_data: all_data[key] += value else: all_data[key] = value return all_data
def __deal_with_pagination(self, url, method, params, data): """ Perform multiple calls in order to have a full list of elements when the API are "paginated". (content list is divided in more than one page) """ all_data = data while data.get("links", {}).get("pages", {}).get("next"): url, query = data["links"]["pages"]["next"].split("?", 1) # Merge the query parameters for key, value in urlparse.parse_qs(query).items(): params[key] = value data = self.__perform_request(url, method, params).json() # Merge the dictionaries for key, value in data.items(): if isinstance(value, list) and key in all_data: all_data[key] += value else: all_data[key] = value return all_data
[ "Perform", "multiple", "calls", "in", "order", "to", "have", "a", "full", "list", "of", "elements", "when", "the", "API", "are", "paginated", ".", "(", "content", "list", "is", "divided", "in", "more", "than", "one", "page", ")" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/baseapi.py#L118-L141
[ "def", "__deal_with_pagination", "(", "self", ",", "url", ",", "method", ",", "params", ",", "data", ")", ":", "all_data", "=", "data", "while", "data", ".", "get", "(", "\"links\"", ",", "{", "}", ")", ".", "get", "(", "\"pages\"", ",", "{", "}", ")", ".", "get", "(", "\"next\"", ")", ":", "url", ",", "query", "=", "data", "[", "\"links\"", "]", "[", "\"pages\"", "]", "[", "\"next\"", "]", ".", "split", "(", "\"?\"", ",", "1", ")", "# Merge the query parameters", "for", "key", ",", "value", "in", "urlparse", ".", "parse_qs", "(", "query", ")", ".", "items", "(", ")", ":", "params", "[", "key", "]", "=", "value", "data", "=", "self", ".", "__perform_request", "(", "url", ",", "method", ",", "params", ")", ".", "json", "(", ")", "# Merge the dictionaries", "for", "key", ",", "value", "in", "data", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "list", ")", "and", "key", "in", "all_data", ":", "all_data", "[", "key", "]", "+=", "value", "else", ":", "all_data", "[", "key", "]", "=", "value", "return", "all_data" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
BaseAPI.get_timeout
Checks if any timeout for the requests to DigitalOcean is required. To set a timeout, use the REQUEST_TIMEOUT_ENV_VAR environment variable.
digitalocean/baseapi.py
def get_timeout(self): """ Checks if any timeout for the requests to DigitalOcean is required. To set a timeout, use the REQUEST_TIMEOUT_ENV_VAR environment variable. """ timeout_str = os.environ.get(REQUEST_TIMEOUT_ENV_VAR) if timeout_str: try: return float(timeout_str) except: self._log.error('Failed parsing the request read timeout of ' '"%s". Please use a valid float number!' % timeout_str) return None
def get_timeout(self): """ Checks if any timeout for the requests to DigitalOcean is required. To set a timeout, use the REQUEST_TIMEOUT_ENV_VAR environment variable. """ timeout_str = os.environ.get(REQUEST_TIMEOUT_ENV_VAR) if timeout_str: try: return float(timeout_str) except: self._log.error('Failed parsing the request read timeout of ' '"%s". Please use a valid float number!' % timeout_str) return None
[ "Checks", "if", "any", "timeout", "for", "the", "requests", "to", "DigitalOcean", "is", "required", ".", "To", "set", "a", "timeout", "use", "the", "REQUEST_TIMEOUT_ENV_VAR", "environment", "variable", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/baseapi.py#L151-L165
[ "def", "get_timeout", "(", "self", ")", ":", "timeout_str", "=", "os", ".", "environ", ".", "get", "(", "REQUEST_TIMEOUT_ENV_VAR", ")", "if", "timeout_str", ":", "try", ":", "return", "float", "(", "timeout_str", ")", "except", ":", "self", ".", "_log", ".", "error", "(", "'Failed parsing the request read timeout of '", "'\"%s\". Please use a valid float number!'", "%", "timeout_str", ")", "return", "None" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
BaseAPI.get_data
This method is a basic implementation of __call_api that checks errors too. In case of success the method will return True or the content of the response to the request. Pagination is automatically detected and handled accordingly
digitalocean/baseapi.py
def get_data(self, url, type=GET, params=None): """ This method is a basic implementation of __call_api that checks errors too. In case of success the method will return True or the content of the response to the request. Pagination is automatically detected and handled accordingly """ if params is None: params = dict() # If per_page is not set, make sure it has a sane default if type is GET: params.setdefault("per_page", 200) req = self.__perform_request(url, type, params) if req.status_code == 204: return True if req.status_code == 404: raise NotFoundError() try: data = req.json() except ValueError as e: raise JSONReadError( 'Read failed from DigitalOcean: %s' % str(e) ) if not req.ok: msg = [data[m] for m in ("id", "message") if m in data][1] raise DataReadError(msg) # init request limits self.__init_ratelimit(req.headers) # If there are more elements available (total) than the elements per # page, try to deal with pagination. Note: Breaking the logic on # multiple pages, pages = data.get("links", {}).get("pages", {}) if pages.get("next") and "page" not in params: return self.__deal_with_pagination(url, type, params, data) else: return data
def get_data(self, url, type=GET, params=None): """ This method is a basic implementation of __call_api that checks errors too. In case of success the method will return True or the content of the response to the request. Pagination is automatically detected and handled accordingly """ if params is None: params = dict() # If per_page is not set, make sure it has a sane default if type is GET: params.setdefault("per_page", 200) req = self.__perform_request(url, type, params) if req.status_code == 204: return True if req.status_code == 404: raise NotFoundError() try: data = req.json() except ValueError as e: raise JSONReadError( 'Read failed from DigitalOcean: %s' % str(e) ) if not req.ok: msg = [data[m] for m in ("id", "message") if m in data][1] raise DataReadError(msg) # init request limits self.__init_ratelimit(req.headers) # If there are more elements available (total) than the elements per # page, try to deal with pagination. Note: Breaking the logic on # multiple pages, pages = data.get("links", {}).get("pages", {}) if pages.get("next") and "page" not in params: return self.__deal_with_pagination(url, type, params, data) else: return data
[ "This", "method", "is", "a", "basic", "implementation", "of", "__call_api", "that", "checks", "errors", "too", ".", "In", "case", "of", "success", "the", "method", "will", "return", "True", "or", "the", "content", "of", "the", "response", "to", "the", "request", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/baseapi.py#L167-L210
[ "def", "get_data", "(", "self", ",", "url", ",", "type", "=", "GET", ",", "params", "=", "None", ")", ":", "if", "params", "is", "None", ":", "params", "=", "dict", "(", ")", "# If per_page is not set, make sure it has a sane default", "if", "type", "is", "GET", ":", "params", ".", "setdefault", "(", "\"per_page\"", ",", "200", ")", "req", "=", "self", ".", "__perform_request", "(", "url", ",", "type", ",", "params", ")", "if", "req", ".", "status_code", "==", "204", ":", "return", "True", "if", "req", ".", "status_code", "==", "404", ":", "raise", "NotFoundError", "(", ")", "try", ":", "data", "=", "req", ".", "json", "(", ")", "except", "ValueError", "as", "e", ":", "raise", "JSONReadError", "(", "'Read failed from DigitalOcean: %s'", "%", "str", "(", "e", ")", ")", "if", "not", "req", ".", "ok", ":", "msg", "=", "[", "data", "[", "m", "]", "for", "m", "in", "(", "\"id\"", ",", "\"message\"", ")", "if", "m", "in", "data", "]", "[", "1", "]", "raise", "DataReadError", "(", "msg", ")", "# init request limits", "self", ".", "__init_ratelimit", "(", "req", ".", "headers", ")", "# If there are more elements available (total) than the elements per", "# page, try to deal with pagination. Note: Breaking the logic on", "# multiple pages,", "pages", "=", "data", ".", "get", "(", "\"links\"", ",", "{", "}", ")", ".", "get", "(", "\"pages\"", ",", "{", "}", ")", "if", "pages", ".", "get", "(", "\"next\"", ")", "and", "\"page\"", "not", "in", "params", ":", "return", "self", ".", "__deal_with_pagination", "(", "url", ",", "type", ",", "params", ",", "data", ")", "else", ":", "return", "data" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Volume.get_object
Class method that will return an Volume object by ID.
digitalocean/Volume.py
def get_object(cls, api_token, volume_id): """ Class method that will return an Volume object by ID. """ volume = cls(token=api_token, id=volume_id) volume.load() return volume
def get_object(cls, api_token, volume_id): """ Class method that will return an Volume object by ID. """ volume = cls(token=api_token, id=volume_id) volume.load() return volume
[ "Class", "method", "that", "will", "return", "an", "Volume", "object", "by", "ID", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Volume.py#L21-L27
[ "def", "get_object", "(", "cls", ",", "api_token", ",", "volume_id", ")", ":", "volume", "=", "cls", "(", "token", "=", "api_token", ",", "id", "=", "volume_id", ")", "volume", ".", "load", "(", ")", "return", "volume" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Volume.create_from_snapshot
Creates a Block Storage volume Note: Every argument and parameter given to this method will be assigned to the object. Args: name: string - a name for the volume snapshot_id: string - unique identifier for the volume snapshot size_gigabytes: int - size of the Block Storage volume in GiB filesystem_type: string, optional - name of the filesystem type the volume will be formated with ('ext4' or 'xfs') filesystem_label: string, optional - the label to be applied to the filesystem, only used in conjunction with filesystem_type Optional Args: description: string - text field to describe a volume
digitalocean/Volume.py
def create_from_snapshot(self, *args, **kwargs): """ Creates a Block Storage volume Note: Every argument and parameter given to this method will be assigned to the object. Args: name: string - a name for the volume snapshot_id: string - unique identifier for the volume snapshot size_gigabytes: int - size of the Block Storage volume in GiB filesystem_type: string, optional - name of the filesystem type the volume will be formated with ('ext4' or 'xfs') filesystem_label: string, optional - the label to be applied to the filesystem, only used in conjunction with filesystem_type Optional Args: description: string - text field to describe a volume """ data = self.get_data('volumes/', type=POST, params={'name': self.name, 'snapshot_id': self.snapshot_id, 'region': self.region, 'size_gigabytes': self.size_gigabytes, 'description': self.description, 'filesystem_type': self.filesystem_type, 'filesystem_label': self.filesystem_label }) if data: self.id = data['volume']['id'] self.created_at = data['volume']['created_at'] return self
def create_from_snapshot(self, *args, **kwargs): """ Creates a Block Storage volume Note: Every argument and parameter given to this method will be assigned to the object. Args: name: string - a name for the volume snapshot_id: string - unique identifier for the volume snapshot size_gigabytes: int - size of the Block Storage volume in GiB filesystem_type: string, optional - name of the filesystem type the volume will be formated with ('ext4' or 'xfs') filesystem_label: string, optional - the label to be applied to the filesystem, only used in conjunction with filesystem_type Optional Args: description: string - text field to describe a volume """ data = self.get_data('volumes/', type=POST, params={'name': self.name, 'snapshot_id': self.snapshot_id, 'region': self.region, 'size_gigabytes': self.size_gigabytes, 'description': self.description, 'filesystem_type': self.filesystem_type, 'filesystem_label': self.filesystem_label }) if data: self.id = data['volume']['id'] self.created_at = data['volume']['created_at'] return self
[ "Creates", "a", "Block", "Storage", "volume" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Volume.py#L74-L108
[ "def", "create_from_snapshot", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "data", "=", "self", ".", "get_data", "(", "'volumes/'", ",", "type", "=", "POST", ",", "params", "=", "{", "'name'", ":", "self", ".", "name", ",", "'snapshot_id'", ":", "self", ".", "snapshot_id", ",", "'region'", ":", "self", ".", "region", ",", "'size_gigabytes'", ":", "self", ".", "size_gigabytes", ",", "'description'", ":", "self", ".", "description", ",", "'filesystem_type'", ":", "self", ".", "filesystem_type", ",", "'filesystem_label'", ":", "self", ".", "filesystem_label", "}", ")", "if", "data", ":", "self", ".", "id", "=", "data", "[", "'volume'", "]", "[", "'id'", "]", "self", ".", "created_at", "=", "data", "[", "'volume'", "]", "[", "'created_at'", "]", "return", "self" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Volume.attach
Attach a Volume to a Droplet. Args: droplet_id: int - droplet id region: string - slug identifier for the region
digitalocean/Volume.py
def attach(self, droplet_id, region): """ Attach a Volume to a Droplet. Args: droplet_id: int - droplet id region: string - slug identifier for the region """ return self.get_data( "volumes/%s/actions/" % self.id, type=POST, params={"type": "attach", "droplet_id": droplet_id, "region": region} )
def attach(self, droplet_id, region): """ Attach a Volume to a Droplet. Args: droplet_id: int - droplet id region: string - slug identifier for the region """ return self.get_data( "volumes/%s/actions/" % self.id, type=POST, params={"type": "attach", "droplet_id": droplet_id, "region": region} )
[ "Attach", "a", "Volume", "to", "a", "Droplet", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Volume.py#L116-L130
[ "def", "attach", "(", "self", ",", "droplet_id", ",", "region", ")", ":", "return", "self", ".", "get_data", "(", "\"volumes/%s/actions/\"", "%", "self", ".", "id", ",", "type", "=", "POST", ",", "params", "=", "{", "\"type\"", ":", "\"attach\"", ",", "\"droplet_id\"", ":", "droplet_id", ",", "\"region\"", ":", "region", "}", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Volume.resize
Detach a Volume to a Droplet. Args: size_gigabytes: int - size of the Block Storage volume in GiB region: string - slug identifier for the region
digitalocean/Volume.py
def resize(self, size_gigabytes, region): """ Detach a Volume to a Droplet. Args: size_gigabytes: int - size of the Block Storage volume in GiB region: string - slug identifier for the region """ return self.get_data( "volumes/%s/actions/" % self.id, type=POST, params={"type": "resize", "size_gigabytes": size_gigabytes, "region": region} )
def resize(self, size_gigabytes, region): """ Detach a Volume to a Droplet. Args: size_gigabytes: int - size of the Block Storage volume in GiB region: string - slug identifier for the region """ return self.get_data( "volumes/%s/actions/" % self.id, type=POST, params={"type": "resize", "size_gigabytes": size_gigabytes, "region": region} )
[ "Detach", "a", "Volume", "to", "a", "Droplet", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Volume.py#L148-L162
[ "def", "resize", "(", "self", ",", "size_gigabytes", ",", "region", ")", ":", "return", "self", ".", "get_data", "(", "\"volumes/%s/actions/\"", "%", "self", ".", "id", ",", "type", "=", "POST", ",", "params", "=", "{", "\"type\"", ":", "\"resize\"", ",", "\"size_gigabytes\"", ":", "size_gigabytes", ",", "\"region\"", ":", "region", "}", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Volume.snapshot
Create a snapshot of the volume. Args: name: string - a human-readable name for the snapshot
digitalocean/Volume.py
def snapshot(self, name): """ Create a snapshot of the volume. Args: name: string - a human-readable name for the snapshot """ return self.get_data( "volumes/%s/snapshots/" % self.id, type=POST, params={"name": name} )
def snapshot(self, name): """ Create a snapshot of the volume. Args: name: string - a human-readable name for the snapshot """ return self.get_data( "volumes/%s/snapshots/" % self.id, type=POST, params={"name": name} )
[ "Create", "a", "snapshot", "of", "the", "volume", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Volume.py#L164-L175
[ "def", "snapshot", "(", "self", ",", "name", ")", ":", "return", "self", ".", "get_data", "(", "\"volumes/%s/snapshots/\"", "%", "self", ".", "id", ",", "type", "=", "POST", ",", "params", "=", "{", "\"name\"", ":", "name", "}", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Volume.get_snapshots
Retrieve the list of snapshots that have been created from a volume. Args:
digitalocean/Volume.py
def get_snapshots(self): """ Retrieve the list of snapshots that have been created from a volume. Args: """ data = self.get_data("volumes/%s/snapshots/" % self.id) snapshots = list() for jsond in data[u'snapshots']: snapshot = Snapshot(**jsond) snapshot.token = self.token snapshots.append(snapshot) return snapshots
def get_snapshots(self): """ Retrieve the list of snapshots that have been created from a volume. Args: """ data = self.get_data("volumes/%s/snapshots/" % self.id) snapshots = list() for jsond in data[u'snapshots']: snapshot = Snapshot(**jsond) snapshot.token = self.token snapshots.append(snapshot) return snapshots
[ "Retrieve", "the", "list", "of", "snapshots", "that", "have", "been", "created", "from", "a", "volume", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Volume.py#L177-L190
[ "def", "get_snapshots", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "\"volumes/%s/snapshots/\"", "%", "self", ".", "id", ")", "snapshots", "=", "list", "(", ")", "for", "jsond", "in", "data", "[", "u'snapshots'", "]", ":", "snapshot", "=", "Snapshot", "(", "*", "*", "jsond", ")", "snapshot", ".", "token", "=", "self", ".", "token", "snapshots", ".", "append", "(", "snapshot", ")", "return", "snapshots" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Certificate.get_object
Class method that will return a Certificate object by its ID.
digitalocean/Certificate.py
def get_object(cls, api_token, cert_id): """ Class method that will return a Certificate object by its ID. """ certificate = cls(token=api_token, id=cert_id) certificate.load() return certificate
def get_object(cls, api_token, cert_id): """ Class method that will return a Certificate object by its ID. """ certificate = cls(token=api_token, id=cert_id) certificate.load() return certificate
[ "Class", "method", "that", "will", "return", "a", "Certificate", "object", "by", "its", "ID", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Certificate.py#L61-L67
[ "def", "get_object", "(", "cls", ",", "api_token", ",", "cert_id", ")", ":", "certificate", "=", "cls", "(", "token", "=", "api_token", ",", "id", "=", "cert_id", ")", "certificate", ".", "load", "(", ")", "return", "certificate" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Certificate.load
Load the Certificate object from DigitalOcean. Requires self.id to be set.
digitalocean/Certificate.py
def load(self): """ Load the Certificate object from DigitalOcean. Requires self.id to be set. """ data = self.get_data("certificates/%s" % self.id) certificate = data["certificate"] for attr in certificate.keys(): setattr(self, attr, certificate[attr]) return self
def load(self): """ Load the Certificate object from DigitalOcean. Requires self.id to be set. """ data = self.get_data("certificates/%s" % self.id) certificate = data["certificate"] for attr in certificate.keys(): setattr(self, attr, certificate[attr]) return self
[ "Load", "the", "Certificate", "object", "from", "DigitalOcean", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Certificate.py#L69-L81
[ "def", "load", "(", "self", ")", ":", "data", "=", "self", ".", "get_data", "(", "\"certificates/%s\"", "%", "self", ".", "id", ")", "certificate", "=", "data", "[", "\"certificate\"", "]", "for", "attr", "in", "certificate", ".", "keys", "(", ")", ":", "setattr", "(", "self", ",", "attr", ",", "certificate", "[", "attr", "]", ")", "return", "self" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Certificate.create
Create the Certificate
digitalocean/Certificate.py
def create(self): """ Create the Certificate """ params = { "name": self.name, "type": self.type, "dns_names": self.dns_names, "private_key": self.private_key, "leaf_certificate": self.leaf_certificate, "certificate_chain": self.certificate_chain } data = self.get_data("certificates/", type=POST, params=params) if data: self.id = data['certificate']['id'] self.not_after = data['certificate']['not_after'] self.sha1_fingerprint = data['certificate']['sha1_fingerprint'] self.created_at = data['certificate']['created_at'] self.type = data['certificate']['type'] self.dns_names = data['certificate']['dns_names'] self.state = data['certificate']['state'] return self
def create(self): """ Create the Certificate """ params = { "name": self.name, "type": self.type, "dns_names": self.dns_names, "private_key": self.private_key, "leaf_certificate": self.leaf_certificate, "certificate_chain": self.certificate_chain } data = self.get_data("certificates/", type=POST, params=params) if data: self.id = data['certificate']['id'] self.not_after = data['certificate']['not_after'] self.sha1_fingerprint = data['certificate']['sha1_fingerprint'] self.created_at = data['certificate']['created_at'] self.type = data['certificate']['type'] self.dns_names = data['certificate']['dns_names'] self.state = data['certificate']['state'] return self
[ "Create", "the", "Certificate" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Certificate.py#L83-L107
[ "def", "create", "(", "self", ")", ":", "params", "=", "{", "\"name\"", ":", "self", ".", "name", ",", "\"type\"", ":", "self", ".", "type", ",", "\"dns_names\"", ":", "self", ".", "dns_names", ",", "\"private_key\"", ":", "self", ".", "private_key", ",", "\"leaf_certificate\"", ":", "self", ".", "leaf_certificate", ",", "\"certificate_chain\"", ":", "self", ".", "certificate_chain", "}", "data", "=", "self", ".", "get_data", "(", "\"certificates/\"", ",", "type", "=", "POST", ",", "params", "=", "params", ")", "if", "data", ":", "self", ".", "id", "=", "data", "[", "'certificate'", "]", "[", "'id'", "]", "self", ".", "not_after", "=", "data", "[", "'certificate'", "]", "[", "'not_after'", "]", "self", ".", "sha1_fingerprint", "=", "data", "[", "'certificate'", "]", "[", "'sha1_fingerprint'", "]", "self", ".", "created_at", "=", "data", "[", "'certificate'", "]", "[", "'created_at'", "]", "self", ".", "type", "=", "data", "[", "'certificate'", "]", "[", "'type'", "]", "self", ".", "dns_names", "=", "data", "[", "'certificate'", "]", "[", "'dns_names'", "]", "self", ".", "state", "=", "data", "[", "'certificate'", "]", "[", "'state'", "]", "return", "self" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Image.get_object
Class method that will return an Image object by ID or slug. This method is used to validate the type of the image. If it is a number, it will be considered as an Image ID, instead if it is a string, it will considered as slug.
digitalocean/Image.py
def get_object(cls, api_token, image_id_or_slug): """ Class method that will return an Image object by ID or slug. This method is used to validate the type of the image. If it is a number, it will be considered as an Image ID, instead if it is a string, it will considered as slug. """ if cls._is_string(image_id_or_slug): image = cls(token=api_token, slug=image_id_or_slug) image.load(use_slug=True) else: image = cls(token=api_token, id=image_id_or_slug) image.load() return image
def get_object(cls, api_token, image_id_or_slug): """ Class method that will return an Image object by ID or slug. This method is used to validate the type of the image. If it is a number, it will be considered as an Image ID, instead if it is a string, it will considered as slug. """ if cls._is_string(image_id_or_slug): image = cls(token=api_token, slug=image_id_or_slug) image.load(use_slug=True) else: image = cls(token=api_token, id=image_id_or_slug) image.load() return image
[ "Class", "method", "that", "will", "return", "an", "Image", "object", "by", "ID", "or", "slug", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Image.py#L64-L78
[ "def", "get_object", "(", "cls", ",", "api_token", ",", "image_id_or_slug", ")", ":", "if", "cls", ".", "_is_string", "(", "image_id_or_slug", ")", ":", "image", "=", "cls", "(", "token", "=", "api_token", ",", "slug", "=", "image_id_or_slug", ")", "image", ".", "load", "(", "use_slug", "=", "True", ")", "else", ":", "image", "=", "cls", "(", "token", "=", "api_token", ",", "id", "=", "image_id_or_slug", ")", "image", ".", "load", "(", ")", "return", "image" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Image._is_string
Checks if the value provided is a string (True) or not integer (False) or something else (None).
digitalocean/Image.py
def _is_string(value): """ Checks if the value provided is a string (True) or not integer (False) or something else (None). """ if type(value) in [type(u''), type('')]: return True elif type(value) in [int, type(2 ** 64)]: return False else: return None
def _is_string(value): """ Checks if the value provided is a string (True) or not integer (False) or something else (None). """ if type(value) in [type(u''), type('')]: return True elif type(value) in [int, type(2 ** 64)]: return False else: return None
[ "Checks", "if", "the", "value", "provided", "is", "a", "string", "(", "True", ")", "or", "not", "integer", "(", "False", ")", "or", "something", "else", "(", "None", ")", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Image.py#L81-L91
[ "def", "_is_string", "(", "value", ")", ":", "if", "type", "(", "value", ")", "in", "[", "type", "(", "u''", ")", ",", "type", "(", "''", ")", "]", ":", "return", "True", "elif", "type", "(", "value", ")", "in", "[", "int", ",", "type", "(", "2", "**", "64", ")", "]", ":", "return", "False", "else", ":", "return", "None" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Image.create
Creates a new custom DigitalOcean Image from the Linux virtual machine image located at the provided `url`.
digitalocean/Image.py
def create(self): """ Creates a new custom DigitalOcean Image from the Linux virtual machine image located at the provided `url`. """ params = {'name': self.name, 'region': self.region, 'url': self.url, 'distribution': self.distribution, 'description': self.description, 'tags': self.tags} data = self.get_data('images', type=POST, params=params) if data: for attr in data['image'].keys(): setattr(self, attr, data['image'][attr]) return self
def create(self): """ Creates a new custom DigitalOcean Image from the Linux virtual machine image located at the provided `url`. """ params = {'name': self.name, 'region': self.region, 'url': self.url, 'distribution': self.distribution, 'description': self.description, 'tags': self.tags} data = self.get_data('images', type=POST, params=params) if data: for attr in data['image'].keys(): setattr(self, attr, data['image'][attr]) return self
[ "Creates", "a", "new", "custom", "DigitalOcean", "Image", "from", "the", "Linux", "virtual", "machine", "image", "located", "at", "the", "provided", "url", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Image.py#L93-L111
[ "def", "create", "(", "self", ")", ":", "params", "=", "{", "'name'", ":", "self", ".", "name", ",", "'region'", ":", "self", ".", "region", ",", "'url'", ":", "self", ".", "url", ",", "'distribution'", ":", "self", ".", "distribution", ",", "'description'", ":", "self", ".", "description", ",", "'tags'", ":", "self", ".", "tags", "}", "data", "=", "self", ".", "get_data", "(", "'images'", ",", "type", "=", "POST", ",", "params", "=", "params", ")", "if", "data", ":", "for", "attr", "in", "data", "[", "'image'", "]", ".", "keys", "(", ")", ":", "setattr", "(", "self", ",", "attr", ",", "data", "[", "'image'", "]", "[", "attr", "]", ")", "return", "self" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Image.load
Load slug. Loads by id, or by slug if id is not present or use slug is True.
digitalocean/Image.py
def load(self, use_slug=False): """ Load slug. Loads by id, or by slug if id is not present or use slug is True. """ identifier = None if use_slug or not self.id: identifier = self.slug else: identifier = self.id if not identifier: raise NotFoundError("One of self.id or self.slug must be set.") data = self.get_data("images/%s" % identifier) image_dict = data['image'] # Setting the attribute values for attr in image_dict.keys(): setattr(self, attr, image_dict[attr]) return self
def load(self, use_slug=False): """ Load slug. Loads by id, or by slug if id is not present or use slug is True. """ identifier = None if use_slug or not self.id: identifier = self.slug else: identifier = self.id if not identifier: raise NotFoundError("One of self.id or self.slug must be set.") data = self.get_data("images/%s" % identifier) image_dict = data['image'] # Setting the attribute values for attr in image_dict.keys(): setattr(self, attr, image_dict[attr]) return self
[ "Load", "slug", "." ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Image.py#L113-L133
[ "def", "load", "(", "self", ",", "use_slug", "=", "False", ")", ":", "identifier", "=", "None", "if", "use_slug", "or", "not", "self", ".", "id", ":", "identifier", "=", "self", ".", "slug", "else", ":", "identifier", "=", "self", ".", "id", "if", "not", "identifier", ":", "raise", "NotFoundError", "(", "\"One of self.id or self.slug must be set.\"", ")", "data", "=", "self", ".", "get_data", "(", "\"images/%s\"", "%", "identifier", ")", "image_dict", "=", "data", "[", "'image'", "]", "# Setting the attribute values", "for", "attr", "in", "image_dict", ".", "keys", "(", ")", ":", "setattr", "(", "self", ",", "attr", ",", "image_dict", "[", "attr", "]", ")", "return", "self" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Image.transfer
Transfer the image
digitalocean/Image.py
def transfer(self, new_region_slug): """ Transfer the image """ return self.get_data( "images/%s/actions/" % self.id, type=POST, params={"type": "transfer", "region": new_region_slug} )
def transfer(self, new_region_slug): """ Transfer the image """ return self.get_data( "images/%s/actions/" % self.id, type=POST, params={"type": "transfer", "region": new_region_slug} )
[ "Transfer", "the", "image" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Image.py#L141-L149
[ "def", "transfer", "(", "self", ",", "new_region_slug", ")", ":", "return", "self", ".", "get_data", "(", "\"images/%s/actions/\"", "%", "self", ".", "id", ",", "type", "=", "POST", ",", "params", "=", "{", "\"type\"", ":", "\"transfer\"", ",", "\"region\"", ":", "new_region_slug", "}", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
Image.rename
Rename an image
digitalocean/Image.py
def rename(self, new_name): """ Rename an image """ return self.get_data( "images/%s" % self.id, type=PUT, params={"name": new_name} )
def rename(self, new_name): """ Rename an image """ return self.get_data( "images/%s" % self.id, type=PUT, params={"name": new_name} )
[ "Rename", "an", "image" ]
koalalorenzo/python-digitalocean
python
https://github.com/koalalorenzo/python-digitalocean/blob/d0221b57856fb1e131cafecf99d826f7b07a947c/digitalocean/Image.py#L151-L159
[ "def", "rename", "(", "self", ",", "new_name", ")", ":", "return", "self", ".", "get_data", "(", "\"images/%s\"", "%", "self", ".", "id", ",", "type", "=", "PUT", ",", "params", "=", "{", "\"name\"", ":", "new_name", "}", ")" ]
d0221b57856fb1e131cafecf99d826f7b07a947c
valid
convert_conv
Convert convolution layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/convolution_layers.py
def convert_conv(params, w_name, scope_name, inputs, layers, weights, names): """ Convert convolution layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting convolution ...') if names == 'short': tf_name = 'C' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) bias_name = '{0}.bias'.format(w_name) weights_name = '{0}.weight'.format(w_name) input_name = inputs[0] if len(weights[weights_name].numpy().shape) == 5: # 3D conv W = weights[weights_name].numpy().transpose(2, 3, 4, 1, 0) height, width, channels, n_layers, n_filters = W.shape if bias_name in weights: biases = weights[bias_name].numpy() has_bias = True else: biases = None has_bias = False if params['pads'][0] > 0 or params['pads'][1] > 0: padding_name = tf_name + '_pad' padding_layer = keras.layers.ZeroPadding3D( padding=(params['pads'][0], params['pads'][1], params['pads'][2]), name=padding_name ) layers[padding_name] = padding_layer(layers[input_name]) input_name = padding_name if has_bias: weights = [W, biases] else: weights = [W] conv = keras.layers.Conv3D( filters=n_filters, kernel_size=(channels, height, width), strides=(params['strides'][0], params['strides'][1], params['strides'][2]), padding='valid', weights=weights, use_bias=has_bias, activation=None, dilation_rate=params['dilations'][0], bias_initializer='zeros', kernel_initializer='zeros', name=tf_name ) layers[scope_name] = conv(layers[input_name]) elif len(weights[weights_name].numpy().shape) == 4: # 2D conv if params['pads'][0] > 0 or params['pads'][1] > 0: padding_name = tf_name + '_pad' padding_layer = keras.layers.ZeroPadding2D( padding=(params['pads'][0], params['pads'][1]), name=padding_name ) layers[padding_name] = padding_layer(layers[input_name]) input_name = padding_name W = weights[weights_name].numpy().transpose(2, 3, 1, 0) height, width, channels_per_group, out_channels = W.shape n_groups = params['group'] in_channels = channels_per_group * n_groups if n_groups == in_channels and n_groups != 1: if bias_name in weights: biases = weights[bias_name].numpy() has_bias = True else: biases = None has_bias = False W = W.transpose(0, 1, 3, 2) if has_bias: weights = [W, biases] else: weights = [W] conv = keras.layers.DepthwiseConv2D( kernel_size=(height, width), strides=(params['strides'][0], params['strides'][1]), padding='valid', use_bias=has_bias, activation=None, depth_multiplier=1, weights = weights, dilation_rate=params['dilations'][0], bias_initializer='zeros', kernel_initializer='zeros' ) layers[scope_name] = conv(layers[input_name]) elif n_groups != 1: # Example from https://kratzert.github.io/2017/02/24/finetuning-alexnet-with-tensorflow.html # # Split input and weights and convolve them separately # input_groups = tf.split(axis=3, num_or_size_splits=groups, value=x) # weight_groups = tf.split(axis=3, num_or_size_splits=groups, value=weights) # output_groups = [convolve(i, k) for i, k in zip(input_groups, weight_groups)] # # Concat the convolved output together again # conv = tf.concat(axis=3, values=output_groups) def target_layer(x, groups=params['group'], stride_y=params['strides'][0], stride_x=params['strides'][1]): x = tf.transpose(x, [0, 2, 3, 1]) def convolve_lambda(i, k): return tf.nn.conv2d(i, k, strides=[1, stride_y, stride_x, 1], padding='VALID') input_groups = tf.split(axis=3, num_or_size_splits=groups, value=x) weight_groups = tf.split(axis=3, num_or_size_splits=groups, value=W.transpose(0, 1, 2, 3)) output_groups = [convolve_lambda(i, k) for i, k in zip(input_groups, weight_groups)] layer = tf.concat(axis=3, values=output_groups) layer = tf.transpose(layer, [0, 3, 1, 2]) return layer lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name] = lambda_layer(layers[input_name]) else: if bias_name in weights: biases = weights[bias_name].numpy() has_bias = True else: biases = None has_bias = False if has_bias: weights = [W, biases] else: weights = [W] conv = keras.layers.Conv2D( filters=out_channels, kernel_size=(height, width), strides=(params['strides'][0], params['strides'][1]), padding='valid', weights=weights, use_bias=has_bias, activation=None, dilation_rate=params['dilations'][0], bias_initializer='zeros', kernel_initializer='zeros', name=tf_name ) layers[scope_name] = conv(layers[input_name]) else: # 1D conv W = weights[weights_name].numpy().transpose(2, 1, 0) width, channels, n_filters = W.shape n_groups = params['group'] if n_groups > 1: raise AssertionError('Cannot convert conv1d with groups != 1') if bias_name in weights: biases = weights[bias_name].numpy() has_bias = True else: biases = None has_bias = False padding_name = tf_name + '_pad' padding_layer = keras.layers.ZeroPadding1D( padding=params['pads'][0], name=padding_name ) layers[padding_name] = padding_layer(layers[inputs[0]]) input_name = padding_name if has_bias: weights = [W, biases] else: weights = [W] conv = keras.layers.Conv1D( filters=channels, kernel_size=width, strides=params['strides'], padding='valid', weights=weights, use_bias=has_bias, activation=None, data_format='channels_first', dilation_rate=params['dilations'], bias_initializer='zeros', kernel_initializer='zeros', name=tf_name ) layers[scope_name] = conv(layers[input_name])
def convert_conv(params, w_name, scope_name, inputs, layers, weights, names): """ Convert convolution layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting convolution ...') if names == 'short': tf_name = 'C' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) bias_name = '{0}.bias'.format(w_name) weights_name = '{0}.weight'.format(w_name) input_name = inputs[0] if len(weights[weights_name].numpy().shape) == 5: # 3D conv W = weights[weights_name].numpy().transpose(2, 3, 4, 1, 0) height, width, channels, n_layers, n_filters = W.shape if bias_name in weights: biases = weights[bias_name].numpy() has_bias = True else: biases = None has_bias = False if params['pads'][0] > 0 or params['pads'][1] > 0: padding_name = tf_name + '_pad' padding_layer = keras.layers.ZeroPadding3D( padding=(params['pads'][0], params['pads'][1], params['pads'][2]), name=padding_name ) layers[padding_name] = padding_layer(layers[input_name]) input_name = padding_name if has_bias: weights = [W, biases] else: weights = [W] conv = keras.layers.Conv3D( filters=n_filters, kernel_size=(channels, height, width), strides=(params['strides'][0], params['strides'][1], params['strides'][2]), padding='valid', weights=weights, use_bias=has_bias, activation=None, dilation_rate=params['dilations'][0], bias_initializer='zeros', kernel_initializer='zeros', name=tf_name ) layers[scope_name] = conv(layers[input_name]) elif len(weights[weights_name].numpy().shape) == 4: # 2D conv if params['pads'][0] > 0 or params['pads'][1] > 0: padding_name = tf_name + '_pad' padding_layer = keras.layers.ZeroPadding2D( padding=(params['pads'][0], params['pads'][1]), name=padding_name ) layers[padding_name] = padding_layer(layers[input_name]) input_name = padding_name W = weights[weights_name].numpy().transpose(2, 3, 1, 0) height, width, channels_per_group, out_channels = W.shape n_groups = params['group'] in_channels = channels_per_group * n_groups if n_groups == in_channels and n_groups != 1: if bias_name in weights: biases = weights[bias_name].numpy() has_bias = True else: biases = None has_bias = False W = W.transpose(0, 1, 3, 2) if has_bias: weights = [W, biases] else: weights = [W] conv = keras.layers.DepthwiseConv2D( kernel_size=(height, width), strides=(params['strides'][0], params['strides'][1]), padding='valid', use_bias=has_bias, activation=None, depth_multiplier=1, weights = weights, dilation_rate=params['dilations'][0], bias_initializer='zeros', kernel_initializer='zeros' ) layers[scope_name] = conv(layers[input_name]) elif n_groups != 1: # Example from https://kratzert.github.io/2017/02/24/finetuning-alexnet-with-tensorflow.html # # Split input and weights and convolve them separately # input_groups = tf.split(axis=3, num_or_size_splits=groups, value=x) # weight_groups = tf.split(axis=3, num_or_size_splits=groups, value=weights) # output_groups = [convolve(i, k) for i, k in zip(input_groups, weight_groups)] # # Concat the convolved output together again # conv = tf.concat(axis=3, values=output_groups) def target_layer(x, groups=params['group'], stride_y=params['strides'][0], stride_x=params['strides'][1]): x = tf.transpose(x, [0, 2, 3, 1]) def convolve_lambda(i, k): return tf.nn.conv2d(i, k, strides=[1, stride_y, stride_x, 1], padding='VALID') input_groups = tf.split(axis=3, num_or_size_splits=groups, value=x) weight_groups = tf.split(axis=3, num_or_size_splits=groups, value=W.transpose(0, 1, 2, 3)) output_groups = [convolve_lambda(i, k) for i, k in zip(input_groups, weight_groups)] layer = tf.concat(axis=3, values=output_groups) layer = tf.transpose(layer, [0, 3, 1, 2]) return layer lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name] = lambda_layer(layers[input_name]) else: if bias_name in weights: biases = weights[bias_name].numpy() has_bias = True else: biases = None has_bias = False if has_bias: weights = [W, biases] else: weights = [W] conv = keras.layers.Conv2D( filters=out_channels, kernel_size=(height, width), strides=(params['strides'][0], params['strides'][1]), padding='valid', weights=weights, use_bias=has_bias, activation=None, dilation_rate=params['dilations'][0], bias_initializer='zeros', kernel_initializer='zeros', name=tf_name ) layers[scope_name] = conv(layers[input_name]) else: # 1D conv W = weights[weights_name].numpy().transpose(2, 1, 0) width, channels, n_filters = W.shape n_groups = params['group'] if n_groups > 1: raise AssertionError('Cannot convert conv1d with groups != 1') if bias_name in weights: biases = weights[bias_name].numpy() has_bias = True else: biases = None has_bias = False padding_name = tf_name + '_pad' padding_layer = keras.layers.ZeroPadding1D( padding=params['pads'][0], name=padding_name ) layers[padding_name] = padding_layer(layers[inputs[0]]) input_name = padding_name if has_bias: weights = [W, biases] else: weights = [W] conv = keras.layers.Conv1D( filters=channels, kernel_size=width, strides=params['strides'], padding='valid', weights=weights, use_bias=has_bias, activation=None, data_format='channels_first', dilation_rate=params['dilations'], bias_initializer='zeros', kernel_initializer='zeros', name=tf_name ) layers[scope_name] = conv(layers[input_name])
[ "Convert", "convolution", "layer", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/convolution_layers.py#L9-L214
[ "def", "convert_conv", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting convolution ...'", ")", "if", "names", "==", "'short'", ":", "tf_name", "=", "'C'", "+", "random_string", "(", "7", ")", "elif", "names", "==", "'keep'", ":", "tf_name", "=", "w_name", "else", ":", "tf_name", "=", "w_name", "+", "str", "(", "random", ".", "random", "(", ")", ")", "bias_name", "=", "'{0}.bias'", ".", "format", "(", "w_name", ")", "weights_name", "=", "'{0}.weight'", ".", "format", "(", "w_name", ")", "input_name", "=", "inputs", "[", "0", "]", "if", "len", "(", "weights", "[", "weights_name", "]", ".", "numpy", "(", ")", ".", "shape", ")", "==", "5", ":", "# 3D conv", "W", "=", "weights", "[", "weights_name", "]", ".", "numpy", "(", ")", ".", "transpose", "(", "2", ",", "3", ",", "4", ",", "1", ",", "0", ")", "height", ",", "width", ",", "channels", ",", "n_layers", ",", "n_filters", "=", "W", ".", "shape", "if", "bias_name", "in", "weights", ":", "biases", "=", "weights", "[", "bias_name", "]", ".", "numpy", "(", ")", "has_bias", "=", "True", "else", ":", "biases", "=", "None", "has_bias", "=", "False", "if", "params", "[", "'pads'", "]", "[", "0", "]", ">", "0", "or", "params", "[", "'pads'", "]", "[", "1", "]", ">", "0", ":", "padding_name", "=", "tf_name", "+", "'_pad'", "padding_layer", "=", "keras", ".", "layers", ".", "ZeroPadding3D", "(", "padding", "=", "(", "params", "[", "'pads'", "]", "[", "0", "]", ",", "params", "[", "'pads'", "]", "[", "1", "]", ",", "params", "[", "'pads'", "]", "[", "2", "]", ")", ",", "name", "=", "padding_name", ")", "layers", "[", "padding_name", "]", "=", "padding_layer", "(", "layers", "[", "input_name", "]", ")", "input_name", "=", "padding_name", "if", "has_bias", ":", "weights", "=", "[", "W", ",", "biases", "]", "else", ":", "weights", "=", "[", "W", "]", "conv", "=", "keras", ".", "layers", ".", "Conv3D", "(", "filters", "=", "n_filters", ",", "kernel_size", "=", "(", "channels", ",", "height", ",", "width", ")", ",", "strides", "=", "(", "params", "[", "'strides'", "]", "[", "0", "]", ",", "params", "[", "'strides'", "]", "[", "1", "]", ",", "params", "[", "'strides'", "]", "[", "2", "]", ")", ",", "padding", "=", "'valid'", ",", "weights", "=", "weights", ",", "use_bias", "=", "has_bias", ",", "activation", "=", "None", ",", "dilation_rate", "=", "params", "[", "'dilations'", "]", "[", "0", "]", ",", "bias_initializer", "=", "'zeros'", ",", "kernel_initializer", "=", "'zeros'", ",", "name", "=", "tf_name", ")", "layers", "[", "scope_name", "]", "=", "conv", "(", "layers", "[", "input_name", "]", ")", "elif", "len", "(", "weights", "[", "weights_name", "]", ".", "numpy", "(", ")", ".", "shape", ")", "==", "4", ":", "# 2D conv", "if", "params", "[", "'pads'", "]", "[", "0", "]", ">", "0", "or", "params", "[", "'pads'", "]", "[", "1", "]", ">", "0", ":", "padding_name", "=", "tf_name", "+", "'_pad'", "padding_layer", "=", "keras", ".", "layers", ".", "ZeroPadding2D", "(", "padding", "=", "(", "params", "[", "'pads'", "]", "[", "0", "]", ",", "params", "[", "'pads'", "]", "[", "1", "]", ")", ",", "name", "=", "padding_name", ")", "layers", "[", "padding_name", "]", "=", "padding_layer", "(", "layers", "[", "input_name", "]", ")", "input_name", "=", "padding_name", "W", "=", "weights", "[", "weights_name", "]", ".", "numpy", "(", ")", ".", "transpose", "(", "2", ",", "3", ",", "1", ",", "0", ")", "height", ",", "width", ",", "channels_per_group", ",", "out_channels", "=", "W", ".", "shape", "n_groups", "=", "params", "[", "'group'", "]", "in_channels", "=", "channels_per_group", "*", "n_groups", "if", "n_groups", "==", "in_channels", "and", "n_groups", "!=", "1", ":", "if", "bias_name", "in", "weights", ":", "biases", "=", "weights", "[", "bias_name", "]", ".", "numpy", "(", ")", "has_bias", "=", "True", "else", ":", "biases", "=", "None", "has_bias", "=", "False", "W", "=", "W", ".", "transpose", "(", "0", ",", "1", ",", "3", ",", "2", ")", "if", "has_bias", ":", "weights", "=", "[", "W", ",", "biases", "]", "else", ":", "weights", "=", "[", "W", "]", "conv", "=", "keras", ".", "layers", ".", "DepthwiseConv2D", "(", "kernel_size", "=", "(", "height", ",", "width", ")", ",", "strides", "=", "(", "params", "[", "'strides'", "]", "[", "0", "]", ",", "params", "[", "'strides'", "]", "[", "1", "]", ")", ",", "padding", "=", "'valid'", ",", "use_bias", "=", "has_bias", ",", "activation", "=", "None", ",", "depth_multiplier", "=", "1", ",", "weights", "=", "weights", ",", "dilation_rate", "=", "params", "[", "'dilations'", "]", "[", "0", "]", ",", "bias_initializer", "=", "'zeros'", ",", "kernel_initializer", "=", "'zeros'", ")", "layers", "[", "scope_name", "]", "=", "conv", "(", "layers", "[", "input_name", "]", ")", "elif", "n_groups", "!=", "1", ":", "# Example from https://kratzert.github.io/2017/02/24/finetuning-alexnet-with-tensorflow.html", "# # Split input and weights and convolve them separately", "# input_groups = tf.split(axis=3, num_or_size_splits=groups, value=x)", "# weight_groups = tf.split(axis=3, num_or_size_splits=groups, value=weights)", "# output_groups = [convolve(i, k) for i, k in zip(input_groups, weight_groups)]", "# # Concat the convolved output together again", "# conv = tf.concat(axis=3, values=output_groups)", "def", "target_layer", "(", "x", ",", "groups", "=", "params", "[", "'group'", "]", ",", "stride_y", "=", "params", "[", "'strides'", "]", "[", "0", "]", ",", "stride_x", "=", "params", "[", "'strides'", "]", "[", "1", "]", ")", ":", "x", "=", "tf", ".", "transpose", "(", "x", ",", "[", "0", ",", "2", ",", "3", ",", "1", "]", ")", "def", "convolve_lambda", "(", "i", ",", "k", ")", ":", "return", "tf", ".", "nn", ".", "conv2d", "(", "i", ",", "k", ",", "strides", "=", "[", "1", ",", "stride_y", ",", "stride_x", ",", "1", "]", ",", "padding", "=", "'VALID'", ")", "input_groups", "=", "tf", ".", "split", "(", "axis", "=", "3", ",", "num_or_size_splits", "=", "groups", ",", "value", "=", "x", ")", "weight_groups", "=", "tf", ".", "split", "(", "axis", "=", "3", ",", "num_or_size_splits", "=", "groups", ",", "value", "=", "W", ".", "transpose", "(", "0", ",", "1", ",", "2", ",", "3", ")", ")", "output_groups", "=", "[", "convolve_lambda", "(", "i", ",", "k", ")", "for", "i", ",", "k", "in", "zip", "(", "input_groups", ",", "weight_groups", ")", "]", "layer", "=", "tf", ".", "concat", "(", "axis", "=", "3", ",", "values", "=", "output_groups", ")", "layer", "=", "tf", ".", "transpose", "(", "layer", ",", "[", "0", ",", "3", ",", "1", ",", "2", "]", ")", "return", "layer", "lambda_layer", "=", "keras", ".", "layers", ".", "Lambda", "(", "target_layer", ")", "layers", "[", "scope_name", "]", "=", "lambda_layer", "(", "layers", "[", "input_name", "]", ")", "else", ":", "if", "bias_name", "in", "weights", ":", "biases", "=", "weights", "[", "bias_name", "]", ".", "numpy", "(", ")", "has_bias", "=", "True", "else", ":", "biases", "=", "None", "has_bias", "=", "False", "if", "has_bias", ":", "weights", "=", "[", "W", ",", "biases", "]", "else", ":", "weights", "=", "[", "W", "]", "conv", "=", "keras", ".", "layers", ".", "Conv2D", "(", "filters", "=", "out_channels", ",", "kernel_size", "=", "(", "height", ",", "width", ")", ",", "strides", "=", "(", "params", "[", "'strides'", "]", "[", "0", "]", ",", "params", "[", "'strides'", "]", "[", "1", "]", ")", ",", "padding", "=", "'valid'", ",", "weights", "=", "weights", ",", "use_bias", "=", "has_bias", ",", "activation", "=", "None", ",", "dilation_rate", "=", "params", "[", "'dilations'", "]", "[", "0", "]", ",", "bias_initializer", "=", "'zeros'", ",", "kernel_initializer", "=", "'zeros'", ",", "name", "=", "tf_name", ")", "layers", "[", "scope_name", "]", "=", "conv", "(", "layers", "[", "input_name", "]", ")", "else", ":", "# 1D conv", "W", "=", "weights", "[", "weights_name", "]", ".", "numpy", "(", ")", ".", "transpose", "(", "2", ",", "1", ",", "0", ")", "width", ",", "channels", ",", "n_filters", "=", "W", ".", "shape", "n_groups", "=", "params", "[", "'group'", "]", "if", "n_groups", ">", "1", ":", "raise", "AssertionError", "(", "'Cannot convert conv1d with groups != 1'", ")", "if", "bias_name", "in", "weights", ":", "biases", "=", "weights", "[", "bias_name", "]", ".", "numpy", "(", ")", "has_bias", "=", "True", "else", ":", "biases", "=", "None", "has_bias", "=", "False", "padding_name", "=", "tf_name", "+", "'_pad'", "padding_layer", "=", "keras", ".", "layers", ".", "ZeroPadding1D", "(", "padding", "=", "params", "[", "'pads'", "]", "[", "0", "]", ",", "name", "=", "padding_name", ")", "layers", "[", "padding_name", "]", "=", "padding_layer", "(", "layers", "[", "inputs", "[", "0", "]", "]", ")", "input_name", "=", "padding_name", "if", "has_bias", ":", "weights", "=", "[", "W", ",", "biases", "]", "else", ":", "weights", "=", "[", "W", "]", "conv", "=", "keras", ".", "layers", ".", "Conv1D", "(", "filters", "=", "channels", ",", "kernel_size", "=", "width", ",", "strides", "=", "params", "[", "'strides'", "]", ",", "padding", "=", "'valid'", ",", "weights", "=", "weights", ",", "use_bias", "=", "has_bias", ",", "activation", "=", "None", ",", "data_format", "=", "'channels_first'", ",", "dilation_rate", "=", "params", "[", "'dilations'", "]", ",", "bias_initializer", "=", "'zeros'", ",", "kernel_initializer", "=", "'zeros'", ",", "name", "=", "tf_name", ")", "layers", "[", "scope_name", "]", "=", "conv", "(", "layers", "[", "input_name", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_convtranspose
Convert transposed convolution layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/convolution_layers.py
def convert_convtranspose(params, w_name, scope_name, inputs, layers, weights, names): """ Convert transposed convolution layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting transposed convolution ...') if names == 'short': tf_name = 'C' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) bias_name = '{0}.bias'.format(w_name) weights_name = '{0}.weight'.format(w_name) if len(weights[weights_name].numpy().shape) == 4: W = weights[weights_name].numpy().transpose(2, 3, 1, 0) height, width, n_filters, channels = W.shape n_groups = params['group'] if n_groups > 1: raise AssertionError('Cannot convert conv1d with groups != 1') if params['dilations'][0] > 1: raise AssertionError('Cannot convert conv1d with dilation_rate != 1') if bias_name in weights: biases = weights[bias_name].numpy() has_bias = True else: biases = None has_bias = False input_name = inputs[0] if has_bias: weights = [W, biases] else: weights = [W] conv = keras.layers.Conv2DTranspose( filters=n_filters, kernel_size=(height, width), strides=(params['strides'][0], params['strides'][1]), padding='valid', output_padding=0, weights=weights, use_bias=has_bias, activation=None, dilation_rate=params['dilations'][0], bias_initializer='zeros', kernel_initializer='zeros', name=tf_name ) layers[scope_name] = conv(layers[input_name]) # Magic ad-hoc. # See the Keras issue: https://github.com/keras-team/keras/issues/6777 layers[scope_name].set_shape(layers[scope_name]._keras_shape) pads = params['pads'] if pads[0] > 0: assert(len(pads) == 2 or (pads[2] == pads[0] and pads[3] == pads[1])) crop = keras.layers.Cropping2D( pads[:2], name=tf_name + '_crop' ) layers[scope_name] = crop(layers[scope_name]) else: raise AssertionError('Layer is not supported for now')
def convert_convtranspose(params, w_name, scope_name, inputs, layers, weights, names): """ Convert transposed convolution layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting transposed convolution ...') if names == 'short': tf_name = 'C' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) bias_name = '{0}.bias'.format(w_name) weights_name = '{0}.weight'.format(w_name) if len(weights[weights_name].numpy().shape) == 4: W = weights[weights_name].numpy().transpose(2, 3, 1, 0) height, width, n_filters, channels = W.shape n_groups = params['group'] if n_groups > 1: raise AssertionError('Cannot convert conv1d with groups != 1') if params['dilations'][0] > 1: raise AssertionError('Cannot convert conv1d with dilation_rate != 1') if bias_name in weights: biases = weights[bias_name].numpy() has_bias = True else: biases = None has_bias = False input_name = inputs[0] if has_bias: weights = [W, biases] else: weights = [W] conv = keras.layers.Conv2DTranspose( filters=n_filters, kernel_size=(height, width), strides=(params['strides'][0], params['strides'][1]), padding='valid', output_padding=0, weights=weights, use_bias=has_bias, activation=None, dilation_rate=params['dilations'][0], bias_initializer='zeros', kernel_initializer='zeros', name=tf_name ) layers[scope_name] = conv(layers[input_name]) # Magic ad-hoc. # See the Keras issue: https://github.com/keras-team/keras/issues/6777 layers[scope_name].set_shape(layers[scope_name]._keras_shape) pads = params['pads'] if pads[0] > 0: assert(len(pads) == 2 or (pads[2] == pads[0] and pads[3] == pads[1])) crop = keras.layers.Cropping2D( pads[:2], name=tf_name + '_crop' ) layers[scope_name] = crop(layers[scope_name]) else: raise AssertionError('Layer is not supported for now')
[ "Convert", "transposed", "convolution", "layer", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/convolution_layers.py#L217-L297
[ "def", "convert_convtranspose", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting transposed convolution ...'", ")", "if", "names", "==", "'short'", ":", "tf_name", "=", "'C'", "+", "random_string", "(", "7", ")", "elif", "names", "==", "'keep'", ":", "tf_name", "=", "w_name", "else", ":", "tf_name", "=", "w_name", "+", "str", "(", "random", ".", "random", "(", ")", ")", "bias_name", "=", "'{0}.bias'", ".", "format", "(", "w_name", ")", "weights_name", "=", "'{0}.weight'", ".", "format", "(", "w_name", ")", "if", "len", "(", "weights", "[", "weights_name", "]", ".", "numpy", "(", ")", ".", "shape", ")", "==", "4", ":", "W", "=", "weights", "[", "weights_name", "]", ".", "numpy", "(", ")", ".", "transpose", "(", "2", ",", "3", ",", "1", ",", "0", ")", "height", ",", "width", ",", "n_filters", ",", "channels", "=", "W", ".", "shape", "n_groups", "=", "params", "[", "'group'", "]", "if", "n_groups", ">", "1", ":", "raise", "AssertionError", "(", "'Cannot convert conv1d with groups != 1'", ")", "if", "params", "[", "'dilations'", "]", "[", "0", "]", ">", "1", ":", "raise", "AssertionError", "(", "'Cannot convert conv1d with dilation_rate != 1'", ")", "if", "bias_name", "in", "weights", ":", "biases", "=", "weights", "[", "bias_name", "]", ".", "numpy", "(", ")", "has_bias", "=", "True", "else", ":", "biases", "=", "None", "has_bias", "=", "False", "input_name", "=", "inputs", "[", "0", "]", "if", "has_bias", ":", "weights", "=", "[", "W", ",", "biases", "]", "else", ":", "weights", "=", "[", "W", "]", "conv", "=", "keras", ".", "layers", ".", "Conv2DTranspose", "(", "filters", "=", "n_filters", ",", "kernel_size", "=", "(", "height", ",", "width", ")", ",", "strides", "=", "(", "params", "[", "'strides'", "]", "[", "0", "]", ",", "params", "[", "'strides'", "]", "[", "1", "]", ")", ",", "padding", "=", "'valid'", ",", "output_padding", "=", "0", ",", "weights", "=", "weights", ",", "use_bias", "=", "has_bias", ",", "activation", "=", "None", ",", "dilation_rate", "=", "params", "[", "'dilations'", "]", "[", "0", "]", ",", "bias_initializer", "=", "'zeros'", ",", "kernel_initializer", "=", "'zeros'", ",", "name", "=", "tf_name", ")", "layers", "[", "scope_name", "]", "=", "conv", "(", "layers", "[", "input_name", "]", ")", "# Magic ad-hoc.", "# See the Keras issue: https://github.com/keras-team/keras/issues/6777", "layers", "[", "scope_name", "]", ".", "set_shape", "(", "layers", "[", "scope_name", "]", ".", "_keras_shape", ")", "pads", "=", "params", "[", "'pads'", "]", "if", "pads", "[", "0", "]", ">", "0", ":", "assert", "(", "len", "(", "pads", ")", "==", "2", "or", "(", "pads", "[", "2", "]", "==", "pads", "[", "0", "]", "and", "pads", "[", "3", "]", "==", "pads", "[", "1", "]", ")", ")", "crop", "=", "keras", ".", "layers", ".", "Cropping2D", "(", "pads", "[", ":", "2", "]", ",", "name", "=", "tf_name", "+", "'_crop'", ")", "layers", "[", "scope_name", "]", "=", "crop", "(", "layers", "[", "scope_name", "]", ")", "else", ":", "raise", "AssertionError", "(", "'Layer is not supported for now'", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_sum
Convert sum. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/operation_layers.py
def convert_sum( params, w_name, scope_name, inputs, layers, weights, names ): """ Convert sum. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting Sum ...') def target_layer(x): import keras.backend as K return K.sum(x) lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name] = lambda_layer(layers[inputs[0]])
def convert_sum( params, w_name, scope_name, inputs, layers, weights, names ): """ Convert sum. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting Sum ...') def target_layer(x): import keras.backend as K return K.sum(x) lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name] = lambda_layer(layers[inputs[0]])
[ "Convert", "sum", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/operation_layers.py#L10-L32
[ "def", "convert_sum", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting Sum ...'", ")", "def", "target_layer", "(", "x", ")", ":", "import", "keras", ".", "backend", "as", "K", "return", "K", ".", "sum", "(", "x", ")", "lambda_layer", "=", "keras", ".", "layers", ".", "Lambda", "(", "target_layer", ")", "layers", "[", "scope_name", "]", "=", "lambda_layer", "(", "layers", "[", "inputs", "[", "0", "]", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_reduce_sum
Convert reduce_sum layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/operation_layers.py
def convert_reduce_sum(params, w_name, scope_name, inputs, layers, weights, names): """ Convert reduce_sum layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting reduce_sum ...') keepdims = params['keepdims'] > 0 axis = params['axes'] def target_layer(x, keepdims=keepdims, axis=axis): import keras.backend as K return K.sum(x, keepdims=keepdims, axis=axis) lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name] = lambda_layer(layers[inputs[0]])
def convert_reduce_sum(params, w_name, scope_name, inputs, layers, weights, names): """ Convert reduce_sum layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting reduce_sum ...') keepdims = params['keepdims'] > 0 axis = params['axes'] def target_layer(x, keepdims=keepdims, axis=axis): import keras.backend as K return K.sum(x, keepdims=keepdims, axis=axis) lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name] = lambda_layer(layers[inputs[0]])
[ "Convert", "reduce_sum", "layer", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/operation_layers.py#L35-L58
[ "def", "convert_reduce_sum", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting reduce_sum ...'", ")", "keepdims", "=", "params", "[", "'keepdims'", "]", ">", "0", "axis", "=", "params", "[", "'axes'", "]", "def", "target_layer", "(", "x", ",", "keepdims", "=", "keepdims", ",", "axis", "=", "axis", ")", ":", "import", "keras", ".", "backend", "as", "K", "return", "K", ".", "sum", "(", "x", ",", "keepdims", "=", "keepdims", ",", "axis", "=", "axis", ")", "lambda_layer", "=", "keras", ".", "layers", ".", "Lambda", "(", "target_layer", ")", "layers", "[", "scope_name", "]", "=", "lambda_layer", "(", "layers", "[", "inputs", "[", "0", "]", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_concat
Convert concatenation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/operation_layers.py
def convert_concat(params, w_name, scope_name, inputs, layers, weights, names): """ Convert concatenation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting concat ...') concat_nodes = [layers[i] for i in inputs] if len(concat_nodes) == 1: # no-op layers[scope_name] = concat_nodes[0] return if names == 'short': tf_name = 'CAT' + random_string(5) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) cat = keras.layers.Concatenate(name=tf_name, axis=params['axis']) layers[scope_name] = cat(concat_nodes)
def convert_concat(params, w_name, scope_name, inputs, layers, weights, names): """ Convert concatenation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting concat ...') concat_nodes = [layers[i] for i in inputs] if len(concat_nodes) == 1: # no-op layers[scope_name] = concat_nodes[0] return if names == 'short': tf_name = 'CAT' + random_string(5) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) cat = keras.layers.Concatenate(name=tf_name, axis=params['axis']) layers[scope_name] = cat(concat_nodes)
[ "Convert", "concatenation", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/operation_layers.py#L60-L89
[ "def", "convert_concat", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting concat ...'", ")", "concat_nodes", "=", "[", "layers", "[", "i", "]", "for", "i", "in", "inputs", "]", "if", "len", "(", "concat_nodes", ")", "==", "1", ":", "# no-op", "layers", "[", "scope_name", "]", "=", "concat_nodes", "[", "0", "]", "return", "if", "names", "==", "'short'", ":", "tf_name", "=", "'CAT'", "+", "random_string", "(", "5", ")", "elif", "names", "==", "'keep'", ":", "tf_name", "=", "w_name", "else", ":", "tf_name", "=", "w_name", "+", "str", "(", "random", ".", "random", "(", ")", ")", "cat", "=", "keras", ".", "layers", ".", "Concatenate", "(", "name", "=", "tf_name", ",", "axis", "=", "params", "[", "'axis'", "]", ")", "layers", "[", "scope_name", "]", "=", "cat", "(", "concat_nodes", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_slice
Convert slice operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/operation_layers.py
def convert_slice(params, w_name, scope_name, inputs, layers, weights, names): """ Convert slice operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting slice ...') if len(params['axes']) > 1: raise AssertionError('Cannot convert slice by multiple dimensions') if params['axes'][0] not in [0, 1, 2, 3]: raise AssertionError('Slice by dimension more than 3 or less than 0 is not supported') def target_layer(x, axis=int(params['axes'][0]), start=int(params['starts'][0]), end=int(params['ends'][0])): if axis == 0: return x[start:end] elif axis == 1: return x[:, start:end] elif axis == 2: return x[:, :, start:end] elif axis == 3: return x[:, :, :, start:end] lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name] = lambda_layer(layers[inputs[0]])
def convert_slice(params, w_name, scope_name, inputs, layers, weights, names): """ Convert slice operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting slice ...') if len(params['axes']) > 1: raise AssertionError('Cannot convert slice by multiple dimensions') if params['axes'][0] not in [0, 1, 2, 3]: raise AssertionError('Slice by dimension more than 3 or less than 0 is not supported') def target_layer(x, axis=int(params['axes'][0]), start=int(params['starts'][0]), end=int(params['ends'][0])): if axis == 0: return x[start:end] elif axis == 1: return x[:, start:end] elif axis == 2: return x[:, :, start:end] elif axis == 3: return x[:, :, :, start:end] lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name] = lambda_layer(layers[inputs[0]])
[ "Convert", "slice", "operation", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/operation_layers.py#L92-L124
[ "def", "convert_slice", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting slice ...'", ")", "if", "len", "(", "params", "[", "'axes'", "]", ")", ">", "1", ":", "raise", "AssertionError", "(", "'Cannot convert slice by multiple dimensions'", ")", "if", "params", "[", "'axes'", "]", "[", "0", "]", "not", "in", "[", "0", ",", "1", ",", "2", ",", "3", "]", ":", "raise", "AssertionError", "(", "'Slice by dimension more than 3 or less than 0 is not supported'", ")", "def", "target_layer", "(", "x", ",", "axis", "=", "int", "(", "params", "[", "'axes'", "]", "[", "0", "]", ")", ",", "start", "=", "int", "(", "params", "[", "'starts'", "]", "[", "0", "]", ")", ",", "end", "=", "int", "(", "params", "[", "'ends'", "]", "[", "0", "]", ")", ")", ":", "if", "axis", "==", "0", ":", "return", "x", "[", "start", ":", "end", "]", "elif", "axis", "==", "1", ":", "return", "x", "[", ":", ",", "start", ":", "end", "]", "elif", "axis", "==", "2", ":", "return", "x", "[", ":", ",", ":", ",", "start", ":", "end", "]", "elif", "axis", "==", "3", ":", "return", "x", "[", ":", ",", ":", ",", ":", ",", "start", ":", "end", "]", "lambda_layer", "=", "keras", ".", "layers", ".", "Lambda", "(", "target_layer", ")", "layers", "[", "scope_name", "]", "=", "lambda_layer", "(", "layers", "[", "inputs", "[", "0", "]", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_clip
Convert clip operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/operation_layers.py
def convert_clip(params, w_name, scope_name, inputs, layers, weights, names): """ Convert clip operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting clip ...') if params['min'] == 0: print("using ReLU({0})".format(params['max'])) layer = keras.layers.ReLU(max_value=params['max']) else: def target_layer(x, vmin=params['min'], vmax=params['max']): import tensorflow as tf return tf.clip_by_value(x, vmin, vmax) layer = keras.layers.Lambda(target_layer) layers[scope_name] = layer(layers[inputs[0]])
def convert_clip(params, w_name, scope_name, inputs, layers, weights, names): """ Convert clip operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting clip ...') if params['min'] == 0: print("using ReLU({0})".format(params['max'])) layer = keras.layers.ReLU(max_value=params['max']) else: def target_layer(x, vmin=params['min'], vmax=params['max']): import tensorflow as tf return tf.clip_by_value(x, vmin, vmax) layer = keras.layers.Lambda(target_layer) layers[scope_name] = layer(layers[inputs[0]])
[ "Convert", "clip", "operation", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/operation_layers.py#L127-L151
[ "def", "convert_clip", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting clip ...'", ")", "if", "params", "[", "'min'", "]", "==", "0", ":", "print", "(", "\"using ReLU({0})\"", ".", "format", "(", "params", "[", "'max'", "]", ")", ")", "layer", "=", "keras", ".", "layers", ".", "ReLU", "(", "max_value", "=", "params", "[", "'max'", "]", ")", "else", ":", "def", "target_layer", "(", "x", ",", "vmin", "=", "params", "[", "'min'", "]", ",", "vmax", "=", "params", "[", "'max'", "]", ")", ":", "import", "tensorflow", "as", "tf", "return", "tf", ".", "clip_by_value", "(", "x", ",", "vmin", ",", "vmax", ")", "layer", "=", "keras", ".", "layers", ".", "Lambda", "(", "target_layer", ")", "layers", "[", "scope_name", "]", "=", "layer", "(", "layers", "[", "inputs", "[", "0", "]", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_elementwise_add
Convert elementwise addition. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/elementwise_layers.py
def convert_elementwise_add( params, w_name, scope_name, inputs, layers, weights, names ): """ Convert elementwise addition. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting elementwise_add ...') if 'broadcast' in params: model0 = layers[inputs[0]] model1 = layers[inputs[1]] if names == 'short': tf_name = 'A' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) def target_layer(x): layer = tf.add(x[0], x[1]) return layer lambda_layer = keras.layers.Lambda(target_layer, name=tf_name) layers[scope_name] = lambda_layer([layers[inputs[0]], layers[inputs[1]]]) else: model0 = layers[inputs[0]] model1 = layers[inputs[1]] if names == 'short': tf_name = 'A' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) add = keras.layers.Add(name=tf_name) layers[scope_name] = add([model0, model1])
def convert_elementwise_add( params, w_name, scope_name, inputs, layers, weights, names ): """ Convert elementwise addition. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting elementwise_add ...') if 'broadcast' in params: model0 = layers[inputs[0]] model1 = layers[inputs[1]] if names == 'short': tf_name = 'A' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) def target_layer(x): layer = tf.add(x[0], x[1]) return layer lambda_layer = keras.layers.Lambda(target_layer, name=tf_name) layers[scope_name] = lambda_layer([layers[inputs[0]], layers[inputs[1]]]) else: model0 = layers[inputs[0]] model1 = layers[inputs[1]] if names == 'short': tf_name = 'A' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) add = keras.layers.Add(name=tf_name) layers[scope_name] = add([model0, model1])
[ "Convert", "elementwise", "addition", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/elementwise_layers.py#L9-L54
[ "def", "convert_elementwise_add", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting elementwise_add ...'", ")", "if", "'broadcast'", "in", "params", ":", "model0", "=", "layers", "[", "inputs", "[", "0", "]", "]", "model1", "=", "layers", "[", "inputs", "[", "1", "]", "]", "if", "names", "==", "'short'", ":", "tf_name", "=", "'A'", "+", "random_string", "(", "7", ")", "elif", "names", "==", "'keep'", ":", "tf_name", "=", "w_name", "else", ":", "tf_name", "=", "w_name", "+", "str", "(", "random", ".", "random", "(", ")", ")", "def", "target_layer", "(", "x", ")", ":", "layer", "=", "tf", ".", "add", "(", "x", "[", "0", "]", ",", "x", "[", "1", "]", ")", "return", "layer", "lambda_layer", "=", "keras", ".", "layers", ".", "Lambda", "(", "target_layer", ",", "name", "=", "tf_name", ")", "layers", "[", "scope_name", "]", "=", "lambda_layer", "(", "[", "layers", "[", "inputs", "[", "0", "]", "]", ",", "layers", "[", "inputs", "[", "1", "]", "]", "]", ")", "else", ":", "model0", "=", "layers", "[", "inputs", "[", "0", "]", "]", "model1", "=", "layers", "[", "inputs", "[", "1", "]", "]", "if", "names", "==", "'short'", ":", "tf_name", "=", "'A'", "+", "random_string", "(", "7", ")", "elif", "names", "==", "'keep'", ":", "tf_name", "=", "w_name", "else", ":", "tf_name", "=", "w_name", "+", "str", "(", "random", ".", "random", "(", ")", ")", "add", "=", "keras", ".", "layers", ".", "Add", "(", "name", "=", "tf_name", ")", "layers", "[", "scope_name", "]", "=", "add", "(", "[", "model0", ",", "model1", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_elementwise_mul
Convert elementwise multiplication. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/elementwise_layers.py
def convert_elementwise_mul( params, w_name, scope_name, inputs, layers, weights, names ): """ Convert elementwise multiplication. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting elementwise_mul ...') model0 = layers[inputs[0]] model1 = layers[inputs[1]] if names == 'short': tf_name = 'M' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) def target_layer(x): layer = tf.multiply( x[0], x[1] ) return layer lambda_layer = keras.layers.Lambda(target_layer, name=tf_name) layers[scope_name] = lambda_layer([layers[inputs[0]], layers[inputs[1]]])
def convert_elementwise_mul( params, w_name, scope_name, inputs, layers, weights, names ): """ Convert elementwise multiplication. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting elementwise_mul ...') model0 = layers[inputs[0]] model1 = layers[inputs[1]] if names == 'short': tf_name = 'M' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) def target_layer(x): layer = tf.multiply( x[0], x[1] ) return layer lambda_layer = keras.layers.Lambda(target_layer, name=tf_name) layers[scope_name] = lambda_layer([layers[inputs[0]], layers[inputs[1]]])
[ "Convert", "elementwise", "multiplication", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/elementwise_layers.py#L57-L91
[ "def", "convert_elementwise_mul", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting elementwise_mul ...'", ")", "model0", "=", "layers", "[", "inputs", "[", "0", "]", "]", "model1", "=", "layers", "[", "inputs", "[", "1", "]", "]", "if", "names", "==", "'short'", ":", "tf_name", "=", "'M'", "+", "random_string", "(", "7", ")", "elif", "names", "==", "'keep'", ":", "tf_name", "=", "w_name", "else", ":", "tf_name", "=", "w_name", "+", "str", "(", "random", ".", "random", "(", ")", ")", "def", "target_layer", "(", "x", ")", ":", "layer", "=", "tf", ".", "multiply", "(", "x", "[", "0", "]", ",", "x", "[", "1", "]", ")", "return", "layer", "lambda_layer", "=", "keras", ".", "layers", ".", "Lambda", "(", "target_layer", ",", "name", "=", "tf_name", ")", "layers", "[", "scope_name", "]", "=", "lambda_layer", "(", "[", "layers", "[", "inputs", "[", "0", "]", "]", ",", "layers", "[", "inputs", "[", "1", "]", "]", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_elementwise_div
Convert elementwise multiplication. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/elementwise_layers.py
def convert_elementwise_div( params, w_name, scope_name, inputs, layers, weights, names ): """ Convert elementwise multiplication. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting elementwise_div ...') if names == 'short': tf_name = 'D' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) def target_layer(x): layer = tf.div( x[0], x[1] ) return layer lambda_layer = keras.layers.Lambda(target_layer, name=tf_name) layers[scope_name] = lambda_layer([layers[inputs[0]], layers[inputs[1]]])
def convert_elementwise_div( params, w_name, scope_name, inputs, layers, weights, names ): """ Convert elementwise multiplication. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting elementwise_div ...') if names == 'short': tf_name = 'D' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) def target_layer(x): layer = tf.div( x[0], x[1] ) return layer lambda_layer = keras.layers.Lambda(target_layer, name=tf_name) layers[scope_name] = lambda_layer([layers[inputs[0]], layers[inputs[1]]])
[ "Convert", "elementwise", "multiplication", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/elementwise_layers.py#L94-L126
[ "def", "convert_elementwise_div", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting elementwise_div ...'", ")", "if", "names", "==", "'short'", ":", "tf_name", "=", "'D'", "+", "random_string", "(", "7", ")", "elif", "names", "==", "'keep'", ":", "tf_name", "=", "w_name", "else", ":", "tf_name", "=", "w_name", "+", "str", "(", "random", ".", "random", "(", ")", ")", "def", "target_layer", "(", "x", ")", ":", "layer", "=", "tf", ".", "div", "(", "x", "[", "0", "]", ",", "x", "[", "1", "]", ")", "return", "layer", "lambda_layer", "=", "keras", ".", "layers", ".", "Lambda", "(", "target_layer", ",", "name", "=", "tf_name", ")", "layers", "[", "scope_name", "]", "=", "lambda_layer", "(", "[", "layers", "[", "inputs", "[", "0", "]", "]", ",", "layers", "[", "inputs", "[", "1", "]", "]", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_elementwise_sub
Convert elementwise subtraction. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/elementwise_layers.py
def convert_elementwise_sub( params, w_name, scope_name, inputs, layers, weights, names ): """ Convert elementwise subtraction. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting elementwise_sub ...') model0 = layers[inputs[0]] model1 = layers[inputs[1]] if names == 'short': tf_name = 'S' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) sub = keras.layers.Subtract(name=tf_name) layers[scope_name] = sub([model0, model1])
def convert_elementwise_sub( params, w_name, scope_name, inputs, layers, weights, names ): """ Convert elementwise subtraction. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting elementwise_sub ...') model0 = layers[inputs[0]] model1 = layers[inputs[1]] if names == 'short': tf_name = 'S' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) sub = keras.layers.Subtract(name=tf_name) layers[scope_name] = sub([model0, model1])
[ "Convert", "elementwise", "subtraction", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/elementwise_layers.py#L129-L156
[ "def", "convert_elementwise_sub", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting elementwise_sub ...'", ")", "model0", "=", "layers", "[", "inputs", "[", "0", "]", "]", "model1", "=", "layers", "[", "inputs", "[", "1", "]", "]", "if", "names", "==", "'short'", ":", "tf_name", "=", "'S'", "+", "random_string", "(", "7", ")", "elif", "names", "==", "'keep'", ":", "tf_name", "=", "w_name", "else", ":", "tf_name", "=", "w_name", "+", "str", "(", "random", ".", "random", "(", ")", ")", "sub", "=", "keras", ".", "layers", ".", "Subtract", "(", "name", "=", "tf_name", ")", "layers", "[", "scope_name", "]", "=", "sub", "(", "[", "model0", ",", "model1", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_gemm
Convert Linear. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/linear_layers.py
def convert_gemm(params, w_name, scope_name, inputs, layers, weights, names): """ Convert Linear. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting Linear ...') if names == 'short': tf_name = 'FC' + random_string(6) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) bias_name = '{0}.bias'.format(w_name) weights_name = '{0}.weight'.format(w_name) W = weights[weights_name].numpy().transpose() input_channels, output_channels = W.shape keras_weights = [W] has_bias = False if bias_name in weights: bias = weights[bias_name].numpy() keras_weights = [W, bias] has_bias = True dense = keras.layers.Dense( output_channels, weights=keras_weights, use_bias=has_bias, name=tf_name, bias_initializer='zeros', kernel_initializer='zeros', ) layers[scope_name] = dense(layers[inputs[0]])
def convert_gemm(params, w_name, scope_name, inputs, layers, weights, names): """ Convert Linear. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting Linear ...') if names == 'short': tf_name = 'FC' + random_string(6) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) bias_name = '{0}.bias'.format(w_name) weights_name = '{0}.weight'.format(w_name) W = weights[weights_name].numpy().transpose() input_channels, output_channels = W.shape keras_weights = [W] has_bias = False if bias_name in weights: bias = weights[bias_name].numpy() keras_weights = [W, bias] has_bias = True dense = keras.layers.Dense( output_channels, weights=keras_weights, use_bias=has_bias, name=tf_name, bias_initializer='zeros', kernel_initializer='zeros', ) layers[scope_name] = dense(layers[inputs[0]])
[ "Convert", "Linear", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/linear_layers.py#L9-L49
[ "def", "convert_gemm", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting Linear ...'", ")", "if", "names", "==", "'short'", ":", "tf_name", "=", "'FC'", "+", "random_string", "(", "6", ")", "elif", "names", "==", "'keep'", ":", "tf_name", "=", "w_name", "else", ":", "tf_name", "=", "w_name", "+", "str", "(", "random", ".", "random", "(", ")", ")", "bias_name", "=", "'{0}.bias'", ".", "format", "(", "w_name", ")", "weights_name", "=", "'{0}.weight'", ".", "format", "(", "w_name", ")", "W", "=", "weights", "[", "weights_name", "]", ".", "numpy", "(", ")", ".", "transpose", "(", ")", "input_channels", ",", "output_channels", "=", "W", ".", "shape", "keras_weights", "=", "[", "W", "]", "has_bias", "=", "False", "if", "bias_name", "in", "weights", ":", "bias", "=", "weights", "[", "bias_name", "]", ".", "numpy", "(", ")", "keras_weights", "=", "[", "W", ",", "bias", "]", "has_bias", "=", "True", "dense", "=", "keras", ".", "layers", ".", "Dense", "(", "output_channels", ",", "weights", "=", "keras_weights", ",", "use_bias", "=", "has_bias", ",", "name", "=", "tf_name", ",", "bias_initializer", "=", "'zeros'", ",", "kernel_initializer", "=", "'zeros'", ",", ")", "layers", "[", "scope_name", "]", "=", "dense", "(", "layers", "[", "inputs", "[", "0", "]", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_matmul
Convert matmul layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/linear_layers.py
def convert_matmul(params, w_name, scope_name, inputs, layers, weights, names): """ Convert matmul layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting matmul ...') if names == 'short': tf_name = 'MMUL' + random_string(4) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) if len(inputs) == 1: weights_name = '{0}.weight'.format(w_name) W = weights[weights_name].numpy().transpose() input_channels, output_channels = W.shape keras_weights = [W] dense = keras.layers.Dense( output_channels, weights=keras_weights, use_bias=False, name=tf_name, bias_initializer='zeros', kernel_initializer='zeros', ) layers[scope_name] = dense(layers[inputs[0]]) elif len(inputs) == 2: weights_name = '{0}.weight'.format(w_name) W = weights[weights_name].numpy().transpose() input_channels, output_channels = W.shape keras_weights = [W] dense = keras.layers.Dense( output_channels, weights=keras_weights, use_bias=False, name=tf_name, bias_initializer='zeros', kernel_initializer='zeros', ) layers[scope_name] = dense(layers[inputs[0]]) else: raise AssertionError('Cannot convert matmul layer')
def convert_matmul(params, w_name, scope_name, inputs, layers, weights, names): """ Convert matmul layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting matmul ...') if names == 'short': tf_name = 'MMUL' + random_string(4) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) if len(inputs) == 1: weights_name = '{0}.weight'.format(w_name) W = weights[weights_name].numpy().transpose() input_channels, output_channels = W.shape keras_weights = [W] dense = keras.layers.Dense( output_channels, weights=keras_weights, use_bias=False, name=tf_name, bias_initializer='zeros', kernel_initializer='zeros', ) layers[scope_name] = dense(layers[inputs[0]]) elif len(inputs) == 2: weights_name = '{0}.weight'.format(w_name) W = weights[weights_name].numpy().transpose() input_channels, output_channels = W.shape keras_weights = [W] dense = keras.layers.Dense( output_channels, weights=keras_weights, use_bias=False, name=tf_name, bias_initializer='zeros', kernel_initializer='zeros', ) layers[scope_name] = dense(layers[inputs[0]]) else: raise AssertionError('Cannot convert matmul layer')
[ "Convert", "matmul", "layer", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/linear_layers.py#L52-L101
[ "def", "convert_matmul", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting matmul ...'", ")", "if", "names", "==", "'short'", ":", "tf_name", "=", "'MMUL'", "+", "random_string", "(", "4", ")", "elif", "names", "==", "'keep'", ":", "tf_name", "=", "w_name", "else", ":", "tf_name", "=", "w_name", "+", "str", "(", "random", ".", "random", "(", ")", ")", "if", "len", "(", "inputs", ")", "==", "1", ":", "weights_name", "=", "'{0}.weight'", ".", "format", "(", "w_name", ")", "W", "=", "weights", "[", "weights_name", "]", ".", "numpy", "(", ")", ".", "transpose", "(", ")", "input_channels", ",", "output_channels", "=", "W", ".", "shape", "keras_weights", "=", "[", "W", "]", "dense", "=", "keras", ".", "layers", ".", "Dense", "(", "output_channels", ",", "weights", "=", "keras_weights", ",", "use_bias", "=", "False", ",", "name", "=", "tf_name", ",", "bias_initializer", "=", "'zeros'", ",", "kernel_initializer", "=", "'zeros'", ",", ")", "layers", "[", "scope_name", "]", "=", "dense", "(", "layers", "[", "inputs", "[", "0", "]", "]", ")", "elif", "len", "(", "inputs", ")", "==", "2", ":", "weights_name", "=", "'{0}.weight'", ".", "format", "(", "w_name", ")", "W", "=", "weights", "[", "weights_name", "]", ".", "numpy", "(", ")", ".", "transpose", "(", ")", "input_channels", ",", "output_channels", "=", "W", ".", "shape", "keras_weights", "=", "[", "W", "]", "dense", "=", "keras", ".", "layers", ".", "Dense", "(", "output_channels", ",", "weights", "=", "keras_weights", ",", "use_bias", "=", "False", ",", "name", "=", "tf_name", ",", "bias_initializer", "=", "'zeros'", ",", "kernel_initializer", "=", "'zeros'", ",", ")", "layers", "[", "scope_name", "]", "=", "dense", "(", "layers", "[", "inputs", "[", "0", "]", "]", ")", "else", ":", "raise", "AssertionError", "(", "'Cannot convert matmul layer'", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_constant
Convert constant layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/constant_layers.py
def convert_constant(params, w_name, scope_name, inputs, layers, weights, names): """ Convert constant layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting constant ...') params_list = params['value'].numpy() def target_layer(x, value=params_list): return tf.constant(value.tolist(), shape=value.shape) lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name + '_np'] = params_list # ad-hoc layers[scope_name] = lambda_layer(layers[list(layers.keys())[0]])
def convert_constant(params, w_name, scope_name, inputs, layers, weights, names): """ Convert constant layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting constant ...') params_list = params['value'].numpy() def target_layer(x, value=params_list): return tf.constant(value.tolist(), shape=value.shape) lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name + '_np'] = params_list # ad-hoc layers[scope_name] = lambda_layer(layers[list(layers.keys())[0]])
[ "Convert", "constant", "layer", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/constant_layers.py#L9-L31
[ "def", "convert_constant", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting constant ...'", ")", "params_list", "=", "params", "[", "'value'", "]", ".", "numpy", "(", ")", "def", "target_layer", "(", "x", ",", "value", "=", "params_list", ")", ":", "return", "tf", ".", "constant", "(", "value", ".", "tolist", "(", ")", ",", "shape", "=", "value", ".", "shape", ")", "lambda_layer", "=", "keras", ".", "layers", ".", "Lambda", "(", "target_layer", ")", "layers", "[", "scope_name", "+", "'_np'", "]", "=", "params_list", "# ad-hoc", "layers", "[", "scope_name", "]", "=", "lambda_layer", "(", "layers", "[", "list", "(", "layers", ".", "keys", "(", ")", ")", "[", "0", "]", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_flatten
Convert reshape(view). Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/reshape_layers.py
def convert_flatten(params, w_name, scope_name, inputs, layers, weights, names): """ Convert reshape(view). Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting flatten ...') if names == 'short': tf_name = 'R' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) reshape = keras.layers.Reshape([-1], name=tf_name) layers[scope_name] = reshape(layers[inputs[0]])
def convert_flatten(params, w_name, scope_name, inputs, layers, weights, names): """ Convert reshape(view). Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting flatten ...') if names == 'short': tf_name = 'R' + random_string(7) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) reshape = keras.layers.Reshape([-1], name=tf_name) layers[scope_name] = reshape(layers[inputs[0]])
[ "Convert", "reshape", "(", "view", ")", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/reshape_layers.py#L9-L32
[ "def", "convert_flatten", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting flatten ...'", ")", "if", "names", "==", "'short'", ":", "tf_name", "=", "'R'", "+", "random_string", "(", "7", ")", "elif", "names", "==", "'keep'", ":", "tf_name", "=", "w_name", "else", ":", "tf_name", "=", "w_name", "+", "str", "(", "random", ".", "random", "(", ")", ")", "reshape", "=", "keras", ".", "layers", ".", "Reshape", "(", "[", "-", "1", "]", ",", "name", "=", "tf_name", ")", "layers", "[", "scope_name", "]", "=", "reshape", "(", "layers", "[", "inputs", "[", "0", "]", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_transpose
Convert transpose layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/reshape_layers.py
def convert_transpose(params, w_name, scope_name, inputs, layers, weights, names): """ Convert transpose layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting transpose ...') if params['perm'][0] != 0: if inputs[0] in layers: print('!!! Cannot permute batch dimension. Result may be wrong !!!') layers[scope_name] = layers[inputs[0]] else: print('Skip weight matrix transpose, result may be wrong.') else: if names: tf_name = 'PERM' + random_string(4) else: tf_name = w_name + str(random.random()) permute = keras.layers.Permute(params['perm'][1:], name=tf_name) layers[scope_name] = permute(layers[inputs[0]])
def convert_transpose(params, w_name, scope_name, inputs, layers, weights, names): """ Convert transpose layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting transpose ...') if params['perm'][0] != 0: if inputs[0] in layers: print('!!! Cannot permute batch dimension. Result may be wrong !!!') layers[scope_name] = layers[inputs[0]] else: print('Skip weight matrix transpose, result may be wrong.') else: if names: tf_name = 'PERM' + random_string(4) else: tf_name = w_name + str(random.random()) permute = keras.layers.Permute(params['perm'][1:], name=tf_name) layers[scope_name] = permute(layers[inputs[0]])
[ "Convert", "transpose", "layer", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/reshape_layers.py#L35-L61
[ "def", "convert_transpose", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting transpose ...'", ")", "if", "params", "[", "'perm'", "]", "[", "0", "]", "!=", "0", ":", "if", "inputs", "[", "0", "]", "in", "layers", ":", "print", "(", "'!!! Cannot permute batch dimension. Result may be wrong !!!'", ")", "layers", "[", "scope_name", "]", "=", "layers", "[", "inputs", "[", "0", "]", "]", "else", ":", "print", "(", "'Skip weight matrix transpose, result may be wrong.'", ")", "else", ":", "if", "names", ":", "tf_name", "=", "'PERM'", "+", "random_string", "(", "4", ")", "else", ":", "tf_name", "=", "w_name", "+", "str", "(", "random", ".", "random", "(", ")", ")", "permute", "=", "keras", ".", "layers", ".", "Permute", "(", "params", "[", "'perm'", "]", "[", "1", ":", "]", ",", "name", "=", "tf_name", ")", "layers", "[", "scope_name", "]", "=", "permute", "(", "layers", "[", "inputs", "[", "0", "]", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_reshape
Convert reshape layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/reshape_layers.py
def convert_reshape(params, w_name, scope_name, inputs, layers, weights, names): """ Convert reshape layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting reshape ...') if names == 'short': tf_name = 'RESH' + random_string(4) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) if len(inputs) > 1: if layers[inputs[1]][0] == -1: print('Cannot deduct batch size! It will be omitted, but result may be wrong.') reshape = keras.layers.Reshape(layers[inputs[1] + '_np'], name=tf_name) layers[scope_name] = reshape(layers[inputs[0]]) else: if inputs[0] in layers: reshape = keras.layers.Reshape(params['shape'][1:], name=tf_name) layers[scope_name] = reshape(layers[inputs[0]]) else: print('Skip weight matrix transpose, but result may be wrong.')
def convert_reshape(params, w_name, scope_name, inputs, layers, weights, names): """ Convert reshape layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting reshape ...') if names == 'short': tf_name = 'RESH' + random_string(4) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) if len(inputs) > 1: if layers[inputs[1]][0] == -1: print('Cannot deduct batch size! It will be omitted, but result may be wrong.') reshape = keras.layers.Reshape(layers[inputs[1] + '_np'], name=tf_name) layers[scope_name] = reshape(layers[inputs[0]]) else: if inputs[0] in layers: reshape = keras.layers.Reshape(params['shape'][1:], name=tf_name) layers[scope_name] = reshape(layers[inputs[0]]) else: print('Skip weight matrix transpose, but result may be wrong.')
[ "Convert", "reshape", "layer", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/reshape_layers.py#L64-L96
[ "def", "convert_reshape", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting reshape ...'", ")", "if", "names", "==", "'short'", ":", "tf_name", "=", "'RESH'", "+", "random_string", "(", "4", ")", "elif", "names", "==", "'keep'", ":", "tf_name", "=", "w_name", "else", ":", "tf_name", "=", "w_name", "+", "str", "(", "random", ".", "random", "(", ")", ")", "if", "len", "(", "inputs", ")", ">", "1", ":", "if", "layers", "[", "inputs", "[", "1", "]", "]", "[", "0", "]", "==", "-", "1", ":", "print", "(", "'Cannot deduct batch size! It will be omitted, but result may be wrong.'", ")", "reshape", "=", "keras", ".", "layers", ".", "Reshape", "(", "layers", "[", "inputs", "[", "1", "]", "+", "'_np'", "]", ",", "name", "=", "tf_name", ")", "layers", "[", "scope_name", "]", "=", "reshape", "(", "layers", "[", "inputs", "[", "0", "]", "]", ")", "else", ":", "if", "inputs", "[", "0", "]", "in", "layers", ":", "reshape", "=", "keras", ".", "layers", ".", "Reshape", "(", "params", "[", "'shape'", "]", "[", "1", ":", "]", ",", "name", "=", "tf_name", ")", "layers", "[", "scope_name", "]", "=", "reshape", "(", "layers", "[", "inputs", "[", "0", "]", "]", ")", "else", ":", "print", "(", "'Skip weight matrix transpose, but result may be wrong.'", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_squeeze
Convert squeeze operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/reshape_layers.py
def convert_squeeze(params, w_name, scope_name, inputs, layers, weights, names): """ Convert squeeze operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting squeeze ...') if len(params['axes']) > 1: raise AssertionError('Cannot convert squeeze by multiple dimensions') def target_layer(x, axis=int(params['axes'][0])): import tensorflow as tf return tf.squeeze(x, axis=axis) lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name] = lambda_layer(layers[inputs[0]])
def convert_squeeze(params, w_name, scope_name, inputs, layers, weights, names): """ Convert squeeze operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting squeeze ...') if len(params['axes']) > 1: raise AssertionError('Cannot convert squeeze by multiple dimensions') def target_layer(x, axis=int(params['axes'][0])): import tensorflow as tf return tf.squeeze(x, axis=axis) lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name] = lambda_layer(layers[inputs[0]])
[ "Convert", "squeeze", "operation", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/reshape_layers.py#L98-L121
[ "def", "convert_squeeze", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting squeeze ...'", ")", "if", "len", "(", "params", "[", "'axes'", "]", ")", ">", "1", ":", "raise", "AssertionError", "(", "'Cannot convert squeeze by multiple dimensions'", ")", "def", "target_layer", "(", "x", ",", "axis", "=", "int", "(", "params", "[", "'axes'", "]", "[", "0", "]", ")", ")", ":", "import", "tensorflow", "as", "tf", "return", "tf", ".", "squeeze", "(", "x", ",", "axis", "=", "axis", ")", "lambda_layer", "=", "keras", ".", "layers", ".", "Lambda", "(", "target_layer", ")", "layers", "[", "scope_name", "]", "=", "lambda_layer", "(", "layers", "[", "inputs", "[", "0", "]", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_unsqueeze
Convert unsqueeze operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/reshape_layers.py
def convert_unsqueeze(params, w_name, scope_name, inputs, layers, weights, names): """ Convert unsqueeze operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting unsqueeze ...') if names == 'short': tf_name = 'UNSQ' + random_string(4) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) def target_layer(x): import keras return keras.backend.expand_dims(x) lambda_layer = keras.layers.Lambda(target_layer, name=tf_name + 'E') layers[scope_name] = lambda_layer(layers[inputs[0]])
def convert_unsqueeze(params, w_name, scope_name, inputs, layers, weights, names): """ Convert unsqueeze operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting unsqueeze ...') if names == 'short': tf_name = 'UNSQ' + random_string(4) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) def target_layer(x): import keras return keras.backend.expand_dims(x) lambda_layer = keras.layers.Lambda(target_layer, name=tf_name + 'E') layers[scope_name] = lambda_layer(layers[inputs[0]])
[ "Convert", "unsqueeze", "operation", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/reshape_layers.py#L124-L151
[ "def", "convert_unsqueeze", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting unsqueeze ...'", ")", "if", "names", "==", "'short'", ":", "tf_name", "=", "'UNSQ'", "+", "random_string", "(", "4", ")", "elif", "names", "==", "'keep'", ":", "tf_name", "=", "w_name", "else", ":", "tf_name", "=", "w_name", "+", "str", "(", "random", ".", "random", "(", ")", ")", "def", "target_layer", "(", "x", ")", ":", "import", "keras", "return", "keras", ".", "backend", ".", "expand_dims", "(", "x", ")", "lambda_layer", "=", "keras", ".", "layers", ".", "Lambda", "(", "target_layer", ",", "name", "=", "tf_name", "+", "'E'", ")", "layers", "[", "scope_name", "]", "=", "lambda_layer", "(", "layers", "[", "inputs", "[", "0", "]", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764
valid
convert_shape
Convert shape operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
pytorch2keras/reshape_layers.py
def convert_shape(params, w_name, scope_name, inputs, layers, weights, names): """ Convert shape operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting shape ...') def target_layer(x): import tensorflow as tf return tf.shape(x) lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name] = lambda_layer(layers[inputs[0]])
def convert_shape(params, w_name, scope_name, inputs, layers, weights, names): """ Convert shape operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting shape ...') def target_layer(x): import tensorflow as tf return tf.shape(x) lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name] = lambda_layer(layers[inputs[0]])
[ "Convert", "shape", "operation", "." ]
nerox8664/pytorch2keras
python
https://github.com/nerox8664/pytorch2keras/blob/750eaf747323580e6732d0c5ba9f2f39cb096764/pytorch2keras/reshape_layers.py#L154-L174
[ "def", "convert_shape", "(", "params", ",", "w_name", ",", "scope_name", ",", "inputs", ",", "layers", ",", "weights", ",", "names", ")", ":", "print", "(", "'Converting shape ...'", ")", "def", "target_layer", "(", "x", ")", ":", "import", "tensorflow", "as", "tf", "return", "tf", ".", "shape", "(", "x", ")", "lambda_layer", "=", "keras", ".", "layers", ".", "Lambda", "(", "target_layer", ")", "layers", "[", "scope_name", "]", "=", "lambda_layer", "(", "layers", "[", "inputs", "[", "0", "]", "]", ")" ]
750eaf747323580e6732d0c5ba9f2f39cb096764