repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/tag_mixin.py
TagManager.get_tags
def get_tags(self): """List all tags as Tag objects.""" res = self.get_request('/tag') return [Tag(cloud_manager=self, **tag) for tag in res['tags']['tag']]
python
def get_tags(self): """List all tags as Tag objects.""" res = self.get_request('/tag') return [Tag(cloud_manager=self, **tag) for tag in res['tags']['tag']]
[ "def", "get_tags", "(", "self", ")", ":", "res", "=", "self", ".", "get_request", "(", "'/tag'", ")", "return", "[", "Tag", "(", "cloud_manager", "=", "self", ",", "*", "*", "tag", ")", "for", "tag", "in", "res", "[", "'tags'", "]", "[", "'tag'", "]", "]" ]
List all tags as Tag objects.
[ "List", "all", "tags", "as", "Tag", "objects", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/tag_mixin.py#L18-L21
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/tag_mixin.py
TagManager.get_tag
def get_tag(self, name): """Return the tag as Tag object.""" res = self.get_request('/tag/' + name) return Tag(cloud_manager=self, **res['tag'])
python
def get_tag(self, name): """Return the tag as Tag object.""" res = self.get_request('/tag/' + name) return Tag(cloud_manager=self, **res['tag'])
[ "def", "get_tag", "(", "self", ",", "name", ")", ":", "res", "=", "self", ".", "get_request", "(", "'/tag/'", "+", "name", ")", "return", "Tag", "(", "cloud_manager", "=", "self", ",", "*", "*", "res", "[", "'tag'", "]", ")" ]
Return the tag as Tag object.
[ "Return", "the", "tag", "as", "Tag", "object", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/tag_mixin.py#L23-L26
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/tag_mixin.py
TagManager.create_tag
def create_tag(self, name, description=None, servers=[]): """ Create a new Tag. Only name is mandatory. Returns the created Tag object. """ servers = [str(server) for server in servers] body = {'tag': Tag(name, description, servers).to_dict()} res = self.request('POST', '/tag', body) return Tag(cloud_manager=self, **res['tag'])
python
def create_tag(self, name, description=None, servers=[]): """ Create a new Tag. Only name is mandatory. Returns the created Tag object. """ servers = [str(server) for server in servers] body = {'tag': Tag(name, description, servers).to_dict()} res = self.request('POST', '/tag', body) return Tag(cloud_manager=self, **res['tag'])
[ "def", "create_tag", "(", "self", ",", "name", ",", "description", "=", "None", ",", "servers", "=", "[", "]", ")", ":", "servers", "=", "[", "str", "(", "server", ")", "for", "server", "in", "servers", "]", "body", "=", "{", "'tag'", ":", "Tag", "(", "name", ",", "description", ",", "servers", ")", ".", "to_dict", "(", ")", "}", "res", "=", "self", ".", "request", "(", "'POST'", ",", "'/tag'", ",", "body", ")", "return", "Tag", "(", "cloud_manager", "=", "self", ",", "*", "*", "res", "[", "'tag'", "]", ")" ]
Create a new Tag. Only name is mandatory. Returns the created Tag object.
[ "Create", "a", "new", "Tag", ".", "Only", "name", "is", "mandatory", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/tag_mixin.py#L28-L38
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/tag_mixin.py
TagManager._modify_tag
def _modify_tag(self, name, description, servers, new_name): """ PUT /tag/name. Returns a dict that can be used to create a Tag object. Private method used by the Tag class and TagManager.modify_tag. """ body = {'tag': Tag(new_name, description, servers).to_dict()} res = self.request('PUT', '/tag/' + name, body) return res['tag']
python
def _modify_tag(self, name, description, servers, new_name): """ PUT /tag/name. Returns a dict that can be used to create a Tag object. Private method used by the Tag class and TagManager.modify_tag. """ body = {'tag': Tag(new_name, description, servers).to_dict()} res = self.request('PUT', '/tag/' + name, body) return res['tag']
[ "def", "_modify_tag", "(", "self", ",", "name", ",", "description", ",", "servers", ",", "new_name", ")", ":", "body", "=", "{", "'tag'", ":", "Tag", "(", "new_name", ",", "description", ",", "servers", ")", ".", "to_dict", "(", ")", "}", "res", "=", "self", ".", "request", "(", "'PUT'", ",", "'/tag/'", "+", "name", ",", "body", ")", "return", "res", "[", "'tag'", "]" ]
PUT /tag/name. Returns a dict that can be used to create a Tag object. Private method used by the Tag class and TagManager.modify_tag.
[ "PUT", "/", "tag", "/", "name", ".", "Returns", "a", "dict", "that", "can", "be", "used", "to", "create", "a", "Tag", "object", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/tag_mixin.py#L40-L48
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/tag_mixin.py
TagManager.modify_tag
def modify_tag(self, name, description=None, servers=None, new_name=None): """ PUT /tag/name. Returns a new Tag object based on the API response. """ res = self._modify_tag(name, description, servers, new_name) return Tag(cloud_manager=self, **res['tag'])
python
def modify_tag(self, name, description=None, servers=None, new_name=None): """ PUT /tag/name. Returns a new Tag object based on the API response. """ res = self._modify_tag(name, description, servers, new_name) return Tag(cloud_manager=self, **res['tag'])
[ "def", "modify_tag", "(", "self", ",", "name", ",", "description", "=", "None", ",", "servers", "=", "None", ",", "new_name", "=", "None", ")", ":", "res", "=", "self", ".", "_modify_tag", "(", "name", ",", "description", ",", "servers", ",", "new_name", ")", "return", "Tag", "(", "cloud_manager", "=", "self", ",", "*", "*", "res", "[", "'tag'", "]", ")" ]
PUT /tag/name. Returns a new Tag object based on the API response.
[ "PUT", "/", "tag", "/", "name", ".", "Returns", "a", "new", "Tag", "object", "based", "on", "the", "API", "response", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/tag_mixin.py#L50-L55
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/tag_mixin.py
TagManager.remove_tags
def remove_tags(self, server, tags): """ Remove tags from a server. - server: Server object or UUID string - tags: list of Tag objects or strings """ uuid = str(server) tags = [str(tag) for tag in tags] url = '/server/{0}/untag/{1}'.format(uuid, ','.join(tags)) return self.post_request(url)
python
def remove_tags(self, server, tags): """ Remove tags from a server. - server: Server object or UUID string - tags: list of Tag objects or strings """ uuid = str(server) tags = [str(tag) for tag in tags] url = '/server/{0}/untag/{1}'.format(uuid, ','.join(tags)) return self.post_request(url)
[ "def", "remove_tags", "(", "self", ",", "server", ",", "tags", ")", ":", "uuid", "=", "str", "(", "server", ")", "tags", "=", "[", "str", "(", "tag", ")", "for", "tag", "in", "tags", "]", "url", "=", "'/server/{0}/untag/{1}'", ".", "format", "(", "uuid", ",", "','", ".", "join", "(", "tags", ")", ")", "return", "self", ".", "post_request", "(", "url", ")" ]
Remove tags from a server. - server: Server object or UUID string - tags: list of Tag objects or strings
[ "Remove", "tags", "from", "a", "server", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/tag_mixin.py#L70-L81
UpCloudLtd/upcloud-python-api
upcloud_api/utils.py
assignIfExists
def assignIfExists(opts, default=None, **kwargs): """ Helper for assigning object attributes from API responses. """ for opt in opts: if(opt in kwargs): return kwargs[opt] return default
python
def assignIfExists(opts, default=None, **kwargs): """ Helper for assigning object attributes from API responses. """ for opt in opts: if(opt in kwargs): return kwargs[opt] return default
[ "def", "assignIfExists", "(", "opts", ",", "default", "=", "None", ",", "*", "*", "kwargs", ")", ":", "for", "opt", "in", "opts", ":", "if", "(", "opt", "in", "kwargs", ")", ":", "return", "kwargs", "[", "opt", "]", "return", "default" ]
Helper for assigning object attributes from API responses.
[ "Helper", "for", "assigning", "object", "attributes", "from", "API", "responses", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/utils.py#L7-L14
UpCloudLtd/upcloud-python-api
upcloud_api/utils.py
try_it_n_times
def try_it_n_times(operation, expected_error_codes, custom_error='operation failed', n=10): """ Try a given operation (API call) n times. Raises if the API call fails with an error_code that is not expected. Raises if the API call has not succeeded within n attempts. Waits 3 seconds betwee each attempt. """ for i in itertools.count(): try: operation() break except UpCloudAPIError as e: if e.error_code not in expected_error_codes: raise e sleep(3) if i >= n - 1: raise UpCloudClientError(custom_error)
python
def try_it_n_times(operation, expected_error_codes, custom_error='operation failed', n=10): """ Try a given operation (API call) n times. Raises if the API call fails with an error_code that is not expected. Raises if the API call has not succeeded within n attempts. Waits 3 seconds betwee each attempt. """ for i in itertools.count(): try: operation() break except UpCloudAPIError as e: if e.error_code not in expected_error_codes: raise e sleep(3) if i >= n - 1: raise UpCloudClientError(custom_error)
[ "def", "try_it_n_times", "(", "operation", ",", "expected_error_codes", ",", "custom_error", "=", "'operation failed'", ",", "n", "=", "10", ")", ":", "for", "i", "in", "itertools", ".", "count", "(", ")", ":", "try", ":", "operation", "(", ")", "break", "except", "UpCloudAPIError", "as", "e", ":", "if", "e", ".", "error_code", "not", "in", "expected_error_codes", ":", "raise", "e", "sleep", "(", "3", ")", "if", "i", ">=", "n", "-", "1", ":", "raise", "UpCloudClientError", "(", "custom_error", ")" ]
Try a given operation (API call) n times. Raises if the API call fails with an error_code that is not expected. Raises if the API call has not succeeded within n attempts. Waits 3 seconds betwee each attempt.
[ "Try", "a", "given", "operation", "(", "API", "call", ")", "n", "times", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/utils.py#L17-L34
casebeer/python-hkdf
hkdf.py
hkdf_extract
def hkdf_extract(salt, input_key_material, hash=hashlib.sha512): ''' Extract a pseudorandom key suitable for use with hkdf_expand from the input_key_material and a salt using HMAC with the provided hash (default SHA-512). salt should be a random, application-specific byte string. If salt is None or the empty string, an all-zeros string of the same length as the hash's block size will be used instead per the RFC. See the HKDF draft RFC and paper for usage notes. ''' hash_len = hash().digest_size if salt == None or len(salt) == 0: salt = bytearray((0,) * hash_len) return hmac.new(bytes(salt), buffer(input_key_material), hash).digest()
python
def hkdf_extract(salt, input_key_material, hash=hashlib.sha512): ''' Extract a pseudorandom key suitable for use with hkdf_expand from the input_key_material and a salt using HMAC with the provided hash (default SHA-512). salt should be a random, application-specific byte string. If salt is None or the empty string, an all-zeros string of the same length as the hash's block size will be used instead per the RFC. See the HKDF draft RFC and paper for usage notes. ''' hash_len = hash().digest_size if salt == None or len(salt) == 0: salt = bytearray((0,) * hash_len) return hmac.new(bytes(salt), buffer(input_key_material), hash).digest()
[ "def", "hkdf_extract", "(", "salt", ",", "input_key_material", ",", "hash", "=", "hashlib", ".", "sha512", ")", ":", "hash_len", "=", "hash", "(", ")", ".", "digest_size", "if", "salt", "==", "None", "or", "len", "(", "salt", ")", "==", "0", ":", "salt", "=", "bytearray", "(", "(", "0", ",", ")", "*", "hash_len", ")", "return", "hmac", ".", "new", "(", "bytes", "(", "salt", ")", ",", "buffer", "(", "input_key_material", ")", ",", "hash", ")", ".", "digest", "(", ")" ]
Extract a pseudorandom key suitable for use with hkdf_expand from the input_key_material and a salt using HMAC with the provided hash (default SHA-512). salt should be a random, application-specific byte string. If salt is None or the empty string, an all-zeros string of the same length as the hash's block size will be used instead per the RFC. See the HKDF draft RFC and paper for usage notes.
[ "Extract", "a", "pseudorandom", "key", "suitable", "for", "use", "with", "hkdf_expand", "from", "the", "input_key_material", "and", "a", "salt", "using", "HMAC", "with", "the", "provided", "hash", "(", "default", "SHA", "-", "512", ")", "." ]
train
https://github.com/casebeer/python-hkdf/blob/cc3c9dbf0a271b27a7ac5cd04cc1485bbc3b4307/hkdf.py#L10-L25
casebeer/python-hkdf
hkdf.py
hkdf_expand
def hkdf_expand(pseudo_random_key, info=b"", length=32, hash=hashlib.sha512): ''' Expand `pseudo_random_key` and `info` into a key of length `bytes` using HKDF's expand function based on HMAC with the provided hash (default SHA-512). See the HKDF draft RFC and paper for usage notes. ''' hash_len = hash().digest_size length = int(length) if length > 255 * hash_len: raise Exception("Cannot expand to more than 255 * %d = %d bytes using the specified hash function" %\ (hash_len, 255 * hash_len)) blocks_needed = length // hash_len + (0 if length % hash_len == 0 else 1) # ceil okm = b"" output_block = b"" for counter in range(blocks_needed): output_block = hmac.new(pseudo_random_key, buffer(output_block + info + bytearray((counter + 1,))),\ hash).digest() okm += output_block return okm[:length]
python
def hkdf_expand(pseudo_random_key, info=b"", length=32, hash=hashlib.sha512): ''' Expand `pseudo_random_key` and `info` into a key of length `bytes` using HKDF's expand function based on HMAC with the provided hash (default SHA-512). See the HKDF draft RFC and paper for usage notes. ''' hash_len = hash().digest_size length = int(length) if length > 255 * hash_len: raise Exception("Cannot expand to more than 255 * %d = %d bytes using the specified hash function" %\ (hash_len, 255 * hash_len)) blocks_needed = length // hash_len + (0 if length % hash_len == 0 else 1) # ceil okm = b"" output_block = b"" for counter in range(blocks_needed): output_block = hmac.new(pseudo_random_key, buffer(output_block + info + bytearray((counter + 1,))),\ hash).digest() okm += output_block return okm[:length]
[ "def", "hkdf_expand", "(", "pseudo_random_key", ",", "info", "=", "b\"\"", ",", "length", "=", "32", ",", "hash", "=", "hashlib", ".", "sha512", ")", ":", "hash_len", "=", "hash", "(", ")", ".", "digest_size", "length", "=", "int", "(", "length", ")", "if", "length", ">", "255", "*", "hash_len", ":", "raise", "Exception", "(", "\"Cannot expand to more than 255 * %d = %d bytes using the specified hash function\"", "%", "(", "hash_len", ",", "255", "*", "hash_len", ")", ")", "blocks_needed", "=", "length", "//", "hash_len", "+", "(", "0", "if", "length", "%", "hash_len", "==", "0", "else", "1", ")", "# ceil", "okm", "=", "b\"\"", "output_block", "=", "b\"\"", "for", "counter", "in", "range", "(", "blocks_needed", ")", ":", "output_block", "=", "hmac", ".", "new", "(", "pseudo_random_key", ",", "buffer", "(", "output_block", "+", "info", "+", "bytearray", "(", "(", "counter", "+", "1", ",", ")", ")", ")", ",", "hash", ")", ".", "digest", "(", ")", "okm", "+=", "output_block", "return", "okm", "[", ":", "length", "]" ]
Expand `pseudo_random_key` and `info` into a key of length `bytes` using HKDF's expand function based on HMAC with the provided hash (default SHA-512). See the HKDF draft RFC and paper for usage notes.
[ "Expand", "pseudo_random_key", "and", "info", "into", "a", "key", "of", "length", "bytes", "using", "HKDF", "s", "expand", "function", "based", "on", "HMAC", "with", "the", "provided", "hash", "(", "default", "SHA", "-", "512", ")", ".", "See", "the", "HKDF", "draft", "RFC", "and", "paper", "for", "usage", "notes", "." ]
train
https://github.com/casebeer/python-hkdf/blob/cc3c9dbf0a271b27a7ac5cd04cc1485bbc3b4307/hkdf.py#L27-L45
casebeer/python-hkdf
hkdf.py
Hkdf.expand
def expand(self, info=b"", length=32): ''' Generate output key material based on an `info` value Arguments: - info - context to generate the OKM - length - length in bytes of the key to generate See the HKDF draft RFC for guidance. ''' return hkdf_expand(self._prk, info, length, self._hash)
python
def expand(self, info=b"", length=32): ''' Generate output key material based on an `info` value Arguments: - info - context to generate the OKM - length - length in bytes of the key to generate See the HKDF draft RFC for guidance. ''' return hkdf_expand(self._prk, info, length, self._hash)
[ "def", "expand", "(", "self", ",", "info", "=", "b\"\"", ",", "length", "=", "32", ")", ":", "return", "hkdf_expand", "(", "self", ".", "_prk", ",", "info", ",", "length", ",", "self", ".", "_hash", ")" ]
Generate output key material based on an `info` value Arguments: - info - context to generate the OKM - length - length in bytes of the key to generate See the HKDF draft RFC for guidance.
[ "Generate", "output", "key", "material", "based", "on", "an", "info", "value" ]
train
https://github.com/casebeer/python-hkdf/blob/cc3c9dbf0a271b27a7ac5cd04cc1485bbc3b4307/hkdf.py#L61-L71
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
login_user_block
def login_user_block(username, ssh_keys, create_password=True): """ Helper function for creating Server.login_user blocks. (see: https://www.upcloud.com/api/8-servers/#create-server) """ block = { 'create_password': 'yes' if create_password is True else 'no', 'ssh_keys': { 'ssh_key': ssh_keys } } if username: block['username'] = username return block
python
def login_user_block(username, ssh_keys, create_password=True): """ Helper function for creating Server.login_user blocks. (see: https://www.upcloud.com/api/8-servers/#create-server) """ block = { 'create_password': 'yes' if create_password is True else 'no', 'ssh_keys': { 'ssh_key': ssh_keys } } if username: block['username'] = username return block
[ "def", "login_user_block", "(", "username", ",", "ssh_keys", ",", "create_password", "=", "True", ")", ":", "block", "=", "{", "'create_password'", ":", "'yes'", "if", "create_password", "is", "True", "else", "'no'", ",", "'ssh_keys'", ":", "{", "'ssh_key'", ":", "ssh_keys", "}", "}", "if", "username", ":", "block", "[", "'username'", "]", "=", "username", "return", "block" ]
Helper function for creating Server.login_user blocks. (see: https://www.upcloud.com/api/8-servers/#create-server)
[ "Helper", "function", "for", "creating", "Server", ".", "login_user", "blocks", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L10-L26
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server._reset
def _reset(self, server, **kwargs): """ Reset the server object with new values given as params. - server: a dict representing the server. e.g the API response. - kwargs: any meta fields such as cloud_manager and populated. Note: storage_devices and ip_addresses may be given in server as dicts or in kwargs as lists containing Storage and IPAddress objects. """ if server: # handle storage, ip_address dicts and tags if they exist Server._handle_server_subobjs(server, kwargs.get('cloud_manager')) for key in server: object.__setattr__(self, key, server[key]) for key in kwargs: object.__setattr__(self, key, kwargs[key])
python
def _reset(self, server, **kwargs): """ Reset the server object with new values given as params. - server: a dict representing the server. e.g the API response. - kwargs: any meta fields such as cloud_manager and populated. Note: storage_devices and ip_addresses may be given in server as dicts or in kwargs as lists containing Storage and IPAddress objects. """ if server: # handle storage, ip_address dicts and tags if they exist Server._handle_server_subobjs(server, kwargs.get('cloud_manager')) for key in server: object.__setattr__(self, key, server[key]) for key in kwargs: object.__setattr__(self, key, kwargs[key])
[ "def", "_reset", "(", "self", ",", "server", ",", "*", "*", "kwargs", ")", ":", "if", "server", ":", "# handle storage, ip_address dicts and tags if they exist", "Server", ".", "_handle_server_subobjs", "(", "server", ",", "kwargs", ".", "get", "(", "'cloud_manager'", ")", ")", "for", "key", "in", "server", ":", "object", ".", "__setattr__", "(", "self", ",", "key", ",", "server", "[", "key", "]", ")", "for", "key", "in", "kwargs", ":", "object", ".", "__setattr__", "(", "self", ",", "key", ",", "kwargs", "[", "key", "]", ")" ]
Reset the server object with new values given as params. - server: a dict representing the server. e.g the API response. - kwargs: any meta fields such as cloud_manager and populated. Note: storage_devices and ip_addresses may be given in server as dicts or in kwargs as lists containing Storage and IPAddress objects.
[ "Reset", "the", "server", "object", "with", "new", "values", "given", "as", "params", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L74-L92
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.populate
def populate(self): """ Sync changes from the API to the local object. Note: syncs ip_addresses and storage_devices too (/server/uuid endpoint) """ server, IPAddresses, storages = self.cloud_manager.get_server_data(self.uuid) self._reset( server, ip_addresses=IPAddresses, storage_devices=storages, populated=True ) return self
python
def populate(self): """ Sync changes from the API to the local object. Note: syncs ip_addresses and storage_devices too (/server/uuid endpoint) """ server, IPAddresses, storages = self.cloud_manager.get_server_data(self.uuid) self._reset( server, ip_addresses=IPAddresses, storage_devices=storages, populated=True ) return self
[ "def", "populate", "(", "self", ")", ":", "server", ",", "IPAddresses", ",", "storages", "=", "self", ".", "cloud_manager", ".", "get_server_data", "(", "self", ".", "uuid", ")", "self", ".", "_reset", "(", "server", ",", "ip_addresses", "=", "IPAddresses", ",", "storage_devices", "=", "storages", ",", "populated", "=", "True", ")", "return", "self" ]
Sync changes from the API to the local object. Note: syncs ip_addresses and storage_devices too (/server/uuid endpoint)
[ "Sync", "changes", "from", "the", "API", "to", "the", "local", "object", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L94-L107
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.save
def save(self): """ Sync local changes in server's attributes to the API. Note: DOES NOT sync IPAddresses and storage_devices, use add_ip, add_storage, remove_ip, remove_storage instead. """ # dict comprehension that also works with 2.6 # http://stackoverflow.com/questions/21069668/alternative-to-dict-comprehension-prior-to-python-2-7 kwargs = dict( (field, getattr(self, field)) for field in self.updateable_fields if hasattr(self, field) ) self.cloud_manager.modify_server(self.uuid, **kwargs) self._reset(kwargs)
python
def save(self): """ Sync local changes in server's attributes to the API. Note: DOES NOT sync IPAddresses and storage_devices, use add_ip, add_storage, remove_ip, remove_storage instead. """ # dict comprehension that also works with 2.6 # http://stackoverflow.com/questions/21069668/alternative-to-dict-comprehension-prior-to-python-2-7 kwargs = dict( (field, getattr(self, field)) for field in self.updateable_fields if hasattr(self, field) ) self.cloud_manager.modify_server(self.uuid, **kwargs) self._reset(kwargs)
[ "def", "save", "(", "self", ")", ":", "# dict comprehension that also works with 2.6", "# http://stackoverflow.com/questions/21069668/alternative-to-dict-comprehension-prior-to-python-2-7", "kwargs", "=", "dict", "(", "(", "field", ",", "getattr", "(", "self", ",", "field", ")", ")", "for", "field", "in", "self", ".", "updateable_fields", "if", "hasattr", "(", "self", ",", "field", ")", ")", "self", ".", "cloud_manager", ".", "modify_server", "(", "self", ".", "uuid", ",", "*", "*", "kwargs", ")", "self", ".", "_reset", "(", "kwargs", ")" ]
Sync local changes in server's attributes to the API. Note: DOES NOT sync IPAddresses and storage_devices, use add_ip, add_storage, remove_ip, remove_storage instead.
[ "Sync", "local", "changes", "in", "server", "s", "attributes", "to", "the", "API", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L116-L132
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.shutdown
def shutdown(self, hard=False, timeout=30): """ Shutdown/stop the server. By default, issue a soft shutdown with a timeout of 30s. After the a timeout a hard shutdown is performed if the server has not stopped. Note: API responds immediately (unlike in start), with state: started. This client will, however, set state as 'maintenance' to signal that the server is neither started nor stopped. """ body = dict() body['stop_server'] = { 'stop_type': 'hard' if hard else 'soft', 'timeout': '{0}'.format(timeout) } path = '/server/{0}/stop'.format(self.uuid) self.cloud_manager.post_request(path, body) object.__setattr__(self, 'state', 'maintenance')
python
def shutdown(self, hard=False, timeout=30): """ Shutdown/stop the server. By default, issue a soft shutdown with a timeout of 30s. After the a timeout a hard shutdown is performed if the server has not stopped. Note: API responds immediately (unlike in start), with state: started. This client will, however, set state as 'maintenance' to signal that the server is neither started nor stopped. """ body = dict() body['stop_server'] = { 'stop_type': 'hard' if hard else 'soft', 'timeout': '{0}'.format(timeout) } path = '/server/{0}/stop'.format(self.uuid) self.cloud_manager.post_request(path, body) object.__setattr__(self, 'state', 'maintenance')
[ "def", "shutdown", "(", "self", ",", "hard", "=", "False", ",", "timeout", "=", "30", ")", ":", "body", "=", "dict", "(", ")", "body", "[", "'stop_server'", "]", "=", "{", "'stop_type'", ":", "'hard'", "if", "hard", "else", "'soft'", ",", "'timeout'", ":", "'{0}'", ".", "format", "(", "timeout", ")", "}", "path", "=", "'/server/{0}/stop'", ".", "format", "(", "self", ".", "uuid", ")", "self", ".", "cloud_manager", ".", "post_request", "(", "path", ",", "body", ")", "object", ".", "__setattr__", "(", "self", ",", "'state'", ",", "'maintenance'", ")" ]
Shutdown/stop the server. By default, issue a soft shutdown with a timeout of 30s. After the a timeout a hard shutdown is performed if the server has not stopped. Note: API responds immediately (unlike in start), with state: started. This client will, however, set state as 'maintenance' to signal that the server is neither started nor stopped.
[ "Shutdown", "/", "stop", "the", "server", ".", "By", "default", "issue", "a", "soft", "shutdown", "with", "a", "timeout", "of", "30s", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L137-L155
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.start
def start(self, timeout=120): """ Start the server. Note: slow and blocking request. The API waits for confirmation from UpCloud's IaaS backend before responding. """ path = '/server/{0}/start'.format(self.uuid) self.cloud_manager.post_request(path, timeout=timeout) object.__setattr__(self, 'state', 'started')
python
def start(self, timeout=120): """ Start the server. Note: slow and blocking request. The API waits for confirmation from UpCloud's IaaS backend before responding. """ path = '/server/{0}/start'.format(self.uuid) self.cloud_manager.post_request(path, timeout=timeout) object.__setattr__(self, 'state', 'started')
[ "def", "start", "(", "self", ",", "timeout", "=", "120", ")", ":", "path", "=", "'/server/{0}/start'", ".", "format", "(", "self", ".", "uuid", ")", "self", ".", "cloud_manager", ".", "post_request", "(", "path", ",", "timeout", "=", "timeout", ")", "object", ".", "__setattr__", "(", "self", ",", "'state'", ",", "'started'", ")" ]
Start the server. Note: slow and blocking request. The API waits for confirmation from UpCloud's IaaS backend before responding.
[ "Start", "the", "server", ".", "Note", ":", "slow", "and", "blocking", "request", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L163-L171
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.restart
def restart(self, hard=False, timeout=30, force=True): """ Restart the server. By default, issue a soft restart with a timeout of 30s and a hard restart after the timeout. After the a timeout a hard restart is performed if the server has not stopped. Note: API responds immediately (unlike in start), with state: started. This client will, however, set state as 'maintenance' to signal that the server is neither started nor stopped. """ body = dict() body['restart_server'] = { 'stop_type': 'hard' if hard else 'soft', 'timeout': '{0}'.format(timeout), 'timeout_action': 'destroy' if force else 'ignore' } path = '/server/{0}/restart'.format(self.uuid) self.cloud_manager.post_request(path, body) object.__setattr__(self, 'state', 'maintenance')
python
def restart(self, hard=False, timeout=30, force=True): """ Restart the server. By default, issue a soft restart with a timeout of 30s and a hard restart after the timeout. After the a timeout a hard restart is performed if the server has not stopped. Note: API responds immediately (unlike in start), with state: started. This client will, however, set state as 'maintenance' to signal that the server is neither started nor stopped. """ body = dict() body['restart_server'] = { 'stop_type': 'hard' if hard else 'soft', 'timeout': '{0}'.format(timeout), 'timeout_action': 'destroy' if force else 'ignore' } path = '/server/{0}/restart'.format(self.uuid) self.cloud_manager.post_request(path, body) object.__setattr__(self, 'state', 'maintenance')
[ "def", "restart", "(", "self", ",", "hard", "=", "False", ",", "timeout", "=", "30", ",", "force", "=", "True", ")", ":", "body", "=", "dict", "(", ")", "body", "[", "'restart_server'", "]", "=", "{", "'stop_type'", ":", "'hard'", "if", "hard", "else", "'soft'", ",", "'timeout'", ":", "'{0}'", ".", "format", "(", "timeout", ")", ",", "'timeout_action'", ":", "'destroy'", "if", "force", "else", "'ignore'", "}", "path", "=", "'/server/{0}/restart'", ".", "format", "(", "self", ".", "uuid", ")", "self", ".", "cloud_manager", ".", "post_request", "(", "path", ",", "body", ")", "object", ".", "__setattr__", "(", "self", ",", "'state'", ",", "'maintenance'", ")" ]
Restart the server. By default, issue a soft restart with a timeout of 30s and a hard restart after the timeout. After the a timeout a hard restart is performed if the server has not stopped. Note: API responds immediately (unlike in start), with state: started. This client will, however, set state as 'maintenance' to signal that the server is neither started nor stopped.
[ "Restart", "the", "server", ".", "By", "default", "issue", "a", "soft", "restart", "with", "a", "timeout", "of", "30s", "and", "a", "hard", "restart", "after", "the", "timeout", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L173-L193
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.add_ip
def add_ip(self, family='IPv4'): """ Allocate a new (random) IP-address to the Server. """ IP = self.cloud_manager.attach_ip(self.uuid, family) self.ip_addresses.append(IP) return IP
python
def add_ip(self, family='IPv4'): """ Allocate a new (random) IP-address to the Server. """ IP = self.cloud_manager.attach_ip(self.uuid, family) self.ip_addresses.append(IP) return IP
[ "def", "add_ip", "(", "self", ",", "family", "=", "'IPv4'", ")", ":", "IP", "=", "self", ".", "cloud_manager", ".", "attach_ip", "(", "self", ".", "uuid", ",", "family", ")", "self", ".", "ip_addresses", ".", "append", "(", "IP", ")", "return", "IP" ]
Allocate a new (random) IP-address to the Server.
[ "Allocate", "a", "new", "(", "random", ")", "IP", "-", "address", "to", "the", "Server", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L195-L201
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.remove_ip
def remove_ip(self, IPAddress): """ Release the specified IP-address from the server. """ self.cloud_manager.release_ip(IPAddress.address) self.ip_addresses.remove(IPAddress)
python
def remove_ip(self, IPAddress): """ Release the specified IP-address from the server. """ self.cloud_manager.release_ip(IPAddress.address) self.ip_addresses.remove(IPAddress)
[ "def", "remove_ip", "(", "self", ",", "IPAddress", ")", ":", "self", ".", "cloud_manager", ".", "release_ip", "(", "IPAddress", ".", "address", ")", "self", ".", "ip_addresses", ".", "remove", "(", "IPAddress", ")" ]
Release the specified IP-address from the server.
[ "Release", "the", "specified", "IP", "-", "address", "from", "the", "server", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L203-L208
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.add_storage
def add_storage(self, storage=None, type='disk', address=None): """ Attach the given storage to the Server. Default address is next available. """ self.cloud_manager.attach_storage(server=self.uuid, storage=storage.uuid, storage_type=type, address=address) storage.address = address storage.type = type self.storage_devices.append(storage)
python
def add_storage(self, storage=None, type='disk', address=None): """ Attach the given storage to the Server. Default address is next available. """ self.cloud_manager.attach_storage(server=self.uuid, storage=storage.uuid, storage_type=type, address=address) storage.address = address storage.type = type self.storage_devices.append(storage)
[ "def", "add_storage", "(", "self", ",", "storage", "=", "None", ",", "type", "=", "'disk'", ",", "address", "=", "None", ")", ":", "self", ".", "cloud_manager", ".", "attach_storage", "(", "server", "=", "self", ".", "uuid", ",", "storage", "=", "storage", ".", "uuid", ",", "storage_type", "=", "type", ",", "address", "=", "address", ")", "storage", ".", "address", "=", "address", "storage", ".", "type", "=", "type", "self", ".", "storage_devices", ".", "append", "(", "storage", ")" ]
Attach the given storage to the Server. Default address is next available.
[ "Attach", "the", "given", "storage", "to", "the", "Server", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L210-L222
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.remove_storage
def remove_storage(self, storage): """ Remove Storage from a Server. The Storage must be a reference to an object in Server.storage_devices or the method will throw and Exception. A Storage from get_storage(uuid) will not work as it is missing the 'address' property. """ if not hasattr(storage, 'address'): raise Exception( ('Storage does not have an address. ' 'Access the Storage via Server.storage_devices ' 'so they include an address. ' '(This is due how the API handles Storages)') ) self.cloud_manager.detach_storage(server=self.uuid, address=storage.address) self.storage_devices.remove(storage)
python
def remove_storage(self, storage): """ Remove Storage from a Server. The Storage must be a reference to an object in Server.storage_devices or the method will throw and Exception. A Storage from get_storage(uuid) will not work as it is missing the 'address' property. """ if not hasattr(storage, 'address'): raise Exception( ('Storage does not have an address. ' 'Access the Storage via Server.storage_devices ' 'so they include an address. ' '(This is due how the API handles Storages)') ) self.cloud_manager.detach_storage(server=self.uuid, address=storage.address) self.storage_devices.remove(storage)
[ "def", "remove_storage", "(", "self", ",", "storage", ")", ":", "if", "not", "hasattr", "(", "storage", ",", "'address'", ")", ":", "raise", "Exception", "(", "(", "'Storage does not have an address. '", "'Access the Storage via Server.storage_devices '", "'so they include an address. '", "'(This is due how the API handles Storages)'", ")", ")", "self", ".", "cloud_manager", ".", "detach_storage", "(", "server", "=", "self", ".", "uuid", ",", "address", "=", "storage", ".", "address", ")", "self", ".", "storage_devices", ".", "remove", "(", "storage", ")" ]
Remove Storage from a Server. The Storage must be a reference to an object in Server.storage_devices or the method will throw and Exception. A Storage from get_storage(uuid) will not work as it is missing the 'address' property.
[ "Remove", "Storage", "from", "a", "Server", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L224-L242
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.add_tags
def add_tags(self, tags): """ Add tags to a server. Accepts tags as strings or Tag objects. """ if self.cloud_manager.assign_tags(self.uuid, tags): tags = self.tags + [str(tag) for tag in tags] object.__setattr__(self, 'tags', tags)
python
def add_tags(self, tags): """ Add tags to a server. Accepts tags as strings or Tag objects. """ if self.cloud_manager.assign_tags(self.uuid, tags): tags = self.tags + [str(tag) for tag in tags] object.__setattr__(self, 'tags', tags)
[ "def", "add_tags", "(", "self", ",", "tags", ")", ":", "if", "self", ".", "cloud_manager", ".", "assign_tags", "(", "self", ".", "uuid", ",", "tags", ")", ":", "tags", "=", "self", ".", "tags", "+", "[", "str", "(", "tag", ")", "for", "tag", "in", "tags", "]", "object", ".", "__setattr__", "(", "self", ",", "'tags'", ",", "tags", ")" ]
Add tags to a server. Accepts tags as strings or Tag objects.
[ "Add", "tags", "to", "a", "server", ".", "Accepts", "tags", "as", "strings", "or", "Tag", "objects", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L264-L270
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.remove_tags
def remove_tags(self, tags): """ Add tags to a server. Accepts tags as strings or Tag objects. """ if self.cloud_manager.remove_tags(self, tags): new_tags = [tag for tag in self.tags if tag not in tags] object.__setattr__(self, 'tags', new_tags)
python
def remove_tags(self, tags): """ Add tags to a server. Accepts tags as strings or Tag objects. """ if self.cloud_manager.remove_tags(self, tags): new_tags = [tag for tag in self.tags if tag not in tags] object.__setattr__(self, 'tags', new_tags)
[ "def", "remove_tags", "(", "self", ",", "tags", ")", ":", "if", "self", ".", "cloud_manager", ".", "remove_tags", "(", "self", ",", "tags", ")", ":", "new_tags", "=", "[", "tag", "for", "tag", "in", "self", ".", "tags", "if", "tag", "not", "in", "tags", "]", "object", ".", "__setattr__", "(", "self", ",", "'tags'", ",", "new_tags", ")" ]
Add tags to a server. Accepts tags as strings or Tag objects.
[ "Add", "tags", "to", "a", "server", ".", "Accepts", "tags", "as", "strings", "or", "Tag", "objects", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L272-L278
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.configure_firewall
def configure_firewall(self, FirewallRules): """ Helper function for automatically adding several FirewallRules in series. """ firewall_rule_bodies = [ FirewallRule.to_dict() for FirewallRule in FirewallRules ] return self.cloud_manager.configure_firewall(self, firewall_rule_bodies)
python
def configure_firewall(self, FirewallRules): """ Helper function for automatically adding several FirewallRules in series. """ firewall_rule_bodies = [ FirewallRule.to_dict() for FirewallRule in FirewallRules ] return self.cloud_manager.configure_firewall(self, firewall_rule_bodies)
[ "def", "configure_firewall", "(", "self", ",", "FirewallRules", ")", ":", "firewall_rule_bodies", "=", "[", "FirewallRule", ".", "to_dict", "(", ")", "for", "FirewallRule", "in", "FirewallRules", "]", "return", "self", ".", "cloud_manager", ".", "configure_firewall", "(", "self", ",", "firewall_rule_bodies", ")" ]
Helper function for automatically adding several FirewallRules in series.
[ "Helper", "function", "for", "automatically", "adding", "several", "FirewallRules", "in", "series", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L285-L293
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.prepare_post_body
def prepare_post_body(self): """ Prepare a JSON serializable dict from a Server instance with nested. Storage instances. """ body = dict() # mandatory body['server'] = { 'hostname': self.hostname, 'zone': self.zone, 'title': self.title, 'storage_devices': {} } # optional fields for optional_field in self.optional_fields: if hasattr(self, optional_field): body['server'][optional_field] = getattr(self, optional_field) # set password_delivery default as 'none' to prevent API from sending # emails (with credentials) about each created server if not hasattr(self, 'password_delivery'): body['server']['password_delivery'] = 'none' # collect storage devices and create a unique title (see: Storage.title in API doc) # for each of them body['server']['storage_devices'] = { 'storage_device': [] } storage_title_id = 0 # running number for unique storage titles for storage in self.storage_devices: if not hasattr(storage, 'os') or storage.os is None: storage_title_id += 1 storage_body = storage.to_dict() # setup default titles for storages unless the user has specified # them at storage.title if not hasattr(storage, 'title') or not storage.title: if hasattr(storage, 'os') and storage.os: storage_body['title'] = self.hostname + ' OS disk' else: storage_body['title'] = self.hostname + ' storage disk ' + str(storage_title_id) # figure out the storage `action` parameter # public template if hasattr(storage, 'os') and storage.os: storage_body['action'] = 'clone' storage_body['storage'] = OperatingSystems.get_OS_UUID(storage.os) # private template elif hasattr(storage, 'uuid'): storage_body['action'] = 'clone' storage_body['storage'] = storage.uuid # create a new storage else: storage_body['action'] = 'create' body['server']['storage_devices']['storage_device'].append(storage_body) if hasattr(self, 'ip_addresses') and self.ip_addresses: body['server']['ip_addresses'] = { 'ip_address': [ ip.to_dict() for ip in self.ip_addresses ] } return body
python
def prepare_post_body(self): """ Prepare a JSON serializable dict from a Server instance with nested. Storage instances. """ body = dict() # mandatory body['server'] = { 'hostname': self.hostname, 'zone': self.zone, 'title': self.title, 'storage_devices': {} } # optional fields for optional_field in self.optional_fields: if hasattr(self, optional_field): body['server'][optional_field] = getattr(self, optional_field) # set password_delivery default as 'none' to prevent API from sending # emails (with credentials) about each created server if not hasattr(self, 'password_delivery'): body['server']['password_delivery'] = 'none' # collect storage devices and create a unique title (see: Storage.title in API doc) # for each of them body['server']['storage_devices'] = { 'storage_device': [] } storage_title_id = 0 # running number for unique storage titles for storage in self.storage_devices: if not hasattr(storage, 'os') or storage.os is None: storage_title_id += 1 storage_body = storage.to_dict() # setup default titles for storages unless the user has specified # them at storage.title if not hasattr(storage, 'title') or not storage.title: if hasattr(storage, 'os') and storage.os: storage_body['title'] = self.hostname + ' OS disk' else: storage_body['title'] = self.hostname + ' storage disk ' + str(storage_title_id) # figure out the storage `action` parameter # public template if hasattr(storage, 'os') and storage.os: storage_body['action'] = 'clone' storage_body['storage'] = OperatingSystems.get_OS_UUID(storage.os) # private template elif hasattr(storage, 'uuid'): storage_body['action'] = 'clone' storage_body['storage'] = storage.uuid # create a new storage else: storage_body['action'] = 'create' body['server']['storage_devices']['storage_device'].append(storage_body) if hasattr(self, 'ip_addresses') and self.ip_addresses: body['server']['ip_addresses'] = { 'ip_address': [ ip.to_dict() for ip in self.ip_addresses ] } return body
[ "def", "prepare_post_body", "(", "self", ")", ":", "body", "=", "dict", "(", ")", "# mandatory", "body", "[", "'server'", "]", "=", "{", "'hostname'", ":", "self", ".", "hostname", ",", "'zone'", ":", "self", ".", "zone", ",", "'title'", ":", "self", ".", "title", ",", "'storage_devices'", ":", "{", "}", "}", "# optional fields", "for", "optional_field", "in", "self", ".", "optional_fields", ":", "if", "hasattr", "(", "self", ",", "optional_field", ")", ":", "body", "[", "'server'", "]", "[", "optional_field", "]", "=", "getattr", "(", "self", ",", "optional_field", ")", "# set password_delivery default as 'none' to prevent API from sending", "# emails (with credentials) about each created server", "if", "not", "hasattr", "(", "self", ",", "'password_delivery'", ")", ":", "body", "[", "'server'", "]", "[", "'password_delivery'", "]", "=", "'none'", "# collect storage devices and create a unique title (see: Storage.title in API doc)", "# for each of them", "body", "[", "'server'", "]", "[", "'storage_devices'", "]", "=", "{", "'storage_device'", ":", "[", "]", "}", "storage_title_id", "=", "0", "# running number for unique storage titles", "for", "storage", "in", "self", ".", "storage_devices", ":", "if", "not", "hasattr", "(", "storage", ",", "'os'", ")", "or", "storage", ".", "os", "is", "None", ":", "storage_title_id", "+=", "1", "storage_body", "=", "storage", ".", "to_dict", "(", ")", "# setup default titles for storages unless the user has specified", "# them at storage.title", "if", "not", "hasattr", "(", "storage", ",", "'title'", ")", "or", "not", "storage", ".", "title", ":", "if", "hasattr", "(", "storage", ",", "'os'", ")", "and", "storage", ".", "os", ":", "storage_body", "[", "'title'", "]", "=", "self", ".", "hostname", "+", "' OS disk'", "else", ":", "storage_body", "[", "'title'", "]", "=", "self", ".", "hostname", "+", "' storage disk '", "+", "str", "(", "storage_title_id", ")", "# figure out the storage `action` parameter", "# public template", "if", "hasattr", "(", "storage", ",", "'os'", ")", "and", "storage", ".", "os", ":", "storage_body", "[", "'action'", "]", "=", "'clone'", "storage_body", "[", "'storage'", "]", "=", "OperatingSystems", ".", "get_OS_UUID", "(", "storage", ".", "os", ")", "# private template", "elif", "hasattr", "(", "storage", ",", "'uuid'", ")", ":", "storage_body", "[", "'action'", "]", "=", "'clone'", "storage_body", "[", "'storage'", "]", "=", "storage", ".", "uuid", "# create a new storage", "else", ":", "storage_body", "[", "'action'", "]", "=", "'create'", "body", "[", "'server'", "]", "[", "'storage_devices'", "]", "[", "'storage_device'", "]", ".", "append", "(", "storage_body", ")", "if", "hasattr", "(", "self", ",", "'ip_addresses'", ")", "and", "self", ".", "ip_addresses", ":", "body", "[", "'server'", "]", "[", "'ip_addresses'", "]", "=", "{", "'ip_address'", ":", "[", "ip", ".", "to_dict", "(", ")", "for", "ip", "in", "self", ".", "ip_addresses", "]", "}", "return", "body" ]
Prepare a JSON serializable dict from a Server instance with nested. Storage instances.
[ "Prepare", "a", "JSON", "serializable", "dict", "from", "a", "Server", "instance", "with", "nested", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L295-L368
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.to_dict
def to_dict(self): """ Prepare a JSON serializable dict for read-only purposes. Includes storages and IP-addresses. Use prepare_post_body for POST and .save() for PUT. """ fields = dict(vars(self).items()) if self.populated: fields['ip_addresses'] = [] fields['storage_devices'] = [] for ip in self.ip_addresses: fields['ip_addresses'].append({ 'address': ip.address, 'access': ip.access, 'family': ip.family }) for storage in self.storage_devices: fields['storage_devices'].append({ 'address': storage.address, 'storage': storage.uuid, 'storage_size': storage.size, 'storage_title': storage.title, 'type': storage.type, }) del fields['populated'] del fields['cloud_manager'] return fields
python
def to_dict(self): """ Prepare a JSON serializable dict for read-only purposes. Includes storages and IP-addresses. Use prepare_post_body for POST and .save() for PUT. """ fields = dict(vars(self).items()) if self.populated: fields['ip_addresses'] = [] fields['storage_devices'] = [] for ip in self.ip_addresses: fields['ip_addresses'].append({ 'address': ip.address, 'access': ip.access, 'family': ip.family }) for storage in self.storage_devices: fields['storage_devices'].append({ 'address': storage.address, 'storage': storage.uuid, 'storage_size': storage.size, 'storage_title': storage.title, 'type': storage.type, }) del fields['populated'] del fields['cloud_manager'] return fields
[ "def", "to_dict", "(", "self", ")", ":", "fields", "=", "dict", "(", "vars", "(", "self", ")", ".", "items", "(", ")", ")", "if", "self", ".", "populated", ":", "fields", "[", "'ip_addresses'", "]", "=", "[", "]", "fields", "[", "'storage_devices'", "]", "=", "[", "]", "for", "ip", "in", "self", ".", "ip_addresses", ":", "fields", "[", "'ip_addresses'", "]", ".", "append", "(", "{", "'address'", ":", "ip", ".", "address", ",", "'access'", ":", "ip", ".", "access", ",", "'family'", ":", "ip", ".", "family", "}", ")", "for", "storage", "in", "self", ".", "storage_devices", ":", "fields", "[", "'storage_devices'", "]", ".", "append", "(", "{", "'address'", ":", "storage", ".", "address", ",", "'storage'", ":", "storage", ".", "uuid", ",", "'storage_size'", ":", "storage", ".", "size", ",", "'storage_title'", ":", "storage", ".", "title", ",", "'type'", ":", "storage", ".", "type", ",", "}", ")", "del", "fields", "[", "'populated'", "]", "del", "fields", "[", "'cloud_manager'", "]", "return", "fields" ]
Prepare a JSON serializable dict for read-only purposes. Includes storages and IP-addresses. Use prepare_post_body for POST and .save() for PUT.
[ "Prepare", "a", "JSON", "serializable", "dict", "for", "read", "-", "only", "purposes", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L370-L400
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.get_ip
def get_ip(self, access='public', addr_family=None, strict=None): """ Return the server's IP address. Params: - addr_family: IPv4, IPv6 or None. None prefers IPv4 but will return IPv6 if IPv4 addr was not available. - access: 'public' or 'private' """ if addr_family not in ['IPv4', 'IPv6', None]: raise Exception("`addr_family` must be 'IPv4', 'IPv6' or None") if access not in ['private', 'public']: raise Exception("`access` must be 'public' or 'private'") if not hasattr(self, 'ip_addresses'): self.populate() # server can have several public or private IPs ip_addrs = [ ip_addr for ip_addr in self.ip_addresses if ip_addr.access == access ] # prefer addr_family (or IPv4 if none given) preferred_family = addr_family if addr_family else 'IPv4' for ip_addr in ip_addrs: if ip_addr.family == preferred_family: return ip_addr.address # any IP (of the right access) will do if available and addr_family is None return ip_addrs[0].address if ip_addrs and not addr_family else None
python
def get_ip(self, access='public', addr_family=None, strict=None): """ Return the server's IP address. Params: - addr_family: IPv4, IPv6 or None. None prefers IPv4 but will return IPv6 if IPv4 addr was not available. - access: 'public' or 'private' """ if addr_family not in ['IPv4', 'IPv6', None]: raise Exception("`addr_family` must be 'IPv4', 'IPv6' or None") if access not in ['private', 'public']: raise Exception("`access` must be 'public' or 'private'") if not hasattr(self, 'ip_addresses'): self.populate() # server can have several public or private IPs ip_addrs = [ ip_addr for ip_addr in self.ip_addresses if ip_addr.access == access ] # prefer addr_family (or IPv4 if none given) preferred_family = addr_family if addr_family else 'IPv4' for ip_addr in ip_addrs: if ip_addr.family == preferred_family: return ip_addr.address # any IP (of the right access) will do if available and addr_family is None return ip_addrs[0].address if ip_addrs and not addr_family else None
[ "def", "get_ip", "(", "self", ",", "access", "=", "'public'", ",", "addr_family", "=", "None", ",", "strict", "=", "None", ")", ":", "if", "addr_family", "not", "in", "[", "'IPv4'", ",", "'IPv6'", ",", "None", "]", ":", "raise", "Exception", "(", "\"`addr_family` must be 'IPv4', 'IPv6' or None\"", ")", "if", "access", "not", "in", "[", "'private'", ",", "'public'", "]", ":", "raise", "Exception", "(", "\"`access` must be 'public' or 'private'\"", ")", "if", "not", "hasattr", "(", "self", ",", "'ip_addresses'", ")", ":", "self", ".", "populate", "(", ")", "# server can have several public or private IPs", "ip_addrs", "=", "[", "ip_addr", "for", "ip_addr", "in", "self", ".", "ip_addresses", "if", "ip_addr", ".", "access", "==", "access", "]", "# prefer addr_family (or IPv4 if none given)", "preferred_family", "=", "addr_family", "if", "addr_family", "else", "'IPv4'", "for", "ip_addr", "in", "ip_addrs", ":", "if", "ip_addr", ".", "family", "==", "preferred_family", ":", "return", "ip_addr", ".", "address", "# any IP (of the right access) will do if available and addr_family is None", "return", "ip_addrs", "[", "0", "]", ".", "address", "if", "ip_addrs", "and", "not", "addr_family", "else", "None" ]
Return the server's IP address. Params: - addr_family: IPv4, IPv6 or None. None prefers IPv4 but will return IPv6 if IPv4 addr was not available. - access: 'public' or 'private'
[ "Return", "the", "server", "s", "IP", "address", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L402-L433
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.get_public_ip
def get_public_ip(self, addr_family=None, *args, **kwargs): """Alias for get_ip('public')""" return self.get_ip('public', addr_family, *args, **kwargs)
python
def get_public_ip(self, addr_family=None, *args, **kwargs): """Alias for get_ip('public')""" return self.get_ip('public', addr_family, *args, **kwargs)
[ "def", "get_public_ip", "(", "self", ",", "addr_family", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "get_ip", "(", "'public'", ",", "addr_family", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Alias for get_ip('public')
[ "Alias", "for", "get_ip", "(", "public", ")" ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L435-L437
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.get_private_ip
def get_private_ip(self, addr_family=None, *args, **kwargs): """Alias for get_ip('private')""" return self.get_ip('private', addr_family, *args, **kwargs)
python
def get_private_ip(self, addr_family=None, *args, **kwargs): """Alias for get_ip('private')""" return self.get_ip('private', addr_family, *args, **kwargs)
[ "def", "get_private_ip", "(", "self", ",", "addr_family", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "get_ip", "(", "'private'", ",", "addr_family", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Alias for get_ip('private')
[ "Alias", "for", "get_ip", "(", "private", ")" ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L439-L441
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server._wait_for_state_change
def _wait_for_state_change(self, target_states, update_interval=10): """ Blocking wait until target_state reached. update_interval is in seconds. Warning: state change must begin before calling this method. """ while self.state not in target_states: if self.state == 'error': raise Exception('server is in error state') # update server state every 10s sleep(update_interval) self.populate()
python
def _wait_for_state_change(self, target_states, update_interval=10): """ Blocking wait until target_state reached. update_interval is in seconds. Warning: state change must begin before calling this method. """ while self.state not in target_states: if self.state == 'error': raise Exception('server is in error state') # update server state every 10s sleep(update_interval) self.populate()
[ "def", "_wait_for_state_change", "(", "self", ",", "target_states", ",", "update_interval", "=", "10", ")", ":", "while", "self", ".", "state", "not", "in", "target_states", ":", "if", "self", ".", "state", "==", "'error'", ":", "raise", "Exception", "(", "'server is in error state'", ")", "# update server state every 10s", "sleep", "(", "update_interval", ")", "self", ".", "populate", "(", ")" ]
Blocking wait until target_state reached. update_interval is in seconds. Warning: state change must begin before calling this method.
[ "Blocking", "wait", "until", "target_state", "reached", ".", "update_interval", "is", "in", "seconds", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L443-L455
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.ensure_started
def ensure_started(self): """ Start a server and waits (blocking wait) until it is fully started. """ # server is either starting or stopping (or error) if self.state in ['maintenance', 'error']: self._wait_for_state_change(['stopped', 'started']) if self.state == 'stopped': self.start() self._wait_for_state_change(['started']) if self.state == 'started': return True else: # something went wrong, fail explicitly raise Exception('unknown server state: ' + self.state)
python
def ensure_started(self): """ Start a server and waits (blocking wait) until it is fully started. """ # server is either starting or stopping (or error) if self.state in ['maintenance', 'error']: self._wait_for_state_change(['stopped', 'started']) if self.state == 'stopped': self.start() self._wait_for_state_change(['started']) if self.state == 'started': return True else: # something went wrong, fail explicitly raise Exception('unknown server state: ' + self.state)
[ "def", "ensure_started", "(", "self", ")", ":", "# server is either starting or stopping (or error)", "if", "self", ".", "state", "in", "[", "'maintenance'", ",", "'error'", "]", ":", "self", ".", "_wait_for_state_change", "(", "[", "'stopped'", ",", "'started'", "]", ")", "if", "self", ".", "state", "==", "'stopped'", ":", "self", ".", "start", "(", ")", "self", ".", "_wait_for_state_change", "(", "[", "'started'", "]", ")", "if", "self", ".", "state", "==", "'started'", ":", "return", "True", "else", ":", "# something went wrong, fail explicitly", "raise", "Exception", "(", "'unknown server state: '", "+", "self", ".", "state", ")" ]
Start a server and waits (blocking wait) until it is fully started.
[ "Start", "a", "server", "and", "waits", "(", "blocking", "wait", ")", "until", "it", "is", "fully", "started", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L457-L473
UpCloudLtd/upcloud-python-api
upcloud_api/server.py
Server.stop_and_destroy
def stop_and_destroy(self, sync=True): """ Destroy a server and its storages. Stops the server before destroying. Syncs the server state from the API, use sync=False to disable. """ def _self_destruct(): """destroy the server and all storages attached to it.""" # try_it_n_times util is used as a convenience because # Servers and Storages can fluctuate between "maintenance" and their # original state due to several different reasons especially when # destroying infrastructure. # first destroy server try_it_n_times(operation=self.destroy, expected_error_codes=['SERVER_STATE_ILLEGAL'], custom_error='destroying server failed') # storages may be deleted instantly after server DELETE for storage in self.storage_devices: try_it_n_times(operation=storage.destroy, expected_error_codes=['STORAGE_STATE_ILLEGAL'], custom_error='destroying storage failed') if sync: self.populate() # server is either starting or stopping (or error) if self.state in ['maintenance', 'error']: self._wait_for_state_change(['stopped', 'started']) if self.state == 'started': try_it_n_times(operation=self.stop, expected_error_codes=['SERVER_STATE_ILLEGAL'], custom_error='stopping server failed') self._wait_for_state_change(['stopped']) if self.state == 'stopped': _self_destruct() else: raise Exception('unknown server state: ' + self.state)
python
def stop_and_destroy(self, sync=True): """ Destroy a server and its storages. Stops the server before destroying. Syncs the server state from the API, use sync=False to disable. """ def _self_destruct(): """destroy the server and all storages attached to it.""" # try_it_n_times util is used as a convenience because # Servers and Storages can fluctuate between "maintenance" and their # original state due to several different reasons especially when # destroying infrastructure. # first destroy server try_it_n_times(operation=self.destroy, expected_error_codes=['SERVER_STATE_ILLEGAL'], custom_error='destroying server failed') # storages may be deleted instantly after server DELETE for storage in self.storage_devices: try_it_n_times(operation=storage.destroy, expected_error_codes=['STORAGE_STATE_ILLEGAL'], custom_error='destroying storage failed') if sync: self.populate() # server is either starting or stopping (or error) if self.state in ['maintenance', 'error']: self._wait_for_state_change(['stopped', 'started']) if self.state == 'started': try_it_n_times(operation=self.stop, expected_error_codes=['SERVER_STATE_ILLEGAL'], custom_error='stopping server failed') self._wait_for_state_change(['stopped']) if self.state == 'stopped': _self_destruct() else: raise Exception('unknown server state: ' + self.state)
[ "def", "stop_and_destroy", "(", "self", ",", "sync", "=", "True", ")", ":", "def", "_self_destruct", "(", ")", ":", "\"\"\"destroy the server and all storages attached to it.\"\"\"", "# try_it_n_times util is used as a convenience because", "# Servers and Storages can fluctuate between \"maintenance\" and their", "# original state due to several different reasons especially when", "# destroying infrastructure.", "# first destroy server", "try_it_n_times", "(", "operation", "=", "self", ".", "destroy", ",", "expected_error_codes", "=", "[", "'SERVER_STATE_ILLEGAL'", "]", ",", "custom_error", "=", "'destroying server failed'", ")", "# storages may be deleted instantly after server DELETE", "for", "storage", "in", "self", ".", "storage_devices", ":", "try_it_n_times", "(", "operation", "=", "storage", ".", "destroy", ",", "expected_error_codes", "=", "[", "'STORAGE_STATE_ILLEGAL'", "]", ",", "custom_error", "=", "'destroying storage failed'", ")", "if", "sync", ":", "self", ".", "populate", "(", ")", "# server is either starting or stopping (or error)", "if", "self", ".", "state", "in", "[", "'maintenance'", ",", "'error'", "]", ":", "self", ".", "_wait_for_state_change", "(", "[", "'stopped'", ",", "'started'", "]", ")", "if", "self", ".", "state", "==", "'started'", ":", "try_it_n_times", "(", "operation", "=", "self", ".", "stop", ",", "expected_error_codes", "=", "[", "'SERVER_STATE_ILLEGAL'", "]", ",", "custom_error", "=", "'stopping server failed'", ")", "self", ".", "_wait_for_state_change", "(", "[", "'stopped'", "]", ")", "if", "self", ".", "state", "==", "'stopped'", ":", "_self_destruct", "(", ")", "else", ":", "raise", "Exception", "(", "'unknown server state: '", "+", "self", ".", "state", ")" ]
Destroy a server and its storages. Stops the server before destroying. Syncs the server state from the API, use sync=False to disable.
[ "Destroy", "a", "server", "and", "its", "storages", ".", "Stops", "the", "server", "before", "destroying", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L475-L517
okfn/ofs
ofs/local/storedjson.py
PersistentState.revert
def revert(self): """Revert the state to the version stored on disc.""" if self.filepath: if path.isfile(self.filepath): serialised_file = open(self.filepath, "r") try: self.state = json.load(serialised_file) except ValueError: print("No JSON information could be read from the persistence file - could be empty: %s" % self.filepath) self.state = {} finally: serialised_file.close() else: print("The persistence file has not yet been created or does not exist, so the state cannot be read from it yet.") else: print("Filepath to the persistence file is not set. State cannot be read.") return False
python
def revert(self): """Revert the state to the version stored on disc.""" if self.filepath: if path.isfile(self.filepath): serialised_file = open(self.filepath, "r") try: self.state = json.load(serialised_file) except ValueError: print("No JSON information could be read from the persistence file - could be empty: %s" % self.filepath) self.state = {} finally: serialised_file.close() else: print("The persistence file has not yet been created or does not exist, so the state cannot be read from it yet.") else: print("Filepath to the persistence file is not set. State cannot be read.") return False
[ "def", "revert", "(", "self", ")", ":", "if", "self", ".", "filepath", ":", "if", "path", ".", "isfile", "(", "self", ".", "filepath", ")", ":", "serialised_file", "=", "open", "(", "self", ".", "filepath", ",", "\"r\"", ")", "try", ":", "self", ".", "state", "=", "json", ".", "load", "(", "serialised_file", ")", "except", "ValueError", ":", "print", "(", "\"No JSON information could be read from the persistence file - could be empty: %s\"", "%", "self", ".", "filepath", ")", "self", ".", "state", "=", "{", "}", "finally", ":", "serialised_file", ".", "close", "(", ")", "else", ":", "print", "(", "\"The persistence file has not yet been created or does not exist, so the state cannot be read from it yet.\"", ")", "else", ":", "print", "(", "\"Filepath to the persistence file is not set. State cannot be read.\"", ")", "return", "False" ]
Revert the state to the version stored on disc.
[ "Revert", "the", "state", "to", "the", "version", "stored", "on", "disc", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/storedjson.py#L33-L49
okfn/ofs
ofs/local/storedjson.py
PersistentState.sync
def sync(self): """Synchronise and update the stored state to the in-memory state.""" if self.filepath: serialised_file = open(self.filepath, "w") json.dump(self.state, serialised_file) serialised_file.close() else: print("Filepath to the persistence file is not set. State cannot be synced to disc.")
python
def sync(self): """Synchronise and update the stored state to the in-memory state.""" if self.filepath: serialised_file = open(self.filepath, "w") json.dump(self.state, serialised_file) serialised_file.close() else: print("Filepath to the persistence file is not set. State cannot be synced to disc.")
[ "def", "sync", "(", "self", ")", ":", "if", "self", ".", "filepath", ":", "serialised_file", "=", "open", "(", "self", ".", "filepath", ",", "\"w\"", ")", "json", ".", "dump", "(", "self", ".", "state", ",", "serialised_file", ")", "serialised_file", ".", "close", "(", ")", "else", ":", "print", "(", "\"Filepath to the persistence file is not set. State cannot be synced to disc.\"", ")" ]
Synchronise and update the stored state to the in-memory state.
[ "Synchronise", "and", "update", "the", "stored", "state", "to", "the", "in", "-", "memory", "state", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/storedjson.py#L51-L58
UpCloudLtd/upcloud-python-api
upcloud_api/upcloud_resource.py
UpCloudResource._reset
def _reset(self, **kwargs): """ Reset after repopulating from API (or when initializing). """ # set object attributes from params for key in kwargs: setattr(self, key, kwargs[key]) # set defaults (if need be) where the default is not None for attr in self.ATTRIBUTES: if not hasattr(self, attr) and self.ATTRIBUTES[attr] is not None: setattr(self, attr, self.ATTRIBUTES[attr])
python
def _reset(self, **kwargs): """ Reset after repopulating from API (or when initializing). """ # set object attributes from params for key in kwargs: setattr(self, key, kwargs[key]) # set defaults (if need be) where the default is not None for attr in self.ATTRIBUTES: if not hasattr(self, attr) and self.ATTRIBUTES[attr] is not None: setattr(self, attr, self.ATTRIBUTES[attr])
[ "def", "_reset", "(", "self", ",", "*", "*", "kwargs", ")", ":", "# set object attributes from params", "for", "key", "in", "kwargs", ":", "setattr", "(", "self", ",", "key", ",", "kwargs", "[", "key", "]", ")", "# set defaults (if need be) where the default is not None", "for", "attr", "in", "self", ".", "ATTRIBUTES", ":", "if", "not", "hasattr", "(", "self", ",", "attr", ")", "and", "self", ".", "ATTRIBUTES", "[", "attr", "]", "is", "not", "None", ":", "setattr", "(", "self", ",", "attr", ",", "self", ".", "ATTRIBUTES", "[", "attr", "]", ")" ]
Reset after repopulating from API (or when initializing).
[ "Reset", "after", "repopulating", "from", "API", "(", "or", "when", "initializing", ")", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/upcloud_resource.py#L27-L38
UpCloudLtd/upcloud-python-api
upcloud_api/upcloud_resource.py
UpCloudResource.to_dict
def to_dict(self): """ Return a dict that can be serialised to JSON and sent to UpCloud's API. """ return dict( (attr, getattr(self, attr)) for attr in self.ATTRIBUTES if hasattr(self, attr) )
python
def to_dict(self): """ Return a dict that can be serialised to JSON and sent to UpCloud's API. """ return dict( (attr, getattr(self, attr)) for attr in self.ATTRIBUTES if hasattr(self, attr) )
[ "def", "to_dict", "(", "self", ")", ":", "return", "dict", "(", "(", "attr", ",", "getattr", "(", "self", ",", "attr", ")", ")", "for", "attr", "in", "self", ".", "ATTRIBUTES", "if", "hasattr", "(", "self", ",", "attr", ")", ")" ]
Return a dict that can be serialised to JSON and sent to UpCloud's API.
[ "Return", "a", "dict", "that", "can", "be", "serialised", "to", "JSON", "and", "sent", "to", "UpCloud", "s", "API", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/upcloud_resource.py#L47-L55
okfn/ofs
ofs/remote/botostore.py
BotoOFS._require_bucket
def _require_bucket(self, bucket_name): """ Also try to create the bucket. """ if not self.exists(bucket_name) and not self.claim_bucket(bucket_name): raise OFSException("Invalid bucket: %s" % bucket_name) return self._get_bucket(bucket_name)
python
def _require_bucket(self, bucket_name): """ Also try to create the bucket. """ if not self.exists(bucket_name) and not self.claim_bucket(bucket_name): raise OFSException("Invalid bucket: %s" % bucket_name) return self._get_bucket(bucket_name)
[ "def", "_require_bucket", "(", "self", ",", "bucket_name", ")", ":", "if", "not", "self", ".", "exists", "(", "bucket_name", ")", "and", "not", "self", ".", "claim_bucket", "(", "bucket_name", ")", ":", "raise", "OFSException", "(", "\"Invalid bucket: %s\"", "%", "bucket_name", ")", "return", "self", ".", "_get_bucket", "(", "bucket_name", ")" ]
Also try to create the bucket.
[ "Also", "try", "to", "create", "the", "bucket", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/remote/botostore.py#L42-L46
okfn/ofs
ofs/remote/botostore.py
BotoOFS.del_stream
def del_stream(self, bucket, label): """ Will fail if the bucket or label don't exist """ bucket = self._require_bucket(bucket) key = self._require_key(bucket, label) key.delete()
python
def del_stream(self, bucket, label): """ Will fail if the bucket or label don't exist """ bucket = self._require_bucket(bucket) key = self._require_key(bucket, label) key.delete()
[ "def", "del_stream", "(", "self", ",", "bucket", ",", "label", ")", ":", "bucket", "=", "self", ".", "_require_bucket", "(", "bucket", ")", "key", "=", "self", ".", "_require_key", "(", "bucket", ",", "label", ")", "key", ".", "delete", "(", ")" ]
Will fail if the bucket or label don't exist
[ "Will", "fail", "if", "the", "bucket", "or", "label", "don", "t", "exist" ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/remote/botostore.py#L119-L123
okfn/ofs
ofs/remote/botostore.py
BotoOFS.authenticate_request
def authenticate_request(self, method, bucket='', key='', headers=None): '''Authenticate a HTTP request by filling in Authorization field header. :param method: HTTP method (e.g. GET, PUT, POST) :param bucket: name of the bucket. :param key: name of key within bucket. :param headers: dictionary of additional HTTP headers. :return: boto.connection.HTTPRequest object with Authorization header filled (NB: will also have a Date field if none before and a User-Agent field will be set to Boto). ''' # following is extracted from S3Connection.make_request and the method # it calls: AWSAuthConnection.make_request path = self.conn.calling_format.build_path_base(bucket, key) auth_path = self.conn.calling_format.build_auth_path(bucket, key) http_request = boto.connection.AWSAuthConnection.build_base_http_request( self.conn, method, path, auth_path, {}, headers ) http_request.authorize(connection=self.conn) return http_request
python
def authenticate_request(self, method, bucket='', key='', headers=None): '''Authenticate a HTTP request by filling in Authorization field header. :param method: HTTP method (e.g. GET, PUT, POST) :param bucket: name of the bucket. :param key: name of key within bucket. :param headers: dictionary of additional HTTP headers. :return: boto.connection.HTTPRequest object with Authorization header filled (NB: will also have a Date field if none before and a User-Agent field will be set to Boto). ''' # following is extracted from S3Connection.make_request and the method # it calls: AWSAuthConnection.make_request path = self.conn.calling_format.build_path_base(bucket, key) auth_path = self.conn.calling_format.build_auth_path(bucket, key) http_request = boto.connection.AWSAuthConnection.build_base_http_request( self.conn, method, path, auth_path, {}, headers ) http_request.authorize(connection=self.conn) return http_request
[ "def", "authenticate_request", "(", "self", ",", "method", ",", "bucket", "=", "''", ",", "key", "=", "''", ",", "headers", "=", "None", ")", ":", "# following is extracted from S3Connection.make_request and the method", "# it calls: AWSAuthConnection.make_request", "path", "=", "self", ".", "conn", ".", "calling_format", ".", "build_path_base", "(", "bucket", ",", "key", ")", "auth_path", "=", "self", ".", "conn", ".", "calling_format", ".", "build_auth_path", "(", "bucket", ",", "key", ")", "http_request", "=", "boto", ".", "connection", ".", "AWSAuthConnection", ".", "build_base_http_request", "(", "self", ".", "conn", ",", "method", ",", "path", ",", "auth_path", ",", "{", "}", ",", "headers", ")", "http_request", ".", "authorize", "(", "connection", "=", "self", ".", "conn", ")", "return", "http_request" ]
Authenticate a HTTP request by filling in Authorization field header. :param method: HTTP method (e.g. GET, PUT, POST) :param bucket: name of the bucket. :param key: name of key within bucket. :param headers: dictionary of additional HTTP headers. :return: boto.connection.HTTPRequest object with Authorization header filled (NB: will also have a Date field if none before and a User-Agent field will be set to Boto).
[ "Authenticate", "a", "HTTP", "request", "by", "filling", "in", "Authorization", "field", "header", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/remote/botostore.py#L172-L197
ckan/deadoralive
deadoralive/deadoralive.py
get_resources_to_check
def get_resources_to_check(client_site_url, apikey): """Return a list of resource IDs to check for broken links. Calls the client site's API to get a list of resource IDs. :raises CouldNotGetResourceIDsError: if getting the resource IDs fails for any reason """ url = client_site_url + u"deadoralive/get_resources_to_check" response = requests.get(url, headers=dict(Authorization=apikey)) if not response.ok: raise CouldNotGetResourceIDsError( u"Couldn't get resource IDs to check: {code} {reason}".format( code=response.status_code, reason=response.reason)) return response.json()
python
def get_resources_to_check(client_site_url, apikey): """Return a list of resource IDs to check for broken links. Calls the client site's API to get a list of resource IDs. :raises CouldNotGetResourceIDsError: if getting the resource IDs fails for any reason """ url = client_site_url + u"deadoralive/get_resources_to_check" response = requests.get(url, headers=dict(Authorization=apikey)) if not response.ok: raise CouldNotGetResourceIDsError( u"Couldn't get resource IDs to check: {code} {reason}".format( code=response.status_code, reason=response.reason)) return response.json()
[ "def", "get_resources_to_check", "(", "client_site_url", ",", "apikey", ")", ":", "url", "=", "client_site_url", "+", "u\"deadoralive/get_resources_to_check\"", "response", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "dict", "(", "Authorization", "=", "apikey", ")", ")", "if", "not", "response", ".", "ok", ":", "raise", "CouldNotGetResourceIDsError", "(", "u\"Couldn't get resource IDs to check: {code} {reason}\"", ".", "format", "(", "code", "=", "response", ".", "status_code", ",", "reason", "=", "response", ".", "reason", ")", ")", "return", "response", ".", "json", "(", ")" ]
Return a list of resource IDs to check for broken links. Calls the client site's API to get a list of resource IDs. :raises CouldNotGetResourceIDsError: if getting the resource IDs fails for any reason
[ "Return", "a", "list", "of", "resource", "IDs", "to", "check", "for", "broken", "links", "." ]
train
https://github.com/ckan/deadoralive/blob/82eed6c73e17b9884476311a7a8fae9d2b379600/deadoralive/deadoralive.py#L26-L41
ckan/deadoralive
deadoralive/deadoralive.py
get_url_for_id
def get_url_for_id(client_site_url, apikey, resource_id): """Return the URL for the given resource ID. Contacts the client site's API to get the URL for the ID and returns it. :raises CouldNotGetURLError: if getting the URL fails for any reason """ # TODO: Handle invalid responses from the client site. url = client_site_url + u"deadoralive/get_url_for_resource_id" params = {"resource_id": resource_id} response = requests.get(url, headers=dict(Authorization=apikey), params=params) if not response.ok: raise CouldNotGetURLError( u"Couldn't get URL for resource {id}: {code} {reason}".format( id=resource_id, code=response.status_code, reason=response.reason)) return response.json()
python
def get_url_for_id(client_site_url, apikey, resource_id): """Return the URL for the given resource ID. Contacts the client site's API to get the URL for the ID and returns it. :raises CouldNotGetURLError: if getting the URL fails for any reason """ # TODO: Handle invalid responses from the client site. url = client_site_url + u"deadoralive/get_url_for_resource_id" params = {"resource_id": resource_id} response = requests.get(url, headers=dict(Authorization=apikey), params=params) if not response.ok: raise CouldNotGetURLError( u"Couldn't get URL for resource {id}: {code} {reason}".format( id=resource_id, code=response.status_code, reason=response.reason)) return response.json()
[ "def", "get_url_for_id", "(", "client_site_url", ",", "apikey", ",", "resource_id", ")", ":", "# TODO: Handle invalid responses from the client site.", "url", "=", "client_site_url", "+", "u\"deadoralive/get_url_for_resource_id\"", "params", "=", "{", "\"resource_id\"", ":", "resource_id", "}", "response", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "dict", "(", "Authorization", "=", "apikey", ")", ",", "params", "=", "params", ")", "if", "not", "response", ".", "ok", ":", "raise", "CouldNotGetURLError", "(", "u\"Couldn't get URL for resource {id}: {code} {reason}\"", ".", "format", "(", "id", "=", "resource_id", ",", "code", "=", "response", ".", "status_code", ",", "reason", "=", "response", ".", "reason", ")", ")", "return", "response", ".", "json", "(", ")" ]
Return the URL for the given resource ID. Contacts the client site's API to get the URL for the ID and returns it. :raises CouldNotGetURLError: if getting the URL fails for any reason
[ "Return", "the", "URL", "for", "the", "given", "resource", "ID", "." ]
train
https://github.com/ckan/deadoralive/blob/82eed6c73e17b9884476311a7a8fae9d2b379600/deadoralive/deadoralive.py#L49-L68
ckan/deadoralive
deadoralive/deadoralive.py
check_url
def check_url(url): """Check whether the given URL is dead or alive. Returns a dict with four keys: "url": The URL that was checked (string) "alive": Whether the URL was working, True or False "status": The HTTP status code of the response from the URL, e.g. 200, 401, 500 (int) "reason": The reason for the success or failure of the check, e.g. "OK", "Unauthorized", "Internal Server Error" (string) The "status" may be None if we did not get a valid HTTP response, e.g. in the event of a timeout, DNS failure or invalid HTTP response. The "reason" will always be a string, but may be a requests library exception string rather than an HTTP reason string if we did not get a valid HTTP response. """ result = {"url": url} try: response = requests.get(url) result["status"] = response.status_code result["reason"] = response.reason response.raise_for_status() # Raise if status_code is not OK. result["alive"] = True except AttributeError as err: if err.message == "'NoneType' object has no attribute 'encode'": # requests seems to throw these for some invalid URLs. result["alive"] = False result["reason"] = "Invalid URL" result["status"] = None else: raise except requests.exceptions.RequestException as err: result["alive"] = False if "reason" not in result: result["reason"] = str(err) if "status" not in result: # This can happen if the response is invalid HTTP, if we get a DNS # failure, or a timeout, etc. result["status"] = None # We should always have these four fields in the result. assert "url" in result assert result.get("alive") in (True, False) assert "status" in result assert "reason" in result return result
python
def check_url(url): """Check whether the given URL is dead or alive. Returns a dict with four keys: "url": The URL that was checked (string) "alive": Whether the URL was working, True or False "status": The HTTP status code of the response from the URL, e.g. 200, 401, 500 (int) "reason": The reason for the success or failure of the check, e.g. "OK", "Unauthorized", "Internal Server Error" (string) The "status" may be None if we did not get a valid HTTP response, e.g. in the event of a timeout, DNS failure or invalid HTTP response. The "reason" will always be a string, but may be a requests library exception string rather than an HTTP reason string if we did not get a valid HTTP response. """ result = {"url": url} try: response = requests.get(url) result["status"] = response.status_code result["reason"] = response.reason response.raise_for_status() # Raise if status_code is not OK. result["alive"] = True except AttributeError as err: if err.message == "'NoneType' object has no attribute 'encode'": # requests seems to throw these for some invalid URLs. result["alive"] = False result["reason"] = "Invalid URL" result["status"] = None else: raise except requests.exceptions.RequestException as err: result["alive"] = False if "reason" not in result: result["reason"] = str(err) if "status" not in result: # This can happen if the response is invalid HTTP, if we get a DNS # failure, or a timeout, etc. result["status"] = None # We should always have these four fields in the result. assert "url" in result assert result.get("alive") in (True, False) assert "status" in result assert "reason" in result return result
[ "def", "check_url", "(", "url", ")", ":", "result", "=", "{", "\"url\"", ":", "url", "}", "try", ":", "response", "=", "requests", ".", "get", "(", "url", ")", "result", "[", "\"status\"", "]", "=", "response", ".", "status_code", "result", "[", "\"reason\"", "]", "=", "response", ".", "reason", "response", ".", "raise_for_status", "(", ")", "# Raise if status_code is not OK.", "result", "[", "\"alive\"", "]", "=", "True", "except", "AttributeError", "as", "err", ":", "if", "err", ".", "message", "==", "\"'NoneType' object has no attribute 'encode'\"", ":", "# requests seems to throw these for some invalid URLs.", "result", "[", "\"alive\"", "]", "=", "False", "result", "[", "\"reason\"", "]", "=", "\"Invalid URL\"", "result", "[", "\"status\"", "]", "=", "None", "else", ":", "raise", "except", "requests", ".", "exceptions", ".", "RequestException", "as", "err", ":", "result", "[", "\"alive\"", "]", "=", "False", "if", "\"reason\"", "not", "in", "result", ":", "result", "[", "\"reason\"", "]", "=", "str", "(", "err", ")", "if", "\"status\"", "not", "in", "result", ":", "# This can happen if the response is invalid HTTP, if we get a DNS", "# failure, or a timeout, etc.", "result", "[", "\"status\"", "]", "=", "None", "# We should always have these four fields in the result.", "assert", "\"url\"", "in", "result", "assert", "result", ".", "get", "(", "\"alive\"", ")", "in", "(", "True", ",", "False", ")", "assert", "\"status\"", "in", "result", "assert", "\"reason\"", "in", "result", "return", "result" ]
Check whether the given URL is dead or alive. Returns a dict with four keys: "url": The URL that was checked (string) "alive": Whether the URL was working, True or False "status": The HTTP status code of the response from the URL, e.g. 200, 401, 500 (int) "reason": The reason for the success or failure of the check, e.g. "OK", "Unauthorized", "Internal Server Error" (string) The "status" may be None if we did not get a valid HTTP response, e.g. in the event of a timeout, DNS failure or invalid HTTP response. The "reason" will always be a string, but may be a requests library exception string rather than an HTTP reason string if we did not get a valid HTTP response.
[ "Check", "whether", "the", "given", "URL", "is", "dead", "or", "alive", "." ]
train
https://github.com/ckan/deadoralive/blob/82eed6c73e17b9884476311a7a8fae9d2b379600/deadoralive/deadoralive.py#L71-L121
ckan/deadoralive
deadoralive/deadoralive.py
upsert_result
def upsert_result(client_site_url, apikey, resource_id, result): """Post the given link check result to the client site.""" # TODO: Handle exceptions and unexpected results. url = client_site_url + u"deadoralive/upsert" params = result.copy() params["resource_id"] = resource_id requests.post(url, headers=dict(Authorization=apikey), params=params)
python
def upsert_result(client_site_url, apikey, resource_id, result): """Post the given link check result to the client site.""" # TODO: Handle exceptions and unexpected results. url = client_site_url + u"deadoralive/upsert" params = result.copy() params["resource_id"] = resource_id requests.post(url, headers=dict(Authorization=apikey), params=params)
[ "def", "upsert_result", "(", "client_site_url", ",", "apikey", ",", "resource_id", ",", "result", ")", ":", "# TODO: Handle exceptions and unexpected results.", "url", "=", "client_site_url", "+", "u\"deadoralive/upsert\"", "params", "=", "result", ".", "copy", "(", ")", "params", "[", "\"resource_id\"", "]", "=", "resource_id", "requests", ".", "post", "(", "url", ",", "headers", "=", "dict", "(", "Authorization", "=", "apikey", ")", ",", "params", "=", "params", ")" ]
Post the given link check result to the client site.
[ "Post", "the", "given", "link", "check", "result", "to", "the", "client", "site", "." ]
train
https://github.com/ckan/deadoralive/blob/82eed6c73e17b9884476311a7a8fae9d2b379600/deadoralive/deadoralive.py#L124-L131
ckan/deadoralive
deadoralive/deadoralive.py
get_check_and_report
def get_check_and_report(client_site_url, apikey, get_resource_ids_to_check, get_url_for_id, check_url, upsert_result): """Get links from the client site, check them, and post the results back. Get resource IDs from the client site, get the URL for each resource ID from the client site, check each URL, and post the results back to the client site. This function can be called repeatedly to keep on getting more links from the client site and checking them. The functions that this function calls to carry out the various tasks are taken as parameters to this function for testing purposes - it makes it easy for tests to pass in mock functions. It also decouples the code nicely. :param client_site_url: the base URL of the client site :type client_site_url: string :param apikey: the API key to use when making requests to the client site :type apikey: string or None :param get_resource_ids_to_check: The function to call to get the list of resource IDs to be checked from the client site. See get_resource_ids_to_check() above for the interface that this function should implement. :type get_resource_ids_to_check: callable :param get_url_for_id: The function to call to get the URL for a given resource ID from the client site. See get_url_for_id() above for the interface that this function should implement. :type get_url_for_id: callable :param check_url: The function to call to check whether a URL is dead or alive. See check_url() above for the interface that this function should implement. :type check_url: callable :param upsert_result: The function to call to post a link check result to the client site. See upsert_result() above for the interface that this function should implement. :type upsert_result: callable """ logger = _get_logger() resource_ids = get_resource_ids_to_check(client_site_url, apikey) for resource_id in resource_ids: try: url = get_url_for_id(client_site_url, apikey, resource_id) except CouldNotGetURLError: logger.info(u"This link checker was not authorized to access " "resource {0}, skipping.".format(resource_id)) continue result = check_url(url) status = result["status"] reason = result["reason"] if result["alive"]: logger.info(u"Checking URL {0} of resource {1} succeeded with " "status {2}:".format(url, resource_id, status)) else: logger.info(u"Checking URL {0} of resource {1} failed with error " "{2}:".format(url, resource_id, reason)) upsert_result(client_site_url, apikey, resource_id=resource_id, result=result)
python
def get_check_and_report(client_site_url, apikey, get_resource_ids_to_check, get_url_for_id, check_url, upsert_result): """Get links from the client site, check them, and post the results back. Get resource IDs from the client site, get the URL for each resource ID from the client site, check each URL, and post the results back to the client site. This function can be called repeatedly to keep on getting more links from the client site and checking them. The functions that this function calls to carry out the various tasks are taken as parameters to this function for testing purposes - it makes it easy for tests to pass in mock functions. It also decouples the code nicely. :param client_site_url: the base URL of the client site :type client_site_url: string :param apikey: the API key to use when making requests to the client site :type apikey: string or None :param get_resource_ids_to_check: The function to call to get the list of resource IDs to be checked from the client site. See get_resource_ids_to_check() above for the interface that this function should implement. :type get_resource_ids_to_check: callable :param get_url_for_id: The function to call to get the URL for a given resource ID from the client site. See get_url_for_id() above for the interface that this function should implement. :type get_url_for_id: callable :param check_url: The function to call to check whether a URL is dead or alive. See check_url() above for the interface that this function should implement. :type check_url: callable :param upsert_result: The function to call to post a link check result to the client site. See upsert_result() above for the interface that this function should implement. :type upsert_result: callable """ logger = _get_logger() resource_ids = get_resource_ids_to_check(client_site_url, apikey) for resource_id in resource_ids: try: url = get_url_for_id(client_site_url, apikey, resource_id) except CouldNotGetURLError: logger.info(u"This link checker was not authorized to access " "resource {0}, skipping.".format(resource_id)) continue result = check_url(url) status = result["status"] reason = result["reason"] if result["alive"]: logger.info(u"Checking URL {0} of resource {1} succeeded with " "status {2}:".format(url, resource_id, status)) else: logger.info(u"Checking URL {0} of resource {1} failed with error " "{2}:".format(url, resource_id, reason)) upsert_result(client_site_url, apikey, resource_id=resource_id, result=result)
[ "def", "get_check_and_report", "(", "client_site_url", ",", "apikey", ",", "get_resource_ids_to_check", ",", "get_url_for_id", ",", "check_url", ",", "upsert_result", ")", ":", "logger", "=", "_get_logger", "(", ")", "resource_ids", "=", "get_resource_ids_to_check", "(", "client_site_url", ",", "apikey", ")", "for", "resource_id", "in", "resource_ids", ":", "try", ":", "url", "=", "get_url_for_id", "(", "client_site_url", ",", "apikey", ",", "resource_id", ")", "except", "CouldNotGetURLError", ":", "logger", ".", "info", "(", "u\"This link checker was not authorized to access \"", "\"resource {0}, skipping.\"", ".", "format", "(", "resource_id", ")", ")", "continue", "result", "=", "check_url", "(", "url", ")", "status", "=", "result", "[", "\"status\"", "]", "reason", "=", "result", "[", "\"reason\"", "]", "if", "result", "[", "\"alive\"", "]", ":", "logger", ".", "info", "(", "u\"Checking URL {0} of resource {1} succeeded with \"", "\"status {2}:\"", ".", "format", "(", "url", ",", "resource_id", ",", "status", ")", ")", "else", ":", "logger", ".", "info", "(", "u\"Checking URL {0} of resource {1} failed with error \"", "\"{2}:\"", ".", "format", "(", "url", ",", "resource_id", ",", "reason", ")", ")", "upsert_result", "(", "client_site_url", ",", "apikey", ",", "resource_id", "=", "resource_id", ",", "result", "=", "result", ")" ]
Get links from the client site, check them, and post the results back. Get resource IDs from the client site, get the URL for each resource ID from the client site, check each URL, and post the results back to the client site. This function can be called repeatedly to keep on getting more links from the client site and checking them. The functions that this function calls to carry out the various tasks are taken as parameters to this function for testing purposes - it makes it easy for tests to pass in mock functions. It also decouples the code nicely. :param client_site_url: the base URL of the client site :type client_site_url: string :param apikey: the API key to use when making requests to the client site :type apikey: string or None :param get_resource_ids_to_check: The function to call to get the list of resource IDs to be checked from the client site. See get_resource_ids_to_check() above for the interface that this function should implement. :type get_resource_ids_to_check: callable :param get_url_for_id: The function to call to get the URL for a given resource ID from the client site. See get_url_for_id() above for the interface that this function should implement. :type get_url_for_id: callable :param check_url: The function to call to check whether a URL is dead or alive. See check_url() above for the interface that this function should implement. :type check_url: callable :param upsert_result: The function to call to post a link check result to the client site. See upsert_result() above for the interface that this function should implement. :type upsert_result: callable
[ "Get", "links", "from", "the", "client", "site", "check", "them", "and", "post", "the", "results", "back", "." ]
train
https://github.com/ckan/deadoralive/blob/82eed6c73e17b9884476311a7a8fae9d2b379600/deadoralive/deadoralive.py#L148-L210
okfn/ofs
ofs/local/zipfile.py
ZipExtFile.peek
def peek(self, n=1): """Returns buffered bytes without advancing the position.""" if n > len(self._readbuffer) - self._offset: chunk = self.read(n) self._offset -= len(chunk) # Return up to 512 bytes to reduce allocation overhead for tight loops. return self._readbuffer[self._offset: self._offset + 512]
python
def peek(self, n=1): """Returns buffered bytes without advancing the position.""" if n > len(self._readbuffer) - self._offset: chunk = self.read(n) self._offset -= len(chunk) # Return up to 512 bytes to reduce allocation overhead for tight loops. return self._readbuffer[self._offset: self._offset + 512]
[ "def", "peek", "(", "self", ",", "n", "=", "1", ")", ":", "if", "n", ">", "len", "(", "self", ".", "_readbuffer", ")", "-", "self", ".", "_offset", ":", "chunk", "=", "self", ".", "read", "(", "n", ")", "self", ".", "_offset", "-=", "len", "(", "chunk", ")", "# Return up to 512 bytes to reduce allocation overhead for tight loops.", "return", "self", ".", "_readbuffer", "[", "self", ".", "_offset", ":", "self", ".", "_offset", "+", "512", "]" ]
Returns buffered bytes without advancing the position.
[ "Returns", "buffered", "bytes", "without", "advancing", "the", "position", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipfile.py#L551-L558
okfn/ofs
ofs/local/zipfile.py
ZipExtFile.read
def read(self, n=-1): """Read and return up to n bytes. If the argument is omitted, None, or negative, data is read and returned until EOF is reached.. """ buf = b'' while n < 0 or n is None or n > len(buf): data = self.read1(n) if len(data) == 0: return buf buf += data return buf
python
def read(self, n=-1): """Read and return up to n bytes. If the argument is omitted, None, or negative, data is read and returned until EOF is reached.. """ buf = b'' while n < 0 or n is None or n > len(buf): data = self.read1(n) if len(data) == 0: return buf buf += data return buf
[ "def", "read", "(", "self", ",", "n", "=", "-", "1", ")", ":", "buf", "=", "b''", "while", "n", "<", "0", "or", "n", "is", "None", "or", "n", ">", "len", "(", "buf", ")", ":", "data", "=", "self", ".", "read1", "(", "n", ")", "if", "len", "(", "data", ")", "==", "0", ":", "return", "buf", "buf", "+=", "data", "return", "buf" ]
Read and return up to n bytes. If the argument is omitted, None, or negative, data is read and returned until EOF is reached..
[ "Read", "and", "return", "up", "to", "n", "bytes", ".", "If", "the", "argument", "is", "omitted", "None", "or", "negative", "data", "is", "read", "and", "returned", "until", "EOF", "is", "reached", ".." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipfile.py#L563-L576
okfn/ofs
ofs/local/zipfile.py
ZipFile._RealGetContents
def _RealGetContents(self): """Read in the table of contents for the ZIP file.""" fp = self.fp endrec = _EndRecData(fp) if not endrec: raise BadZipfile("File is not a zip file") if self.debug > 1: print(endrec) size_cd = endrec[_ECD_SIZE] # bytes in central directory offset_cd = endrec[_ECD_OFFSET] # offset of central directory self.comment = endrec[_ECD_COMMENT] # archive comment # "concat" is zero, unless zip was concatenated to another file concat = endrec[_ECD_LOCATION] - size_cd - offset_cd if endrec[_ECD_SIGNATURE] == stringEndArchive64: # If Zip64 extension structures are present, account for them concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator) if self.debug > 2: inferred = concat + offset_cd print("given, inferred, offset", offset_cd, inferred, concat) # self.start_dir: Position of start of central directory self.start_dir = offset_cd + concat fp.seek(self.start_dir, 0) data = fp.read(size_cd) fp = cStringIO.StringIO(data) total = 0 while total < size_cd: centdir = fp.read(sizeCentralDir) if centdir[0:4] != stringCentralDir: raise BadZipfile("Bad magic number for central directory") centdir = struct.unpack(structCentralDir, centdir) if self.debug > 2: print(centdir) filename = fp.read(centdir[_CD_FILENAME_LENGTH]) # Create ZipInfo instance to store file information x = ZipInfo(filename) x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH]) x.comment = fp.read(centdir[_CD_COMMENT_LENGTH]) x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET] (x.create_version, x.create_system, x.extract_version, x.reserved, x.flag_bits, x.compress_type, t, d, x.CRC, x.compress_size, x.file_size) = centdir[1:12] x.volume, x.internal_attr, x.external_attr = centdir[15:18] # Convert date/time code to (year, month, day, hour, min, sec) x._raw_time = t x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F, t>>11, (t>>5)&0x3F, (t&0x1F) * 2 ) x._decodeExtra() x.header_offset = x.header_offset + concat x.filename = x._decodeFilename() self.filelist.append(x) self.NameToInfo[x.filename] = x # update total bytes read from central directory total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH] + centdir[_CD_EXTRA_FIELD_LENGTH] + centdir[_CD_COMMENT_LENGTH]) if self.debug > 2: print("total", total)
python
def _RealGetContents(self): """Read in the table of contents for the ZIP file.""" fp = self.fp endrec = _EndRecData(fp) if not endrec: raise BadZipfile("File is not a zip file") if self.debug > 1: print(endrec) size_cd = endrec[_ECD_SIZE] # bytes in central directory offset_cd = endrec[_ECD_OFFSET] # offset of central directory self.comment = endrec[_ECD_COMMENT] # archive comment # "concat" is zero, unless zip was concatenated to another file concat = endrec[_ECD_LOCATION] - size_cd - offset_cd if endrec[_ECD_SIGNATURE] == stringEndArchive64: # If Zip64 extension structures are present, account for them concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator) if self.debug > 2: inferred = concat + offset_cd print("given, inferred, offset", offset_cd, inferred, concat) # self.start_dir: Position of start of central directory self.start_dir = offset_cd + concat fp.seek(self.start_dir, 0) data = fp.read(size_cd) fp = cStringIO.StringIO(data) total = 0 while total < size_cd: centdir = fp.read(sizeCentralDir) if centdir[0:4] != stringCentralDir: raise BadZipfile("Bad magic number for central directory") centdir = struct.unpack(structCentralDir, centdir) if self.debug > 2: print(centdir) filename = fp.read(centdir[_CD_FILENAME_LENGTH]) # Create ZipInfo instance to store file information x = ZipInfo(filename) x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH]) x.comment = fp.read(centdir[_CD_COMMENT_LENGTH]) x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET] (x.create_version, x.create_system, x.extract_version, x.reserved, x.flag_bits, x.compress_type, t, d, x.CRC, x.compress_size, x.file_size) = centdir[1:12] x.volume, x.internal_attr, x.external_attr = centdir[15:18] # Convert date/time code to (year, month, day, hour, min, sec) x._raw_time = t x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F, t>>11, (t>>5)&0x3F, (t&0x1F) * 2 ) x._decodeExtra() x.header_offset = x.header_offset + concat x.filename = x._decodeFilename() self.filelist.append(x) self.NameToInfo[x.filename] = x # update total bytes read from central directory total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH] + centdir[_CD_EXTRA_FIELD_LENGTH] + centdir[_CD_COMMENT_LENGTH]) if self.debug > 2: print("total", total)
[ "def", "_RealGetContents", "(", "self", ")", ":", "fp", "=", "self", ".", "fp", "endrec", "=", "_EndRecData", "(", "fp", ")", "if", "not", "endrec", ":", "raise", "BadZipfile", "(", "\"File is not a zip file\"", ")", "if", "self", ".", "debug", ">", "1", ":", "print", "(", "endrec", ")", "size_cd", "=", "endrec", "[", "_ECD_SIZE", "]", "# bytes in central directory", "offset_cd", "=", "endrec", "[", "_ECD_OFFSET", "]", "# offset of central directory", "self", ".", "comment", "=", "endrec", "[", "_ECD_COMMENT", "]", "# archive comment", "# \"concat\" is zero, unless zip was concatenated to another file", "concat", "=", "endrec", "[", "_ECD_LOCATION", "]", "-", "size_cd", "-", "offset_cd", "if", "endrec", "[", "_ECD_SIGNATURE", "]", "==", "stringEndArchive64", ":", "# If Zip64 extension structures are present, account for them", "concat", "-=", "(", "sizeEndCentDir64", "+", "sizeEndCentDir64Locator", ")", "if", "self", ".", "debug", ">", "2", ":", "inferred", "=", "concat", "+", "offset_cd", "print", "(", "\"given, inferred, offset\"", ",", "offset_cd", ",", "inferred", ",", "concat", ")", "# self.start_dir: Position of start of central directory", "self", ".", "start_dir", "=", "offset_cd", "+", "concat", "fp", ".", "seek", "(", "self", ".", "start_dir", ",", "0", ")", "data", "=", "fp", ".", "read", "(", "size_cd", ")", "fp", "=", "cStringIO", ".", "StringIO", "(", "data", ")", "total", "=", "0", "while", "total", "<", "size_cd", ":", "centdir", "=", "fp", ".", "read", "(", "sizeCentralDir", ")", "if", "centdir", "[", "0", ":", "4", "]", "!=", "stringCentralDir", ":", "raise", "BadZipfile", "(", "\"Bad magic number for central directory\"", ")", "centdir", "=", "struct", ".", "unpack", "(", "structCentralDir", ",", "centdir", ")", "if", "self", ".", "debug", ">", "2", ":", "print", "(", "centdir", ")", "filename", "=", "fp", ".", "read", "(", "centdir", "[", "_CD_FILENAME_LENGTH", "]", ")", "# Create ZipInfo instance to store file information", "x", "=", "ZipInfo", "(", "filename", ")", "x", ".", "extra", "=", "fp", ".", "read", "(", "centdir", "[", "_CD_EXTRA_FIELD_LENGTH", "]", ")", "x", ".", "comment", "=", "fp", ".", "read", "(", "centdir", "[", "_CD_COMMENT_LENGTH", "]", ")", "x", ".", "header_offset", "=", "centdir", "[", "_CD_LOCAL_HEADER_OFFSET", "]", "(", "x", ".", "create_version", ",", "x", ".", "create_system", ",", "x", ".", "extract_version", ",", "x", ".", "reserved", ",", "x", ".", "flag_bits", ",", "x", ".", "compress_type", ",", "t", ",", "d", ",", "x", ".", "CRC", ",", "x", ".", "compress_size", ",", "x", ".", "file_size", ")", "=", "centdir", "[", "1", ":", "12", "]", "x", ".", "volume", ",", "x", ".", "internal_attr", ",", "x", ".", "external_attr", "=", "centdir", "[", "15", ":", "18", "]", "# Convert date/time code to (year, month, day, hour, min, sec)", "x", ".", "_raw_time", "=", "t", "x", ".", "date_time", "=", "(", "(", "d", ">>", "9", ")", "+", "1980", ",", "(", "d", ">>", "5", ")", "&", "0xF", ",", "d", "&", "0x1F", ",", "t", ">>", "11", ",", "(", "t", ">>", "5", ")", "&", "0x3F", ",", "(", "t", "&", "0x1F", ")", "*", "2", ")", "x", ".", "_decodeExtra", "(", ")", "x", ".", "header_offset", "=", "x", ".", "header_offset", "+", "concat", "x", ".", "filename", "=", "x", ".", "_decodeFilename", "(", ")", "self", ".", "filelist", ".", "append", "(", "x", ")", "self", ".", "NameToInfo", "[", "x", ".", "filename", "]", "=", "x", "# update total bytes read from central directory", "total", "=", "(", "total", "+", "sizeCentralDir", "+", "centdir", "[", "_CD_FILENAME_LENGTH", "]", "+", "centdir", "[", "_CD_EXTRA_FIELD_LENGTH", "]", "+", "centdir", "[", "_CD_COMMENT_LENGTH", "]", ")", "if", "self", ".", "debug", ">", "2", ":", "print", "(", "\"total\"", ",", "total", ")" ]
Read in the table of contents for the ZIP file.
[ "Read", "in", "the", "table", "of", "contents", "for", "the", "ZIP", "file", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipfile.py#L724-L785
okfn/ofs
ofs/local/zipfile.py
ZipFile.open
def open(self, name, mode="r", pwd=None): """Return file-like object for 'name'.""" if mode not in ("r", "U", "rU"): raise RuntimeError('open() requires mode "r", "U", or "rU"') if not self.fp: raise RuntimeError( "Attempt to read ZIP archive that was already closed" ) # Only open a new file for instances where we were not # given a file object in the constructor if self._filePassed: zef_file = self.fp else: zef_file = open(self.filename, 'rb') # Make sure we have an info object if isinstance(name, ZipInfo): # 'name' is already an info object zinfo = name else: # Get info object for name zinfo = self.getinfo(name) zef_file.seek(zinfo.header_offset, 0) # Skip the file header: fheader = zef_file.read(sizeFileHeader) if fheader[0:4] != stringFileHeader: raise BadZipfile("Bad magic number for file header") fheader = struct.unpack(structFileHeader, fheader) fname = zef_file.read(fheader[_FH_FILENAME_LENGTH]) if fheader[_FH_EXTRA_FIELD_LENGTH]: zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH]) if fname != zinfo.orig_filename.encode('utf-8'): raise BadZipfile( 'File name in directory "%s" and header "%s" differ.' % ( zinfo.orig_filename, fname) ) # check for encrypted flag & handle password is_encrypted = zinfo.flag_bits & 0x1 zd = None if is_encrypted: if not pwd: pwd = self.pwd if not pwd: raise RuntimeError("File %s is encrypted, " \ "password required for extraction" % name) zd = _ZipDecrypter(pwd) # The first 12 bytes in the cypher stream is an encryption header # used to strengthen the algorithm. The first 11 bytes are # completely random, while the 12th contains the MSB of the CRC, # or the MSB of the file time depending on the header type # and is used to check the correctness of the password. bytes = zef_file.read(12) h = map(zd, bytes[0:12]) if zinfo.flag_bits & 0x8: # compare against the file type from extended local headers check_byte = (zinfo._raw_time >> 8) & 0xff else: # compare against the CRC otherwise check_byte = (zinfo.CRC >> 24) & 0xff if ord(h[11]) != check_byte: raise RuntimeError("Bad password for file", name) return ZipExtFile(zef_file, mode, zinfo, zd)
python
def open(self, name, mode="r", pwd=None): """Return file-like object for 'name'.""" if mode not in ("r", "U", "rU"): raise RuntimeError('open() requires mode "r", "U", or "rU"') if not self.fp: raise RuntimeError( "Attempt to read ZIP archive that was already closed" ) # Only open a new file for instances where we were not # given a file object in the constructor if self._filePassed: zef_file = self.fp else: zef_file = open(self.filename, 'rb') # Make sure we have an info object if isinstance(name, ZipInfo): # 'name' is already an info object zinfo = name else: # Get info object for name zinfo = self.getinfo(name) zef_file.seek(zinfo.header_offset, 0) # Skip the file header: fheader = zef_file.read(sizeFileHeader) if fheader[0:4] != stringFileHeader: raise BadZipfile("Bad magic number for file header") fheader = struct.unpack(structFileHeader, fheader) fname = zef_file.read(fheader[_FH_FILENAME_LENGTH]) if fheader[_FH_EXTRA_FIELD_LENGTH]: zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH]) if fname != zinfo.orig_filename.encode('utf-8'): raise BadZipfile( 'File name in directory "%s" and header "%s" differ.' % ( zinfo.orig_filename, fname) ) # check for encrypted flag & handle password is_encrypted = zinfo.flag_bits & 0x1 zd = None if is_encrypted: if not pwd: pwd = self.pwd if not pwd: raise RuntimeError("File %s is encrypted, " \ "password required for extraction" % name) zd = _ZipDecrypter(pwd) # The first 12 bytes in the cypher stream is an encryption header # used to strengthen the algorithm. The first 11 bytes are # completely random, while the 12th contains the MSB of the CRC, # or the MSB of the file time depending on the header type # and is used to check the correctness of the password. bytes = zef_file.read(12) h = map(zd, bytes[0:12]) if zinfo.flag_bits & 0x8: # compare against the file type from extended local headers check_byte = (zinfo._raw_time >> 8) & 0xff else: # compare against the CRC otherwise check_byte = (zinfo.CRC >> 24) & 0xff if ord(h[11]) != check_byte: raise RuntimeError("Bad password for file", name) return ZipExtFile(zef_file, mode, zinfo, zd)
[ "def", "open", "(", "self", ",", "name", ",", "mode", "=", "\"r\"", ",", "pwd", "=", "None", ")", ":", "if", "mode", "not", "in", "(", "\"r\"", ",", "\"U\"", ",", "\"rU\"", ")", ":", "raise", "RuntimeError", "(", "'open() requires mode \"r\", \"U\", or \"rU\"'", ")", "if", "not", "self", ".", "fp", ":", "raise", "RuntimeError", "(", "\"Attempt to read ZIP archive that was already closed\"", ")", "# Only open a new file for instances where we were not", "# given a file object in the constructor", "if", "self", ".", "_filePassed", ":", "zef_file", "=", "self", ".", "fp", "else", ":", "zef_file", "=", "open", "(", "self", ".", "filename", ",", "'rb'", ")", "# Make sure we have an info object", "if", "isinstance", "(", "name", ",", "ZipInfo", ")", ":", "# 'name' is already an info object", "zinfo", "=", "name", "else", ":", "# Get info object for name", "zinfo", "=", "self", ".", "getinfo", "(", "name", ")", "zef_file", ".", "seek", "(", "zinfo", ".", "header_offset", ",", "0", ")", "# Skip the file header:", "fheader", "=", "zef_file", ".", "read", "(", "sizeFileHeader", ")", "if", "fheader", "[", "0", ":", "4", "]", "!=", "stringFileHeader", ":", "raise", "BadZipfile", "(", "\"Bad magic number for file header\"", ")", "fheader", "=", "struct", ".", "unpack", "(", "structFileHeader", ",", "fheader", ")", "fname", "=", "zef_file", ".", "read", "(", "fheader", "[", "_FH_FILENAME_LENGTH", "]", ")", "if", "fheader", "[", "_FH_EXTRA_FIELD_LENGTH", "]", ":", "zef_file", ".", "read", "(", "fheader", "[", "_FH_EXTRA_FIELD_LENGTH", "]", ")", "if", "fname", "!=", "zinfo", ".", "orig_filename", ".", "encode", "(", "'utf-8'", ")", ":", "raise", "BadZipfile", "(", "'File name in directory \"%s\" and header \"%s\" differ.'", "%", "(", "zinfo", ".", "orig_filename", ",", "fname", ")", ")", "# check for encrypted flag & handle password", "is_encrypted", "=", "zinfo", ".", "flag_bits", "&", "0x1", "zd", "=", "None", "if", "is_encrypted", ":", "if", "not", "pwd", ":", "pwd", "=", "self", ".", "pwd", "if", "not", "pwd", ":", "raise", "RuntimeError", "(", "\"File %s is encrypted, \"", "\"password required for extraction\"", "%", "name", ")", "zd", "=", "_ZipDecrypter", "(", "pwd", ")", "# The first 12 bytes in the cypher stream is an encryption header", "# used to strengthen the algorithm. The first 11 bytes are", "# completely random, while the 12th contains the MSB of the CRC,", "# or the MSB of the file time depending on the header type", "# and is used to check the correctness of the password.", "bytes", "=", "zef_file", ".", "read", "(", "12", ")", "h", "=", "map", "(", "zd", ",", "bytes", "[", "0", ":", "12", "]", ")", "if", "zinfo", ".", "flag_bits", "&", "0x8", ":", "# compare against the file type from extended local headers", "check_byte", "=", "(", "zinfo", ".", "_raw_time", ">>", "8", ")", "&", "0xff", "else", ":", "# compare against the CRC otherwise", "check_byte", "=", "(", "zinfo", ".", "CRC", ">>", "24", ")", "&", "0xff", "if", "ord", "(", "h", "[", "11", "]", ")", "!=", "check_byte", ":", "raise", "RuntimeError", "(", "\"Bad password for file\"", ",", "name", ")", "return", "ZipExtFile", "(", "zef_file", ",", "mode", ",", "zinfo", ",", "zd", ")" ]
Return file-like object for 'name'.
[ "Return", "file", "-", "like", "object", "for", "name", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipfile.py#L837-L906
okfn/ofs
ofs/local/zipfile.py
ZipFile.remove
def remove(self, member): """Remove a member from the archive.""" # Make sure we have an info object if isinstance(member, ZipInfo): # 'member' is already an info object zinfo = member else: # Get info object for name zinfo = self.getinfo(member) # compute the location of the file data in the local file header, # by adding the lengths of the records before it zlen = len(zinfo.FileHeader()) + zinfo.compress_size fileidx = self.filelist.index(zinfo) fileofs = sum( [len(self.filelist[f].FileHeader()) + self.filelist[f].compress_size for f in xrange(0, fileidx)] ) self.fp.seek(fileofs + zlen) after = self.fp.read() self.fp.seek(fileofs) self.fp.write(after) self.fp.seek(-zlen, 2) self.fp.truncate() self._didModify = True self.filelist.remove(zinfo) del self.NameToInfo[member]
python
def remove(self, member): """Remove a member from the archive.""" # Make sure we have an info object if isinstance(member, ZipInfo): # 'member' is already an info object zinfo = member else: # Get info object for name zinfo = self.getinfo(member) # compute the location of the file data in the local file header, # by adding the lengths of the records before it zlen = len(zinfo.FileHeader()) + zinfo.compress_size fileidx = self.filelist.index(zinfo) fileofs = sum( [len(self.filelist[f].FileHeader()) + self.filelist[f].compress_size for f in xrange(0, fileidx)] ) self.fp.seek(fileofs + zlen) after = self.fp.read() self.fp.seek(fileofs) self.fp.write(after) self.fp.seek(-zlen, 2) self.fp.truncate() self._didModify = True self.filelist.remove(zinfo) del self.NameToInfo[member]
[ "def", "remove", "(", "self", ",", "member", ")", ":", "# Make sure we have an info object", "if", "isinstance", "(", "member", ",", "ZipInfo", ")", ":", "# 'member' is already an info object", "zinfo", "=", "member", "else", ":", "# Get info object for name", "zinfo", "=", "self", ".", "getinfo", "(", "member", ")", "# compute the location of the file data in the local file header,", "# by adding the lengths of the records before it", "zlen", "=", "len", "(", "zinfo", ".", "FileHeader", "(", ")", ")", "+", "zinfo", ".", "compress_size", "fileidx", "=", "self", ".", "filelist", ".", "index", "(", "zinfo", ")", "fileofs", "=", "sum", "(", "[", "len", "(", "self", ".", "filelist", "[", "f", "]", ".", "FileHeader", "(", ")", ")", "+", "self", ".", "filelist", "[", "f", "]", ".", "compress_size", "for", "f", "in", "xrange", "(", "0", ",", "fileidx", ")", "]", ")", "self", ".", "fp", ".", "seek", "(", "fileofs", "+", "zlen", ")", "after", "=", "self", ".", "fp", ".", "read", "(", ")", "self", ".", "fp", ".", "seek", "(", "fileofs", ")", "self", ".", "fp", ".", "write", "(", "after", ")", "self", ".", "fp", ".", "seek", "(", "-", "zlen", ",", "2", ")", "self", ".", "fp", ".", "truncate", "(", ")", "self", ".", "_didModify", "=", "True", "self", ".", "filelist", ".", "remove", "(", "zinfo", ")", "del", "self", ".", "NameToInfo", "[", "member", "]" ]
Remove a member from the archive.
[ "Remove", "a", "member", "from", "the", "archive", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipfile.py#L1118-L1146
okfn/ofs
ofs/local/zipfile.py
PyZipFile._get_codename
def _get_codename(self, pathname, basename): """Return (filename, archivename) for the path. Given a module name path, return the correct file path and archive name, compiling if necessary. For example, given /python/lib/string, return (/python/lib/string.pyc, string). """ file_py = pathname + ".py" file_pyc = pathname + ".pyc" file_pyo = pathname + ".pyo" if os.path.isfile(file_pyo) and \ os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime: fname = file_pyo # Use .pyo file elif not os.path.isfile(file_pyc) or \ os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime: import py_compile if self.debug: print("Compiling", file_py) try: py_compile.compile(file_py, file_pyc, None, True) except py_compile.PyCompileError as err: print(err.msg) fname = file_pyc else: fname = file_pyc archivename = os.path.split(fname)[1] if basename: archivename = "%s/%s" % (basename, archivename) return (fname, archivename)
python
def _get_codename(self, pathname, basename): """Return (filename, archivename) for the path. Given a module name path, return the correct file path and archive name, compiling if necessary. For example, given /python/lib/string, return (/python/lib/string.pyc, string). """ file_py = pathname + ".py" file_pyc = pathname + ".pyc" file_pyo = pathname + ".pyo" if os.path.isfile(file_pyo) and \ os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime: fname = file_pyo # Use .pyo file elif not os.path.isfile(file_pyc) or \ os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime: import py_compile if self.debug: print("Compiling", file_py) try: py_compile.compile(file_py, file_pyc, None, True) except py_compile.PyCompileError as err: print(err.msg) fname = file_pyc else: fname = file_pyc archivename = os.path.split(fname)[1] if basename: archivename = "%s/%s" % (basename, archivename) return (fname, archivename)
[ "def", "_get_codename", "(", "self", ",", "pathname", ",", "basename", ")", ":", "file_py", "=", "pathname", "+", "\".py\"", "file_pyc", "=", "pathname", "+", "\".pyc\"", "file_pyo", "=", "pathname", "+", "\".pyo\"", "if", "os", ".", "path", ".", "isfile", "(", "file_pyo", ")", "and", "os", ".", "stat", "(", "file_pyo", ")", ".", "st_mtime", ">=", "os", ".", "stat", "(", "file_py", ")", ".", "st_mtime", ":", "fname", "=", "file_pyo", "# Use .pyo file", "elif", "not", "os", ".", "path", ".", "isfile", "(", "file_pyc", ")", "or", "os", ".", "stat", "(", "file_pyc", ")", ".", "st_mtime", "<", "os", ".", "stat", "(", "file_py", ")", ".", "st_mtime", ":", "import", "py_compile", "if", "self", ".", "debug", ":", "print", "(", "\"Compiling\"", ",", "file_py", ")", "try", ":", "py_compile", ".", "compile", "(", "file_py", ",", "file_pyc", ",", "None", ",", "True", ")", "except", "py_compile", ".", "PyCompileError", "as", "err", ":", "print", "(", "err", ".", "msg", ")", "fname", "=", "file_pyc", "else", ":", "fname", "=", "file_pyc", "archivename", "=", "os", ".", "path", ".", "split", "(", "fname", ")", "[", "1", "]", "if", "basename", ":", "archivename", "=", "\"%s/%s\"", "%", "(", "basename", ",", "archivename", ")", "return", "(", "fname", ",", "archivename", ")" ]
Return (filename, archivename) for the path. Given a module name path, return the correct file path and archive name, compiling if necessary. For example, given /python/lib/string, return (/python/lib/string.pyc, string).
[ "Return", "(", "filename", "archivename", ")", "for", "the", "path", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipfile.py#L1334-L1362
tanwanirahul/django-batch-requests
batch_requests/settings.py
import_class
def import_class(class_path): ''' Imports the class for the given class name. ''' module_name, class_name = class_path.rsplit(".", 1) module = import_module(module_name) claz = getattr(module, class_name) return claz
python
def import_class(class_path): ''' Imports the class for the given class name. ''' module_name, class_name = class_path.rsplit(".", 1) module = import_module(module_name) claz = getattr(module, class_name) return claz
[ "def", "import_class", "(", "class_path", ")", ":", "module_name", ",", "class_name", "=", "class_path", ".", "rsplit", "(", "\".\"", ",", "1", ")", "module", "=", "import_module", "(", "module_name", ")", "claz", "=", "getattr", "(", "module", ",", "class_name", ")", "return", "claz" ]
Imports the class for the given class name.
[ "Imports", "the", "class", "for", "the", "given", "class", "name", "." ]
train
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/settings.py#L27-L34
tanwanirahul/django-batch-requests
batch_requests/settings.py
BatchRequestSettings._executor
def _executor(self): ''' Creating an ExecutorPool is a costly operation. Executor needs to be instantiated only once. ''' if self.EXECUTE_PARALLEL is False: executor_path = "batch_requests.concurrent.executor.SequentialExecutor" executor_class = import_class(executor_path) return executor_class() else: executor_path = self.CONCURRENT_EXECUTOR executor_class = import_class(executor_path) return executor_class(self.NUM_WORKERS)
python
def _executor(self): ''' Creating an ExecutorPool is a costly operation. Executor needs to be instantiated only once. ''' if self.EXECUTE_PARALLEL is False: executor_path = "batch_requests.concurrent.executor.SequentialExecutor" executor_class = import_class(executor_path) return executor_class() else: executor_path = self.CONCURRENT_EXECUTOR executor_class = import_class(executor_path) return executor_class(self.NUM_WORKERS)
[ "def", "_executor", "(", "self", ")", ":", "if", "self", ".", "EXECUTE_PARALLEL", "is", "False", ":", "executor_path", "=", "\"batch_requests.concurrent.executor.SequentialExecutor\"", "executor_class", "=", "import_class", "(", "executor_path", ")", "return", "executor_class", "(", ")", "else", ":", "executor_path", "=", "self", ".", "CONCURRENT_EXECUTOR", "executor_class", "=", "import_class", "(", "executor_path", ")", "return", "executor_class", "(", "self", ".", "NUM_WORKERS", ")" ]
Creating an ExecutorPool is a costly operation. Executor needs to be instantiated only once.
[ "Creating", "an", "ExecutorPool", "is", "a", "costly", "operation", ".", "Executor", "needs", "to", "be", "instantiated", "only", "once", "." ]
train
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/settings.py#L48-L59
okfn/ofs
ofs/command.py
OFS.make_label
def make_label(self, path): """ this borrows too much from the internals of ofs maybe expose different parts of the api? """ from datetime import datetime from StringIO import StringIO path = path.lstrip("/") bucket, label = path.split("/", 1) bucket = self.ofs._require_bucket(bucket) key = self.ofs._get_key(bucket, label) if key is None: key = bucket.new_key(label) self.ofs._update_key_metadata(key, { '_creation_time': str(datetime.utcnow()) }) key.set_contents_from_file(StringIO('')) key.close()
python
def make_label(self, path): """ this borrows too much from the internals of ofs maybe expose different parts of the api? """ from datetime import datetime from StringIO import StringIO path = path.lstrip("/") bucket, label = path.split("/", 1) bucket = self.ofs._require_bucket(bucket) key = self.ofs._get_key(bucket, label) if key is None: key = bucket.new_key(label) self.ofs._update_key_metadata(key, { '_creation_time': str(datetime.utcnow()) }) key.set_contents_from_file(StringIO('')) key.close()
[ "def", "make_label", "(", "self", ",", "path", ")", ":", "from", "datetime", "import", "datetime", "from", "StringIO", "import", "StringIO", "path", "=", "path", ".", "lstrip", "(", "\"/\"", ")", "bucket", ",", "label", "=", "path", ".", "split", "(", "\"/\"", ",", "1", ")", "bucket", "=", "self", ".", "ofs", ".", "_require_bucket", "(", "bucket", ")", "key", "=", "self", ".", "ofs", ".", "_get_key", "(", "bucket", ",", "label", ")", "if", "key", "is", "None", ":", "key", "=", "bucket", ".", "new_key", "(", "label", ")", "self", ".", "ofs", ".", "_update_key_metadata", "(", "key", ",", "{", "'_creation_time'", ":", "str", "(", "datetime", ".", "utcnow", "(", ")", ")", "}", ")", "key", ".", "set_contents_from_file", "(", "StringIO", "(", "''", ")", ")", "key", ".", "close", "(", ")" ]
this borrows too much from the internals of ofs maybe expose different parts of the api?
[ "this", "borrows", "too", "much", "from", "the", "internals", "of", "ofs", "maybe", "expose", "different", "parts", "of", "the", "api?" ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/command.py#L57-L73
okfn/ofs
ofs/command.py
OFS.get_proxy_config
def get_proxy_config(self, headers, path): """ stub. this really needs to be a call to the remote restful interface to get the appropriate host and headers to use for this upload """ self.ofs.conn.add_aws_auth_header(headers, 'PUT', path) from pprint import pprint pprint(headers) host = self.ofs.conn.server_name() return host, headers
python
def get_proxy_config(self, headers, path): """ stub. this really needs to be a call to the remote restful interface to get the appropriate host and headers to use for this upload """ self.ofs.conn.add_aws_auth_header(headers, 'PUT', path) from pprint import pprint pprint(headers) host = self.ofs.conn.server_name() return host, headers
[ "def", "get_proxy_config", "(", "self", ",", "headers", ",", "path", ")", ":", "self", ".", "ofs", ".", "conn", ".", "add_aws_auth_header", "(", "headers", ",", "'PUT'", ",", "path", ")", "from", "pprint", "import", "pprint", "pprint", "(", "headers", ")", "host", "=", "self", ".", "ofs", ".", "conn", ".", "server_name", "(", ")", "return", "host", ",", "headers" ]
stub. this really needs to be a call to the remote restful interface to get the appropriate host and headers to use for this upload
[ "stub", ".", "this", "really", "needs", "to", "be", "a", "call", "to", "the", "remote", "restful", "interface", "to", "get", "the", "appropriate", "host", "and", "headers", "to", "use", "for", "this", "upload" ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/command.py#L75-L85
okfn/ofs
ofs/command.py
OFS.proxy_upload
def proxy_upload(self, path, filename, content_type=None, content_encoding=None, cb=None, num_cb=None): """ This is the main function that uploads. We assume the bucket and key (== path) exists. What we do here is simple. Calculate the headers we will need, (e.g. md5, content-type, etc). Then we ask the self.get_proxy_config method to fill in the authentication information and tell us which remote host we should talk to for the upload. From there, the rest is ripped from boto.key.Key.send_file """ from boto.connection import AWSAuthConnection import mimetypes from hashlib import md5 import base64 BufferSize = 65536 ## set to something very small to make sure ## chunking is working properly fp = open(filename) headers = { 'Content-Type': content_type } if content_type is None: content_type = mimetypes.guess_type(filename)[0] or "text/plain" headers['Content-Type'] = content_type if content_encoding is not None: headers['Content-Encoding'] = content_encoding m = md5() fp.seek(0) s = fp.read(BufferSize) while s: m.update(s) s = fp.read(BufferSize) self.size = fp.tell() fp.seek(0) self.md5 = m.hexdigest() headers['Content-MD5'] = base64.encodestring(m.digest()).rstrip('\n') headers['Content-Length'] = str(self.size) headers['Expect'] = '100-Continue' host, headers = self.get_proxy_config(headers, path) ### how to do this same thing with curl instead... print("curl -i --trace-ascii foo.log -T %s -H %s https://%s%s" % ( filename, " -H ".join("'%s: %s'" % (k,v) for k,v in headers.items()), host, path )) def sender(http_conn, method, path, data, headers): http_conn.putrequest(method, path) for key in headers: http_conn.putheader(key, headers[key]) http_conn.endheaders() fp.seek(0) http_conn.set_debuglevel(0) ### XXX set to e.g. 4 to see what going on if cb: if num_cb > 2: cb_count = self.size / BufferSize / (num_cb-2) elif num_cb < 0: cb_count = -1 else: cb_count = 0 i = total_bytes = 0 cb(total_bytes, self.size) l = fp.read(BufferSize) while len(l) > 0: http_conn.send(l) if cb: total_bytes += len(l) i += 1 if i == cb_count or cb_count == -1: cb(total_bytes, self.size) i = 0 l = fp.read(BufferSize) if cb: cb(total_bytes, self.size) response = http_conn.getresponse() body = response.read() fp.seek(0) if response.status == 500 or response.status == 503 or \ response.getheader('location'): # we'll try again return response elif response.status >= 200 and response.status <= 299: self.etag = response.getheader('etag') if self.etag != '"%s"' % self.md5: raise Exception('ETag from S3 did not match computed MD5') return response else: #raise provider.storage_response_error( # response.status, response.reason, body) raise Exception(response.status, response.reason, body) awsc = AWSAuthConnection(host, aws_access_key_id="key_id", aws_secret_access_key="secret") awsc._mexe('PUT', path, None, headers, sender=sender)
python
def proxy_upload(self, path, filename, content_type=None, content_encoding=None, cb=None, num_cb=None): """ This is the main function that uploads. We assume the bucket and key (== path) exists. What we do here is simple. Calculate the headers we will need, (e.g. md5, content-type, etc). Then we ask the self.get_proxy_config method to fill in the authentication information and tell us which remote host we should talk to for the upload. From there, the rest is ripped from boto.key.Key.send_file """ from boto.connection import AWSAuthConnection import mimetypes from hashlib import md5 import base64 BufferSize = 65536 ## set to something very small to make sure ## chunking is working properly fp = open(filename) headers = { 'Content-Type': content_type } if content_type is None: content_type = mimetypes.guess_type(filename)[0] or "text/plain" headers['Content-Type'] = content_type if content_encoding is not None: headers['Content-Encoding'] = content_encoding m = md5() fp.seek(0) s = fp.read(BufferSize) while s: m.update(s) s = fp.read(BufferSize) self.size = fp.tell() fp.seek(0) self.md5 = m.hexdigest() headers['Content-MD5'] = base64.encodestring(m.digest()).rstrip('\n') headers['Content-Length'] = str(self.size) headers['Expect'] = '100-Continue' host, headers = self.get_proxy_config(headers, path) ### how to do this same thing with curl instead... print("curl -i --trace-ascii foo.log -T %s -H %s https://%s%s" % ( filename, " -H ".join("'%s: %s'" % (k,v) for k,v in headers.items()), host, path )) def sender(http_conn, method, path, data, headers): http_conn.putrequest(method, path) for key in headers: http_conn.putheader(key, headers[key]) http_conn.endheaders() fp.seek(0) http_conn.set_debuglevel(0) ### XXX set to e.g. 4 to see what going on if cb: if num_cb > 2: cb_count = self.size / BufferSize / (num_cb-2) elif num_cb < 0: cb_count = -1 else: cb_count = 0 i = total_bytes = 0 cb(total_bytes, self.size) l = fp.read(BufferSize) while len(l) > 0: http_conn.send(l) if cb: total_bytes += len(l) i += 1 if i == cb_count or cb_count == -1: cb(total_bytes, self.size) i = 0 l = fp.read(BufferSize) if cb: cb(total_bytes, self.size) response = http_conn.getresponse() body = response.read() fp.seek(0) if response.status == 500 or response.status == 503 or \ response.getheader('location'): # we'll try again return response elif response.status >= 200 and response.status <= 299: self.etag = response.getheader('etag') if self.etag != '"%s"' % self.md5: raise Exception('ETag from S3 did not match computed MD5') return response else: #raise provider.storage_response_error( # response.status, response.reason, body) raise Exception(response.status, response.reason, body) awsc = AWSAuthConnection(host, aws_access_key_id="key_id", aws_secret_access_key="secret") awsc._mexe('PUT', path, None, headers, sender=sender)
[ "def", "proxy_upload", "(", "self", ",", "path", ",", "filename", ",", "content_type", "=", "None", ",", "content_encoding", "=", "None", ",", "cb", "=", "None", ",", "num_cb", "=", "None", ")", ":", "from", "boto", ".", "connection", "import", "AWSAuthConnection", "import", "mimetypes", "from", "hashlib", "import", "md5", "import", "base64", "BufferSize", "=", "65536", "## set to something very small to make sure", "## chunking is working properly", "fp", "=", "open", "(", "filename", ")", "headers", "=", "{", "'Content-Type'", ":", "content_type", "}", "if", "content_type", "is", "None", ":", "content_type", "=", "mimetypes", ".", "guess_type", "(", "filename", ")", "[", "0", "]", "or", "\"text/plain\"", "headers", "[", "'Content-Type'", "]", "=", "content_type", "if", "content_encoding", "is", "not", "None", ":", "headers", "[", "'Content-Encoding'", "]", "=", "content_encoding", "m", "=", "md5", "(", ")", "fp", ".", "seek", "(", "0", ")", "s", "=", "fp", ".", "read", "(", "BufferSize", ")", "while", "s", ":", "m", ".", "update", "(", "s", ")", "s", "=", "fp", ".", "read", "(", "BufferSize", ")", "self", ".", "size", "=", "fp", ".", "tell", "(", ")", "fp", ".", "seek", "(", "0", ")", "self", ".", "md5", "=", "m", ".", "hexdigest", "(", ")", "headers", "[", "'Content-MD5'", "]", "=", "base64", ".", "encodestring", "(", "m", ".", "digest", "(", ")", ")", ".", "rstrip", "(", "'\\n'", ")", "headers", "[", "'Content-Length'", "]", "=", "str", "(", "self", ".", "size", ")", "headers", "[", "'Expect'", "]", "=", "'100-Continue'", "host", ",", "headers", "=", "self", ".", "get_proxy_config", "(", "headers", ",", "path", ")", "### how to do this same thing with curl instead...", "print", "(", "\"curl -i --trace-ascii foo.log -T %s -H %s https://%s%s\"", "%", "(", "filename", ",", "\" -H \"", ".", "join", "(", "\"'%s: %s'\"", "%", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "headers", ".", "items", "(", ")", ")", ",", "host", ",", "path", ")", ")", "def", "sender", "(", "http_conn", ",", "method", ",", "path", ",", "data", ",", "headers", ")", ":", "http_conn", ".", "putrequest", "(", "method", ",", "path", ")", "for", "key", "in", "headers", ":", "http_conn", ".", "putheader", "(", "key", ",", "headers", "[", "key", "]", ")", "http_conn", ".", "endheaders", "(", ")", "fp", ".", "seek", "(", "0", ")", "http_conn", ".", "set_debuglevel", "(", "0", ")", "### XXX set to e.g. 4 to see what going on", "if", "cb", ":", "if", "num_cb", ">", "2", ":", "cb_count", "=", "self", ".", "size", "/", "BufferSize", "/", "(", "num_cb", "-", "2", ")", "elif", "num_cb", "<", "0", ":", "cb_count", "=", "-", "1", "else", ":", "cb_count", "=", "0", "i", "=", "total_bytes", "=", "0", "cb", "(", "total_bytes", ",", "self", ".", "size", ")", "l", "=", "fp", ".", "read", "(", "BufferSize", ")", "while", "len", "(", "l", ")", ">", "0", ":", "http_conn", ".", "send", "(", "l", ")", "if", "cb", ":", "total_bytes", "+=", "len", "(", "l", ")", "i", "+=", "1", "if", "i", "==", "cb_count", "or", "cb_count", "==", "-", "1", ":", "cb", "(", "total_bytes", ",", "self", ".", "size", ")", "i", "=", "0", "l", "=", "fp", ".", "read", "(", "BufferSize", ")", "if", "cb", ":", "cb", "(", "total_bytes", ",", "self", ".", "size", ")", "response", "=", "http_conn", ".", "getresponse", "(", ")", "body", "=", "response", ".", "read", "(", ")", "fp", ".", "seek", "(", "0", ")", "if", "response", ".", "status", "==", "500", "or", "response", ".", "status", "==", "503", "or", "response", ".", "getheader", "(", "'location'", ")", ":", "# we'll try again", "return", "response", "elif", "response", ".", "status", ">=", "200", "and", "response", ".", "status", "<=", "299", ":", "self", ".", "etag", "=", "response", ".", "getheader", "(", "'etag'", ")", "if", "self", ".", "etag", "!=", "'\"%s\"'", "%", "self", ".", "md5", ":", "raise", "Exception", "(", "'ETag from S3 did not match computed MD5'", ")", "return", "response", "else", ":", "#raise provider.storage_response_error(", "# response.status, response.reason, body)", "raise", "Exception", "(", "response", ".", "status", ",", "response", ".", "reason", ",", "body", ")", "awsc", "=", "AWSAuthConnection", "(", "host", ",", "aws_access_key_id", "=", "\"key_id\"", ",", "aws_secret_access_key", "=", "\"secret\"", ")", "awsc", ".", "_mexe", "(", "'PUT'", ",", "path", ",", "None", ",", "headers", ",", "sender", "=", "sender", ")" ]
This is the main function that uploads. We assume the bucket and key (== path) exists. What we do here is simple. Calculate the headers we will need, (e.g. md5, content-type, etc). Then we ask the self.get_proxy_config method to fill in the authentication information and tell us which remote host we should talk to for the upload. From there, the rest is ripped from boto.key.Key.send_file
[ "This", "is", "the", "main", "function", "that", "uploads", ".", "We", "assume", "the", "bucket", "and", "key", "(", "==", "path", ")", "exists", ".", "What", "we", "do", "here", "is", "simple", ".", "Calculate", "the", "headers", "we", "will", "need", "(", "e", ".", "g", ".", "md5", "content", "-", "type", "etc", ")", ".", "Then", "we", "ask", "the", "self", ".", "get_proxy_config", "method", "to", "fill", "in", "the", "authentication", "information", "and", "tell", "us", "which", "remote", "host", "we", "should", "talk", "to", "for", "the", "upload", ".", "From", "there", "the", "rest", "is", "ripped", "from", "boto", ".", "key", ".", "Key", ".", "send_file" ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/command.py#L87-L187
KKBOX/OpenAPI-Python
kkbox_developer_sdk/mood_station_fetcher.py
KKBOXMoodStationFetcher.fetch_all_mood_stations
def fetch_all_mood_stations(self, terr=KKBOXTerritory.TAIWAN): ''' Fetches all mood stations. :param terr: the current territory. :return: API response. :rtype: dict See `https://docs-en.kkbox.codes/v1.1/reference#moodstations`. ''' url = 'https://api.kkbox.com/v1.1/mood-stations' url += '?' + url_parse.urlencode({'territory': terr}) return self.http._post_data(url, None, self.http._headers_with_access_token())
python
def fetch_all_mood_stations(self, terr=KKBOXTerritory.TAIWAN): ''' Fetches all mood stations. :param terr: the current territory. :return: API response. :rtype: dict See `https://docs-en.kkbox.codes/v1.1/reference#moodstations`. ''' url = 'https://api.kkbox.com/v1.1/mood-stations' url += '?' + url_parse.urlencode({'territory': terr}) return self.http._post_data(url, None, self.http._headers_with_access_token())
[ "def", "fetch_all_mood_stations", "(", "self", ",", "terr", "=", "KKBOXTerritory", ".", "TAIWAN", ")", ":", "url", "=", "'https://api.kkbox.com/v1.1/mood-stations'", "url", "+=", "'?'", "+", "url_parse", ".", "urlencode", "(", "{", "'territory'", ":", "terr", "}", ")", "return", "self", ".", "http", ".", "_post_data", "(", "url", ",", "None", ",", "self", ".", "http", ".", "_headers_with_access_token", "(", ")", ")" ]
Fetches all mood stations. :param terr: the current territory. :return: API response. :rtype: dict See `https://docs-en.kkbox.codes/v1.1/reference#moodstations`.
[ "Fetches", "all", "mood", "stations", "." ]
train
https://github.com/KKBOX/OpenAPI-Python/blob/77aa22fd300ed987d5507a5b66b149edcd28047d/kkbox_developer_sdk/mood_station_fetcher.py#L13-L25
KKBOX/OpenAPI-Python
kkbox_developer_sdk/mood_station_fetcher.py
KKBOXMoodStationFetcher.fetch_mood_station
def fetch_mood_station(self, station_id, terr=KKBOXTerritory.TAIWAN): ''' Fetches a mood station by given ID. :param station_id: the station ID :param terr: the current territory. :return: API response. :rtype: dict See `https://docs-en.kkbox.codes/v1.1/reference#moodstations-station_id`. ''' url = 'https://api.kkbox.com/v1.1/mood-stations/%s' % station_id url += '?' + url_parse.urlencode({'territory': terr}) return self.http._post_data(url, None, self.http._headers_with_access_token())
python
def fetch_mood_station(self, station_id, terr=KKBOXTerritory.TAIWAN): ''' Fetches a mood station by given ID. :param station_id: the station ID :param terr: the current territory. :return: API response. :rtype: dict See `https://docs-en.kkbox.codes/v1.1/reference#moodstations-station_id`. ''' url = 'https://api.kkbox.com/v1.1/mood-stations/%s' % station_id url += '?' + url_parse.urlencode({'territory': terr}) return self.http._post_data(url, None, self.http._headers_with_access_token())
[ "def", "fetch_mood_station", "(", "self", ",", "station_id", ",", "terr", "=", "KKBOXTerritory", ".", "TAIWAN", ")", ":", "url", "=", "'https://api.kkbox.com/v1.1/mood-stations/%s'", "%", "station_id", "url", "+=", "'?'", "+", "url_parse", ".", "urlencode", "(", "{", "'territory'", ":", "terr", "}", ")", "return", "self", ".", "http", ".", "_post_data", "(", "url", ",", "None", ",", "self", ".", "http", ".", "_headers_with_access_token", "(", ")", ")" ]
Fetches a mood station by given ID. :param station_id: the station ID :param terr: the current territory. :return: API response. :rtype: dict See `https://docs-en.kkbox.codes/v1.1/reference#moodstations-station_id`.
[ "Fetches", "a", "mood", "station", "by", "given", "ID", "." ]
train
https://github.com/KKBOX/OpenAPI-Python/blob/77aa22fd300ed987d5507a5b66b149edcd28047d/kkbox_developer_sdk/mood_station_fetcher.py#L28-L41
KKBOX/OpenAPI-Python
kkbox_developer_sdk/fetcher.py
Fetcher.fetch_next_page
def fetch_next_page(self, data): ''' Fetches next page based on previously fetched data. Will get the next page url from data['paging']['next']. :param data: previously fetched API response. :type data: dict :return: API response. :rtype: dict ''' next_url = data['paging']['next'] if next_url != None: next_data = self.http._post_data(next_url, None, self.http._headers_with_access_token()) return next_data else: return None
python
def fetch_next_page(self, data): ''' Fetches next page based on previously fetched data. Will get the next page url from data['paging']['next']. :param data: previously fetched API response. :type data: dict :return: API response. :rtype: dict ''' next_url = data['paging']['next'] if next_url != None: next_data = self.http._post_data(next_url, None, self.http._headers_with_access_token()) return next_data else: return None
[ "def", "fetch_next_page", "(", "self", ",", "data", ")", ":", "next_url", "=", "data", "[", "'paging'", "]", "[", "'next'", "]", "if", "next_url", "!=", "None", ":", "next_data", "=", "self", ".", "http", ".", "_post_data", "(", "next_url", ",", "None", ",", "self", ".", "http", ".", "_headers_with_access_token", "(", ")", ")", "return", "next_data", "else", ":", "return", "None" ]
Fetches next page based on previously fetched data. Will get the next page url from data['paging']['next']. :param data: previously fetched API response. :type data: dict :return: API response. :rtype: dict
[ "Fetches", "next", "page", "based", "on", "previously", "fetched", "data", ".", "Will", "get", "the", "next", "page", "url", "from", "data", "[", "paging", "]", "[", "next", "]", "." ]
train
https://github.com/KKBOX/OpenAPI-Python/blob/77aa22fd300ed987d5507a5b66b149edcd28047d/kkbox_developer_sdk/fetcher.py#L29-L44
KKBOX/OpenAPI-Python
kkbox_developer_sdk/fetcher.py
Fetcher.fetch_data
def fetch_data(self, url): ''' Fetches data from specific url. :return: The response. :rtype: dict ''' return self.http._post_data(url, None, self.http._headers_with_access_token())
python
def fetch_data(self, url): ''' Fetches data from specific url. :return: The response. :rtype: dict ''' return self.http._post_data(url, None, self.http._headers_with_access_token())
[ "def", "fetch_data", "(", "self", ",", "url", ")", ":", "return", "self", ".", "http", ".", "_post_data", "(", "url", ",", "None", ",", "self", ".", "http", ".", "_headers_with_access_token", "(", ")", ")" ]
Fetches data from specific url. :return: The response. :rtype: dict
[ "Fetches", "data", "from", "specific", "url", "." ]
train
https://github.com/KKBOX/OpenAPI-Python/blob/77aa22fd300ed987d5507a5b66b149edcd28047d/kkbox_developer_sdk/fetcher.py#L46-L53
KKBOX/OpenAPI-Python
kkbox_developer_sdk/shared_playlist_fetcher.py
KKBOXSharedPlaylistFetcher.fetch_shared_playlist
def fetch_shared_playlist(self, playlist_id, terr=KKBOXTerritory.TAIWAN): ''' Fetches a shared playlist by given ID. :param playlist_id: the playlist ID. :type playlist_id: str :param terr: the current territory. :return: API response. :rtype: dictcd See `https://docs-en.kkbox.codes/v1.1/reference#sharedplaylists-playlist_id`. ''' url = 'https://api.kkbox.com/v1.1/shared-playlists/%s' % playlist_id url += '?' + url_parse.urlencode({'territory': terr}) return self.http._post_data(url, None, self.http._headers_with_access_token())
python
def fetch_shared_playlist(self, playlist_id, terr=KKBOXTerritory.TAIWAN): ''' Fetches a shared playlist by given ID. :param playlist_id: the playlist ID. :type playlist_id: str :param terr: the current territory. :return: API response. :rtype: dictcd See `https://docs-en.kkbox.codes/v1.1/reference#sharedplaylists-playlist_id`. ''' url = 'https://api.kkbox.com/v1.1/shared-playlists/%s' % playlist_id url += '?' + url_parse.urlencode({'territory': terr}) return self.http._post_data(url, None, self.http._headers_with_access_token())
[ "def", "fetch_shared_playlist", "(", "self", ",", "playlist_id", ",", "terr", "=", "KKBOXTerritory", ".", "TAIWAN", ")", ":", "url", "=", "'https://api.kkbox.com/v1.1/shared-playlists/%s'", "%", "playlist_id", "url", "+=", "'?'", "+", "url_parse", ".", "urlencode", "(", "{", "'territory'", ":", "terr", "}", ")", "return", "self", ".", "http", ".", "_post_data", "(", "url", ",", "None", ",", "self", ".", "http", ".", "_headers_with_access_token", "(", ")", ")" ]
Fetches a shared playlist by given ID. :param playlist_id: the playlist ID. :type playlist_id: str :param terr: the current territory. :return: API response. :rtype: dictcd See `https://docs-en.kkbox.codes/v1.1/reference#sharedplaylists-playlist_id`.
[ "Fetches", "a", "shared", "playlist", "by", "given", "ID", "." ]
train
https://github.com/KKBOX/OpenAPI-Python/blob/77aa22fd300ed987d5507a5b66b149edcd28047d/kkbox_developer_sdk/shared_playlist_fetcher.py#L13-L27
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/firewall_mixin.py
FirewallManager.get_firewall_rule
def get_firewall_rule(self, server_uuid, firewall_rule_position, server_instance=None): """ Return a FirewallRule object based on server uuid and rule position. """ url = '/server/{0}/firewall_rule/{1}'.format(server_uuid, firewall_rule_position) res = self.get_request(url) return FirewallRule(**res['firewall_rule'])
python
def get_firewall_rule(self, server_uuid, firewall_rule_position, server_instance=None): """ Return a FirewallRule object based on server uuid and rule position. """ url = '/server/{0}/firewall_rule/{1}'.format(server_uuid, firewall_rule_position) res = self.get_request(url) return FirewallRule(**res['firewall_rule'])
[ "def", "get_firewall_rule", "(", "self", ",", "server_uuid", ",", "firewall_rule_position", ",", "server_instance", "=", "None", ")", ":", "url", "=", "'/server/{0}/firewall_rule/{1}'", ".", "format", "(", "server_uuid", ",", "firewall_rule_position", ")", "res", "=", "self", ".", "get_request", "(", "url", ")", "return", "FirewallRule", "(", "*", "*", "res", "[", "'firewall_rule'", "]", ")" ]
Return a FirewallRule object based on server uuid and rule position.
[ "Return", "a", "FirewallRule", "object", "based", "on", "server", "uuid", "and", "rule", "position", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/firewall_mixin.py#L20-L26
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/firewall_mixin.py
FirewallManager.get_firewall_rules
def get_firewall_rules(self, server): """ Return all FirewallRule objects based on a server instance or uuid. """ server_uuid, server_instance = uuid_and_instance(server) url = '/server/{0}/firewall_rule'.format(server_uuid) res = self.get_request(url) return [ FirewallRule(server=server_instance, **firewall_rule) for firewall_rule in res['firewall_rules']['firewall_rule'] ]
python
def get_firewall_rules(self, server): """ Return all FirewallRule objects based on a server instance or uuid. """ server_uuid, server_instance = uuid_and_instance(server) url = '/server/{0}/firewall_rule'.format(server_uuid) res = self.get_request(url) return [ FirewallRule(server=server_instance, **firewall_rule) for firewall_rule in res['firewall_rules']['firewall_rule'] ]
[ "def", "get_firewall_rules", "(", "self", ",", "server", ")", ":", "server_uuid", ",", "server_instance", "=", "uuid_and_instance", "(", "server", ")", "url", "=", "'/server/{0}/firewall_rule'", ".", "format", "(", "server_uuid", ")", "res", "=", "self", ".", "get_request", "(", "url", ")", "return", "[", "FirewallRule", "(", "server", "=", "server_instance", ",", "*", "*", "firewall_rule", ")", "for", "firewall_rule", "in", "res", "[", "'firewall_rules'", "]", "[", "'firewall_rule'", "]", "]" ]
Return all FirewallRule objects based on a server instance or uuid.
[ "Return", "all", "FirewallRule", "objects", "based", "on", "a", "server", "instance", "or", "uuid", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/firewall_mixin.py#L28-L40
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/firewall_mixin.py
FirewallManager.create_firewall_rule
def create_firewall_rule(self, server, firewall_rule_body): """ Create a new firewall rule for a given server uuid. The rule can begiven as a dict or with FirewallRule.prepare_post_body(). Returns a FirewallRule object. """ server_uuid, server_instance = uuid_and_instance(server) url = '/server/{0}/firewall_rule'.format(server_uuid) body = {'firewall_rule': firewall_rule_body} res = self.post_request(url, body) return FirewallRule(server=server_instance, **res['firewall_rule'])
python
def create_firewall_rule(self, server, firewall_rule_body): """ Create a new firewall rule for a given server uuid. The rule can begiven as a dict or with FirewallRule.prepare_post_body(). Returns a FirewallRule object. """ server_uuid, server_instance = uuid_and_instance(server) url = '/server/{0}/firewall_rule'.format(server_uuid) body = {'firewall_rule': firewall_rule_body} res = self.post_request(url, body) return FirewallRule(server=server_instance, **res['firewall_rule'])
[ "def", "create_firewall_rule", "(", "self", ",", "server", ",", "firewall_rule_body", ")", ":", "server_uuid", ",", "server_instance", "=", "uuid_and_instance", "(", "server", ")", "url", "=", "'/server/{0}/firewall_rule'", ".", "format", "(", "server_uuid", ")", "body", "=", "{", "'firewall_rule'", ":", "firewall_rule_body", "}", "res", "=", "self", ".", "post_request", "(", "url", ",", "body", ")", "return", "FirewallRule", "(", "server", "=", "server_instance", ",", "*", "*", "res", "[", "'firewall_rule'", "]", ")" ]
Create a new firewall rule for a given server uuid. The rule can begiven as a dict or with FirewallRule.prepare_post_body(). Returns a FirewallRule object.
[ "Create", "a", "new", "firewall", "rule", "for", "a", "given", "server", "uuid", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/firewall_mixin.py#L42-L55
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/firewall_mixin.py
FirewallManager.delete_firewall_rule
def delete_firewall_rule(self, server_uuid, firewall_rule_position): """ Delete a firewall rule based on a server uuid and rule position. """ url = '/server/{0}/firewall_rule/{1}'.format(server_uuid, firewall_rule_position) return self.request('DELETE', url)
python
def delete_firewall_rule(self, server_uuid, firewall_rule_position): """ Delete a firewall rule based on a server uuid and rule position. """ url = '/server/{0}/firewall_rule/{1}'.format(server_uuid, firewall_rule_position) return self.request('DELETE', url)
[ "def", "delete_firewall_rule", "(", "self", ",", "server_uuid", ",", "firewall_rule_position", ")", ":", "url", "=", "'/server/{0}/firewall_rule/{1}'", ".", "format", "(", "server_uuid", ",", "firewall_rule_position", ")", "return", "self", ".", "request", "(", "'DELETE'", ",", "url", ")" ]
Delete a firewall rule based on a server uuid and rule position.
[ "Delete", "a", "firewall", "rule", "based", "on", "a", "server", "uuid", "and", "rule", "position", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/firewall_mixin.py#L57-L62
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/firewall_mixin.py
FirewallManager.configure_firewall
def configure_firewall(self, server, firewall_rule_bodies): """ Helper for calling create_firewall_rule in series for a list of firewall_rule_bodies. """ server_uuid, server_instance = uuid_and_instance(server) return [ self.create_firewall_rule(server_uuid, rule) for rule in firewall_rule_bodies ]
python
def configure_firewall(self, server, firewall_rule_bodies): """ Helper for calling create_firewall_rule in series for a list of firewall_rule_bodies. """ server_uuid, server_instance = uuid_and_instance(server) return [ self.create_firewall_rule(server_uuid, rule) for rule in firewall_rule_bodies ]
[ "def", "configure_firewall", "(", "self", ",", "server", ",", "firewall_rule_bodies", ")", ":", "server_uuid", ",", "server_instance", "=", "uuid_and_instance", "(", "server", ")", "return", "[", "self", ".", "create_firewall_rule", "(", "server_uuid", ",", "rule", ")", "for", "rule", "in", "firewall_rule_bodies", "]" ]
Helper for calling create_firewall_rule in series for a list of firewall_rule_bodies.
[ "Helper", "for", "calling", "create_firewall_rule", "in", "series", "for", "a", "list", "of", "firewall_rule_bodies", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/firewall_mixin.py#L64-L73
csirtgadgets/csirtgsdk-py
csirtgsdk/sinkhole.py
Sinkhole.post
def post(self, data): """ POSTs a raw SMTP message to the Sinkhole API :param data: raw content to be submitted [STRING] :return: { list of predictions } """ uri = '{}/sinkhole'.format(self.client.remote) self.logger.debug(uri) if PYVERSION == 2: try: data = data.decode('utf-8') except Exception: data = data.decode('latin-1') data = { 'message': data } body = self.client.post(uri, data) return body
python
def post(self, data): """ POSTs a raw SMTP message to the Sinkhole API :param data: raw content to be submitted [STRING] :return: { list of predictions } """ uri = '{}/sinkhole'.format(self.client.remote) self.logger.debug(uri) if PYVERSION == 2: try: data = data.decode('utf-8') except Exception: data = data.decode('latin-1') data = { 'message': data } body = self.client.post(uri, data) return body
[ "def", "post", "(", "self", ",", "data", ")", ":", "uri", "=", "'{}/sinkhole'", ".", "format", "(", "self", ".", "client", ".", "remote", ")", "self", ".", "logger", ".", "debug", "(", "uri", ")", "if", "PYVERSION", "==", "2", ":", "try", ":", "data", "=", "data", ".", "decode", "(", "'utf-8'", ")", "except", "Exception", ":", "data", "=", "data", ".", "decode", "(", "'latin-1'", ")", "data", "=", "{", "'message'", ":", "data", "}", "body", "=", "self", ".", "client", ".", "post", "(", "uri", ",", "data", ")", "return", "body" ]
POSTs a raw SMTP message to the Sinkhole API :param data: raw content to be submitted [STRING] :return: { list of predictions }
[ "POSTs", "a", "raw", "SMTP", "message", "to", "the", "Sinkhole", "API" ]
train
https://github.com/csirtgadgets/csirtgsdk-py/blob/5a7ed9c5e6fa27170366ecbdef710dc80d537dc2/csirtgsdk/sinkhole.py#L20-L41
tanwanirahul/django-batch-requests
batch_requests/utils.py
pre_process_method_headers
def pre_process_method_headers(method, headers): ''' Returns the lowered method. Capitalize headers, prepend HTTP_ and change - to _. ''' method = method.lower() # Standard WSGI supported headers _wsgi_headers = ["content_length", "content_type", "query_string", "remote_addr", "remote_host", "remote_user", "request_method", "server_name", "server_port"] _transformed_headers = {} # For every header, replace - to _, prepend http_ if necessary and convert # to upper case. for header, value in headers.items(): header = header.replace("-", "_") header = "http_{header}".format( header=header) if header.lower() not in _wsgi_headers else header _transformed_headers.update({header.upper(): value}) return method, _transformed_headers
python
def pre_process_method_headers(method, headers): ''' Returns the lowered method. Capitalize headers, prepend HTTP_ and change - to _. ''' method = method.lower() # Standard WSGI supported headers _wsgi_headers = ["content_length", "content_type", "query_string", "remote_addr", "remote_host", "remote_user", "request_method", "server_name", "server_port"] _transformed_headers = {} # For every header, replace - to _, prepend http_ if necessary and convert # to upper case. for header, value in headers.items(): header = header.replace("-", "_") header = "http_{header}".format( header=header) if header.lower() not in _wsgi_headers else header _transformed_headers.update({header.upper(): value}) return method, _transformed_headers
[ "def", "pre_process_method_headers", "(", "method", ",", "headers", ")", ":", "method", "=", "method", ".", "lower", "(", ")", "# Standard WSGI supported headers", "_wsgi_headers", "=", "[", "\"content_length\"", ",", "\"content_type\"", ",", "\"query_string\"", ",", "\"remote_addr\"", ",", "\"remote_host\"", ",", "\"remote_user\"", ",", "\"request_method\"", ",", "\"server_name\"", ",", "\"server_port\"", "]", "_transformed_headers", "=", "{", "}", "# For every header, replace - to _, prepend http_ if necessary and convert", "# to upper case.", "for", "header", ",", "value", "in", "headers", ".", "items", "(", ")", ":", "header", "=", "header", ".", "replace", "(", "\"-\"", ",", "\"_\"", ")", "header", "=", "\"http_{header}\"", ".", "format", "(", "header", "=", "header", ")", "if", "header", ".", "lower", "(", ")", "not", "in", "_wsgi_headers", "else", "header", "_transformed_headers", ".", "update", "(", "{", "header", ".", "upper", "(", ")", ":", "value", "}", ")", "return", "method", ",", "_transformed_headers" ]
Returns the lowered method. Capitalize headers, prepend HTTP_ and change - to _.
[ "Returns", "the", "lowered", "method", ".", "Capitalize", "headers", "prepend", "HTTP_", "and", "change", "-", "to", "_", "." ]
train
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/utils.py#L47-L70
tanwanirahul/django-batch-requests
batch_requests/utils.py
headers_to_include_from_request
def headers_to_include_from_request(curr_request): ''' Define headers that needs to be included from the current request. ''' return { h: v for h, v in curr_request.META.items() if h in _settings.HEADERS_TO_INCLUDE}
python
def headers_to_include_from_request(curr_request): ''' Define headers that needs to be included from the current request. ''' return { h: v for h, v in curr_request.META.items() if h in _settings.HEADERS_TO_INCLUDE}
[ "def", "headers_to_include_from_request", "(", "curr_request", ")", ":", "return", "{", "h", ":", "v", "for", "h", ",", "v", "in", "curr_request", ".", "META", ".", "items", "(", ")", "if", "h", "in", "_settings", ".", "HEADERS_TO_INCLUDE", "}" ]
Define headers that needs to be included from the current request.
[ "Define", "headers", "that", "needs", "to", "be", "included", "from", "the", "current", "request", "." ]
train
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/utils.py#L73-L78
tanwanirahul/django-batch-requests
batch_requests/utils.py
get_wsgi_request_object
def get_wsgi_request_object(curr_request, method, url, headers, body): ''' Based on the given request parameters, constructs and returns the WSGI request object. ''' x_headers = headers_to_include_from_request(curr_request) method, t_headers = pre_process_method_headers(method, headers) # Add default content type. if "CONTENT_TYPE" not in t_headers: t_headers.update({"CONTENT_TYPE": _settings.DEFAULT_CONTENT_TYPE}) # Override existing batch requests headers with the new headers passed for this request. x_headers.update(t_headers) content_type = x_headers.get("CONTENT_TYPE", _settings.DEFAULT_CONTENT_TYPE) # Get hold of request factory to construct the request. _request_factory = BatchRequestFactory() _request_provider = getattr(_request_factory, method) secure = _settings.USE_HTTPS request = _request_provider(url, data=body, secure=secure, content_type=content_type, **x_headers) return request
python
def get_wsgi_request_object(curr_request, method, url, headers, body): ''' Based on the given request parameters, constructs and returns the WSGI request object. ''' x_headers = headers_to_include_from_request(curr_request) method, t_headers = pre_process_method_headers(method, headers) # Add default content type. if "CONTENT_TYPE" not in t_headers: t_headers.update({"CONTENT_TYPE": _settings.DEFAULT_CONTENT_TYPE}) # Override existing batch requests headers with the new headers passed for this request. x_headers.update(t_headers) content_type = x_headers.get("CONTENT_TYPE", _settings.DEFAULT_CONTENT_TYPE) # Get hold of request factory to construct the request. _request_factory = BatchRequestFactory() _request_provider = getattr(_request_factory, method) secure = _settings.USE_HTTPS request = _request_provider(url, data=body, secure=secure, content_type=content_type, **x_headers) return request
[ "def", "get_wsgi_request_object", "(", "curr_request", ",", "method", ",", "url", ",", "headers", ",", "body", ")", ":", "x_headers", "=", "headers_to_include_from_request", "(", "curr_request", ")", "method", ",", "t_headers", "=", "pre_process_method_headers", "(", "method", ",", "headers", ")", "# Add default content type.", "if", "\"CONTENT_TYPE\"", "not", "in", "t_headers", ":", "t_headers", ".", "update", "(", "{", "\"CONTENT_TYPE\"", ":", "_settings", ".", "DEFAULT_CONTENT_TYPE", "}", ")", "# Override existing batch requests headers with the new headers passed for this request.", "x_headers", ".", "update", "(", "t_headers", ")", "content_type", "=", "x_headers", ".", "get", "(", "\"CONTENT_TYPE\"", ",", "_settings", ".", "DEFAULT_CONTENT_TYPE", ")", "# Get hold of request factory to construct the request.", "_request_factory", "=", "BatchRequestFactory", "(", ")", "_request_provider", "=", "getattr", "(", "_request_factory", ",", "method", ")", "secure", "=", "_settings", ".", "USE_HTTPS", "request", "=", "_request_provider", "(", "url", ",", "data", "=", "body", ",", "secure", "=", "secure", ",", "content_type", "=", "content_type", ",", "*", "*", "x_headers", ")", "return", "request" ]
Based on the given request parameters, constructs and returns the WSGI request object.
[ "Based", "on", "the", "given", "request", "parameters", "constructs", "and", "returns", "the", "WSGI", "request", "object", "." ]
train
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/utils.py#L81-L106
tanwanirahul/django-batch-requests
batch_requests/utils.py
BatchRequestFactory._base_environ
def _base_environ(self, **request): ''' Override the default values for the wsgi environment variables. ''' # This is a minimal valid WSGI environ dictionary, plus: # - HTTP_COOKIE: for cookie support, # - REMOTE_ADDR: often useful, see #8551. # See http://www.python.org/dev/peps/pep-3333/#environ-variables environ = { 'HTTP_COOKIE': self.cookies.output(header='', sep='; '), 'PATH_INFO': str('/'), 'REMOTE_ADDR': str('127.0.0.1'), 'REQUEST_METHOD': str('GET'), 'SCRIPT_NAME': str(''), 'SERVER_NAME': str('localhost'), 'SERVER_PORT': str('8000'), 'SERVER_PROTOCOL': str('HTTP/1.1'), 'wsgi.version': (1, 0), 'wsgi.url_scheme': str('http'), 'wsgi.input': FakePayload(b''), 'wsgi.errors': self.errors, 'wsgi.multiprocess': True, 'wsgi.multithread': True, 'wsgi.run_once': False, } environ.update(self.defaults) environ.update(request) return environ
python
def _base_environ(self, **request): ''' Override the default values for the wsgi environment variables. ''' # This is a minimal valid WSGI environ dictionary, plus: # - HTTP_COOKIE: for cookie support, # - REMOTE_ADDR: often useful, see #8551. # See http://www.python.org/dev/peps/pep-3333/#environ-variables environ = { 'HTTP_COOKIE': self.cookies.output(header='', sep='; '), 'PATH_INFO': str('/'), 'REMOTE_ADDR': str('127.0.0.1'), 'REQUEST_METHOD': str('GET'), 'SCRIPT_NAME': str(''), 'SERVER_NAME': str('localhost'), 'SERVER_PORT': str('8000'), 'SERVER_PROTOCOL': str('HTTP/1.1'), 'wsgi.version': (1, 0), 'wsgi.url_scheme': str('http'), 'wsgi.input': FakePayload(b''), 'wsgi.errors': self.errors, 'wsgi.multiprocess': True, 'wsgi.multithread': True, 'wsgi.run_once': False, } environ.update(self.defaults) environ.update(request) return environ
[ "def", "_base_environ", "(", "self", ",", "*", "*", "request", ")", ":", "# This is a minimal valid WSGI environ dictionary, plus:", "# - HTTP_COOKIE: for cookie support,", "# - REMOTE_ADDR: often useful, see #8551.", "# See http://www.python.org/dev/peps/pep-3333/#environ-variables", "environ", "=", "{", "'HTTP_COOKIE'", ":", "self", ".", "cookies", ".", "output", "(", "header", "=", "''", ",", "sep", "=", "'; '", ")", ",", "'PATH_INFO'", ":", "str", "(", "'/'", ")", ",", "'REMOTE_ADDR'", ":", "str", "(", "'127.0.0.1'", ")", ",", "'REQUEST_METHOD'", ":", "str", "(", "'GET'", ")", ",", "'SCRIPT_NAME'", ":", "str", "(", "''", ")", ",", "'SERVER_NAME'", ":", "str", "(", "'localhost'", ")", ",", "'SERVER_PORT'", ":", "str", "(", "'8000'", ")", ",", "'SERVER_PROTOCOL'", ":", "str", "(", "'HTTP/1.1'", ")", ",", "'wsgi.version'", ":", "(", "1", ",", "0", ")", ",", "'wsgi.url_scheme'", ":", "str", "(", "'http'", ")", ",", "'wsgi.input'", ":", "FakePayload", "(", "b''", ")", ",", "'wsgi.errors'", ":", "self", ".", "errors", ",", "'wsgi.multiprocess'", ":", "True", ",", "'wsgi.multithread'", ":", "True", ",", "'wsgi.run_once'", ":", "False", ",", "}", "environ", ".", "update", "(", "self", ".", "defaults", ")", "environ", ".", "update", "(", "request", ")", "return", "environ" ]
Override the default values for the wsgi environment variables.
[ "Override", "the", "default", "values", "for", "the", "wsgi", "environment", "variables", "." ]
train
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/utils.py#L16-L44
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/base.py
BaseAPI.request
def request(self, method, endpoint, body=None, timeout=-1): """ Perform a request with a given body to a given endpoint in UpCloud's API. Handles errors with __error_middleware. """ if method not in set(['GET', 'POST', 'PUT', 'DELETE']): raise Exception('Invalid/Forbidden HTTP method') url = '/' + self.api_v + endpoint headers = { 'Authorization': self.token, 'Content-Type': 'application/json' } if body: json_body_or_None = json.dumps(body) else: json_body_or_None = None call_timeout = timeout if timeout != -1 else self.timeout APIcall = getattr(requests, method.lower()) res = APIcall('https://api.upcloud.com' + url, data=json_body_or_None, headers=headers, timeout=call_timeout) if res.text: res_json = res.json() else: res_json = {} return self.__error_middleware(res, res_json)
python
def request(self, method, endpoint, body=None, timeout=-1): """ Perform a request with a given body to a given endpoint in UpCloud's API. Handles errors with __error_middleware. """ if method not in set(['GET', 'POST', 'PUT', 'DELETE']): raise Exception('Invalid/Forbidden HTTP method') url = '/' + self.api_v + endpoint headers = { 'Authorization': self.token, 'Content-Type': 'application/json' } if body: json_body_or_None = json.dumps(body) else: json_body_or_None = None call_timeout = timeout if timeout != -1 else self.timeout APIcall = getattr(requests, method.lower()) res = APIcall('https://api.upcloud.com' + url, data=json_body_or_None, headers=headers, timeout=call_timeout) if res.text: res_json = res.json() else: res_json = {} return self.__error_middleware(res, res_json)
[ "def", "request", "(", "self", ",", "method", ",", "endpoint", ",", "body", "=", "None", ",", "timeout", "=", "-", "1", ")", ":", "if", "method", "not", "in", "set", "(", "[", "'GET'", ",", "'POST'", ",", "'PUT'", ",", "'DELETE'", "]", ")", ":", "raise", "Exception", "(", "'Invalid/Forbidden HTTP method'", ")", "url", "=", "'/'", "+", "self", ".", "api_v", "+", "endpoint", "headers", "=", "{", "'Authorization'", ":", "self", ".", "token", ",", "'Content-Type'", ":", "'application/json'", "}", "if", "body", ":", "json_body_or_None", "=", "json", ".", "dumps", "(", "body", ")", "else", ":", "json_body_or_None", "=", "None", "call_timeout", "=", "timeout", "if", "timeout", "!=", "-", "1", "else", "self", ".", "timeout", "APIcall", "=", "getattr", "(", "requests", ",", "method", ".", "lower", "(", ")", ")", "res", "=", "APIcall", "(", "'https://api.upcloud.com'", "+", "url", ",", "data", "=", "json_body_or_None", ",", "headers", "=", "headers", ",", "timeout", "=", "call_timeout", ")", "if", "res", ".", "text", ":", "res_json", "=", "res", ".", "json", "(", ")", "else", ":", "res_json", "=", "{", "}", "return", "self", ".", "__error_middleware", "(", "res", ",", "res_json", ")" ]
Perform a request with a given body to a given endpoint in UpCloud's API. Handles errors with __error_middleware.
[ "Perform", "a", "request", "with", "a", "given", "body", "to", "a", "given", "endpoint", "in", "UpCloud", "s", "API", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/base.py#L21-L54
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/base.py
BaseAPI.post_request
def post_request(self, endpoint, body=None, timeout=-1): """ Perform a POST request to a given endpoint in UpCloud's API. """ return self.request('POST', endpoint, body, timeout)
python
def post_request(self, endpoint, body=None, timeout=-1): """ Perform a POST request to a given endpoint in UpCloud's API. """ return self.request('POST', endpoint, body, timeout)
[ "def", "post_request", "(", "self", ",", "endpoint", ",", "body", "=", "None", ",", "timeout", "=", "-", "1", ")", ":", "return", "self", ".", "request", "(", "'POST'", ",", "endpoint", ",", "body", ",", "timeout", ")" ]
Perform a POST request to a given endpoint in UpCloud's API.
[ "Perform", "a", "POST", "request", "to", "a", "given", "endpoint", "in", "UpCloud", "s", "API", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/base.py#L62-L66
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/base.py
BaseAPI.__error_middleware
def __error_middleware(self, res, res_json): """ Middleware that raises an exception when HTTP statuscode is an error code. """ if(res.status_code in [400, 401, 402, 403, 404, 405, 406, 409]): err_dict = res_json.get('error', {}) raise UpCloudAPIError(error_code=err_dict.get('error_code'), error_message=err_dict.get('error_message')) return res_json
python
def __error_middleware(self, res, res_json): """ Middleware that raises an exception when HTTP statuscode is an error code. """ if(res.status_code in [400, 401, 402, 403, 404, 405, 406, 409]): err_dict = res_json.get('error', {}) raise UpCloudAPIError(error_code=err_dict.get('error_code'), error_message=err_dict.get('error_message')) return res_json
[ "def", "__error_middleware", "(", "self", ",", "res", ",", "res_json", ")", ":", "if", "(", "res", ".", "status_code", "in", "[", "400", ",", "401", ",", "402", ",", "403", ",", "404", ",", "405", ",", "406", ",", "409", "]", ")", ":", "err_dict", "=", "res_json", ".", "get", "(", "'error'", ",", "{", "}", ")", "raise", "UpCloudAPIError", "(", "error_code", "=", "err_dict", ".", "get", "(", "'error_code'", ")", ",", "error_message", "=", "err_dict", ".", "get", "(", "'error_message'", ")", ")", "return", "res_json" ]
Middleware that raises an exception when HTTP statuscode is an error code.
[ "Middleware", "that", "raises", "an", "exception", "when", "HTTP", "statuscode", "is", "an", "error", "code", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/base.py#L68-L77
okfn/ofs
ofs/remote/swiftstore.py
SwiftOFS.put_stream
def put_stream(self, bucket, label, stream_object, params={}): ''' Create a new file to swift object storage. ''' self.claim_bucket(bucket) self.connection.put_object(bucket, label, stream_object, headers=self._convert_to_meta(params))
python
def put_stream(self, bucket, label, stream_object, params={}): ''' Create a new file to swift object storage. ''' self.claim_bucket(bucket) self.connection.put_object(bucket, label, stream_object, headers=self._convert_to_meta(params))
[ "def", "put_stream", "(", "self", ",", "bucket", ",", "label", ",", "stream_object", ",", "params", "=", "{", "}", ")", ":", "self", ".", "claim_bucket", "(", "bucket", ")", "self", ".", "connection", ".", "put_object", "(", "bucket", ",", "label", ",", "stream_object", ",", "headers", "=", "self", ".", "_convert_to_meta", "(", "params", ")", ")" ]
Create a new file to swift object storage.
[ "Create", "a", "new", "file", "to", "swift", "object", "storage", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/remote/swiftstore.py#L116-L120
ralphhaygood/sklearn-gbmi
sklearn_gbmi/sklearn_gbmi.py
h
def h(gbm, array_or_frame, indices_or_columns = 'all'): """ PURPOSE Compute Friedman and Popescu's H statistic, in order to look for an interaction in the passed gradient-boosting model among the variables represented by the elements of the passed array or frame and specified by the passed indices or columns. See Jerome H. Friedman and Bogdan E. Popescu, 2008, "Predictive learning via rule ensembles", Ann. Appl. Stat. 2:916-954, http://projecteuclid.org/download/pdfview_1/euclid.aoas/1223908046, s. 8.1. ARGUMENTS gbm should be a scikit-learn gradient-boosting model (instance of sklearn.ensemble.GradientBoostingClassifier or sklearn.ensemble.GradientBoostingRegressor) that has been fitted to array_or_frame (and a target, not used here). array_or_frame should be a two-dimensional NumPy array or a pandas data frame (instance of numpy.ndarray or pandas .DataFrame). indices_or_columns is optional, with default value 'all'. It should be 'all' or a list of indices of columns of array_or_frame if array_or_frame is a NumPy array or a list of columns of array_or_frame if array_or_frame is a pandas data frame. If it is 'all', then all columns of array_or_frame are used. RETURNS The H statistic of the variables or NaN if the computation is spoiled by weak main effects and rounding errors. H varies from 0 to 1. The larger H, the stronger the evidence for an interaction among the variables. EXAMPLES Friedman and Popescu's (2008) formulas (44) and (46) correspond to h(F, x, [j, k]) and h(F, x, [j, k, l]) respectively. NOTES 1. Per Friedman and Popescu, only variables with strong main effects should be examined for interactions. Strengths of main effects are available as gbm.feature_importances_ once gbm has been fitted. 2. Per Friedman and Popescu, collinearity among variables can lead to interactions in gbm that are not present in the target function. To forestall such spurious interactions, check for strong correlations among variables before fitting gbm. """ if indices_or_columns == 'all': if gbm.max_depth < array_or_frame.shape[1]: raise \ Exception( "gbm.max_depth == {} < array_or_frame.shape[1] == {}, so indices_or_columns must not be 'all'." .format(gbm.max_depth, array_or_frame.shape[1]) ) else: if gbm.max_depth < len(indices_or_columns): raise \ Exception( "gbm.max_depth == {}, so indices_or_columns must contain at most {} {}." .format(gbm.max_depth, gbm.max_depth, "element" if gbm.max_depth == 1 else "elements") ) check_args_contd(array_or_frame, indices_or_columns) arr, model_inds = get_arr_and_model_inds(array_or_frame, indices_or_columns) width = arr.shape[1] f_vals = {} for n in range(width, 0, -1): for inds in itertools.combinations(range(width), n): f_vals[inds] = compute_f_vals(gbm, model_inds, arr, inds) return compute_h_val(f_vals, arr, tuple(range(width)))
python
def h(gbm, array_or_frame, indices_or_columns = 'all'): """ PURPOSE Compute Friedman and Popescu's H statistic, in order to look for an interaction in the passed gradient-boosting model among the variables represented by the elements of the passed array or frame and specified by the passed indices or columns. See Jerome H. Friedman and Bogdan E. Popescu, 2008, "Predictive learning via rule ensembles", Ann. Appl. Stat. 2:916-954, http://projecteuclid.org/download/pdfview_1/euclid.aoas/1223908046, s. 8.1. ARGUMENTS gbm should be a scikit-learn gradient-boosting model (instance of sklearn.ensemble.GradientBoostingClassifier or sklearn.ensemble.GradientBoostingRegressor) that has been fitted to array_or_frame (and a target, not used here). array_or_frame should be a two-dimensional NumPy array or a pandas data frame (instance of numpy.ndarray or pandas .DataFrame). indices_or_columns is optional, with default value 'all'. It should be 'all' or a list of indices of columns of array_or_frame if array_or_frame is a NumPy array or a list of columns of array_or_frame if array_or_frame is a pandas data frame. If it is 'all', then all columns of array_or_frame are used. RETURNS The H statistic of the variables or NaN if the computation is spoiled by weak main effects and rounding errors. H varies from 0 to 1. The larger H, the stronger the evidence for an interaction among the variables. EXAMPLES Friedman and Popescu's (2008) formulas (44) and (46) correspond to h(F, x, [j, k]) and h(F, x, [j, k, l]) respectively. NOTES 1. Per Friedman and Popescu, only variables with strong main effects should be examined for interactions. Strengths of main effects are available as gbm.feature_importances_ once gbm has been fitted. 2. Per Friedman and Popescu, collinearity among variables can lead to interactions in gbm that are not present in the target function. To forestall such spurious interactions, check for strong correlations among variables before fitting gbm. """ if indices_or_columns == 'all': if gbm.max_depth < array_or_frame.shape[1]: raise \ Exception( "gbm.max_depth == {} < array_or_frame.shape[1] == {}, so indices_or_columns must not be 'all'." .format(gbm.max_depth, array_or_frame.shape[1]) ) else: if gbm.max_depth < len(indices_or_columns): raise \ Exception( "gbm.max_depth == {}, so indices_or_columns must contain at most {} {}." .format(gbm.max_depth, gbm.max_depth, "element" if gbm.max_depth == 1 else "elements") ) check_args_contd(array_or_frame, indices_or_columns) arr, model_inds = get_arr_and_model_inds(array_or_frame, indices_or_columns) width = arr.shape[1] f_vals = {} for n in range(width, 0, -1): for inds in itertools.combinations(range(width), n): f_vals[inds] = compute_f_vals(gbm, model_inds, arr, inds) return compute_h_val(f_vals, arr, tuple(range(width)))
[ "def", "h", "(", "gbm", ",", "array_or_frame", ",", "indices_or_columns", "=", "'all'", ")", ":", "if", "indices_or_columns", "==", "'all'", ":", "if", "gbm", ".", "max_depth", "<", "array_or_frame", ".", "shape", "[", "1", "]", ":", "raise", "Exception", "(", "\"gbm.max_depth == {} < array_or_frame.shape[1] == {}, so indices_or_columns must not be 'all'.\"", ".", "format", "(", "gbm", ".", "max_depth", ",", "array_or_frame", ".", "shape", "[", "1", "]", ")", ")", "else", ":", "if", "gbm", ".", "max_depth", "<", "len", "(", "indices_or_columns", ")", ":", "raise", "Exception", "(", "\"gbm.max_depth == {}, so indices_or_columns must contain at most {} {}.\"", ".", "format", "(", "gbm", ".", "max_depth", ",", "gbm", ".", "max_depth", ",", "\"element\"", "if", "gbm", ".", "max_depth", "==", "1", "else", "\"elements\"", ")", ")", "check_args_contd", "(", "array_or_frame", ",", "indices_or_columns", ")", "arr", ",", "model_inds", "=", "get_arr_and_model_inds", "(", "array_or_frame", ",", "indices_or_columns", ")", "width", "=", "arr", ".", "shape", "[", "1", "]", "f_vals", "=", "{", "}", "for", "n", "in", "range", "(", "width", ",", "0", ",", "-", "1", ")", ":", "for", "inds", "in", "itertools", ".", "combinations", "(", "range", "(", "width", ")", ",", "n", ")", ":", "f_vals", "[", "inds", "]", "=", "compute_f_vals", "(", "gbm", ",", "model_inds", ",", "arr", ",", "inds", ")", "return", "compute_h_val", "(", "f_vals", ",", "arr", ",", "tuple", "(", "range", "(", "width", ")", ")", ")" ]
PURPOSE Compute Friedman and Popescu's H statistic, in order to look for an interaction in the passed gradient-boosting model among the variables represented by the elements of the passed array or frame and specified by the passed indices or columns. See Jerome H. Friedman and Bogdan E. Popescu, 2008, "Predictive learning via rule ensembles", Ann. Appl. Stat. 2:916-954, http://projecteuclid.org/download/pdfview_1/euclid.aoas/1223908046, s. 8.1. ARGUMENTS gbm should be a scikit-learn gradient-boosting model (instance of sklearn.ensemble.GradientBoostingClassifier or sklearn.ensemble.GradientBoostingRegressor) that has been fitted to array_or_frame (and a target, not used here). array_or_frame should be a two-dimensional NumPy array or a pandas data frame (instance of numpy.ndarray or pandas .DataFrame). indices_or_columns is optional, with default value 'all'. It should be 'all' or a list of indices of columns of array_or_frame if array_or_frame is a NumPy array or a list of columns of array_or_frame if array_or_frame is a pandas data frame. If it is 'all', then all columns of array_or_frame are used. RETURNS The H statistic of the variables or NaN if the computation is spoiled by weak main effects and rounding errors. H varies from 0 to 1. The larger H, the stronger the evidence for an interaction among the variables. EXAMPLES Friedman and Popescu's (2008) formulas (44) and (46) correspond to h(F, x, [j, k]) and h(F, x, [j, k, l]) respectively. NOTES 1. Per Friedman and Popescu, only variables with strong main effects should be examined for interactions. Strengths of main effects are available as gbm.feature_importances_ once gbm has been fitted. 2. Per Friedman and Popescu, collinearity among variables can lead to interactions in gbm that are not present in the target function. To forestall such spurious interactions, check for strong correlations among variables before fitting gbm.
[ "PURPOSE" ]
train
https://github.com/ralphhaygood/sklearn-gbmi/blob/23a1e7fd50e53d6261379f22a337d8fa4ee6aabe/sklearn_gbmi/sklearn_gbmi.py#L16-L95
ralphhaygood/sklearn-gbmi
sklearn_gbmi/sklearn_gbmi.py
h_all_pairs
def h_all_pairs(gbm, array_or_frame, indices_or_columns = 'all'): """ PURPOSE Compute Friedman and Popescu's two-variable H statistic, in order to look for an interaction in the passed gradient- boosting model between each pair of variables represented by the elements of the passed array or frame and specified by the passed indices or columns. See Jerome H. Friedman and Bogdan E. Popescu, 2008, "Predictive learning via rule ensembles", Ann. Appl. Stat. 2:916-954, http://projecteuclid.org/download/pdfview_1/euclid.aoas/1223908046, s. 8.1. ARGUMENTS gbm should be a scikit-learn gradient-boosting model (instance of sklearn.ensemble.GradientBoostingClassifier or sklearn.ensemble.GradientBoostingRegressor) that has been fitted to array_or_frame (and a target, not used here). array_or_frame should be a two-dimensional NumPy array or a pandas data frame (instance of numpy.ndarray or pandas .DataFrame). indices_or_columns is optional, with default value 'all'. It should be 'all' or a list of indices of columns of array_or_frame if array_or_frame is a NumPy array or a list of columns of array_or_frame if array_or_frame is a pandas data frame. If it is 'all', then all columns of array_or_frame are used. RETURNS A dict whose keys are pairs (2-tuples) of indices or columns and whose values are the H statistic of the pairs of variables or NaN if a computation is spoiled by weak main effects and rounding errors. H varies from 0 to 1. The larger H, the stronger the evidence for an interaction between a pair of variables. EXAMPLE Friedman and Popescu's (2008) formula (44) for every j and k corresponds to h_all_pairs(F, x) NOTES 1. Per Friedman and Popescu, only variables with strong main effects should be examined for interactions. Strengths of main effects are available as gbm.feature_importances_ once gbm has been fitted. 2. Per Friedman and Popescu, collinearity among variables can lead to interactions in gbm that are not present in the target function. To forestall such spurious interactions, check for strong correlations among variables before fitting gbm. """ if gbm.max_depth < 2: raise Exception("gbm.max_depth must be at least 2.") check_args_contd(array_or_frame, indices_or_columns) arr, model_inds = get_arr_and_model_inds(array_or_frame, indices_or_columns) width = arr.shape[1] f_vals = {} for n in [2, 1]: for inds in itertools.combinations(range(width), n): f_vals[inds] = compute_f_vals(gbm, model_inds, arr, inds) h_vals = {} for inds in itertools.combinations(range(width), 2): h_vals[inds] = compute_h_val(f_vals, arr, inds) if indices_or_columns != 'all': h_vals = {tuple(model_inds[(inds,)]): h_vals[inds] for inds in h_vals.keys()} if not isinstance(array_or_frame, np.ndarray): all_cols = array_or_frame.columns.values h_vals = {tuple(all_cols[(inds,)]): h_vals[inds] for inds in h_vals.keys()} return h_vals
python
def h_all_pairs(gbm, array_or_frame, indices_or_columns = 'all'): """ PURPOSE Compute Friedman and Popescu's two-variable H statistic, in order to look for an interaction in the passed gradient- boosting model between each pair of variables represented by the elements of the passed array or frame and specified by the passed indices or columns. See Jerome H. Friedman and Bogdan E. Popescu, 2008, "Predictive learning via rule ensembles", Ann. Appl. Stat. 2:916-954, http://projecteuclid.org/download/pdfview_1/euclid.aoas/1223908046, s. 8.1. ARGUMENTS gbm should be a scikit-learn gradient-boosting model (instance of sklearn.ensemble.GradientBoostingClassifier or sklearn.ensemble.GradientBoostingRegressor) that has been fitted to array_or_frame (and a target, not used here). array_or_frame should be a two-dimensional NumPy array or a pandas data frame (instance of numpy.ndarray or pandas .DataFrame). indices_or_columns is optional, with default value 'all'. It should be 'all' or a list of indices of columns of array_or_frame if array_or_frame is a NumPy array or a list of columns of array_or_frame if array_or_frame is a pandas data frame. If it is 'all', then all columns of array_or_frame are used. RETURNS A dict whose keys are pairs (2-tuples) of indices or columns and whose values are the H statistic of the pairs of variables or NaN if a computation is spoiled by weak main effects and rounding errors. H varies from 0 to 1. The larger H, the stronger the evidence for an interaction between a pair of variables. EXAMPLE Friedman and Popescu's (2008) formula (44) for every j and k corresponds to h_all_pairs(F, x) NOTES 1. Per Friedman and Popescu, only variables with strong main effects should be examined for interactions. Strengths of main effects are available as gbm.feature_importances_ once gbm has been fitted. 2. Per Friedman and Popescu, collinearity among variables can lead to interactions in gbm that are not present in the target function. To forestall such spurious interactions, check for strong correlations among variables before fitting gbm. """ if gbm.max_depth < 2: raise Exception("gbm.max_depth must be at least 2.") check_args_contd(array_or_frame, indices_or_columns) arr, model_inds = get_arr_and_model_inds(array_or_frame, indices_or_columns) width = arr.shape[1] f_vals = {} for n in [2, 1]: for inds in itertools.combinations(range(width), n): f_vals[inds] = compute_f_vals(gbm, model_inds, arr, inds) h_vals = {} for inds in itertools.combinations(range(width), 2): h_vals[inds] = compute_h_val(f_vals, arr, inds) if indices_or_columns != 'all': h_vals = {tuple(model_inds[(inds,)]): h_vals[inds] for inds in h_vals.keys()} if not isinstance(array_or_frame, np.ndarray): all_cols = array_or_frame.columns.values h_vals = {tuple(all_cols[(inds,)]): h_vals[inds] for inds in h_vals.keys()} return h_vals
[ "def", "h_all_pairs", "(", "gbm", ",", "array_or_frame", ",", "indices_or_columns", "=", "'all'", ")", ":", "if", "gbm", ".", "max_depth", "<", "2", ":", "raise", "Exception", "(", "\"gbm.max_depth must be at least 2.\"", ")", "check_args_contd", "(", "array_or_frame", ",", "indices_or_columns", ")", "arr", ",", "model_inds", "=", "get_arr_and_model_inds", "(", "array_or_frame", ",", "indices_or_columns", ")", "width", "=", "arr", ".", "shape", "[", "1", "]", "f_vals", "=", "{", "}", "for", "n", "in", "[", "2", ",", "1", "]", ":", "for", "inds", "in", "itertools", ".", "combinations", "(", "range", "(", "width", ")", ",", "n", ")", ":", "f_vals", "[", "inds", "]", "=", "compute_f_vals", "(", "gbm", ",", "model_inds", ",", "arr", ",", "inds", ")", "h_vals", "=", "{", "}", "for", "inds", "in", "itertools", ".", "combinations", "(", "range", "(", "width", ")", ",", "2", ")", ":", "h_vals", "[", "inds", "]", "=", "compute_h_val", "(", "f_vals", ",", "arr", ",", "inds", ")", "if", "indices_or_columns", "!=", "'all'", ":", "h_vals", "=", "{", "tuple", "(", "model_inds", "[", "(", "inds", ",", ")", "]", ")", ":", "h_vals", "[", "inds", "]", "for", "inds", "in", "h_vals", ".", "keys", "(", ")", "}", "if", "not", "isinstance", "(", "array_or_frame", ",", "np", ".", "ndarray", ")", ":", "all_cols", "=", "array_or_frame", ".", "columns", ".", "values", "h_vals", "=", "{", "tuple", "(", "all_cols", "[", "(", "inds", ",", ")", "]", ")", ":", "h_vals", "[", "inds", "]", "for", "inds", "in", "h_vals", ".", "keys", "(", ")", "}", "return", "h_vals" ]
PURPOSE Compute Friedman and Popescu's two-variable H statistic, in order to look for an interaction in the passed gradient- boosting model between each pair of variables represented by the elements of the passed array or frame and specified by the passed indices or columns. See Jerome H. Friedman and Bogdan E. Popescu, 2008, "Predictive learning via rule ensembles", Ann. Appl. Stat. 2:916-954, http://projecteuclid.org/download/pdfview_1/euclid.aoas/1223908046, s. 8.1. ARGUMENTS gbm should be a scikit-learn gradient-boosting model (instance of sklearn.ensemble.GradientBoostingClassifier or sklearn.ensemble.GradientBoostingRegressor) that has been fitted to array_or_frame (and a target, not used here). array_or_frame should be a two-dimensional NumPy array or a pandas data frame (instance of numpy.ndarray or pandas .DataFrame). indices_or_columns is optional, with default value 'all'. It should be 'all' or a list of indices of columns of array_or_frame if array_or_frame is a NumPy array or a list of columns of array_or_frame if array_or_frame is a pandas data frame. If it is 'all', then all columns of array_or_frame are used. RETURNS A dict whose keys are pairs (2-tuples) of indices or columns and whose values are the H statistic of the pairs of variables or NaN if a computation is spoiled by weak main effects and rounding errors. H varies from 0 to 1. The larger H, the stronger the evidence for an interaction between a pair of variables. EXAMPLE Friedman and Popescu's (2008) formula (44) for every j and k corresponds to h_all_pairs(F, x) NOTES 1. Per Friedman and Popescu, only variables with strong main effects should be examined for interactions. Strengths of main effects are available as gbm.feature_importances_ once gbm has been fitted. 2. Per Friedman and Popescu, collinearity among variables can lead to interactions in gbm that are not present in the target function. To forestall such spurious interactions, check for strong correlations among variables before fitting gbm.
[ "PURPOSE" ]
train
https://github.com/ralphhaygood/sklearn-gbmi/blob/23a1e7fd50e53d6261379f22a337d8fa4ee6aabe/sklearn_gbmi/sklearn_gbmi.py#L98-L169
csirtgadgets/csirtgsdk-py
csirtgsdk/predict.py
Predict.get
def get(self, q, limit=None): """ Performs a search against the predict endpoint :param q: query to be searched for [STRING] :return: { score: [0|1] } """ uri = '{}/predict?q={}'.format(self.client.remote, q) self.logger.debug(uri) body = self.client.get(uri) return body['score']
python
def get(self, q, limit=None): """ Performs a search against the predict endpoint :param q: query to be searched for [STRING] :return: { score: [0|1] } """ uri = '{}/predict?q={}'.format(self.client.remote, q) self.logger.debug(uri) body = self.client.get(uri) return body['score']
[ "def", "get", "(", "self", ",", "q", ",", "limit", "=", "None", ")", ":", "uri", "=", "'{}/predict?q={}'", ".", "format", "(", "self", ".", "client", ".", "remote", ",", "q", ")", "self", ".", "logger", ".", "debug", "(", "uri", ")", "body", "=", "self", ".", "client", ".", "get", "(", "uri", ")", "return", "body", "[", "'score'", "]" ]
Performs a search against the predict endpoint :param q: query to be searched for [STRING] :return: { score: [0|1] }
[ "Performs", "a", "search", "against", "the", "predict", "endpoint" ]
train
https://github.com/csirtgadgets/csirtgsdk-py/blob/5a7ed9c5e6fa27170366ecbdef710dc80d537dc2/csirtgsdk/predict.py#L18-L29
okfn/ofs
ofs/local/zipstore.py
ZOFS.exists
def exists(self, bucket, label): '''Whether a given bucket:label object already exists.''' fn = self._zf(bucket, label) try: self.z.getinfo(fn) return True except KeyError: return False
python
def exists(self, bucket, label): '''Whether a given bucket:label object already exists.''' fn = self._zf(bucket, label) try: self.z.getinfo(fn) return True except KeyError: return False
[ "def", "exists", "(", "self", ",", "bucket", ",", "label", ")", ":", "fn", "=", "self", ".", "_zf", "(", "bucket", ",", "label", ")", "try", ":", "self", ".", "z", ".", "getinfo", "(", "fn", ")", "return", "True", "except", "KeyError", ":", "return", "False" ]
Whether a given bucket:label object already exists.
[ "Whether", "a", "given", "bucket", ":", "label", "object", "already", "exists", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L112-L119
okfn/ofs
ofs/local/zipstore.py
ZOFS.list_labels
def list_labels(self, bucket): '''List labels for the given bucket. Due to zipfiles inherent arbitrary ordering, this is an expensive operation, as it walks the entire archive searching for individual 'buckets' :param bucket: bucket to list labels for. :return: iterator for the labels in the specified bucket. ''' for name in self.z.namelist(): container, label = self._nf(name.encode("utf-8")) if container == bucket and label != MD_FILE: yield label
python
def list_labels(self, bucket): '''List labels for the given bucket. Due to zipfiles inherent arbitrary ordering, this is an expensive operation, as it walks the entire archive searching for individual 'buckets' :param bucket: bucket to list labels for. :return: iterator for the labels in the specified bucket. ''' for name in self.z.namelist(): container, label = self._nf(name.encode("utf-8")) if container == bucket and label != MD_FILE: yield label
[ "def", "list_labels", "(", "self", ",", "bucket", ")", ":", "for", "name", "in", "self", ".", "z", ".", "namelist", "(", ")", ":", "container", ",", "label", "=", "self", ".", "_nf", "(", "name", ".", "encode", "(", "\"utf-8\"", ")", ")", "if", "container", "==", "bucket", "and", "label", "!=", "MD_FILE", ":", "yield", "label" ]
List labels for the given bucket. Due to zipfiles inherent arbitrary ordering, this is an expensive operation, as it walks the entire archive searching for individual 'buckets' :param bucket: bucket to list labels for. :return: iterator for the labels in the specified bucket.
[ "List", "labels", "for", "the", "given", "bucket", ".", "Due", "to", "zipfiles", "inherent", "arbitrary", "ordering", "this", "is", "an", "expensive", "operation", "as", "it", "walks", "the", "entire", "archive", "searching", "for", "individual", "buckets" ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L131-L142
okfn/ofs
ofs/local/zipstore.py
ZOFS.list_buckets
def list_buckets(self): '''List all buckets managed by this OFS instance. Like list_labels, this also walks the entire archive, yielding the bucketnames. A local set is retained so that duplicates aren't returned so this will temporarily pull the entire list into memory even though this is a generator and will slow as more buckets are added to the set. :return: iterator for the buckets. ''' buckets = set() for name in self.z.namelist(): bucket, _ = self._nf(name) if bucket not in buckets: buckets.add(bucket) yield bucket
python
def list_buckets(self): '''List all buckets managed by this OFS instance. Like list_labels, this also walks the entire archive, yielding the bucketnames. A local set is retained so that duplicates aren't returned so this will temporarily pull the entire list into memory even though this is a generator and will slow as more buckets are added to the set. :return: iterator for the buckets. ''' buckets = set() for name in self.z.namelist(): bucket, _ = self._nf(name) if bucket not in buckets: buckets.add(bucket) yield bucket
[ "def", "list_buckets", "(", "self", ")", ":", "buckets", "=", "set", "(", ")", "for", "name", "in", "self", ".", "z", ".", "namelist", "(", ")", ":", "bucket", ",", "_", "=", "self", ".", "_nf", "(", "name", ")", "if", "bucket", "not", "in", "buckets", ":", "buckets", ".", "add", "(", "bucket", ")", "yield", "bucket" ]
List all buckets managed by this OFS instance. Like list_labels, this also walks the entire archive, yielding the bucketnames. A local set is retained so that duplicates aren't returned so this will temporarily pull the entire list into memory even though this is a generator and will slow as more buckets are added to the set. :return: iterator for the buckets.
[ "List", "all", "buckets", "managed", "by", "this", "OFS", "instance", ".", "Like", "list_labels", "this", "also", "walks", "the", "entire", "archive", "yielding", "the", "bucketnames", ".", "A", "local", "set", "is", "retained", "so", "that", "duplicates", "aren", "t", "returned", "so", "this", "will", "temporarily", "pull", "the", "entire", "list", "into", "memory", "even", "though", "this", "is", "a", "generator", "and", "will", "slow", "as", "more", "buckets", "are", "added", "to", "the", "set", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L144-L157
okfn/ofs
ofs/local/zipstore.py
ZOFS.get_stream
def get_stream(self, bucket, label, as_stream=True): '''Get a bitstream for the given bucket:label combination. :param bucket: the bucket to use. :return: bitstream as a file-like object ''' if self.mode == "w": raise OFSException("Cannot read from archive in 'w' mode") elif self.exists(bucket, label): fn = self._zf(bucket, label) if as_stream: return self.z.open(fn) else: return self.z.read(fn) else: raise OFSFileNotFound
python
def get_stream(self, bucket, label, as_stream=True): '''Get a bitstream for the given bucket:label combination. :param bucket: the bucket to use. :return: bitstream as a file-like object ''' if self.mode == "w": raise OFSException("Cannot read from archive in 'w' mode") elif self.exists(bucket, label): fn = self._zf(bucket, label) if as_stream: return self.z.open(fn) else: return self.z.read(fn) else: raise OFSFileNotFound
[ "def", "get_stream", "(", "self", ",", "bucket", ",", "label", ",", "as_stream", "=", "True", ")", ":", "if", "self", ".", "mode", "==", "\"w\"", ":", "raise", "OFSException", "(", "\"Cannot read from archive in 'w' mode\"", ")", "elif", "self", ".", "exists", "(", "bucket", ",", "label", ")", ":", "fn", "=", "self", ".", "_zf", "(", "bucket", ",", "label", ")", "if", "as_stream", ":", "return", "self", ".", "z", ".", "open", "(", "fn", ")", "else", ":", "return", "self", ".", "z", ".", "read", "(", "fn", ")", "else", ":", "raise", "OFSFileNotFound" ]
Get a bitstream for the given bucket:label combination. :param bucket: the bucket to use. :return: bitstream as a file-like object
[ "Get", "a", "bitstream", "for", "the", "given", "bucket", ":", "label", "combination", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L159-L174
okfn/ofs
ofs/local/zipstore.py
ZOFS.get_url
def get_url(self, bucket, label): '''Get a URL that should point at the bucket:labelled resource. Aimed to aid web apps by allowing them to redirect to an open resource, rather than proxy the bitstream. :param bucket: the bucket to use. :param label: the label of the resource to get :return: a string URI - eg 'zip:file:///home/.../foo.zip!/bucket/label' ''' if self.exists(bucket, label): root = "zip:file//%s" % os.path.abspath(self.zipfile) fn = self._zf(bucket, label) return "!/".join(root, fn) else: raise OFSFileNotFound
python
def get_url(self, bucket, label): '''Get a URL that should point at the bucket:labelled resource. Aimed to aid web apps by allowing them to redirect to an open resource, rather than proxy the bitstream. :param bucket: the bucket to use. :param label: the label of the resource to get :return: a string URI - eg 'zip:file:///home/.../foo.zip!/bucket/label' ''' if self.exists(bucket, label): root = "zip:file//%s" % os.path.abspath(self.zipfile) fn = self._zf(bucket, label) return "!/".join(root, fn) else: raise OFSFileNotFound
[ "def", "get_url", "(", "self", ",", "bucket", ",", "label", ")", ":", "if", "self", ".", "exists", "(", "bucket", ",", "label", ")", ":", "root", "=", "\"zip:file//%s\"", "%", "os", ".", "path", ".", "abspath", "(", "self", ".", "zipfile", ")", "fn", "=", "self", ".", "_zf", "(", "bucket", ",", "label", ")", "return", "\"!/\"", ".", "join", "(", "root", ",", "fn", ")", "else", ":", "raise", "OFSFileNotFound" ]
Get a URL that should point at the bucket:labelled resource. Aimed to aid web apps by allowing them to redirect to an open resource, rather than proxy the bitstream. :param bucket: the bucket to use. :param label: the label of the resource to get :return: a string URI - eg 'zip:file:///home/.../foo.zip!/bucket/label'
[ "Get", "a", "URL", "that", "should", "point", "at", "the", "bucket", ":", "labelled", "resource", ".", "Aimed", "to", "aid", "web", "apps", "by", "allowing", "them", "to", "redirect", "to", "an", "open", "resource", "rather", "than", "proxy", "the", "bitstream", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L176-L188
okfn/ofs
ofs/local/zipstore.py
ZOFS.put_stream
def put_stream(self, bucket, label, stream_object, params=None, replace=True, add_md=True): '''Put a bitstream (stream_object) for the specified bucket:label identifier. :param bucket: as standard :param label: as standard :param stream_object: file-like object to read from or bytestring. :param params: update metadata with these params (see `update_metadata`) ''' if self.mode == "r": raise OFSException("Cannot write into archive in 'r' mode") else: params = params or {} fn = self._zf(bucket, label) params['_creation_date'] = datetime.now().isoformat().split(".")[0] ## '2010-07-08T19:56:47' params['_label'] = label if self.exists(bucket, label) and replace==True: # Add then Replace? Let's see if that works... #z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64) zinfo = self.z.getinfo(fn) size, chksum = self._write(self.z, bucket, label, stream_object) self._del_stream(zinfo) #z.close() params['_content_length'] = size if chksum: params['_checksum'] = chksum else: #z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64) size, chksum = self._write(self.z, bucket, label, stream_object) #z.close() params['_content_length'] = size if chksum: params['_checksum'] = chksum if add_md: params = self.update_metadata(bucket, label, params) return params
python
def put_stream(self, bucket, label, stream_object, params=None, replace=True, add_md=True): '''Put a bitstream (stream_object) for the specified bucket:label identifier. :param bucket: as standard :param label: as standard :param stream_object: file-like object to read from or bytestring. :param params: update metadata with these params (see `update_metadata`) ''' if self.mode == "r": raise OFSException("Cannot write into archive in 'r' mode") else: params = params or {} fn = self._zf(bucket, label) params['_creation_date'] = datetime.now().isoformat().split(".")[0] ## '2010-07-08T19:56:47' params['_label'] = label if self.exists(bucket, label) and replace==True: # Add then Replace? Let's see if that works... #z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64) zinfo = self.z.getinfo(fn) size, chksum = self._write(self.z, bucket, label, stream_object) self._del_stream(zinfo) #z.close() params['_content_length'] = size if chksum: params['_checksum'] = chksum else: #z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64) size, chksum = self._write(self.z, bucket, label, stream_object) #z.close() params['_content_length'] = size if chksum: params['_checksum'] = chksum if add_md: params = self.update_metadata(bucket, label, params) return params
[ "def", "put_stream", "(", "self", ",", "bucket", ",", "label", ",", "stream_object", ",", "params", "=", "None", ",", "replace", "=", "True", ",", "add_md", "=", "True", ")", ":", "if", "self", ".", "mode", "==", "\"r\"", ":", "raise", "OFSException", "(", "\"Cannot write into archive in 'r' mode\"", ")", "else", ":", "params", "=", "params", "or", "{", "}", "fn", "=", "self", ".", "_zf", "(", "bucket", ",", "label", ")", "params", "[", "'_creation_date'", "]", "=", "datetime", ".", "now", "(", ")", ".", "isoformat", "(", ")", ".", "split", "(", "\".\"", ")", "[", "0", "]", "## '2010-07-08T19:56:47'", "params", "[", "'_label'", "]", "=", "label", "if", "self", ".", "exists", "(", "bucket", ",", "label", ")", "and", "replace", "==", "True", ":", "# Add then Replace? Let's see if that works...", "#z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64)", "zinfo", "=", "self", ".", "z", ".", "getinfo", "(", "fn", ")", "size", ",", "chksum", "=", "self", ".", "_write", "(", "self", ".", "z", ",", "bucket", ",", "label", ",", "stream_object", ")", "self", ".", "_del_stream", "(", "zinfo", ")", "#z.close()", "params", "[", "'_content_length'", "]", "=", "size", "if", "chksum", ":", "params", "[", "'_checksum'", "]", "=", "chksum", "else", ":", "#z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64)", "size", ",", "chksum", "=", "self", ".", "_write", "(", "self", ".", "z", ",", "bucket", ",", "label", ",", "stream_object", ")", "#z.close()", "params", "[", "'_content_length'", "]", "=", "size", "if", "chksum", ":", "params", "[", "'_checksum'", "]", "=", "chksum", "if", "add_md", ":", "params", "=", "self", ".", "update_metadata", "(", "bucket", ",", "label", ",", "params", ")", "return", "params" ]
Put a bitstream (stream_object) for the specified bucket:label identifier. :param bucket: as standard :param label: as standard :param stream_object: file-like object to read from or bytestring. :param params: update metadata with these params (see `update_metadata`)
[ "Put", "a", "bitstream", "(", "stream_object", ")", "for", "the", "specified", "bucket", ":", "label", "identifier", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L190-L224
okfn/ofs
ofs/local/zipstore.py
ZOFS.del_stream
def del_stream(self, bucket, label): '''Delete a bitstream. This needs more testing - file deletion in a zipfile is problematic. Alternate method is to create second zipfile without the files in question, which is not a nice method for large zip archives. ''' if self.exists(bucket, label): name = self._zf(bucket, label) #z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64) self._del_stream(name)
python
def del_stream(self, bucket, label): '''Delete a bitstream. This needs more testing - file deletion in a zipfile is problematic. Alternate method is to create second zipfile without the files in question, which is not a nice method for large zip archives. ''' if self.exists(bucket, label): name = self._zf(bucket, label) #z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64) self._del_stream(name)
[ "def", "del_stream", "(", "self", ",", "bucket", ",", "label", ")", ":", "if", "self", ".", "exists", "(", "bucket", ",", "label", ")", ":", "name", "=", "self", ".", "_zf", "(", "bucket", ",", "label", ")", "#z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64)", "self", ".", "_del_stream", "(", "name", ")" ]
Delete a bitstream. This needs more testing - file deletion in a zipfile is problematic. Alternate method is to create second zipfile without the files in question, which is not a nice method for large zip archives.
[ "Delete", "a", "bitstream", ".", "This", "needs", "more", "testing", "-", "file", "deletion", "in", "a", "zipfile", "is", "problematic", ".", "Alternate", "method", "is", "to", "create", "second", "zipfile", "without", "the", "files", "in", "question", "which", "is", "not", "a", "nice", "method", "for", "large", "zip", "archives", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L238-L246
okfn/ofs
ofs/local/zipstore.py
ZOFS.get_metadata
def get_metadata(self, bucket, label): '''Get the metadata for this bucket:label identifier. ''' if self.mode !="w": try: jsn = self._get_bucket_md(bucket) except OFSFileNotFound: # No MD found... return {} except OFSException as e: raise OFSException(e) if label in jsn: return jsn[label] else: return {} else: raise OFSException("Cannot read md from archive in 'w' mode")
python
def get_metadata(self, bucket, label): '''Get the metadata for this bucket:label identifier. ''' if self.mode !="w": try: jsn = self._get_bucket_md(bucket) except OFSFileNotFound: # No MD found... return {} except OFSException as e: raise OFSException(e) if label in jsn: return jsn[label] else: return {} else: raise OFSException("Cannot read md from archive in 'w' mode")
[ "def", "get_metadata", "(", "self", ",", "bucket", ",", "label", ")", ":", "if", "self", ".", "mode", "!=", "\"w\"", ":", "try", ":", "jsn", "=", "self", ".", "_get_bucket_md", "(", "bucket", ")", "except", "OFSFileNotFound", ":", "# No MD found...", "return", "{", "}", "except", "OFSException", "as", "e", ":", "raise", "OFSException", "(", "e", ")", "if", "label", "in", "jsn", ":", "return", "jsn", "[", "label", "]", "else", ":", "return", "{", "}", "else", ":", "raise", "OFSException", "(", "\"Cannot read md from archive in 'w' mode\"", ")" ]
Get the metadata for this bucket:label identifier.
[ "Get", "the", "metadata", "for", "this", "bucket", ":", "label", "identifier", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L265-L281
okfn/ofs
ofs/local/zipstore.py
ZOFS.update_metadata
def update_metadata(self, bucket, label, params): '''Update the metadata with the provided dictionary of params. :param parmams: dictionary of key values (json serializable). ''' if self.mode !="r": try: payload = self._get_bucket_md(bucket) except OFSFileNotFound: # No MD found... create it payload = {} for l in self.list_labels(bucket): payload[l] = {} payload[l]['_label'] = l if not self.quiet: print("Had to create md file for %s" % bucket) except OFSException as e: raise OFSException(e) if not label in payload: payload[label] = {} payload[label].update(params) self.put_stream(bucket, MD_FILE, json.dumps(payload).encode('utf-8'), params={}, replace=True, add_md=False) return payload[label] else: raise OFSException("Cannot update MD in archive in 'r' mode")
python
def update_metadata(self, bucket, label, params): '''Update the metadata with the provided dictionary of params. :param parmams: dictionary of key values (json serializable). ''' if self.mode !="r": try: payload = self._get_bucket_md(bucket) except OFSFileNotFound: # No MD found... create it payload = {} for l in self.list_labels(bucket): payload[l] = {} payload[l]['_label'] = l if not self.quiet: print("Had to create md file for %s" % bucket) except OFSException as e: raise OFSException(e) if not label in payload: payload[label] = {} payload[label].update(params) self.put_stream(bucket, MD_FILE, json.dumps(payload).encode('utf-8'), params={}, replace=True, add_md=False) return payload[label] else: raise OFSException("Cannot update MD in archive in 'r' mode")
[ "def", "update_metadata", "(", "self", ",", "bucket", ",", "label", ",", "params", ")", ":", "if", "self", ".", "mode", "!=", "\"r\"", ":", "try", ":", "payload", "=", "self", ".", "_get_bucket_md", "(", "bucket", ")", "except", "OFSFileNotFound", ":", "# No MD found... create it", "payload", "=", "{", "}", "for", "l", "in", "self", ".", "list_labels", "(", "bucket", ")", ":", "payload", "[", "l", "]", "=", "{", "}", "payload", "[", "l", "]", "[", "'_label'", "]", "=", "l", "if", "not", "self", ".", "quiet", ":", "print", "(", "\"Had to create md file for %s\"", "%", "bucket", ")", "except", "OFSException", "as", "e", ":", "raise", "OFSException", "(", "e", ")", "if", "not", "label", "in", "payload", ":", "payload", "[", "label", "]", "=", "{", "}", "payload", "[", "label", "]", ".", "update", "(", "params", ")", "self", ".", "put_stream", "(", "bucket", ",", "MD_FILE", ",", "json", ".", "dumps", "(", "payload", ")", ".", "encode", "(", "'utf-8'", ")", ",", "params", "=", "{", "}", ",", "replace", "=", "True", ",", "add_md", "=", "False", ")", "return", "payload", "[", "label", "]", "else", ":", "raise", "OFSException", "(", "\"Cannot update MD in archive in 'r' mode\"", ")" ]
Update the metadata with the provided dictionary of params. :param parmams: dictionary of key values (json serializable).
[ "Update", "the", "metadata", "with", "the", "provided", "dictionary", "of", "params", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L283-L307
okfn/ofs
ofs/local/zipstore.py
ZOFS.del_metadata_keys
def del_metadata_keys(self, bucket, label, keys): '''Delete the metadata corresponding to the specified keys. ''' if self.mode !="r": try: payload = self._get_bucket_md(bucket) except OFSFileNotFound: # No MD found... raise OFSFileNotFound("Couldn't find a md file for %s bucket" % bucket) except OFSException as e: raise OFSException(e) if payload.has_key(label): for key in [x for x in keys if payload[label].has_key(x)]: del payload[label][key] self.put_stream(bucket, MD_FILE, json.dumps(payload), params={}, replace=True, add_md=False) else: raise OFSException("Cannot update MD in archive in 'r' mode")
python
def del_metadata_keys(self, bucket, label, keys): '''Delete the metadata corresponding to the specified keys. ''' if self.mode !="r": try: payload = self._get_bucket_md(bucket) except OFSFileNotFound: # No MD found... raise OFSFileNotFound("Couldn't find a md file for %s bucket" % bucket) except OFSException as e: raise OFSException(e) if payload.has_key(label): for key in [x for x in keys if payload[label].has_key(x)]: del payload[label][key] self.put_stream(bucket, MD_FILE, json.dumps(payload), params={}, replace=True, add_md=False) else: raise OFSException("Cannot update MD in archive in 'r' mode")
[ "def", "del_metadata_keys", "(", "self", ",", "bucket", ",", "label", ",", "keys", ")", ":", "if", "self", ".", "mode", "!=", "\"r\"", ":", "try", ":", "payload", "=", "self", ".", "_get_bucket_md", "(", "bucket", ")", "except", "OFSFileNotFound", ":", "# No MD found...", "raise", "OFSFileNotFound", "(", "\"Couldn't find a md file for %s bucket\"", "%", "bucket", ")", "except", "OFSException", "as", "e", ":", "raise", "OFSException", "(", "e", ")", "if", "payload", ".", "has_key", "(", "label", ")", ":", "for", "key", "in", "[", "x", "for", "x", "in", "keys", "if", "payload", "[", "label", "]", ".", "has_key", "(", "x", ")", "]", ":", "del", "payload", "[", "label", "]", "[", "key", "]", "self", ".", "put_stream", "(", "bucket", ",", "MD_FILE", ",", "json", ".", "dumps", "(", "payload", ")", ",", "params", "=", "{", "}", ",", "replace", "=", "True", ",", "add_md", "=", "False", ")", "else", ":", "raise", "OFSException", "(", "\"Cannot update MD in archive in 'r' mode\"", ")" ]
Delete the metadata corresponding to the specified keys.
[ "Delete", "the", "metadata", "corresponding", "to", "the", "specified", "keys", "." ]
train
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L309-L325
tanwanirahul/django-batch-requests
batch_requests/views.py
get_response
def get_response(wsgi_request): ''' Given a WSGI request, makes a call to a corresponding view function and returns the response. ''' service_start_time = datetime.now() # Get the view / handler for this request view, args, kwargs = resolve(wsgi_request.path_info) kwargs.update({"request": wsgi_request}) # Let the view do his task. try: resp = view(*args, **kwargs) except Exception as exc: resp = HttpResponseServerError(content=exc.message) headers = dict(resp._headers.values()) # Convert HTTP response into simple dict type. d_resp = {"status_code": resp.status_code, "reason_phrase": resp.reason_phrase, "headers": headers} try: d_resp.update({"body": resp.content}) except ContentNotRenderedError: resp.render() d_resp.update({"body": resp.content}) # Check if we need to send across the duration header. if _settings.ADD_DURATION_HEADER: d_resp['headers'].update({_settings.DURATION_HEADER_NAME: (datetime.now() - service_start_time).seconds}) return d_resp
python
def get_response(wsgi_request): ''' Given a WSGI request, makes a call to a corresponding view function and returns the response. ''' service_start_time = datetime.now() # Get the view / handler for this request view, args, kwargs = resolve(wsgi_request.path_info) kwargs.update({"request": wsgi_request}) # Let the view do his task. try: resp = view(*args, **kwargs) except Exception as exc: resp = HttpResponseServerError(content=exc.message) headers = dict(resp._headers.values()) # Convert HTTP response into simple dict type. d_resp = {"status_code": resp.status_code, "reason_phrase": resp.reason_phrase, "headers": headers} try: d_resp.update({"body": resp.content}) except ContentNotRenderedError: resp.render() d_resp.update({"body": resp.content}) # Check if we need to send across the duration header. if _settings.ADD_DURATION_HEADER: d_resp['headers'].update({_settings.DURATION_HEADER_NAME: (datetime.now() - service_start_time).seconds}) return d_resp
[ "def", "get_response", "(", "wsgi_request", ")", ":", "service_start_time", "=", "datetime", ".", "now", "(", ")", "# Get the view / handler for this request", "view", ",", "args", ",", "kwargs", "=", "resolve", "(", "wsgi_request", ".", "path_info", ")", "kwargs", ".", "update", "(", "{", "\"request\"", ":", "wsgi_request", "}", ")", "# Let the view do his task.", "try", ":", "resp", "=", "view", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "exc", ":", "resp", "=", "HttpResponseServerError", "(", "content", "=", "exc", ".", "message", ")", "headers", "=", "dict", "(", "resp", ".", "_headers", ".", "values", "(", ")", ")", "# Convert HTTP response into simple dict type.", "d_resp", "=", "{", "\"status_code\"", ":", "resp", ".", "status_code", ",", "\"reason_phrase\"", ":", "resp", ".", "reason_phrase", ",", "\"headers\"", ":", "headers", "}", "try", ":", "d_resp", ".", "update", "(", "{", "\"body\"", ":", "resp", ".", "content", "}", ")", "except", "ContentNotRenderedError", ":", "resp", ".", "render", "(", ")", "d_resp", ".", "update", "(", "{", "\"body\"", ":", "resp", ".", "content", "}", ")", "# Check if we need to send across the duration header.", "if", "_settings", ".", "ADD_DURATION_HEADER", ":", "d_resp", "[", "'headers'", "]", ".", "update", "(", "{", "_settings", ".", "DURATION_HEADER_NAME", ":", "(", "datetime", ".", "now", "(", ")", "-", "service_start_time", ")", ".", "seconds", "}", ")", "return", "d_resp" ]
Given a WSGI request, makes a call to a corresponding view function and returns the response.
[ "Given", "a", "WSGI", "request", "makes", "a", "call", "to", "a", "corresponding", "view", "function", "and", "returns", "the", "response", "." ]
train
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/views.py#L22-L53
tanwanirahul/django-batch-requests
batch_requests/views.py
get_wsgi_requests
def get_wsgi_requests(request): ''' For the given batch request, extract the individual requests and create WSGIRequest object for each. ''' valid_http_methods = ["get", "post", "put", "patch", "delete", "head", "options", "connect", "trace"] requests = json.loads(request.body) if type(requests) not in (list, tuple): raise BadBatchRequest("The body of batch request should always be list!") # Max limit check. no_requests = len(requests) if no_requests > _settings.MAX_LIMIT: raise BadBatchRequest("You can batch maximum of %d requests." % (_settings.MAX_LIMIT)) # We could mutate the current request with the respective parameters, but mutation is ghost in the dark, # so lets avoid. Construct the new WSGI request object for each request. def construct_wsgi_from_data(data): ''' Given the data in the format of url, method, body and headers, construct a new WSGIRequest object. ''' url = data.get("url", None) method = data.get("method", None) if url is None or method is None: raise BadBatchRequest("Request definition should have url, method defined.") if method.lower() not in valid_http_methods: raise BadBatchRequest("Invalid request method.") body = data.get("body", "") headers = data.get("headers", {}) return get_wsgi_request_object(request, method, url, headers, body) return [construct_wsgi_from_data(data) for data in requests]
python
def get_wsgi_requests(request): ''' For the given batch request, extract the individual requests and create WSGIRequest object for each. ''' valid_http_methods = ["get", "post", "put", "patch", "delete", "head", "options", "connect", "trace"] requests = json.loads(request.body) if type(requests) not in (list, tuple): raise BadBatchRequest("The body of batch request should always be list!") # Max limit check. no_requests = len(requests) if no_requests > _settings.MAX_LIMIT: raise BadBatchRequest("You can batch maximum of %d requests." % (_settings.MAX_LIMIT)) # We could mutate the current request with the respective parameters, but mutation is ghost in the dark, # so lets avoid. Construct the new WSGI request object for each request. def construct_wsgi_from_data(data): ''' Given the data in the format of url, method, body and headers, construct a new WSGIRequest object. ''' url = data.get("url", None) method = data.get("method", None) if url is None or method is None: raise BadBatchRequest("Request definition should have url, method defined.") if method.lower() not in valid_http_methods: raise BadBatchRequest("Invalid request method.") body = data.get("body", "") headers = data.get("headers", {}) return get_wsgi_request_object(request, method, url, headers, body) return [construct_wsgi_from_data(data) for data in requests]
[ "def", "get_wsgi_requests", "(", "request", ")", ":", "valid_http_methods", "=", "[", "\"get\"", ",", "\"post\"", ",", "\"put\"", ",", "\"patch\"", ",", "\"delete\"", ",", "\"head\"", ",", "\"options\"", ",", "\"connect\"", ",", "\"trace\"", "]", "requests", "=", "json", ".", "loads", "(", "request", ".", "body", ")", "if", "type", "(", "requests", ")", "not", "in", "(", "list", ",", "tuple", ")", ":", "raise", "BadBatchRequest", "(", "\"The body of batch request should always be list!\"", ")", "# Max limit check.", "no_requests", "=", "len", "(", "requests", ")", "if", "no_requests", ">", "_settings", ".", "MAX_LIMIT", ":", "raise", "BadBatchRequest", "(", "\"You can batch maximum of %d requests.\"", "%", "(", "_settings", ".", "MAX_LIMIT", ")", ")", "# We could mutate the current request with the respective parameters, but mutation is ghost in the dark,", "# so lets avoid. Construct the new WSGI request object for each request.", "def", "construct_wsgi_from_data", "(", "data", ")", ":", "'''\n Given the data in the format of url, method, body and headers, construct a new\n WSGIRequest object.\n '''", "url", "=", "data", ".", "get", "(", "\"url\"", ",", "None", ")", "method", "=", "data", ".", "get", "(", "\"method\"", ",", "None", ")", "if", "url", "is", "None", "or", "method", "is", "None", ":", "raise", "BadBatchRequest", "(", "\"Request definition should have url, method defined.\"", ")", "if", "method", ".", "lower", "(", ")", "not", "in", "valid_http_methods", ":", "raise", "BadBatchRequest", "(", "\"Invalid request method.\"", ")", "body", "=", "data", ".", "get", "(", "\"body\"", ",", "\"\"", ")", "headers", "=", "data", ".", "get", "(", "\"headers\"", ",", "{", "}", ")", "return", "get_wsgi_request_object", "(", "request", ",", "method", ",", "url", ",", "headers", ",", "body", ")", "return", "[", "construct_wsgi_from_data", "(", "data", ")", "for", "data", "in", "requests", "]" ]
For the given batch request, extract the individual requests and create WSGIRequest object for each.
[ "For", "the", "given", "batch", "request", "extract", "the", "individual", "requests", "and", "create", "WSGIRequest", "object", "for", "each", "." ]
train
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/views.py#L56-L94
tanwanirahul/django-batch-requests
batch_requests/views.py
handle_batch_requests
def handle_batch_requests(request, *args, **kwargs): ''' A view function to handle the overall processing of batch requests. ''' batch_start_time = datetime.now() try: # Get the Individual WSGI requests. wsgi_requests = get_wsgi_requests(request) except BadBatchRequest as brx: return HttpResponseBadRequest(content=brx.message) # Fire these WSGI requests, and collect the response for the same. response = execute_requests(wsgi_requests) # Evrything's done, return the response. resp = HttpResponse( content=json.dumps(response), content_type="application/json") if _settings.ADD_DURATION_HEADER: resp.__setitem__(_settings.DURATION_HEADER_NAME, str((datetime.now() - batch_start_time).seconds)) return resp
python
def handle_batch_requests(request, *args, **kwargs): ''' A view function to handle the overall processing of batch requests. ''' batch_start_time = datetime.now() try: # Get the Individual WSGI requests. wsgi_requests = get_wsgi_requests(request) except BadBatchRequest as brx: return HttpResponseBadRequest(content=brx.message) # Fire these WSGI requests, and collect the response for the same. response = execute_requests(wsgi_requests) # Evrything's done, return the response. resp = HttpResponse( content=json.dumps(response), content_type="application/json") if _settings.ADD_DURATION_HEADER: resp.__setitem__(_settings.DURATION_HEADER_NAME, str((datetime.now() - batch_start_time).seconds)) return resp
[ "def", "handle_batch_requests", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "batch_start_time", "=", "datetime", ".", "now", "(", ")", "try", ":", "# Get the Individual WSGI requests.", "wsgi_requests", "=", "get_wsgi_requests", "(", "request", ")", "except", "BadBatchRequest", "as", "brx", ":", "return", "HttpResponseBadRequest", "(", "content", "=", "brx", ".", "message", ")", "# Fire these WSGI requests, and collect the response for the same.", "response", "=", "execute_requests", "(", "wsgi_requests", ")", "# Evrything's done, return the response.", "resp", "=", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "response", ")", ",", "content_type", "=", "\"application/json\"", ")", "if", "_settings", ".", "ADD_DURATION_HEADER", ":", "resp", ".", "__setitem__", "(", "_settings", ".", "DURATION_HEADER_NAME", ",", "str", "(", "(", "datetime", ".", "now", "(", ")", "-", "batch_start_time", ")", ".", "seconds", ")", ")", "return", "resp" ]
A view function to handle the overall processing of batch requests.
[ "A", "view", "function", "to", "handle", "the", "overall", "processing", "of", "batch", "requests", "." ]
train
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/views.py#L108-L128
KKBOX/OpenAPI-Python
kkbox_developer_sdk/search_fetcher.py
KKBOXSearchFetcher.search
def search(self, keyword, types=[], terr=KKBOXTerritory.TAIWAN): ''' Searches within KKBOX's database. :param keyword: the keyword. :type keyword: str :param types: the search types. :return: list :param terr: the current territory. :return: API response. :rtype: dict See `https://docs-en.kkbox.codes/v1.1/reference#search_1`. ''' url = 'https://api.kkbox.com/v1.1/search' url += '?' + url_parse.urlencode({'q': keyword, 'territory': terr}) if len(types) > 0: url += '&type=' + ','.join(types) return self.http._post_data(url, None, self.http._headers_with_access_token())
python
def search(self, keyword, types=[], terr=KKBOXTerritory.TAIWAN): ''' Searches within KKBOX's database. :param keyword: the keyword. :type keyword: str :param types: the search types. :return: list :param terr: the current territory. :return: API response. :rtype: dict See `https://docs-en.kkbox.codes/v1.1/reference#search_1`. ''' url = 'https://api.kkbox.com/v1.1/search' url += '?' + url_parse.urlencode({'q': keyword, 'territory': terr}) if len(types) > 0: url += '&type=' + ','.join(types) return self.http._post_data(url, None, self.http._headers_with_access_token())
[ "def", "search", "(", "self", ",", "keyword", ",", "types", "=", "[", "]", ",", "terr", "=", "KKBOXTerritory", ".", "TAIWAN", ")", ":", "url", "=", "'https://api.kkbox.com/v1.1/search'", "url", "+=", "'?'", "+", "url_parse", ".", "urlencode", "(", "{", "'q'", ":", "keyword", ",", "'territory'", ":", "terr", "}", ")", "if", "len", "(", "types", ")", ">", "0", ":", "url", "+=", "'&type='", "+", "','", ".", "join", "(", "types", ")", "return", "self", ".", "http", ".", "_post_data", "(", "url", ",", "None", ",", "self", ".", "http", ".", "_headers_with_access_token", "(", ")", ")" ]
Searches within KKBOX's database. :param keyword: the keyword. :type keyword: str :param types: the search types. :return: list :param terr: the current territory. :return: API response. :rtype: dict See `https://docs-en.kkbox.codes/v1.1/reference#search_1`.
[ "Searches", "within", "KKBOX", "s", "database", "." ]
train
https://github.com/KKBOX/OpenAPI-Python/blob/77aa22fd300ed987d5507a5b66b149edcd28047d/kkbox_developer_sdk/search_fetcher.py#L24-L42
UpCloudLtd/upcloud-python-api
upcloud_api/ip_address.py
IPAddress.save
def save(self): """ IPAddress can only change its PTR record. Saves the current state, PUT /ip_address/uuid. """ body = {'ip_address': {'ptr_record': self.ptr_record}} data = self.cloud_manager.request('PUT', '/ip_address/' + self.address, body) self._reset(**data['ip_address'])
python
def save(self): """ IPAddress can only change its PTR record. Saves the current state, PUT /ip_address/uuid. """ body = {'ip_address': {'ptr_record': self.ptr_record}} data = self.cloud_manager.request('PUT', '/ip_address/' + self.address, body) self._reset(**data['ip_address'])
[ "def", "save", "(", "self", ")", ":", "body", "=", "{", "'ip_address'", ":", "{", "'ptr_record'", ":", "self", ".", "ptr_record", "}", "}", "data", "=", "self", ".", "cloud_manager", ".", "request", "(", "'PUT'", ",", "'/ip_address/'", "+", "self", ".", "address", ",", "body", ")", "self", ".", "_reset", "(", "*", "*", "data", "[", "'ip_address'", "]", ")" ]
IPAddress can only change its PTR record. Saves the current state, PUT /ip_address/uuid.
[ "IPAddress", "can", "only", "change", "its", "PTR", "record", ".", "Saves", "the", "current", "state", "PUT", "/", "ip_address", "/", "uuid", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/ip_address.py#L32-L38
UpCloudLtd/upcloud-python-api
upcloud_api/ip_address.py
IPAddress._create_ip_address_objs
def _create_ip_address_objs(ip_addresses, cloud_manager): """ Create IPAddress objects from API response data. Also associates CloudManager with the objects. """ # ip-addresses might be provided as a flat array or as a following dict: # {'ip_addresses': {'ip_address': [...]}} || {'ip_address': [...]} if 'ip_addresses' in ip_addresses: ip_addresses = ip_addresses['ip_addresses'] if 'ip_address' in ip_addresses: ip_addresses = ip_addresses['ip_address'] return [ IPAddress(cloud_manager=cloud_manager, **ip_addr) for ip_addr in ip_addresses ]
python
def _create_ip_address_objs(ip_addresses, cloud_manager): """ Create IPAddress objects from API response data. Also associates CloudManager with the objects. """ # ip-addresses might be provided as a flat array or as a following dict: # {'ip_addresses': {'ip_address': [...]}} || {'ip_address': [...]} if 'ip_addresses' in ip_addresses: ip_addresses = ip_addresses['ip_addresses'] if 'ip_address' in ip_addresses: ip_addresses = ip_addresses['ip_address'] return [ IPAddress(cloud_manager=cloud_manager, **ip_addr) for ip_addr in ip_addresses ]
[ "def", "_create_ip_address_objs", "(", "ip_addresses", ",", "cloud_manager", ")", ":", "# ip-addresses might be provided as a flat array or as a following dict:", "# {'ip_addresses': {'ip_address': [...]}} || {'ip_address': [...]}", "if", "'ip_addresses'", "in", "ip_addresses", ":", "ip_addresses", "=", "ip_addresses", "[", "'ip_addresses'", "]", "if", "'ip_address'", "in", "ip_addresses", ":", "ip_addresses", "=", "ip_addresses", "[", "'ip_address'", "]", "return", "[", "IPAddress", "(", "cloud_manager", "=", "cloud_manager", ",", "*", "*", "ip_addr", ")", "for", "ip_addr", "in", "ip_addresses", "]" ]
Create IPAddress objects from API response data. Also associates CloudManager with the objects.
[ "Create", "IPAddress", "objects", "from", "API", "response", "data", ".", "Also", "associates", "CloudManager", "with", "the", "objects", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/ip_address.py#L54-L71
UpCloudLtd/upcloud-python-api
upcloud_api/tag.py
Tag._reset
def _reset(self, **kwargs): """ Reset the objects attributes. Accepts servers as either unflattened or flattened UUID strings or Server objects. """ super(Tag, self)._reset(**kwargs) # backup name for changing it (look: Tag.save) self._api_name = self.name # flatten { servers: { server: [] } } if 'server' in self.servers: self.servers = kwargs['servers']['server'] # convert UUIDs into server objects if self.servers and isinstance(self.servers[0], six.string_types): self.servers = [Server(uuid=server, populated=False) for server in self.servers]
python
def _reset(self, **kwargs): """ Reset the objects attributes. Accepts servers as either unflattened or flattened UUID strings or Server objects. """ super(Tag, self)._reset(**kwargs) # backup name for changing it (look: Tag.save) self._api_name = self.name # flatten { servers: { server: [] } } if 'server' in self.servers: self.servers = kwargs['servers']['server'] # convert UUIDs into server objects if self.servers and isinstance(self.servers[0], six.string_types): self.servers = [Server(uuid=server, populated=False) for server in self.servers]
[ "def", "_reset", "(", "self", ",", "*", "*", "kwargs", ")", ":", "super", "(", "Tag", ",", "self", ")", ".", "_reset", "(", "*", "*", "kwargs", ")", "# backup name for changing it (look: Tag.save)", "self", ".", "_api_name", "=", "self", ".", "name", "# flatten { servers: { server: [] } }", "if", "'server'", "in", "self", ".", "servers", ":", "self", ".", "servers", "=", "kwargs", "[", "'servers'", "]", "[", "'server'", "]", "# convert UUIDs into server objects", "if", "self", ".", "servers", "and", "isinstance", "(", "self", ".", "servers", "[", "0", "]", ",", "six", ".", "string_types", ")", ":", "self", ".", "servers", "=", "[", "Server", "(", "uuid", "=", "server", ",", "populated", "=", "False", ")", "for", "server", "in", "self", ".", "servers", "]" ]
Reset the objects attributes. Accepts servers as either unflattened or flattened UUID strings or Server objects.
[ "Reset", "the", "objects", "attributes", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/tag.py#L30-L47
csirtgadgets/csirtgsdk-py
csirtgsdk/client/http.py
HTTP._get
def _get(self, uri, params={}): """ HTTP GET function :param uri: REST endpoint :param params: optional HTTP params to pass to the endpoint :return: list of results (usually a list of dicts) Example: ret = cli.get('/search', params={ 'q': 'example.org' }) """ if not uri.startswith(self.remote): uri = '{}{}'.format(self.remote, uri) return self._make_request(uri, params)
python
def _get(self, uri, params={}): """ HTTP GET function :param uri: REST endpoint :param params: optional HTTP params to pass to the endpoint :return: list of results (usually a list of dicts) Example: ret = cli.get('/search', params={ 'q': 'example.org' }) """ if not uri.startswith(self.remote): uri = '{}{}'.format(self.remote, uri) return self._make_request(uri, params)
[ "def", "_get", "(", "self", ",", "uri", ",", "params", "=", "{", "}", ")", ":", "if", "not", "uri", ".", "startswith", "(", "self", ".", "remote", ")", ":", "uri", "=", "'{}{}'", ".", "format", "(", "self", ".", "remote", ",", "uri", ")", "return", "self", ".", "_make_request", "(", "uri", ",", "params", ")" ]
HTTP GET function :param uri: REST endpoint :param params: optional HTTP params to pass to the endpoint :return: list of results (usually a list of dicts) Example: ret = cli.get('/search', params={ 'q': 'example.org' })
[ "HTTP", "GET", "function" ]
train
https://github.com/csirtgadgets/csirtgsdk-py/blob/5a7ed9c5e6fa27170366ecbdef710dc80d537dc2/csirtgsdk/client/http.py#L102-L117
csirtgadgets/csirtgsdk-py
csirtgsdk/client/http.py
HTTP._post
def _post(self, uri, data): """ HTTP POST function :param uri: REST endpoint to POST to :param data: list of dicts to be passed to the endpoint :return: list of dicts, usually will be a list of objects or id's Example: ret = cli.post('/indicators', { 'indicator': 'example.com' }) """ if not uri.startswith(self.remote): uri = '{}/{}'.format(self.remote, uri) self.logger.debug(uri) return self._make_request(uri, data=data)
python
def _post(self, uri, data): """ HTTP POST function :param uri: REST endpoint to POST to :param data: list of dicts to be passed to the endpoint :return: list of dicts, usually will be a list of objects or id's Example: ret = cli.post('/indicators', { 'indicator': 'example.com' }) """ if not uri.startswith(self.remote): uri = '{}/{}'.format(self.remote, uri) self.logger.debug(uri) return self._make_request(uri, data=data)
[ "def", "_post", "(", "self", ",", "uri", ",", "data", ")", ":", "if", "not", "uri", ".", "startswith", "(", "self", ".", "remote", ")", ":", "uri", "=", "'{}/{}'", ".", "format", "(", "self", ".", "remote", ",", "uri", ")", "self", ".", "logger", ".", "debug", "(", "uri", ")", "return", "self", ".", "_make_request", "(", "uri", ",", "data", "=", "data", ")" ]
HTTP POST function :param uri: REST endpoint to POST to :param data: list of dicts to be passed to the endpoint :return: list of dicts, usually will be a list of objects or id's Example: ret = cli.post('/indicators', { 'indicator': 'example.com' })
[ "HTTP", "POST", "function" ]
train
https://github.com/csirtgadgets/csirtgsdk-py/blob/5a7ed9c5e6fa27170366ecbdef710dc80d537dc2/csirtgsdk/client/http.py#L119-L135
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/server_mixin.py
ServerManager.get_servers
def get_servers(self, populate=False, tags_has_one=None, tags_has_all=None): """ Return a list of (populated or unpopulated) Server instances. - populate = False (default) => 1 API request, returns unpopulated Server instances. - populate = True => Does 1 + n API requests (n = # of servers), returns populated Server instances. New in 0.3.0: the list can be filtered with tags: - tags_has_one: list of Tag objects or strings returns servers that have at least one of the given tags - tags_has_all: list of Tag objects or strings returns servers that have all of the tags """ if tags_has_all and tags_has_one: raise Exception('only one of (tags_has_all, tags_has_one) is allowed.') request = '/server' if tags_has_all: tags_has_all = [str(tag) for tag in tags_has_all] taglist = ':'.join(tags_has_all) request = '/server/tag/{0}'.format(taglist) if tags_has_one: tags_has_one = [str(tag) for tag in tags_has_one] taglist = ','.join(tags_has_one) request = '/server/tag/{0}'.format(taglist) servers = self.get_request(request)['servers']['server'] server_list = list() for server in servers: server_list.append(Server(server, cloud_manager=self)) if populate: for server_instance in server_list: server_instance.populate() return server_list
python
def get_servers(self, populate=False, tags_has_one=None, tags_has_all=None): """ Return a list of (populated or unpopulated) Server instances. - populate = False (default) => 1 API request, returns unpopulated Server instances. - populate = True => Does 1 + n API requests (n = # of servers), returns populated Server instances. New in 0.3.0: the list can be filtered with tags: - tags_has_one: list of Tag objects or strings returns servers that have at least one of the given tags - tags_has_all: list of Tag objects or strings returns servers that have all of the tags """ if tags_has_all and tags_has_one: raise Exception('only one of (tags_has_all, tags_has_one) is allowed.') request = '/server' if tags_has_all: tags_has_all = [str(tag) for tag in tags_has_all] taglist = ':'.join(tags_has_all) request = '/server/tag/{0}'.format(taglist) if tags_has_one: tags_has_one = [str(tag) for tag in tags_has_one] taglist = ','.join(tags_has_one) request = '/server/tag/{0}'.format(taglist) servers = self.get_request(request)['servers']['server'] server_list = list() for server in servers: server_list.append(Server(server, cloud_manager=self)) if populate: for server_instance in server_list: server_instance.populate() return server_list
[ "def", "get_servers", "(", "self", ",", "populate", "=", "False", ",", "tags_has_one", "=", "None", ",", "tags_has_all", "=", "None", ")", ":", "if", "tags_has_all", "and", "tags_has_one", ":", "raise", "Exception", "(", "'only one of (tags_has_all, tags_has_one) is allowed.'", ")", "request", "=", "'/server'", "if", "tags_has_all", ":", "tags_has_all", "=", "[", "str", "(", "tag", ")", "for", "tag", "in", "tags_has_all", "]", "taglist", "=", "':'", ".", "join", "(", "tags_has_all", ")", "request", "=", "'/server/tag/{0}'", ".", "format", "(", "taglist", ")", "if", "tags_has_one", ":", "tags_has_one", "=", "[", "str", "(", "tag", ")", "for", "tag", "in", "tags_has_one", "]", "taglist", "=", "','", ".", "join", "(", "tags_has_one", ")", "request", "=", "'/server/tag/{0}'", ".", "format", "(", "taglist", ")", "servers", "=", "self", ".", "get_request", "(", "request", ")", "[", "'servers'", "]", "[", "'server'", "]", "server_list", "=", "list", "(", ")", "for", "server", "in", "servers", ":", "server_list", ".", "append", "(", "Server", "(", "server", ",", "cloud_manager", "=", "self", ")", ")", "if", "populate", ":", "for", "server_instance", "in", "server_list", ":", "server_instance", ".", "populate", "(", ")", "return", "server_list" ]
Return a list of (populated or unpopulated) Server instances. - populate = False (default) => 1 API request, returns unpopulated Server instances. - populate = True => Does 1 + n API requests (n = # of servers), returns populated Server instances. New in 0.3.0: the list can be filtered with tags: - tags_has_one: list of Tag objects or strings returns servers that have at least one of the given tags - tags_has_all: list of Tag objects or strings returns servers that have all of the tags
[ "Return", "a", "list", "of", "(", "populated", "or", "unpopulated", ")", "Server", "instances", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/server_mixin.py#L15-L54
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/server_mixin.py
ServerManager.get_server
def get_server(self, UUID): """ Return a (populated) Server instance. """ server, IPAddresses, storages = self.get_server_data(UUID) return Server( server, ip_addresses=IPAddresses, storage_devices=storages, populated=True, cloud_manager=self )
python
def get_server(self, UUID): """ Return a (populated) Server instance. """ server, IPAddresses, storages = self.get_server_data(UUID) return Server( server, ip_addresses=IPAddresses, storage_devices=storages, populated=True, cloud_manager=self )
[ "def", "get_server", "(", "self", ",", "UUID", ")", ":", "server", ",", "IPAddresses", ",", "storages", "=", "self", ".", "get_server_data", "(", "UUID", ")", "return", "Server", "(", "server", ",", "ip_addresses", "=", "IPAddresses", ",", "storage_devices", "=", "storages", ",", "populated", "=", "True", ",", "cloud_manager", "=", "self", ")" ]
Return a (populated) Server instance.
[ "Return", "a", "(", "populated", ")", "Server", "instance", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/server_mixin.py#L56-L68
UpCloudLtd/upcloud-python-api
upcloud_api/cloud_manager/server_mixin.py
ServerManager.get_server_by_ip
def get_server_by_ip(self, ip_address): """ Return a (populated) Server instance by its IP. Uses GET '/ip_address/x.x.x.x' to retrieve machine UUID using IP-address. """ data = self.get_request('/ip_address/{0}'.format(ip_address)) UUID = data['ip_address']['server'] return self.get_server(UUID)
python
def get_server_by_ip(self, ip_address): """ Return a (populated) Server instance by its IP. Uses GET '/ip_address/x.x.x.x' to retrieve machine UUID using IP-address. """ data = self.get_request('/ip_address/{0}'.format(ip_address)) UUID = data['ip_address']['server'] return self.get_server(UUID)
[ "def", "get_server_by_ip", "(", "self", ",", "ip_address", ")", ":", "data", "=", "self", ".", "get_request", "(", "'/ip_address/{0}'", ".", "format", "(", "ip_address", ")", ")", "UUID", "=", "data", "[", "'ip_address'", "]", "[", "'server'", "]", "return", "self", ".", "get_server", "(", "UUID", ")" ]
Return a (populated) Server instance by its IP. Uses GET '/ip_address/x.x.x.x' to retrieve machine UUID using IP-address.
[ "Return", "a", "(", "populated", ")", "Server", "instance", "by", "its", "IP", "." ]
train
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/server_mixin.py#L70-L78