source_code
stringlengths 45
7.24k
| function_name
stringlengths 1
111
| messages
list |
|---|---|---|
def x(self):
if not self.job:
raise Exception('Job not loaded yet. Use load(id) first.')
return JobModel(self.job_id, self.job, self.home_config['storage_dir'])
|
get_job_model
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if not self.job:\n raise Exception('Job not loaded yet. Use load(id) first.')\n return JobModel(self.job_id, self.job, self.home_config['storage_dir'])\n",
"role": "user"
},
{
"content": "get_job_model",
"role": "assistant"
}
] |
def x(self):
blacklist = ['.git', 'aetros']
working_tree = self.git.work_tree
def x(path='.'):
if os.path.basename(path) in blacklist:
return 0, 0
if os.path.isdir(path):
files = []
for file in os.listdir(path):
if path and path != '.':
file = path + '/' + file
added_files = recursive(file)
files += added_files
return files
else:
if path.endswith('.pyc'):
return []
if is_ignored(path, self.config['ignore']):
return []
return [os.path.relpath(path, working_tree)]
return recursive(working_tree)
|
file_list
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n blacklist = ['.git', 'aetros']\n working_tree = self.git.work_tree\n\n def x(path='.'):\n if os.path.basename(path) in blacklist:\n return 0, 0\n if os.path.isdir(path):\n files = []\n for file in os.listdir(path):\n if path and path != '.':\n file = path + '/' + file\n added_files = recursive(file)\n files += added_files\n return files\n else:\n if path.endswith('.pyc'):\n return []\n if is_ignored(path, self.config['ignore']):\n return []\n return [os.path.relpath(path, working_tree)]\n return recursive(working_tree)\n",
"role": "user"
},
{
"content": "file_list",
"role": "assistant"
}
] |
def x(self, working_tree, report=False):
blacklist = ['.git']
def x(path='.', report=report):
if os.path.basename(path) in blacklist:
return 0, 0
if working_tree + '/aetros' == path:
return 0, 0
if os.path.isdir(path):
files = 0
size = 0
for file in os.listdir(path):
if path and path != '.':
file = path + '/' + file
added_files, added_size = add_resursiv(file)
files += added_files
size += added_size
return files, size
else:
if path.endswith('.pyc'):
return 0, 0
relative_path = os.path.relpath(path, working_tree)
if is_ignored(relative_path, self.config['ignore']):
return 0, 0
self.logger.debug('added file to job ' + relative_path)
if report:
print('Added job file: ' + relative_path)
self.git.add_file_path_in_work_tree(path, working_tree, verbose
=False)
return 1, os.path.getsize(path)
return add_resursiv(working_tree, report=report)
|
add_files
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, working_tree, report=False):\n blacklist = ['.git']\n\n def x(path='.', report=report):\n if os.path.basename(path) in blacklist:\n return 0, 0\n if working_tree + '/aetros' == path:\n return 0, 0\n if os.path.isdir(path):\n files = 0\n size = 0\n for file in os.listdir(path):\n if path and path != '.':\n file = path + '/' + file\n added_files, added_size = add_resursiv(file)\n files += added_files\n size += added_size\n return files, size\n else:\n if path.endswith('.pyc'):\n return 0, 0\n relative_path = os.path.relpath(path, working_tree)\n if is_ignored(relative_path, self.config['ignore']):\n return 0, 0\n self.logger.debug('added file to job ' + relative_path)\n if report:\n print('Added job file: ' + relative_path)\n self.git.add_file_path_in_work_tree(path, working_tree, verbose\n =False)\n return 1, os.path.getsize(path)\n return add_resursiv(working_tree, report=report)\n",
"role": "user"
},
{
"content": "add_files",
"role": "assistant"
}
] |
def x(self, x, path, dimensions=None, header_with_dimensions=True):
if path.endswith('.txt'):
if not os.path.exists(path):
raise Exception('Given word2vec file does not exist: ' + path)
f = open(path, 'r')
if not header_with_dimensions and not dimensions:
raise Exception(
'Either the word2vec file should contain the dimensions as header or it needs to bespecified manually using dimensions=[x,y] argument.'
)
if header_with_dimensions:
line = f.readline()
if ' ' not in line:
raise Exception(
'Given word2vec file should have in first line the dimensions, e.g.: 1000 200'
)
dimensions = np.fromstring(line, dtype=np.uint, sep=' ').tolist()
labels = ''
vectors = ''
line_pos = 1 if header_with_dimensions else 0
if len(dimensions) != 2:
raise Exception(
'dimensions invalid shape. e.g. [200, 32] => 200 rows, 32 cols.'
)
for line in iter(f.readline, ''):
line_pos += 1
space_pos = line.find(' ')
if -1 == space_pos:
message = (
'Given word2vec does not have correct format in line ' +
str(line_pos))
message += '\nGot: ' + str(line)
raise Exception(message)
labels += line[:space_pos] + '\n'
vectors += line[space_pos + 1:] + ' '
vectors = np.fromstring(vectors, dtype=np.float32, sep=' ').tobytes()
else:
raise Exception(
'Given word2vec is not a .txt file. Other file formats are not supported.'
)
info = {'dimensions': dimensions}
name = os.path.basename(path)
self._ensure_insight(x)
remote_path = 'aetros/job/insight/' + str(x) + '/embedding/'
with self.git.batch_commit('INSIGHT_EMBEDDING ' + str(x)):
self.git.commit_file('WORD2VEC', remote_path + name +
'/tensor.bytes', vectors)
self.git.commit_file('WORD2VEC', remote_path + name +
'/metadata.tsv', labels)
self.git.commit_file('WORD2VEC', remote_path + name + '/info.json',
simplejson.dumps(info))
|
add_embedding_word2vec
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, x, path, dimensions=None, header_with_dimensions=True):\n if path.endswith('.txt'):\n if not os.path.exists(path):\n raise Exception('Given word2vec file does not exist: ' + path)\n f = open(path, 'r')\n if not header_with_dimensions and not dimensions:\n raise Exception(\n 'Either the word2vec file should contain the dimensions as header or it needs to bespecified manually using dimensions=[x,y] argument.'\n )\n if header_with_dimensions:\n line = f.readline()\n if ' ' not in line:\n raise Exception(\n 'Given word2vec file should have in first line the dimensions, e.g.: 1000 200'\n )\n dimensions = np.fromstring(line, dtype=np.uint, sep=' ').tolist()\n labels = ''\n vectors = ''\n line_pos = 1 if header_with_dimensions else 0\n if len(dimensions) != 2:\n raise Exception(\n 'dimensions invalid shape. e.g. [200, 32] => 200 rows, 32 cols.'\n )\n for line in iter(f.readline, ''):\n line_pos += 1\n space_pos = line.find(' ')\n if -1 == space_pos:\n message = (\n 'Given word2vec does not have correct format in line ' +\n str(line_pos))\n message += '\\nGot: ' + str(line)\n raise Exception(message)\n labels += line[:space_pos] + '\\n'\n vectors += line[space_pos + 1:] + ' '\n vectors = np.fromstring(vectors, dtype=np.float32, sep=' ').tobytes()\n else:\n raise Exception(\n 'Given word2vec is not a .txt file. Other file formats are not supported.'\n )\n info = {'dimensions': dimensions}\n name = os.path.basename(path)\n self._ensure_insight(x)\n remote_path = 'aetros/job/insight/' + str(x) + '/embedding/'\n with self.git.batch_commit('INSIGHT_EMBEDDING ' + str(x)):\n self.git.commit_file('WORD2VEC', remote_path + name +\n '/tensor.bytes', vectors)\n self.git.commit_file('WORD2VEC', remote_path + name +\n '/metadata.tsv', labels)\n self.git.commit_file('WORD2VEC', remote_path + name + '/info.json',\n simplejson.dumps(info))\n",
"role": "user"
},
{
"content": "add_embedding_word2vec",
"role": "assistant"
}
] |
def x(self, x, dimensions, vectors_path, metadata=None, image_shape=None,
image=None):
if not os.path.exists(vectors_path):
raise Exception('Given embedding vectors file does not exist: ' +
vectors_path)
if metadata and not os.path.exists(metadata):
raise Exception('Given embedding metadata file does not exist: ' +
metadata)
name = os.path.basename(vectors_path)
self._ensure_insight(x)
remote_path = 'aetros/job/insight/' + str(x) + '/embedding/'
info = {'dimensions': dimensions, 'image_shape': image_shape, 'image':
os.path.basename(image) if image else None}
with self.git.lock_write():
self.git.add_file_path(remote_path + name + '/tensor.bytes',
vectors_path)
self.git.add_file_path(remote_path + name + '/metadata.tsv', metadata)
self.git.add_file(remote_path + name + '/info.json', simplejson.
dumps(info))
if image:
self.git.add_file(remote_path + name + '/' + os.path.basename(
image), image)
self.git.commit_index('INSIGHT_EMBEDDING ' + str(x))
|
add_embedding_path
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, x, dimensions, vectors_path, metadata=None, image_shape=None,\n image=None):\n if not os.path.exists(vectors_path):\n raise Exception('Given embedding vectors file does not exist: ' +\n vectors_path)\n if metadata and not os.path.exists(metadata):\n raise Exception('Given embedding metadata file does not exist: ' +\n metadata)\n name = os.path.basename(vectors_path)\n self._ensure_insight(x)\n remote_path = 'aetros/job/insight/' + str(x) + '/embedding/'\n info = {'dimensions': dimensions, 'image_shape': image_shape, 'image': \n os.path.basename(image) if image else None}\n with self.git.lock_write():\n self.git.add_file_path(remote_path + name + '/tensor.bytes',\n vectors_path)\n self.git.add_file_path(remote_path + name + '/metadata.tsv', metadata)\n self.git.add_file(remote_path + name + '/info.json', simplejson.\n dumps(info))\n if image:\n self.git.add_file(remote_path + name + '/' + os.path.basename(\n image), image)\n self.git.commit_index('INSIGHT_EMBEDDING ' + str(x))\n",
"role": "user"
},
{
"content": "add_embedding_path",
"role": "assistant"
}
] |
def x(fileinfo):
current_year = datetime.datetime.now().strftime('%Y')
files = []
for line in fileinfo:
parts = re.split('^([\\-dbclps])' + '([\\-rwxs]{9})\\s+' +
'(\\d+)\\s+' + '([a-zA-Z0-9_-]+)\\s+' + '([a-zA-Z0-9_-]+)\\s+' +
'(\\d+)\\s+' + '(\\w{3}\\s+\\d{1,2})\\s+' +
'(\\d{1,2}:\\d{1,2}|\\d{4})\\s+' + '(.+)$', line)
date = parts[7]
time = parts[8] if ':' in parts[8] else '00:00'
year = parts[8] if ':' not in parts[8] else current_year
dt_obj = parser.parse('%s %s %s' % (date, year, time))
files.append({'directory': parts[1], 'perms': parts[2], 'items':
parts[3], 'owner': parts[4], 'group': parts[5], 'size': int(
parts[6]), 'date': date, 'time': time, 'year': year, 'name':
parts[9], 'datetime': dt_obj})
return files
|
split_file_info
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(fileinfo):\n current_year = datetime.datetime.now().strftime('%Y')\n files = []\n for line in fileinfo:\n parts = re.split('^([\\\\-dbclps])' + '([\\\\-rwxs]{9})\\\\s+' +\n '(\\\\d+)\\\\s+' + '([a-zA-Z0-9_-]+)\\\\s+' + '([a-zA-Z0-9_-]+)\\\\s+' +\n '(\\\\d+)\\\\s+' + '(\\\\w{3}\\\\s+\\\\d{1,2})\\\\s+' +\n '(\\\\d{1,2}:\\\\d{1,2}|\\\\d{4})\\\\s+' + '(.+)$', line)\n date = parts[7]\n time = parts[8] if ':' in parts[8] else '00:00'\n year = parts[8] if ':' not in parts[8] else current_year\n dt_obj = parser.parse('%s %s %s' % (date, year, time))\n files.append({'directory': parts[1], 'perms': parts[2], 'items':\n parts[3], 'owner': parts[4], 'group': parts[5], 'size': int(\n parts[6]), 'date': date, 'time': time, 'year': year, 'name':\n parts[9], 'datetime': dt_obj})\n return files\n",
"role": "user"
},
{
"content": "split_file_info",
"role": "assistant"
}
] |
def x(self, remote, local=None):
if isinstance(local, file_type):
local_file = local
elif local is None:
local_file = buffer_type()
else:
local_file = open(local, 'wb')
self.conn.retrbinary('RETR %s' % remote, local_file.write)
if isinstance(local, file_type):
pass
elif local is None:
contents = local_file.getvalue()
local_file.close()
return contents
else:
local_file.close()
return None
|
get
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, remote, local=None):\n if isinstance(local, file_type):\n local_file = local\n elif local is None:\n local_file = buffer_type()\n else:\n local_file = open(local, 'wb')\n self.conn.retrbinary('RETR %s' % remote, local_file.write)\n if isinstance(local, file_type):\n pass\n elif local is None:\n contents = local_file.getvalue()\n local_file.close()\n return contents\n else:\n local_file.close()\n return None\n",
"role": "user"
},
{
"content": "get",
"role": "assistant"
}
] |
def x(self, local, remote, contents=None, quiet=False):
remote_dir = os.path.dirname(remote)
remote_file = os.path.basename(local) if remote.endswith('/'
) else os.path.basename(remote)
if contents:
local_file = buffer_type(contents)
elif isinstance(local, file_type):
local_file = local
else:
local_file = open(local, 'rb')
current = self.conn.pwd()
self.descend(remote_dir, force=True)
size = 0
try:
self.conn.storbinary('STOR %s' % remote_file, local_file)
size = self.conn.size(remote_file)
except:
if not quiet:
raise
finally:
local_file.close()
self.conn.cwd(current)
return size
|
put
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, local, remote, contents=None, quiet=False):\n remote_dir = os.path.dirname(remote)\n remote_file = os.path.basename(local) if remote.endswith('/'\n ) else os.path.basename(remote)\n if contents:\n local_file = buffer_type(contents)\n elif isinstance(local, file_type):\n local_file = local\n else:\n local_file = open(local, 'rb')\n current = self.conn.pwd()\n self.descend(remote_dir, force=True)\n size = 0\n try:\n self.conn.storbinary('STOR %s' % remote_file, local_file)\n size = self.conn.size(remote_file)\n except:\n if not quiet:\n raise\n finally:\n local_file.close()\n self.conn.cwd(current)\n return size\n",
"role": "user"
},
{
"content": "put",
"role": "assistant"
}
] |
def x(self, src, dst, ignore=None):
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
try:
self.conn.mkd(dst)
except error_perm:
pass
errors = []
for name in names:
if name in ignored_names:
continue
src_name = os.path.join(src, name)
dst_name = os.path.join(dst, name)
try:
if os.path.islink(src_name):
pass
elif os.path.isdir(src_name):
self.upload_tree(src_name, dst_name, ignore)
else:
self.put(src_name, dst_name)
except Exception as why:
errors.append((src_name, dst_name, str(why)))
return dst
|
upload_tree
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, src, dst, ignore=None):\n names = os.listdir(src)\n if ignore is not None:\n ignored_names = ignore(src, names)\n else:\n ignored_names = set()\n try:\n self.conn.mkd(dst)\n except error_perm:\n pass\n errors = []\n for name in names:\n if name in ignored_names:\n continue\n src_name = os.path.join(src, name)\n dst_name = os.path.join(dst, name)\n try:\n if os.path.islink(src_name):\n pass\n elif os.path.isdir(src_name):\n self.upload_tree(src_name, dst_name, ignore)\n else:\n self.put(src_name, dst_name)\n except Exception as why:\n errors.append((src_name, dst_name, str(why)))\n return dst\n",
"role": "user"
},
{
"content": "upload_tree",
"role": "assistant"
}
] |
def x(self, remote='.', extra=False, remove_relative_paths=False):
if extra:
self.tmp_output = []
self.conn.dir(remote, self._collector)
directory_list = split_file_info(self.tmp_output)
else:
directory_list = self.conn.nlst(remote)
if remove_relative_paths:
return list(filter(self.is_not_relative_path, directory_list))
return directory_list
|
list
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, remote='.', extra=False, remove_relative_paths=False):\n if extra:\n self.tmp_output = []\n self.conn.dir(remote, self._collector)\n directory_list = split_file_info(self.tmp_output)\n else:\n directory_list = self.conn.nlst(remote)\n if remove_relative_paths:\n return list(filter(self.is_not_relative_path, directory_list))\n return directory_list\n",
"role": "user"
},
{
"content": "list",
"role": "assistant"
}
] |
def x(self, remote, force=False):
remote_dirs = remote.split('/')
for directory in remote_dirs:
try:
self.conn.cwd(directory)
except Exception:
if force:
self.conn.mkd(directory)
self.conn.cwd(directory)
return self.conn.pwd()
|
descend
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, remote, force=False):\n remote_dirs = remote.split('/')\n for directory in remote_dirs:\n try:\n self.conn.cwd(directory)\n except Exception:\n if force:\n self.conn.mkd(directory)\n self.conn.cwd(directory)\n return self.conn.pwd()\n",
"role": "user"
},
{
"content": "descend",
"role": "assistant"
}
] |
def x(self, remote):
try:
self.conn.delete(remote)
except Exception:
return False
else:
return True
|
delete
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, remote):\n try:\n self.conn.delete(remote)\n except Exception:\n return False\n else:\n return True\n",
"role": "user"
},
{
"content": "delete",
"role": "assistant"
}
] |
def x(self, remote):
try:
self.conn.cwd(remote)
except Exception:
return False
else:
return self.pwd()
|
cd
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, remote):\n try:\n self.conn.cwd(remote)\n except Exception:\n return False\n else:\n return self.pwd()\n",
"role": "user"
},
{
"content": "cd",
"role": "assistant"
}
] |
def x(logger, full_id, fetch=True, env=None, volumes=None, cpus=None,
memory=None, gpu_devices=None, offline=False):
owner, name, id = unpack_full_job_id(full_id)
if isinstance(sys.stdout, GeneralLogger):
sys.stdout.clear_buffer()
job_backend = JobBackend(model_name=owner + '/' + name)
if fetch:
job_backend.fetch(id)
job_backend.restart(id)
job_backend.start(collect_system=False, offline=offline)
job_backend.set_status('PREPARE', add_section=False)
job = job_backend.get_job_model()
if not cpus:
cpus = job.get_cpu()
if not memory:
memory = job.get_memory()
if not gpu_devices and job.get_gpu():
gpu_devices = []
for i in range(0, job.get_gpu()):
gpu_devices.append(i)
start_command(logger, job_backend, env, volumes, cpus=cpus, memory=
memory, gpu_devices=gpu_devices, offline=offline)
|
start
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(logger, full_id, fetch=True, env=None, volumes=None, cpus=None,\n memory=None, gpu_devices=None, offline=False):\n owner, name, id = unpack_full_job_id(full_id)\n if isinstance(sys.stdout, GeneralLogger):\n sys.stdout.clear_buffer()\n job_backend = JobBackend(model_name=owner + '/' + name)\n if fetch:\n job_backend.fetch(id)\n job_backend.restart(id)\n job_backend.start(collect_system=False, offline=offline)\n job_backend.set_status('PREPARE', add_section=False)\n job = job_backend.get_job_model()\n if not cpus:\n cpus = job.get_cpu()\n if not memory:\n memory = job.get_memory()\n if not gpu_devices and job.get_gpu():\n gpu_devices = []\n for i in range(0, job.get_gpu()):\n gpu_devices.append(i)\n start_command(logger, job_backend, env, volumes, cpus=cpus, memory=\n memory, gpu_devices=gpu_devices, offline=offline)\n",
"role": "user"
},
{
"content": "start",
"role": "assistant"
}
] |
def x(im, flatten=False, mode=None):
if not Image.isImageType(im):
raise TypeError('Input is not a PIL image.')
if mode is not None:
if mode != im.mode:
im = im.convert(mode)
elif im.mode == 'P':
if 'transparency' in im.info:
im = im.convert('RGBA')
else:
im = im.convert('RGB')
if flatten:
im = im.convert('F')
elif im.mode == '1':
im = im.convert('L')
a = array(im)
return a
|
fromimage
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(im, flatten=False, mode=None):\n if not Image.isImageType(im):\n raise TypeError('Input is not a PIL image.')\n if mode is not None:\n if mode != im.mode:\n im = im.convert(mode)\n elif im.mode == 'P':\n if 'transparency' in im.info:\n im = im.convert('RGBA')\n else:\n im = im.convert('RGB')\n if flatten:\n im = im.convert('F')\n elif im.mode == '1':\n im = im.convert('L')\n a = array(im)\n return a\n",
"role": "user"
},
{
"content": "fromimage",
"role": "assistant"
}
] |
def x(data, cmin=None, cmax=None, high=255, low=0):
if data.dtype == uint8:
return data
if high > 255:
raise ValueError('`high` should be less than or equal to 255.')
if low < 0:
raise ValueError('`low` should be greater than or equal to 0.')
if high < low:
raise ValueError('`high` should be greater than or equal to `low`.')
if cmin is None:
cmin = data.min()
if cmax is None:
cmax = data.max()
cscale = cmax - cmin
if cscale < 0:
raise ValueError('`cmax` should be larger than `cmin`.')
elif cscale == 0:
cscale = 1
scale = float(high - low) / cscale
bytedata = (data - cmin) * scale + low
return (bytedata.clip(low, high) + 0.5).astype(uint8)
|
bytescale
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data, cmin=None, cmax=None, high=255, low=0):\n if data.dtype == uint8:\n return data\n if high > 255:\n raise ValueError('`high` should be less than or equal to 255.')\n if low < 0:\n raise ValueError('`low` should be greater than or equal to 0.')\n if high < low:\n raise ValueError('`high` should be greater than or equal to `low`.')\n if cmin is None:\n cmin = data.min()\n if cmax is None:\n cmax = data.max()\n cscale = cmax - cmin\n if cscale < 0:\n raise ValueError('`cmax` should be larger than `cmin`.')\n elif cscale == 0:\n cscale = 1\n scale = float(high - low) / cscale\n bytedata = (data - cmin) * scale + low\n return (bytedata.clip(low, high) + 0.5).astype(uint8)\n",
"role": "user"
},
{
"content": "bytescale",
"role": "assistant"
}
] |
def x(arr, high=255, low=0, cmin=None, cmax=None, pal=None, mode=None,
channel_axis=None):
data = asarray(arr)
if iscomplexobj(data):
raise ValueError('Cannot convert a complex-valued array.')
shape = list(data.shape)
valid = len(shape) == 2 or len(shape) == 3 and (3 in shape or 4 in shape)
if not valid:
raise ValueError(
"'arr' does not have a suitable array shape for any mode.")
if len(shape) == 2:
shape = shape[1], shape[0]
if mode == 'F':
data32 = data.astype(numpy.float32)
image = Image.frombytes(mode, shape, data32.tostring())
return image
if mode in [None, 'L', 'P']:
bytedata = bytescale(data, high=high, low=low, cmin=cmin, cmax=cmax
)
image = Image.frombytes('L', shape, bytedata.tostring())
if pal is not None:
image.putpalette(asarray(pal, dtype=uint8).tostring())
elif mode == 'P':
pal = arange(0, 256, 1, dtype=uint8)[:, newaxis] * ones((3,
), dtype=uint8)[newaxis, :]
image.putpalette(asarray(pal, dtype=uint8).tostring())
return image
if mode == '1':
bytedata = data > high
image = Image.frombytes('1', shape, bytedata.tostring())
return image
if cmin is None:
cmin = amin(ravel(data))
if cmax is None:
cmax = amax(ravel(data))
data = (data * 1.0 - cmin) * (high - low) / (cmax - cmin) + low
if mode == 'I':
data32 = data.astype(numpy.uint32)
image = Image.frombytes(mode, shape, data32.tostring())
else:
raise ValueError(_errstr)
return image
if channel_axis is None:
if 3 in shape:
ca = numpy.flatnonzero(asarray(shape) == 3)[0]
else:
ca = numpy.flatnonzero(asarray(shape) == 4)
if len(ca):
ca = ca[0]
else:
raise ValueError('Could not find channel dimension.')
else:
ca = channel_axis
numch = shape[ca]
if numch not in [3, 4]:
raise ValueError('Channel axis dimension is not valid.')
bytedata = bytescale(data, high=high, low=low, cmin=cmin, cmax=cmax)
if ca == 2:
strdata = bytedata.tostring()
shape = shape[1], shape[0]
elif ca == 1:
strdata = transpose(bytedata, (0, 2, 1)).tostring()
shape = shape[2], shape[0]
elif ca == 0:
strdata = transpose(bytedata, (1, 2, 0)).tostring()
shape = shape[2], shape[1]
if mode is None:
if numch == 3:
mode = 'RGB'
else:
mode = 'RGBA'
if mode not in ['RGB', 'RGBA', 'YCbCr', 'CMYK']:
raise ValueError(_errstr)
if mode in ['RGB', 'YCbCr']:
if numch != 3:
raise ValueError('Invalid array shape for mode.')
if mode in ['RGBA', 'CMYK']:
if numch != 4:
raise ValueError('Invalid array shape for mode.')
image = Image.frombytes(mode, shape, strdata)
return image
|
toimage
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(arr, high=255, low=0, cmin=None, cmax=None, pal=None, mode=None,\n channel_axis=None):\n data = asarray(arr)\n if iscomplexobj(data):\n raise ValueError('Cannot convert a complex-valued array.')\n shape = list(data.shape)\n valid = len(shape) == 2 or len(shape) == 3 and (3 in shape or 4 in shape)\n if not valid:\n raise ValueError(\n \"'arr' does not have a suitable array shape for any mode.\")\n if len(shape) == 2:\n shape = shape[1], shape[0]\n if mode == 'F':\n data32 = data.astype(numpy.float32)\n image = Image.frombytes(mode, shape, data32.tostring())\n return image\n if mode in [None, 'L', 'P']:\n bytedata = bytescale(data, high=high, low=low, cmin=cmin, cmax=cmax\n )\n image = Image.frombytes('L', shape, bytedata.tostring())\n if pal is not None:\n image.putpalette(asarray(pal, dtype=uint8).tostring())\n elif mode == 'P':\n pal = arange(0, 256, 1, dtype=uint8)[:, newaxis] * ones((3,\n ), dtype=uint8)[newaxis, :]\n image.putpalette(asarray(pal, dtype=uint8).tostring())\n return image\n if mode == '1':\n bytedata = data > high\n image = Image.frombytes('1', shape, bytedata.tostring())\n return image\n if cmin is None:\n cmin = amin(ravel(data))\n if cmax is None:\n cmax = amax(ravel(data))\n data = (data * 1.0 - cmin) * (high - low) / (cmax - cmin) + low\n if mode == 'I':\n data32 = data.astype(numpy.uint32)\n image = Image.frombytes(mode, shape, data32.tostring())\n else:\n raise ValueError(_errstr)\n return image\n if channel_axis is None:\n if 3 in shape:\n ca = numpy.flatnonzero(asarray(shape) == 3)[0]\n else:\n ca = numpy.flatnonzero(asarray(shape) == 4)\n if len(ca):\n ca = ca[0]\n else:\n raise ValueError('Could not find channel dimension.')\n else:\n ca = channel_axis\n numch = shape[ca]\n if numch not in [3, 4]:\n raise ValueError('Channel axis dimension is not valid.')\n bytedata = bytescale(data, high=high, low=low, cmin=cmin, cmax=cmax)\n if ca == 2:\n strdata = bytedata.tostring()\n shape = shape[1], shape[0]\n elif ca == 1:\n strdata = transpose(bytedata, (0, 2, 1)).tostring()\n shape = shape[2], shape[0]\n elif ca == 0:\n strdata = transpose(bytedata, (1, 2, 0)).tostring()\n shape = shape[2], shape[1]\n if mode is None:\n if numch == 3:\n mode = 'RGB'\n else:\n mode = 'RGBA'\n if mode not in ['RGB', 'RGBA', 'YCbCr', 'CMYK']:\n raise ValueError(_errstr)\n if mode in ['RGB', 'YCbCr']:\n if numch != 3:\n raise ValueError('Invalid array shape for mode.')\n if mode in ['RGBA', 'CMYK']:\n if numch != 4:\n raise ValueError('Invalid array shape for mode.')\n image = Image.frombytes(mode, shape, strdata)\n return image\n",
"role": "user"
},
{
"content": "toimage",
"role": "assistant"
}
] |
def x(arr, size, interp='bilinear', mode=None):
im = toimage(arr, mode=mode)
ts = type(size)
if issubdtype(ts, int):
percent = size / 100.0
size = tuple((array(im.size) * percent).astype(int))
elif issubdtype(type(size), float):
size = tuple((array(im.size) * size).astype(int))
else:
size = size[1], size[0]
func = {'nearest': 0, 'lanczos': 1, 'bilinear': 2, 'bicubic': 3, 'cubic': 3
}
imnew = im.resize(size, resample=func[interp])
return fromimage(imnew)
|
imresize
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(arr, size, interp='bilinear', mode=None):\n im = toimage(arr, mode=mode)\n ts = type(size)\n if issubdtype(ts, int):\n percent = size / 100.0\n size = tuple((array(im.size) * percent).astype(int))\n elif issubdtype(type(size), float):\n size = tuple((array(im.size) * size).astype(int))\n else:\n size = size[1], size[0]\n func = {'nearest': 0, 'lanczos': 1, 'bilinear': 2, 'bicubic': 3, 'cubic': 3\n }\n imnew = im.resize(size, resample=func[interp])\n return fromimage(imnew)\n",
"role": "user"
},
{
"content": "imresize",
"role": "assistant"
}
] |
def x(self, channel):
if self.connection_tries > 10:
time.sleep(10)
if self.in_connecting[channel]:
return False
self.in_connecting[channel] = True
self.logger.debug('[%s] Wanna connect ...' % (channel,))
try:
if self.is_connected(channel) or self.online is False:
if self.is_connected(channel):
self.logger.debug('[%s] Already connected' % (channel,))
if self.online is False:
self.logger.debug('[%s] self.online=False' % (channel,))
return True
self.channel_lock[channel].acquire()
self.connected[channel] = None
self.registered[channel] = None
self.ssh_stream[channel] = False
self.ssh_channel[channel] = False
messages = None
stderrdata = ''
try:
if not self.ssh_stream[channel]:
self.logger.debug('[%s] Open ssh connection' % (channel,))
self.ssh_stream[channel] = create_ssh_stream(self.config,
exit_on_failure=False)
self.logger.debug('[%s] open channel' % (channel,))
self.ssh_channel[channel] = self.ssh_stream[channel].get_transport(
).open_session()
self.ssh_channel[channel].exec_command('stream')
except (KeyboardInterrupt, SystemExit):
raise
except Exception as e:
self.connected[channel] = False
self.registered[channel] = False
self.logger.debug('[%s] connection failed: %s' % (channel, str(e)))
return False
finally:
self.channel_lock[channel].release()
if self.ssh_channel[channel]:
messages = self.wait_for_at_least_one_message(channel)
if not messages:
stderrdata = self.ssh_channel[channel].recv_stderr().decode('utf-8'
).strip()
self.connected[channel] = False
self.registered[channel] = False
else:
self.logger.debug('[%s] opened and received %d messages' % (
channel, len(messages)))
self.connected[channel] = True
self.registered[channel] = self.on_connect(self.
was_connected_once[channel], channel)
self.connected_since[channel] = time.time()
if channel == '' and self.registered[channel
] and self.was_connected_once[channel]:
self.logger.info('Successfully reconnected.')
if not self.registered[channel]:
try:
self.ssh_channel[channel] and self.ssh_channel[channel].close()
except:
pass
try:
self.ssh_stream[channel] and self.ssh_stream[channel].close()
except:
pass
self.logger.debug(
'[%s] Client: registration failed. stderrdata: %s' % (
channel, stderrdata))
self.connected[channel] = False
try:
self.logger.debug(
'[%s] Client: ssh_tream close due to registration failure'
% (channel,))
self.ssh_stream[channel].close()
except (KeyboardInterrupt, SystemExit):
raise
self.connection_tries += 1
if not self.was_connected_once[channel
] and self.go_offline_on_first_failed_attempt:
self.go_offline()
if stderrdata:
if ('Connection refused' not in stderrdata and
'Permission denied' not in stderrdata):
self.logger.error(stderrdata)
if 'Permission denied' in stderrdata:
if self.connection_tries < 3:
self.logger.warning(
'Access denied. Did you setup your SSH public key correctly and saved it in your AETROS Trainer user account?'
)
self.close()
sys.exit(1)
self.connection_error(channel,
'Connection error during connecting to %s: %s' % (self.host,
str(stderrdata)))
else:
self.was_connected_once[channel] = True
except Exception as error:
self.connection_error(channel, error)
finally:
self.in_connecting[channel] = False
return self.is_connected(channel)
|
connect
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, channel):\n if self.connection_tries > 10:\n time.sleep(10)\n if self.in_connecting[channel]:\n return False\n self.in_connecting[channel] = True\n self.logger.debug('[%s] Wanna connect ...' % (channel,))\n try:\n if self.is_connected(channel) or self.online is False:\n if self.is_connected(channel):\n self.logger.debug('[%s] Already connected' % (channel,))\n if self.online is False:\n self.logger.debug('[%s] self.online=False' % (channel,))\n return True\n self.channel_lock[channel].acquire()\n self.connected[channel] = None\n self.registered[channel] = None\n self.ssh_stream[channel] = False\n self.ssh_channel[channel] = False\n messages = None\n stderrdata = ''\n try:\n if not self.ssh_stream[channel]:\n self.logger.debug('[%s] Open ssh connection' % (channel,))\n self.ssh_stream[channel] = create_ssh_stream(self.config,\n exit_on_failure=False)\n self.logger.debug('[%s] open channel' % (channel,))\n self.ssh_channel[channel] = self.ssh_stream[channel].get_transport(\n ).open_session()\n self.ssh_channel[channel].exec_command('stream')\n except (KeyboardInterrupt, SystemExit):\n raise\n except Exception as e:\n self.connected[channel] = False\n self.registered[channel] = False\n self.logger.debug('[%s] connection failed: %s' % (channel, str(e)))\n return False\n finally:\n self.channel_lock[channel].release()\n if self.ssh_channel[channel]:\n messages = self.wait_for_at_least_one_message(channel)\n if not messages:\n stderrdata = self.ssh_channel[channel].recv_stderr().decode('utf-8'\n ).strip()\n self.connected[channel] = False\n self.registered[channel] = False\n else:\n self.logger.debug('[%s] opened and received %d messages' % (\n channel, len(messages)))\n self.connected[channel] = True\n self.registered[channel] = self.on_connect(self.\n was_connected_once[channel], channel)\n self.connected_since[channel] = time.time()\n if channel == '' and self.registered[channel\n ] and self.was_connected_once[channel]:\n self.logger.info('Successfully reconnected.')\n if not self.registered[channel]:\n try:\n self.ssh_channel[channel] and self.ssh_channel[channel].close()\n except:\n pass\n try:\n self.ssh_stream[channel] and self.ssh_stream[channel].close()\n except:\n pass\n self.logger.debug(\n '[%s] Client: registration failed. stderrdata: %s' % (\n channel, stderrdata))\n self.connected[channel] = False\n try:\n self.logger.debug(\n '[%s] Client: ssh_tream close due to registration failure'\n % (channel,))\n self.ssh_stream[channel].close()\n except (KeyboardInterrupt, SystemExit):\n raise\n self.connection_tries += 1\n if not self.was_connected_once[channel\n ] and self.go_offline_on_first_failed_attempt:\n self.go_offline()\n if stderrdata:\n if ('Connection refused' not in stderrdata and \n 'Permission denied' not in stderrdata):\n self.logger.error(stderrdata)\n if 'Permission denied' in stderrdata:\n if self.connection_tries < 3:\n self.logger.warning(\n 'Access denied. Did you setup your SSH public key correctly and saved it in your AETROS Trainer user account?'\n )\n self.close()\n sys.exit(1)\n self.connection_error(channel, \n 'Connection error during connecting to %s: %s' % (self.host,\n str(stderrdata)))\n else:\n self.was_connected_once[channel] = True\n except Exception as error:\n self.connection_error(channel, error)\n finally:\n self.in_connecting[channel] = False\n return self.is_connected(channel)\n",
"role": "user"
},
{
"content": "connect",
"role": "assistant"
}
] |
def x(self, channel):
self.stop_on_empty_queue[channel] = True
write_thread = self.thread_write_instances[channel]
thread_join_non_blocking(write_thread)
|
end_channel
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, channel):\n self.stop_on_empty_queue[channel] = True\n write_thread = self.thread_write_instances[channel]\n thread_join_non_blocking(write_thread)\n",
"role": "user"
},
{
"content": "end_channel",
"role": "assistant"
}
] |
def x(self):
if self.active and self.online is not False:
self.logger.debug('client sends last %s messages ...' % ([(str(i) +
':' + str(len(x))) for i, x in six.iteritems(self.queues)],))
for channel, messages in six.iteritems(self.queues):
for idx, message in enumerate(messages):
self.logger.debug('[%s] %d: %s' % (channel, idx, str(
message)[0:120]))
for channel in six.iterkeys(self.ssh_channel):
if channel != '':
self._end_channel(channel)
self._end_channel('')
|
wait_sending_last_messages
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if self.active and self.online is not False:\n self.logger.debug('client sends last %s messages ...' % ([(str(i) +\n ':' + str(len(x))) for i, x in six.iteritems(self.queues)],))\n for channel, messages in six.iteritems(self.queues):\n for idx, message in enumerate(messages):\n self.logger.debug('[%s] %d: %s' % (channel, idx, str(\n message)[0:120]))\n for channel in six.iterkeys(self.ssh_channel):\n if channel != '':\n self._end_channel(channel)\n self._end_channel('')\n",
"role": "user"
},
{
"content": "wait_sending_last_messages",
"role": "assistant"
}
] |
def x(self, channels, report=True, clear_end=True):
state = {'message': ''}
self.logger.debug('wait_until_queue_empty: report=%s %s' % (str(report),
str([(channel + ':' + str(len(self.queues[channel]))) for channel in
channels])))
queues = []
for channel in channels:
queues += self.queues[channel][:]
def x():
if report:
self.logger.debug('all_empty=%s' % (str(all_empty),))
sys.__stderr__.write('\x08' * len(state['message']))
sys.__stderr__.write('\x1b[K')
state['message'] = '%.2f kB/s // %.2fkB of %.2fkB // %.2f%%' % (
self.bytes_speed / 1024, self.bytes_sent / 1024, self.
bytes_total / 1024, self.bytes_sent / self.bytes_total *
100 if self.bytes_total else 0)
sys.__stderr__.write(state['message'])
sys.__stderr__.flush()
while True:
all_empty = all(m['_sent'] for m in queues)
print_progress()
if all_empty:
break
time.sleep(0.2)
print_progress()
if report and clear_end:
sys.__stderr__.write('\x08' * len(state['message']))
sys.__stderr__.write('\x1b[K')
sys.__stderr__.flush()
|
wait_until_queue_empty
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, channels, report=True, clear_end=True):\n state = {'message': ''}\n self.logger.debug('wait_until_queue_empty: report=%s %s' % (str(report),\n str([(channel + ':' + str(len(self.queues[channel]))) for channel in\n channels])))\n queues = []\n for channel in channels:\n queues += self.queues[channel][:]\n\n def x():\n if report:\n self.logger.debug('all_empty=%s' % (str(all_empty),))\n sys.__stderr__.write('\\x08' * len(state['message']))\n sys.__stderr__.write('\\x1b[K')\n state['message'] = '%.2f kB/s // %.2fkB of %.2fkB // %.2f%%' % (\n self.bytes_speed / 1024, self.bytes_sent / 1024, self.\n bytes_total / 1024, self.bytes_sent / self.bytes_total * \n 100 if self.bytes_total else 0)\n sys.__stderr__.write(state['message'])\n sys.__stderr__.flush()\n while True:\n all_empty = all(m['_sent'] for m in queues)\n print_progress()\n if all_empty:\n break\n time.sleep(0.2)\n print_progress()\n if report and clear_end:\n sys.__stderr__.write('\\x08' * len(state['message']))\n sys.__stderr__.write('\\x1b[K')\n sys.__stderr__.flush()\n",
"role": "user"
},
{
"content": "wait_until_queue_empty",
"role": "assistant"
}
] |
def x(self, message, channel):
if not self.is_connected(channel):
return False
message['_sending'] = True
if '_data' in message:
data = message['_data']
else:
data = msgpack.packb(message, default=invalid_json_values)
self.bytes_total += len(data)
message['_bytes_sent'] = 0
message['_id'] = -1
if is_debug2():
sys.__stderr__.write('[%s] send message: %s\n' % (channel, str(
msgpack.unpackb(data))[0:180]))
try:
while data:
start = time.time()
bytes_sent = self.ssh_channel[channel].send(data)
data = data[bytes_sent:]
message['_bytes_sent'] += bytes_sent
self.bytes_sent += bytes_sent
end = time.time()
self.write_speeds.append(bytes_sent / (end - start))
speeds_len = len(self.write_speeds)
if speeds_len:
self.bytes_speed = sum(self.write_speeds) / speeds_len
if speeds_len > 10:
self.write_speeds = self.write_speeds[5:]
message['_sent'] = True
return True
except (KeyboardInterrupt, SystemExit):
if message['_sent']:
return message['_bytes_sent']
return False
except Exception as error:
self.connection_error(channel, error)
return False
|
send_message
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, message, channel):\n if not self.is_connected(channel):\n return False\n message['_sending'] = True\n if '_data' in message:\n data = message['_data']\n else:\n data = msgpack.packb(message, default=invalid_json_values)\n self.bytes_total += len(data)\n message['_bytes_sent'] = 0\n message['_id'] = -1\n if is_debug2():\n sys.__stderr__.write('[%s] send message: %s\\n' % (channel, str(\n msgpack.unpackb(data))[0:180]))\n try:\n while data:\n start = time.time()\n bytes_sent = self.ssh_channel[channel].send(data)\n data = data[bytes_sent:]\n message['_bytes_sent'] += bytes_sent\n self.bytes_sent += bytes_sent\n end = time.time()\n self.write_speeds.append(bytes_sent / (end - start))\n speeds_len = len(self.write_speeds)\n if speeds_len:\n self.bytes_speed = sum(self.write_speeds) / speeds_len\n if speeds_len > 10:\n self.write_speeds = self.write_speeds[5:]\n message['_sent'] = True\n return True\n except (KeyboardInterrupt, SystemExit):\n if message['_sent']:\n return message['_bytes_sent']\n return False\n except Exception as error:\n self.connection_error(channel, error)\n return False\n",
"role": "user"
},
{
"content": "send_message",
"role": "assistant"
}
] |
def x(self, channel):
unpacker = msgpack.Unpacker(encoding='utf-8')
while True:
try:
start = time.time()
chunk = self.ssh_channel[channel].recv(1024)
end = time.time()
self.read_speeds.append(len(chunk) / (end - start))
if len(self.read_speeds) > 20:
self.read_speeds = self.read_speeds[10:]
if chunk == b'':
self.connection_error(channel, 'Connection broken w')
return False
except Exception as error:
self.connection_error(channel, error)
raise
unpacker.feed(chunk)
messages = [m for m in unpacker]
if messages:
return messages
|
wait_for_at_least_one_message
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, channel):\n unpacker = msgpack.Unpacker(encoding='utf-8')\n while True:\n try:\n start = time.time()\n chunk = self.ssh_channel[channel].recv(1024)\n end = time.time()\n self.read_speeds.append(len(chunk) / (end - start))\n if len(self.read_speeds) > 20:\n self.read_speeds = self.read_speeds[10:]\n if chunk == b'':\n self.connection_error(channel, 'Connection broken w')\n return False\n except Exception as error:\n self.connection_error(channel, error)\n raise\n unpacker.feed(chunk)\n messages = [m for m in unpacker]\n if messages:\n return messages\n",
"role": "user"
},
{
"content": "wait_for_at_least_one_message",
"role": "assistant"
}
] |
def x(self, channel):
if not self.ssh_channel[channel].recv_ready():
return
try:
start = time.time()
chunk = self.ssh_channel[channel].recv(1024)
end = time.time()
self.read_speeds.append(len(chunk) / (end - start))
if len(self.read_speeds) > 20:
self.read_speeds = self.read_speeds[10:]
except Exception as error:
self.connection_error(channel, error)
raise
if chunk == b'':
self.connection_error(channel, 'Connection broken')
return None
self.read_unpacker.feed(chunk)
messages = [m for m in self.read_unpacker]
return messages if messages else None
|
read
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, channel):\n if not self.ssh_channel[channel].recv_ready():\n return\n try:\n start = time.time()\n chunk = self.ssh_channel[channel].recv(1024)\n end = time.time()\n self.read_speeds.append(len(chunk) / (end - start))\n if len(self.read_speeds) > 20:\n self.read_speeds = self.read_speeds[10:]\n except Exception as error:\n self.connection_error(channel, error)\n raise\n if chunk == b'':\n self.connection_error(channel, 'Connection broken')\n return None\n self.read_unpacker.feed(chunk)\n messages = [m for m in self.read_unpacker]\n return messages if messages else None\n",
"role": "user"
},
{
"content": "read",
"role": "assistant"
}
] |
def x():
if hasattr(signal, 'CTRL_C_EVENT'):
os.kill(os.getpid(), signal.CTRL_C_EVENT)
else:
pgid = os.getpgid(os.getpid())
if pgid == 1:
os.kill(os.getpid(), signal.SIGINT)
else:
os.killpg(os.getpgid(os.getpid()), signal.SIGINT)
|
raise_sigint
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x():\n if hasattr(signal, 'CTRL_C_EVENT'):\n os.kill(os.getpid(), signal.CTRL_C_EVENT)\n else:\n pgid = os.getpgid(os.getpid())\n if pgid == 1:\n os.kill(os.getpid(), signal.SIGINT)\n else:\n os.killpg(os.getpgid(os.getpid()), signal.SIGINT)\n",
"role": "user"
},
{
"content": "raise_sigint",
"role": "assistant"
}
] |
def x(size_bytes, precision=0):
if size_bytes == 1:
return '1 byte'
suffixes_table = [('bytes', 0), ('KB', 0), ('MB', 1), ('GB', 2), ('TB',
2), ('PB', 2)]
num = float(size_bytes)
for suffix, precision in suffixes_table:
if num < 1024.0:
break
num /= 1024.0
if precision == 0:
formatted_size = '%d' % num
else:
formatted_size = str(round(num, ndigits=precision))
return '%s %s' % (formatted_size, suffix)
|
human_size
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(size_bytes, precision=0):\n if size_bytes == 1:\n return '1 byte'\n suffixes_table = [('bytes', 0), ('KB', 0), ('MB', 1), ('GB', 2), ('TB',\n 2), ('PB', 2)]\n num = float(size_bytes)\n for suffix, precision in suffixes_table:\n if num < 1024.0:\n break\n num /= 1024.0\n if precision == 0:\n formatted_size = '%d' % num\n else:\n formatted_size = str(round(num, ndigits=precision))\n return '%s %s' % (formatted_size, suffix)\n",
"role": "user"
},
{
"content": "human_size",
"role": "assistant"
}
] |
def x(x, scale=True):
from PIL import Image
if x.ndim != 3:
raise Exception('Unsupported shape : ', str(x.shape),
'. Need (channels, width, height)')
if scale:
x += max(-np.min(x), 0)
x /= np.max(x)
x *= 255
if x.shape[0] == 3:
if x.dtype != 'uint8':
x = x.astype('uint8')
return Image.fromarray(x.astype('uint8'), 'RGB')
elif x.shape[0] == 1:
if x.dtype != 'uint8':
x = x.astype('uint8')
return Image.fromarray(x.reshape(x.shape[1], x.shape[2]), 'L')
else:
raise Exception('Unsupported channel number: ', x.shape[0])
|
array_to_img
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(x, scale=True):\n from PIL import Image\n if x.ndim != 3:\n raise Exception('Unsupported shape : ', str(x.shape),\n '. Need (channels, width, height)')\n if scale:\n x += max(-np.min(x), 0)\n x /= np.max(x)\n x *= 255\n if x.shape[0] == 3:\n if x.dtype != 'uint8':\n x = x.astype('uint8')\n return Image.fromarray(x.astype('uint8'), 'RGB')\n elif x.shape[0] == 1:\n if x.dtype != 'uint8':\n x = x.astype('uint8')\n return Image.fromarray(x.reshape(x.shape[1], x.shape[2]), 'L')\n else:\n raise Exception('Unsupported channel number: ', x.shape[0])\n",
"role": "user"
},
{
"content": "array_to_img",
"role": "assistant"
}
] |
def x(self, number):
self.nb_val_samples = number
diff_to_batch = number % self.get_batch_size()
if diff_to_batch > 0:
self.nb_val_samples += self.get_batch_size() - diff_to_batch
import keras
if '1' != keras.__version__[0]:
self.nb_val_samples = self.nb_val_samples // self.get_batch_size()
|
set_generator_validation_nb
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, number):\n self.nb_val_samples = number\n diff_to_batch = number % self.get_batch_size()\n if diff_to_batch > 0:\n self.nb_val_samples += self.get_batch_size() - diff_to_batch\n import keras\n if '1' != keras.__version__[0]:\n self.nb_val_samples = self.nb_val_samples // self.get_batch_size()\n",
"role": "user"
},
{
"content": "set_generator_validation_nb",
"role": "assistant"
}
] |
def x(self, number):
self.samples_per_epoch = number
diff_to_batch = number % self.get_batch_size()
if diff_to_batch > 0:
self.samples_per_epoch += self.get_batch_size() - diff_to_batch
|
set_generator_training_nb
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, number):\n self.samples_per_epoch = number\n diff_to_batch = number % self.get_batch_size()\n if diff_to_batch > 0:\n self.samples_per_epoch += self.get_batch_size() - diff_to_batch\n",
"role": "user"
},
{
"content": "set_generator_training_nb",
"role": "assistant"
}
] |
def x(self, buffer, read_line=None):
bid = id(buffer)
self.attach_last_messages[bid] = b''
def x():
current_line = b''
def x(buf):
if chunk == b'':
return
if read_line and callable(read_line):
res = read_line(buf)
if res is False:
return False
elif res is not None:
buf = res
if hasattr(buf, 'encode'):
buf = buf.encode('utf-8')
self.attach_last_messages[bid] += buf
if len(self.attach_last_messages[bid]) > 21 * 1024:
self.attach_last_messages[bid] = self.attach_last_messages[bid
][-20 * 1024:]
self.write(buf)
flush_char = b'\n'
while True:
try:
chunk = buffer.read(1)
if chunk == b'':
if current_line:
handle_line(current_line)
return
current_line += chunk
while flush_char in current_line:
pos = current_line.find(flush_char)
line = current_line[:pos + 1]
current_line = current_line[pos + 1:]
handle_line(line)
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
sys.__stderr__.write(traceback.format_exc() + '\n')
sys.__stderr__.flush()
thread = Thread(target=reader)
thread.daemon = True
thread.start()
def x():
thread_join_non_blocking(thread)
self.send_buffer()
return wait
|
attach
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, buffer, read_line=None):\n bid = id(buffer)\n self.attach_last_messages[bid] = b''\n\n def x():\n current_line = b''\n\n def x(buf):\n if chunk == b'':\n return\n if read_line and callable(read_line):\n res = read_line(buf)\n if res is False:\n return False\n elif res is not None:\n buf = res\n if hasattr(buf, 'encode'):\n buf = buf.encode('utf-8')\n self.attach_last_messages[bid] += buf\n if len(self.attach_last_messages[bid]) > 21 * 1024:\n self.attach_last_messages[bid] = self.attach_last_messages[bid\n ][-20 * 1024:]\n self.write(buf)\n flush_char = b'\\n'\n while True:\n try:\n chunk = buffer.read(1)\n if chunk == b'':\n if current_line:\n handle_line(current_line)\n return\n current_line += chunk\n while flush_char in current_line:\n pos = current_line.find(flush_char)\n line = current_line[:pos + 1]\n current_line = current_line[pos + 1:]\n handle_line(line)\n except (KeyboardInterrupt, SystemExit):\n raise\n except Exception:\n sys.__stderr__.write(traceback.format_exc() + '\\n')\n sys.__stderr__.flush()\n thread = Thread(target=reader)\n thread.daemon = True\n thread.start()\n\n def x():\n thread_join_non_blocking(thread)\n self.send_buffer()\n return wait\n",
"role": "user"
},
{
"content": "attach",
"role": "assistant"
}
] |
def x(image, ratio):
if not isinstance(image, np.ndarray):
raise ValueError('Expected ndarray')
if ratio < 1:
raise ValueError('Ratio must be greater than 1 (ratio=%f)' % ratio)
width = int(math.floor(image.shape[1] * ratio))
height = int(math.floor(image.shape[0] * ratio))
channels = image.shape[2]
out = np.ndarray((height, width, channels), dtype=np.uint8)
for x, y in np.ndindex((width, height)):
out[y, x] = image[int(math.floor(y / ratio)), int(math.floor(x /
ratio))]
return out
|
upscale
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image, ratio):\n if not isinstance(image, np.ndarray):\n raise ValueError('Expected ndarray')\n if ratio < 1:\n raise ValueError('Ratio must be greater than 1 (ratio=%f)' % ratio)\n width = int(math.floor(image.shape[1] * ratio))\n height = int(math.floor(image.shape[0] * ratio))\n channels = image.shape[2]\n out = np.ndarray((height, width, channels), dtype=np.uint8)\n for x, y in np.ndindex((width, height)):\n out[y, x] = image[int(math.floor(y / ratio)), int(math.floor(x /\n ratio))]\n return out\n",
"role": "user"
},
{
"content": "upscale",
"role": "assistant"
}
] |
def x(image, height, width, channels=None, resize_mode=None):
if resize_mode is None:
resize_mode = 'squash'
if resize_mode not in ['crop', 'squash', 'fill', 'half_crop']:
raise ValueError('resize_mode "%s" not supported' % resize_mode)
if channels not in [None, 1, 3]:
raise ValueError('unsupported number of channels: %s' % channels)
if isinstance(image, PIL.Image.Image):
if channels is None:
image_mode = image.mode
if image_mode == 'L':
channels = 1
elif image_mode == 'RGB':
channels = 3
else:
raise ValueError('unknown image mode "%s"' % image_mode)
elif channels == 1:
image_mode = 'L'
elif channels == 3:
image_mode = 'RGB'
if image.mode != image_mode:
image = image.convert(image_mode)
image = np.array(image)
elif isinstance(image, np.ndarray):
if image.dtype != np.uint8:
image = image.astype(np.uint8)
if image.ndim == 3 and image.shape[2] == 1:
image = image.reshape(image.shape[:2])
if channels is None:
if image.ndim == 2:
channels = 1
elif image.ndim == 3 and image.shape[2] == 3:
channels = 3
else:
raise ValueError('invalid image shape: %s' % (image.shape,))
elif channels == 1:
if image.ndim != 2:
if image.ndim == 3 and image.shape[2] == 3:
image = np.dot(image, [0.299, 0.587, 0.114]).astype(np.
uint8)
else:
raise ValueError('invalid image shape: %s' % (image.shape,)
)
elif channels == 3:
if image.ndim == 2:
image = np.repeat(image, 3).reshape(image.shape + (3,))
elif image.shape[2] != 3:
raise ValueError('invalid image shape: %s' % (image.shape,))
else:
raise ValueError(
'resize_image() expected a PIL.Image.Image or a numpy.ndarray')
if image.shape[0] == height and image.shape[1] == width:
return image
interp = 'bilinear'
width_ratio = float(image.shape[1]) / width
height_ratio = float(image.shape[0]) / height
if resize_mode == 'squash' or width_ratio == height_ratio:
return imresize(image, (height, width), interp=interp)
elif resize_mode == 'crop':
if width_ratio > height_ratio:
resize_height = height
resize_width = int(round(image.shape[1] / height_ratio))
else:
resize_width = width
resize_height = int(round(image.shape[0] / width_ratio))
image = imresize(image, (resize_height, resize_width), interp=interp)
if width_ratio > height_ratio:
start = int(round((resize_width - width) / 2.0))
return image[:, start:start + width]
else:
start = int(round((resize_height - height) / 2.0))
return image[start:start + height, :]
else:
if resize_mode == 'fill':
if width_ratio > height_ratio:
resize_width = width
resize_height = int(round(image.shape[0] / width_ratio))
if (height - resize_height) % 2 == 1:
resize_height += 1
else:
resize_height = height
resize_width = int(round(image.shape[1] / height_ratio))
if (width - resize_width) % 2 == 1:
resize_width += 1
image = imresize(image, (resize_height, resize_width), interp=
interp)
elif resize_mode == 'half_crop':
new_ratio = (width_ratio + height_ratio) / 2.0
resize_width = int(round(image.shape[1] / new_ratio))
resize_height = int(round(image.shape[0] / new_ratio))
if width_ratio > height_ratio and (height - resize_height
) % 2 == 1:
resize_height += 1
elif width_ratio < height_ratio and (width - resize_width
) % 2 == 1:
resize_width += 1
image = imresize(image, (resize_height, resize_width), interp=
interp)
if width_ratio > height_ratio:
start = int(round((resize_width - width) / 2.0))
image = image[:, start:start + width]
else:
start = int(round((resize_height - height) / 2.0))
image = image[start:start + height, :]
else:
raise Exception('unrecognized resize_mode "%s"' % resize_mode)
if width_ratio > height_ratio:
padding = (height - resize_height) / 2
noise_size = padding, width
if channels > 1:
noise_size += channels,
noise = np.random.randint(0, 255, noise_size).astype('uint8')
image = np.concatenate((noise, image, noise), axis=0)
else:
padding = (width - resize_width) / 2
noise_size = height, padding
if channels > 1:
noise_size += channels,
noise = np.random.randint(0, 255, noise_size).astype('uint8')
image = np.concatenate((noise, image, noise), axis=1)
return image
|
resize_image
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image, height, width, channels=None, resize_mode=None):\n if resize_mode is None:\n resize_mode = 'squash'\n if resize_mode not in ['crop', 'squash', 'fill', 'half_crop']:\n raise ValueError('resize_mode \"%s\" not supported' % resize_mode)\n if channels not in [None, 1, 3]:\n raise ValueError('unsupported number of channels: %s' % channels)\n if isinstance(image, PIL.Image.Image):\n if channels is None:\n image_mode = image.mode\n if image_mode == 'L':\n channels = 1\n elif image_mode == 'RGB':\n channels = 3\n else:\n raise ValueError('unknown image mode \"%s\"' % image_mode)\n elif channels == 1:\n image_mode = 'L'\n elif channels == 3:\n image_mode = 'RGB'\n if image.mode != image_mode:\n image = image.convert(image_mode)\n image = np.array(image)\n elif isinstance(image, np.ndarray):\n if image.dtype != np.uint8:\n image = image.astype(np.uint8)\n if image.ndim == 3 and image.shape[2] == 1:\n image = image.reshape(image.shape[:2])\n if channels is None:\n if image.ndim == 2:\n channels = 1\n elif image.ndim == 3 and image.shape[2] == 3:\n channels = 3\n else:\n raise ValueError('invalid image shape: %s' % (image.shape,))\n elif channels == 1:\n if image.ndim != 2:\n if image.ndim == 3 and image.shape[2] == 3:\n image = np.dot(image, [0.299, 0.587, 0.114]).astype(np.\n uint8)\n else:\n raise ValueError('invalid image shape: %s' % (image.shape,)\n )\n elif channels == 3:\n if image.ndim == 2:\n image = np.repeat(image, 3).reshape(image.shape + (3,))\n elif image.shape[2] != 3:\n raise ValueError('invalid image shape: %s' % (image.shape,))\n else:\n raise ValueError(\n 'resize_image() expected a PIL.Image.Image or a numpy.ndarray')\n if image.shape[0] == height and image.shape[1] == width:\n return image\n interp = 'bilinear'\n width_ratio = float(image.shape[1]) / width\n height_ratio = float(image.shape[0]) / height\n if resize_mode == 'squash' or width_ratio == height_ratio:\n return imresize(image, (height, width), interp=interp)\n elif resize_mode == 'crop':\n if width_ratio > height_ratio:\n resize_height = height\n resize_width = int(round(image.shape[1] / height_ratio))\n else:\n resize_width = width\n resize_height = int(round(image.shape[0] / width_ratio))\n image = imresize(image, (resize_height, resize_width), interp=interp)\n if width_ratio > height_ratio:\n start = int(round((resize_width - width) / 2.0))\n return image[:, start:start + width]\n else:\n start = int(round((resize_height - height) / 2.0))\n return image[start:start + height, :]\n else:\n if resize_mode == 'fill':\n if width_ratio > height_ratio:\n resize_width = width\n resize_height = int(round(image.shape[0] / width_ratio))\n if (height - resize_height) % 2 == 1:\n resize_height += 1\n else:\n resize_height = height\n resize_width = int(round(image.shape[1] / height_ratio))\n if (width - resize_width) % 2 == 1:\n resize_width += 1\n image = imresize(image, (resize_height, resize_width), interp=\n interp)\n elif resize_mode == 'half_crop':\n new_ratio = (width_ratio + height_ratio) / 2.0\n resize_width = int(round(image.shape[1] / new_ratio))\n resize_height = int(round(image.shape[0] / new_ratio))\n if width_ratio > height_ratio and (height - resize_height\n ) % 2 == 1:\n resize_height += 1\n elif width_ratio < height_ratio and (width - resize_width\n ) % 2 == 1:\n resize_width += 1\n image = imresize(image, (resize_height, resize_width), interp=\n interp)\n if width_ratio > height_ratio:\n start = int(round((resize_width - width) / 2.0))\n image = image[:, start:start + width]\n else:\n start = int(round((resize_height - height) / 2.0))\n image = image[start:start + height, :]\n else:\n raise Exception('unrecognized resize_mode \"%s\"' % resize_mode)\n if width_ratio > height_ratio:\n padding = (height - resize_height) / 2\n noise_size = padding, width\n if channels > 1:\n noise_size += channels,\n noise = np.random.randint(0, 255, noise_size).astype('uint8')\n image = np.concatenate((noise, image, noise), axis=0)\n else:\n padding = (width - resize_width) / 2\n noise_size = height, padding\n if channels > 1:\n noise_size += channels,\n noise = np.random.randint(0, 255, noise_size).astype('uint8')\n image = np.concatenate((noise, image, noise), axis=1)\n return image\n",
"role": "user"
},
{
"content": "resize_image",
"role": "assistant"
}
] |
def x(image):
if image is None:
return None
elif isinstance(image, PIL.Image.Image):
pass
elif isinstance(image, np.ndarray):
image = PIL.Image.fromarray(image)
else:
raise ValueError('image must be a PIL.Image or a np.ndarray')
fmt = image.format
if not fmt:
fmt = 'jpeg'
else:
fmt = fmt.lower()
string_buf = StringIO()
image.save(string_buf, format=fmt)
data = string_buf.getvalue().encode('base64').replace('\n', '')
return 'data:image/%s;base64,%s' % (fmt, data)
|
embed_image_html
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image):\n if image is None:\n return None\n elif isinstance(image, PIL.Image.Image):\n pass\n elif isinstance(image, np.ndarray):\n image = PIL.Image.fromarray(image)\n else:\n raise ValueError('image must be a PIL.Image or a np.ndarray')\n fmt = image.format\n if not fmt:\n fmt = 'jpeg'\n else:\n fmt = fmt.lower()\n string_buf = StringIO()\n image.save(string_buf, format=fmt)\n data = string_buf.getvalue().encode('base64').replace('\\n', '')\n return 'data:image/%s;base64,%s' % (fmt, data)\n",
"role": "user"
},
{
"content": "embed_image_html",
"role": "assistant"
}
] |
def x(image, bboxes, color='red', width=1):
def x(bbox, n):
l = min(bbox[0][0], bbox[1][0])
r = max(bbox[0][0], bbox[1][0])
t = min(bbox[0][1], bbox[1][1])
b = max(bbox[0][1], bbox[1][1])
return (l - n, t - n), (r + n, b + n)
from PIL import Image, ImageDraw
draw = ImageDraw.Draw(image)
for bbox in bboxes:
for n in range(width):
draw.rectangle(expanded_bbox(bbox, n), outline=color)
return image
|
add_bboxes_to_image
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image, bboxes, color='red', width=1):\n\n def x(bbox, n):\n l = min(bbox[0][0], bbox[1][0])\n r = max(bbox[0][0], bbox[1][0])\n t = min(bbox[0][1], bbox[1][1])\n b = max(bbox[0][1], bbox[1][1])\n return (l - n, t - n), (r + n, b + n)\n from PIL import Image, ImageDraw\n draw = ImageDraw.Draw(image)\n for bbox in bboxes:\n for n in range(width):\n draw.rectangle(expanded_bbox(bbox, n), outline=color)\n return image\n",
"role": "user"
},
{
"content": "add_bboxes_to_image",
"role": "assistant"
}
] |
def x(data, allow_heatmap=True, normalize=True, min_img_dim=100, max_width=
1200, channel_order='RGB', colormap='jet'):
if channel_order not in ['RGB', 'BGR']:
raise ValueError('Unsupported channel_order %s' % channel_order)
if data.ndim == 1:
data = data[:, np.newaxis, np.newaxis]
elif data.ndim == 2:
data = data.reshape((data.shape[0] * data.shape[1], 1, 1))
elif data.ndim == 3:
if data.shape[0] == 3:
if channel_order == 'BGR':
data = data[[2, 1, 0], ...]
data = data.transpose(1, 2, 0)
data = data[np.newaxis, ...]
else:
pass
elif data.ndim == 4:
if data.shape[0] == 3:
data = data.transpose(1, 2, 3, 0)
if channel_order == 'BGR':
data = data[:, :, :, [2, 1, 0]]
elif data.shape[1] == 3:
data = data.transpose(0, 2, 3, 1)
if channel_order == 'BGR':
data = data[:, :, :, [2, 1, 0]]
else:
data = data.reshape((data.shape[0] * data.shape[1], data.shape[
2], data.shape[3]))
else:
raise RuntimeError('unrecognized data shape: %s' % (data.shape,))
return get_layer_vis_square_raw(data, allow_heatmap, normalize,
min_img_dim, max_width, colormap)
|
get_layer_vis_square
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data, allow_heatmap=True, normalize=True, min_img_dim=100, max_width=\n 1200, channel_order='RGB', colormap='jet'):\n if channel_order not in ['RGB', 'BGR']:\n raise ValueError('Unsupported channel_order %s' % channel_order)\n if data.ndim == 1:\n data = data[:, np.newaxis, np.newaxis]\n elif data.ndim == 2:\n data = data.reshape((data.shape[0] * data.shape[1], 1, 1))\n elif data.ndim == 3:\n if data.shape[0] == 3:\n if channel_order == 'BGR':\n data = data[[2, 1, 0], ...]\n data = data.transpose(1, 2, 0)\n data = data[np.newaxis, ...]\n else:\n pass\n elif data.ndim == 4:\n if data.shape[0] == 3:\n data = data.transpose(1, 2, 3, 0)\n if channel_order == 'BGR':\n data = data[:, :, :, [2, 1, 0]]\n elif data.shape[1] == 3:\n data = data.transpose(0, 2, 3, 1)\n if channel_order == 'BGR':\n data = data[:, :, :, [2, 1, 0]]\n else:\n data = data.reshape((data.shape[0] * data.shape[1], data.shape[\n 2], data.shape[3]))\n else:\n raise RuntimeError('unrecognized data shape: %s' % (data.shape,))\n return get_layer_vis_square_raw(data, allow_heatmap, normalize,\n min_img_dim, max_width, colormap)\n",
"role": "user"
},
{
"content": "get_layer_vis_square",
"role": "assistant"
}
] |
def x(self):
if os.getenv('AETROS_GIT_INDEX_FILE'):
self.index_path = os.getenv('AETROS_GIT_INDEX_FILE')
return
import tempfile
h, path = tempfile.mkstemp('aetros-git', '', self.temp_path)
self.index_path = path
os.close(h)
os.unlink(self.index_path)
self.logger.debug('GIT_INDEX_FILE created at ' + self.index_path)
|
prepare_index_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if os.getenv('AETROS_GIT_INDEX_FILE'):\n self.index_path = os.getenv('AETROS_GIT_INDEX_FILE')\n return\n import tempfile\n h, path = tempfile.mkstemp('aetros-git', '', self.temp_path)\n self.index_path = path\n os.close(h)\n os.unlink(self.index_path)\n self.logger.debug('GIT_INDEX_FILE created at ' + self.index_path)\n",
"role": "user"
},
{
"content": "prepare_index_file",
"role": "assistant"
}
] |
def x(self, job_id, checkout=False):
self.job_id = job_id
self.logger.debug('Git fetch job reference %s' % (self.ref_head,))
out, code, err = self.command_exec(['ls-remote', 'origin', self.ref_head])
if code:
self.logger.error('Could not find the job ' + job_id +
' on the server. Are you online and does the job exist?')
sys.exit(1)
try:
self.command_exec(['fetch', '-f', '-n', 'origin', self.ref_head +
':' + self.ref_head])
except Exception:
self.logger.error('Could not load job information for ' + job_id +
'. You need to be online to start pre-configured jobs.')
raise
self.read_job(job_id, checkout)
|
fetch_job
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, job_id, checkout=False):\n self.job_id = job_id\n self.logger.debug('Git fetch job reference %s' % (self.ref_head,))\n out, code, err = self.command_exec(['ls-remote', 'origin', self.ref_head])\n if code:\n self.logger.error('Could not find the job ' + job_id +\n ' on the server. Are you online and does the job exist?')\n sys.exit(1)\n try:\n self.command_exec(['fetch', '-f', '-n', 'origin', self.ref_head +\n ':' + self.ref_head])\n except Exception:\n self.logger.error('Could not load job information for ' + job_id +\n '. You need to be online to start pre-configured jobs.')\n raise\n self.read_job(job_id, checkout)\n",
"role": "user"
},
{
"content": "fetch_job",
"role": "assistant"
}
] |
def x(self, job_id, checkout=False):
self.job_id = job_id
commit = self.get_head_commit()
self.logger.debug('Job ref points to ' + commit)
self.command_exec(['read-tree', self.ref_head])
if checkout:
self.logger.debug('Working directory in ' + self.work_tree)
if os.path.exists(self.work_tree):
shutil.rmtree(self.work_tree)
os.makedirs(self.work_tree)
self.command_exec(['--work-tree', self.work_tree, 'checkout', self.
ref_head, '--', '.'])
|
read_job
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, job_id, checkout=False):\n self.job_id = job_id\n commit = self.get_head_commit()\n self.logger.debug('Job ref points to ' + commit)\n self.command_exec(['read-tree', self.ref_head])\n if checkout:\n self.logger.debug('Working directory in ' + self.work_tree)\n if os.path.exists(self.work_tree):\n shutil.rmtree(self.work_tree)\n os.makedirs(self.work_tree)\n self.command_exec(['--work-tree', self.work_tree, 'checkout', self.\n ref_head, '--', '.'])\n",
"role": "user"
},
{
"content": "read_job",
"role": "assistant"
}
] |
def x(self, data):
self.add_file('aetros/job.json', simplejson.dumps(data, indent=4))
tree_id = self.write_tree()
self.job_id = self.command_exec(['commit-tree', '-m', 'JOB_CREATED',
tree_id])[0].decode('utf-8').strip()
out, code, err = self.command_exec(['show-ref', self.ref_head],
allowed_to_fail=True)
if not code:
self.logger.warning(
'Generated job id already exists, because exact same experiment values given. Ref '
+ self.ref_head)
self.command_exec(['update-ref', self.ref_head, self.job_id])
if not os.path.exists(self.work_tree):
os.makedirs(self.work_tree)
self.command_exec(['--work-tree', self.work_tree, 'checkout', self.
ref_head, '--', '.'])
return self.job_id
|
create_job_id
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, data):\n self.add_file('aetros/job.json', simplejson.dumps(data, indent=4))\n tree_id = self.write_tree()\n self.job_id = self.command_exec(['commit-tree', '-m', 'JOB_CREATED',\n tree_id])[0].decode('utf-8').strip()\n out, code, err = self.command_exec(['show-ref', self.ref_head],\n allowed_to_fail=True)\n if not code:\n self.logger.warning(\n 'Generated job id already exists, because exact same experiment values given. Ref '\n + self.ref_head)\n self.command_exec(['update-ref', self.ref_head, self.job_id])\n if not os.path.exists(self.work_tree):\n os.makedirs(self.work_tree)\n self.command_exec(['--work-tree', self.work_tree, 'checkout', self.\n ref_head, '--', '.'])\n return self.job_id\n",
"role": "user"
},
{
"content": "create_job_id",
"role": "assistant"
}
] |
def x(self):
self.active_thread = True
self.active_push = True
self.thread_push_instance = Thread(target=self.thread_push)
self.thread_push_instance.daemon = True
self.thread_push_instance.start()
|
start_push_sync
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self.active_thread = True\n self.active_push = True\n self.thread_push_instance = Thread(target=self.thread_push)\n self.thread_push_instance.daemon = True\n self.thread_push_instance.start()\n",
"role": "user"
},
{
"content": "start_push_sync",
"role": "assistant"
}
] |
def x(self):
self.active_thread = False
if self.thread_push_instance and self.thread_push_instance.isAlive():
self.thread_push_instance.join()
with self.batch_commit('STREAM_END'):
for path, handle in six.iteritems(self.streamed_files.copy()):
full_path = os.path.normpath(self.temp_path + '/stream-blob/' +
self.job_id + '/' + path)
self.logger.debug('Git stream end for file: ' + full_path)
del self.streamed_files[path]
try:
self.stream_files_lock.acquire()
if not handle.closed:
handle.flush()
handle.close()
finally:
self.stream_files_lock.release()
with open(full_path, 'r') as f:
self.commit_file(path, path, f.read())
if not self.keep_stream_files:
os.unlink(full_path)
with self.batch_commit('STORE_END'):
for path, bar in six.iteritems(self.store_files.copy()):
full_path = os.path.normpath(self.temp_path + '/store-blob/' +
self.job_id + '/' + path)
self.logger.debug('Git store end for file: ' + full_path)
del self.store_files[path]
try:
self.stream_files_lock.acquire()
self.commit_file(path, path, open(full_path, 'r').read())
finally:
self.stream_files_lock.release()
if not self.keep_stream_files:
os.unlink(full_path)
|
stop
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self.active_thread = False\n if self.thread_push_instance and self.thread_push_instance.isAlive():\n self.thread_push_instance.join()\n with self.batch_commit('STREAM_END'):\n for path, handle in six.iteritems(self.streamed_files.copy()):\n full_path = os.path.normpath(self.temp_path + '/stream-blob/' +\n self.job_id + '/' + path)\n self.logger.debug('Git stream end for file: ' + full_path)\n del self.streamed_files[path]\n try:\n self.stream_files_lock.acquire()\n if not handle.closed:\n handle.flush()\n handle.close()\n finally:\n self.stream_files_lock.release()\n with open(full_path, 'r') as f:\n self.commit_file(path, path, f.read())\n if not self.keep_stream_files:\n os.unlink(full_path)\n with self.batch_commit('STORE_END'):\n for path, bar in six.iteritems(self.store_files.copy()):\n full_path = os.path.normpath(self.temp_path + '/store-blob/' +\n self.job_id + '/' + path)\n self.logger.debug('Git store end for file: ' + full_path)\n del self.store_files[path]\n try:\n self.stream_files_lock.acquire()\n self.commit_file(path, path, open(full_path, 'r').read())\n finally:\n self.stream_files_lock.release()\n if not self.keep_stream_files:\n os.unlink(full_path)\n",
"role": "user"
},
{
"content": "stop",
"role": "assistant"
}
] |
def x(self, message):
class controlled_execution:
def x(self, git, message):
self.git = git
self.message = message
def x(self):
self.git.git_batch_commit = True
if self.git.job_id:
self.git.read_tree(self.git.ref_head)
def x(self, type, value, traceback):
self.git.git_batch_commit = False
if not self.git.git_batch_commit_messages:
return
commit_message = self.message
if self.git.git_batch_commit_messages:
commit_message = commit_message + '\n\n' + '\n'.join(self.
git.git_batch_commit_messages)
self.git.git_batch_commit_messages = []
self.git.commit_index(commit_message)
return controlled_execution(self, message)
|
batch_commit
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, message):\n\n\n class controlled_execution:\n\n def x(self, git, message):\n self.git = git\n self.message = message\n\n def x(self):\n self.git.git_batch_commit = True\n if self.git.job_id:\n self.git.read_tree(self.git.ref_head)\n\n def x(self, type, value, traceback):\n self.git.git_batch_commit = False\n if not self.git.git_batch_commit_messages:\n return\n commit_message = self.message\n if self.git.git_batch_commit_messages:\n commit_message = commit_message + '\\n\\n' + '\\n'.join(self.\n git.git_batch_commit_messages)\n self.git.git_batch_commit_messages = []\n self.git.commit_index(commit_message)\n return controlled_execution(self, message)\n",
"role": "user"
},
{
"content": "batch_commit",
"role": "assistant"
}
] |
def x(self, path, data, fast_lane=True):
self.stream_files_lock.acquire()
try:
full_path = os.path.normpath(self.temp_path + '/store-blob/' + self
.job_id + '/' + path)
if not os.path.exists(os.path.dirname(full_path)):
os.makedirs(os.path.dirname(full_path))
if hasattr(data, 'encode'):
data = data.encode('utf-8', 'replace')
already_set = path in self.store_files and self.store_files[path
] == data
if is_debug3():
sys.__stderr__.write(
'git:store_file(%s, %s, %s), already_set=%s\n' % (str(path),
str(data)[0:180], str(fast_lane), str(already_set)))
if already_set:
return
open(full_path, 'wb').write(data)
self.store_files[path] = data
if self.client.online is not False:
self.client.send({'type': 'store-blob', 'path': path, 'data':
data}, channel='' if fast_lane else 'files')
finally:
self.stream_files_lock.release()
|
store_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, path, data, fast_lane=True):\n self.stream_files_lock.acquire()\n try:\n full_path = os.path.normpath(self.temp_path + '/store-blob/' + self\n .job_id + '/' + path)\n if not os.path.exists(os.path.dirname(full_path)):\n os.makedirs(os.path.dirname(full_path))\n if hasattr(data, 'encode'):\n data = data.encode('utf-8', 'replace')\n already_set = path in self.store_files and self.store_files[path\n ] == data\n if is_debug3():\n sys.__stderr__.write(\n 'git:store_file(%s, %s, %s), already_set=%s\\n' % (str(path),\n str(data)[0:180], str(fast_lane), str(already_set)))\n if already_set:\n return\n open(full_path, 'wb').write(data)\n self.store_files[path] = data\n if self.client.online is not False:\n self.client.send({'type': 'store-blob', 'path': path, 'data':\n data}, channel='' if fast_lane else 'files')\n finally:\n self.stream_files_lock.release()\n",
"role": "user"
},
{
"content": "store_file",
"role": "assistant"
}
] |
def x(self, path, fast_lane=True):
full_path = os.path.normpath(self.temp_path + '/stream-blob/' + self.
job_id + '/' + path)
if not os.path.exists(os.path.dirname(full_path)):
os.makedirs(os.path.dirname(full_path))
handle = open(full_path, 'wb')
self.streamed_files[path] = handle
class Stream:
def x(self, git):
self.git = git
def x(self, data):
if path not in self.git.streamed_files:
return
if hasattr(data, 'encode'):
data = data.encode('utf-8', 'replace')
try:
self.git.stream_files_lock.acquire()
if not handle.closed:
handle.write(data)
handle.flush()
except IOError as e:
handle.close()
if 'No space left' in e.__str__():
sys.stderr.write(traceback.format_exc() + '\n')
self.git.logger.error(e.__str__())
finally:
self.git.stream_files_lock.release()
if self.git.client.online is not False:
self.git.client.send({'type': 'stream-blob', 'path': path,
'data': data}, channel='' if fast_lane else 'files')
return Stream(self)
|
stream_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, path, fast_lane=True):\n full_path = os.path.normpath(self.temp_path + '/stream-blob/' + self.\n job_id + '/' + path)\n if not os.path.exists(os.path.dirname(full_path)):\n os.makedirs(os.path.dirname(full_path))\n handle = open(full_path, 'wb')\n self.streamed_files[path] = handle\n\n\n class Stream:\n\n def x(self, git):\n self.git = git\n\n def x(self, data):\n if path not in self.git.streamed_files:\n return\n if hasattr(data, 'encode'):\n data = data.encode('utf-8', 'replace')\n try:\n self.git.stream_files_lock.acquire()\n if not handle.closed:\n handle.write(data)\n handle.flush()\n except IOError as e:\n handle.close()\n if 'No space left' in e.__str__():\n sys.stderr.write(traceback.format_exc() + '\\n')\n self.git.logger.error(e.__str__())\n finally:\n self.git.stream_files_lock.release()\n if self.git.client.online is not False:\n self.git.client.send({'type': 'stream-blob', 'path': path,\n 'data': data}, channel='' if fast_lane else 'files')\n return Stream(self)\n",
"role": "user"
},
{
"content": "stream_file",
"role": "assistant"
}
] |
def x(self, mode, blob_id, path):
self.command_exec(['update-index', '--add', '--cacheinfo', mode,
blob_id, path])
|
add_index
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, mode, blob_id, path):\n self.command_exec(['update-index', '--add', '--cacheinfo', mode,\n blob_id, path])\n",
"role": "user"
},
{
"content": "add_index",
"role": "assistant"
}
] |
def x(self, git_path, content):
blob_id = self.write_blob(content)
self.add_index('100644', blob_id, git_path)
|
add_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, git_path, content):\n blob_id = self.write_blob(content)\n self.add_index('100644', blob_id, git_path)\n",
"role": "user"
},
{
"content": "add_file",
"role": "assistant"
}
] |
def x(self, path, work_tree, verbose=True):
args = ['--work-tree', work_tree, 'add', '-f']
if verbose:
args.append('--verbose')
args.append(path)
self.command_exec(args, show_output=verbose)
|
add_file_path_in_work_tree
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, path, work_tree, verbose=True):\n args = ['--work-tree', work_tree, 'add', '-f']\n if verbose:\n args.append('--verbose')\n args.append(path)\n self.command_exec(args, show_output=verbose)\n",
"role": "user"
},
{
"content": "add_file_path_in_work_tree",
"role": "assistant"
}
] |
def x(self, message, path, content):
if self.git_batch_commit:
self.add_file(path, content)
self.git_batch_commit_messages.append(message)
else:
with self.lock_write():
if self.job_id:
self.read_tree(self.ref_head)
self.add_file(path, content)
return self.commit_index(message)
|
commit_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, message, path, content):\n if self.git_batch_commit:\n self.add_file(path, content)\n self.git_batch_commit_messages.append(message)\n else:\n with self.lock_write():\n if self.job_id:\n self.read_tree(self.ref_head)\n self.add_file(path, content)\n return self.commit_index(message)\n",
"role": "user"
},
{
"content": "commit_file",
"role": "assistant"
}
] |
def x(self, latest_commit_sha):
base = ['git', '--bare', '--git-dir', self.git_path]
object_shas = []
summary = {'commits': [], 'trees': [], 'files': []}
def x(commit):
if commit in self.synced_object_shas or commit in object_shas:
return None, None
self.synced_object_shas[commit] = True
summary['commits'].append(commit)
object_shas.append(commit)
object_content = subprocess.check_output(base + ['cat-file', '-p',
commit]).decode('utf-8').strip()
parents = []
tree = ''
for line in object_content.splitlines():
if line.startswith('tree '):
tree = line[len('tree '):]
if line.startswith('parent '):
parents.append(line[len('parent '):])
return parents, tree
def x(tree):
if tree in self.synced_object_shas or tree in object_shas:
return
self.synced_object_shas[tree] = True
summary['trees'].append(tree)
object_shas.append(tree)
object_content = subprocess.check_output(base + ['ls-tree', '-r',
'-t', tree]).decode('utf-8').strip()
for line in object_content.splitlines():
exploded = line.split(' ')
if len(exploded) < 3:
sys.stderr.write(
'Error: Wrong line format of ls-tree for %s: %s\n' % (
tree, line))
sys.exit(1)
object_to_add = str(exploded[2][:40])
path = str(exploded[2][41:])
if (object_to_add in self.synced_object_shas or object_to_add in
object_shas):
continue
object_shas.append(object_to_add)
self.synced_object_shas[object_to_add] = True
summary['files'].append([object_to_add, path])
commits_to_check = [latest_commit_sha]
while len(commits_to_check):
sha = commits_to_check.pop(0)
parents, tree = read_parents_and_tree_from(sha)
if parents:
for parent in parents:
if parent not in commits_to_check:
commits_to_check.append(parent)
if tree:
collect_files_from_tree(tree)
is_debug2() and self.logger.debug('shas_to_check %d: %s ' % (len(
object_shas), str(object_shas)))
if not object_shas:
return [], summary
try:
is_debug2() and self.logger.debug('Do git-cat-file-check.sh')
ssh_stream = create_ssh_stream(read_home_config(), exit_on_failure=
False)
channel = ssh_stream.get_transport().open_session()
channel.exec_command('git-cat-file-check.sh "%s"' % (self.
model_name + '.git',))
channel.sendall('\n'.join(object_shas))
channel.shutdown_write()
def x(c):
content = b''
while True:
try:
chunk = c.recv(1024)
if chunk == b'':
break
content += chunk
except (KeyboardInterrupt, SystemExit):
return
return content
missing_objects = readall(channel).decode('utf-8').splitlines()
channel.close()
ssh_stream.close()
for stype in six.iterkeys(summary):
ids = summary[stype][:]
for sha in ids:
if stype == 'files':
if sha[0] not in missing_objects:
summary[stype].remove(sha)
elif sha not in missing_objects:
summary[stype].remove(sha)
return missing_objects, summary
except (KeyboardInterrupt, SystemExit):
raise
except Exception as e:
self.logger.error('Failed to generate diff_objects: %s' % (str(e),))
for sha in object_shas:
if sha in self.synced_object_shas:
del self.synced_object_shas[sha]
return None, None
|
diff_objects
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, latest_commit_sha):\n base = ['git', '--bare', '--git-dir', self.git_path]\n object_shas = []\n summary = {'commits': [], 'trees': [], 'files': []}\n\n def x(commit):\n if commit in self.synced_object_shas or commit in object_shas:\n return None, None\n self.synced_object_shas[commit] = True\n summary['commits'].append(commit)\n object_shas.append(commit)\n object_content = subprocess.check_output(base + ['cat-file', '-p',\n commit]).decode('utf-8').strip()\n parents = []\n tree = ''\n for line in object_content.splitlines():\n if line.startswith('tree '):\n tree = line[len('tree '):]\n if line.startswith('parent '):\n parents.append(line[len('parent '):])\n return parents, tree\n\n def x(tree):\n if tree in self.synced_object_shas or tree in object_shas:\n return\n self.synced_object_shas[tree] = True\n summary['trees'].append(tree)\n object_shas.append(tree)\n object_content = subprocess.check_output(base + ['ls-tree', '-r',\n '-t', tree]).decode('utf-8').strip()\n for line in object_content.splitlines():\n exploded = line.split(' ')\n if len(exploded) < 3:\n sys.stderr.write(\n 'Error: Wrong line format of ls-tree for %s: %s\\n' % (\n tree, line))\n sys.exit(1)\n object_to_add = str(exploded[2][:40])\n path = str(exploded[2][41:])\n if (object_to_add in self.synced_object_shas or object_to_add in\n object_shas):\n continue\n object_shas.append(object_to_add)\n self.synced_object_shas[object_to_add] = True\n summary['files'].append([object_to_add, path])\n commits_to_check = [latest_commit_sha]\n while len(commits_to_check):\n sha = commits_to_check.pop(0)\n parents, tree = read_parents_and_tree_from(sha)\n if parents:\n for parent in parents:\n if parent not in commits_to_check:\n commits_to_check.append(parent)\n if tree:\n collect_files_from_tree(tree)\n is_debug2() and self.logger.debug('shas_to_check %d: %s ' % (len(\n object_shas), str(object_shas)))\n if not object_shas:\n return [], summary\n try:\n is_debug2() and self.logger.debug('Do git-cat-file-check.sh')\n ssh_stream = create_ssh_stream(read_home_config(), exit_on_failure=\n False)\n channel = ssh_stream.get_transport().open_session()\n channel.exec_command('git-cat-file-check.sh \"%s\"' % (self.\n model_name + '.git',))\n channel.sendall('\\n'.join(object_shas))\n channel.shutdown_write()\n\n def x(c):\n content = b''\n while True:\n try:\n chunk = c.recv(1024)\n if chunk == b'':\n break\n content += chunk\n except (KeyboardInterrupt, SystemExit):\n return\n return content\n missing_objects = readall(channel).decode('utf-8').splitlines()\n channel.close()\n ssh_stream.close()\n for stype in six.iterkeys(summary):\n ids = summary[stype][:]\n for sha in ids:\n if stype == 'files':\n if sha[0] not in missing_objects:\n summary[stype].remove(sha)\n elif sha not in missing_objects:\n summary[stype].remove(sha)\n return missing_objects, summary\n except (KeyboardInterrupt, SystemExit):\n raise\n except Exception as e:\n self.logger.error('Failed to generate diff_objects: %s' % (str(e),))\n for sha in object_shas:\n if sha in self.synced_object_shas:\n del self.synced_object_shas[sha]\n return None, None\n",
"role": "user"
},
{
"content": "diff_objects",
"role": "assistant"
}
] |
def x(self, message):
tree_id = self.write_tree()
args = ['commit-tree', tree_id, '-p', self.ref_head]
commit = self.command_exec(args, message)[0].decode('utf-8').strip()
self.command_exec(['update-ref', self.ref_head, commit])
return commit
|
commit_index
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, message):\n tree_id = self.write_tree()\n args = ['commit-tree', tree_id, '-p', self.ref_head]\n commit = self.command_exec(args, message)[0].decode('utf-8').strip()\n self.command_exec(['update-ref', self.ref_head, commit])\n return commit\n",
"role": "user"
},
{
"content": "commit_index",
"role": "assistant"
}
] |
def x(self, path):
try:
out, code, err = self.command_exec(['cat-file', '-p', self.ref_head +
':' + path])
if not code:
return out.decode('utf-8')
except Exception:
pass
return None
|
contents
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, path):\n try:\n out, code, err = self.command_exec(['cat-file', '-p', self.ref_head +\n ':' + path])\n if not code:\n return out.decode('utf-8')\n except Exception:\n pass\n return None\n",
"role": "user"
},
{
"content": "contents",
"role": "assistant"
}
] |
def x(job_backend, trainer, keras_callback):
job_backend.set_status('STARTING')
job_model = job_backend.get_job_model()
model_provider = job_model.get_model_provider()
job_backend.set_status('LOAD DATA')
datasets = job_model.get_datasets(trainer)
print('trainer.input_shape = %s\n' % (simplejson.dumps(trainer.
input_shape, default=invalid_json_values),))
print('trainer.classes = %s\n' % (simplejson.dumps(trainer.classes,
default=invalid_json_values),))
multiple_inputs = len(datasets) > 1
insights_x = [] if multiple_inputs else None
for dataset_name in job_model.get_input_dataset_names():
dataset = datasets[dataset_name]
if is_generator(dataset['X_train']):
batch_x, batch_y = dataset['X_train'].next()
if multiple_inputs:
insights_x.append(batch_x[0])
else:
insights_x = batch_x[0]
elif multiple_inputs:
insights_x.append(dataset['X_train'][0])
else:
insights_x = dataset['X_train'][0]
keras_callback.insights_x = insights_x
print('Insights sample shape', keras_callback.insights_x.shape)
keras_callback.write("Possible data keys '%s'\n" % "','".join(list(
datasets.keys())))
data_train = model_provider.get_training_data(trainer, datasets)
data_validation = model_provider.get_validation_data(trainer, datasets)
keras_callback.set_validation_data(data_validation, trainer.nb_val_samples)
trainer.set_status('CONSTRUCT')
model = model_provider.get_model(trainer)
trainer.set_model(model)
trainer.set_status('COMPILING')
loss = model_provider.get_loss(trainer)
optimizer = model_provider.get_optimizer(trainer)
model_provider.compile(trainer, model, loss, optimizer)
model.summary()
trainer.callbacks.append(keras_callback)
model_provider.train(trainer, model, data_train, data_validation)
|
job_start
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(job_backend, trainer, keras_callback):\n job_backend.set_status('STARTING')\n job_model = job_backend.get_job_model()\n model_provider = job_model.get_model_provider()\n job_backend.set_status('LOAD DATA')\n datasets = job_model.get_datasets(trainer)\n print('trainer.input_shape = %s\\n' % (simplejson.dumps(trainer.\n input_shape, default=invalid_json_values),))\n print('trainer.classes = %s\\n' % (simplejson.dumps(trainer.classes,\n default=invalid_json_values),))\n multiple_inputs = len(datasets) > 1\n insights_x = [] if multiple_inputs else None\n for dataset_name in job_model.get_input_dataset_names():\n dataset = datasets[dataset_name]\n if is_generator(dataset['X_train']):\n batch_x, batch_y = dataset['X_train'].next()\n if multiple_inputs:\n insights_x.append(batch_x[0])\n else:\n insights_x = batch_x[0]\n elif multiple_inputs:\n insights_x.append(dataset['X_train'][0])\n else:\n insights_x = dataset['X_train'][0]\n keras_callback.insights_x = insights_x\n print('Insights sample shape', keras_callback.insights_x.shape)\n keras_callback.write(\"Possible data keys '%s'\\n\" % \"','\".join(list(\n datasets.keys())))\n data_train = model_provider.get_training_data(trainer, datasets)\n data_validation = model_provider.get_validation_data(trainer, datasets)\n keras_callback.set_validation_data(data_validation, trainer.nb_val_samples)\n trainer.set_status('CONSTRUCT')\n model = model_provider.get_model(trainer)\n trainer.set_model(model)\n trainer.set_status('COMPILING')\n loss = model_provider.get_loss(trainer)\n optimizer = model_provider.get_optimizer(trainer)\n model_provider.compile(trainer, model, loss, optimizer)\n model.summary()\n trainer.callbacks.append(keras_callback)\n model_provider.train(trainer, model, data_train, data_validation)\n",
"role": "user"
},
{
"content": "job_start",
"role": "assistant"
}
] |
def x(job_model, dataset, node, trainer):
concurrent = psutil.cpu_count()
dataset_config = dataset['config']
controller = {'running': True}
q = Queue(concurrent)
result = {'X_train': [], 'Y_train': [], 'X_test': [], 'Y_test': []}
images = []
max = 0
path = job_model.get_dataset_downloads_dir(dataset)
if 'path' in dataset['config']:
path = dataset['config']['path']
classes_count = 0
category_map = {}
classes = []
trainer.set_status('LOAD IMAGES INTO MEMORY')
try:
for i in range(concurrent):
t = ImageReadWorker(q, job_model, node, path, images, controller)
t.daemon = True
t.start()
for validation_or_training in ['validation', 'training']:
if os.path.isdir(os.path.normpath(path + '/' +
validation_or_training)):
for category_name in os.listdir(os.path.normpath(path + '/' +
validation_or_training)):
if os.path.isdir(os.path.normpath(path + '/' +
validation_or_training + '/' + category_name)):
if category_name not in category_map:
category_map[category_name] = classes_count
if ('classes' in dataset_config and 'category_' in
category_name):
category_idx = int(category_name.replace(
'category_', ''))
category_map[category_name] = category_idx
target_category = dataset_config['classes'][
category_idx]
classes.append(target_category['title'] or
'Class %s' % (category_idx,))
else:
classes.append(category_name)
classes_count += 1
for id in os.listdir(os.path.normpath(path + '/' +
validation_or_training + '/' + category_name)):
file_path = os.path.join(path,
validation_or_training, category_name, id)
q.put([file_path, validation_or_training ==
'validation', category_name])
max += 1
q.join()
controller['running'] = False
train_images = []
test_images = []
for v in images:
image, validation, category_dir = v
if validation is True:
test_images.append([image, category_map[category_dir]])
else:
train_images.append([image, category_map[category_dir]])
train_datagen = None
augmentation = bool(get_option(dataset_config, 'augmentation', False))
if augmentation:
train_datagen = get_image_data_augmentor_from_dataset(dataset)
train = InMemoryDataGenerator(train_datagen, train_images,
classes_count, job_model.job['config']['batchSize'])
test = InMemoryDataGenerator(None, test_images, classes_count,
job_model.job['config']['batchSize'])
nb_sample = len(train_images)
trainer.set_info('Dataset size', {'training': nb_sample,
'validation': len(test_images)})
trainer.set_generator_training_nb(nb_sample)
trainer.set_generator_validation_nb(len(test_images))
trainer.logger.info(
'Found %d classes, %d images (%d in training [%saugmented], %d in validation). Read all images into memory from %s'
% (classes_count, max, len(train_images), 'not ' if
augmentation is False else '', len(test_images), path))
if classes_count == 0:
trainer.logger.warning(
'Could not find any classes. Does the directory contains images?'
)
sys.exit(1)
trainer.output_size = classes_count
trainer.set_info('classes', classes)
trainer.classes = classes
result['X_train'] = train
result['Y_train'] = train
result['X_test'] = test
result['Y_test'] = test
return result
except KeyboardInterrupt:
controller['running'] = False
sys.exit(1)
|
read_images_in_memory
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(job_model, dataset, node, trainer):\n concurrent = psutil.cpu_count()\n dataset_config = dataset['config']\n controller = {'running': True}\n q = Queue(concurrent)\n result = {'X_train': [], 'Y_train': [], 'X_test': [], 'Y_test': []}\n images = []\n max = 0\n path = job_model.get_dataset_downloads_dir(dataset)\n if 'path' in dataset['config']:\n path = dataset['config']['path']\n classes_count = 0\n category_map = {}\n classes = []\n trainer.set_status('LOAD IMAGES INTO MEMORY')\n try:\n for i in range(concurrent):\n t = ImageReadWorker(q, job_model, node, path, images, controller)\n t.daemon = True\n t.start()\n for validation_or_training in ['validation', 'training']:\n if os.path.isdir(os.path.normpath(path + '/' +\n validation_or_training)):\n for category_name in os.listdir(os.path.normpath(path + '/' +\n validation_or_training)):\n if os.path.isdir(os.path.normpath(path + '/' +\n validation_or_training + '/' + category_name)):\n if category_name not in category_map:\n category_map[category_name] = classes_count\n if ('classes' in dataset_config and 'category_' in\n category_name):\n category_idx = int(category_name.replace(\n 'category_', ''))\n category_map[category_name] = category_idx\n target_category = dataset_config['classes'][\n category_idx]\n classes.append(target_category['title'] or \n 'Class %s' % (category_idx,))\n else:\n classes.append(category_name)\n classes_count += 1\n for id in os.listdir(os.path.normpath(path + '/' +\n validation_or_training + '/' + category_name)):\n file_path = os.path.join(path,\n validation_or_training, category_name, id)\n q.put([file_path, validation_or_training ==\n 'validation', category_name])\n max += 1\n q.join()\n controller['running'] = False\n train_images = []\n test_images = []\n for v in images:\n image, validation, category_dir = v\n if validation is True:\n test_images.append([image, category_map[category_dir]])\n else:\n train_images.append([image, category_map[category_dir]])\n train_datagen = None\n augmentation = bool(get_option(dataset_config, 'augmentation', False))\n if augmentation:\n train_datagen = get_image_data_augmentor_from_dataset(dataset)\n train = InMemoryDataGenerator(train_datagen, train_images,\n classes_count, job_model.job['config']['batchSize'])\n test = InMemoryDataGenerator(None, test_images, classes_count,\n job_model.job['config']['batchSize'])\n nb_sample = len(train_images)\n trainer.set_info('Dataset size', {'training': nb_sample,\n 'validation': len(test_images)})\n trainer.set_generator_training_nb(nb_sample)\n trainer.set_generator_validation_nb(len(test_images))\n trainer.logger.info(\n 'Found %d classes, %d images (%d in training [%saugmented], %d in validation). Read all images into memory from %s'\n % (classes_count, max, len(train_images), 'not ' if \n augmentation is False else '', len(test_images), path))\n if classes_count == 0:\n trainer.logger.warning(\n 'Could not find any classes. Does the directory contains images?'\n )\n sys.exit(1)\n trainer.output_size = classes_count\n trainer.set_info('classes', classes)\n trainer.classes = classes\n result['X_train'] = train\n result['Y_train'] = train\n result['X_test'] = test\n result['Y_test'] = test\n return result\n except KeyboardInterrupt:\n controller['running'] = False\n sys.exit(1)\n",
"role": "user"
},
{
"content": "read_images_in_memory",
"role": "assistant"
}
] |
def x():
libcudart = get_libcudart()
devices = {}
for i in range(0, get_installed_devices()):
gpu = get_device_properties(i)
pciBusId = ctypes.create_string_buffer(64)
libcudart.cudaDeviceGetPCIBusId(ctypes.byref(pciBusId), 64, i)
full_id = pciBusId.value.decode('utf-8')
gpu['fullId'] = full_id
devices[full_id] = gpu
ordered = []
i = 0
for key in sorted(devices):
devices[key]['id'] = i
ordered.append(devices[key])
i += 1
del libcudart
return ordered
|
get_ordered_devices
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x():\n libcudart = get_libcudart()\n devices = {}\n for i in range(0, get_installed_devices()):\n gpu = get_device_properties(i)\n pciBusId = ctypes.create_string_buffer(64)\n libcudart.cudaDeviceGetPCIBusId(ctypes.byref(pciBusId), 64, i)\n full_id = pciBusId.value.decode('utf-8')\n gpu['fullId'] = full_id\n devices[full_id] = gpu\n ordered = []\n i = 0\n for key in sorted(devices):\n devices[key]['id'] = i\n ordered.append(devices[key])\n i += 1\n del libcudart\n return ordered\n",
"role": "user"
},
{
"content": "get_ordered_devices",
"role": "assistant"
}
] |
def x(l):
convert = lambda text: int(text) if text.isdigit() else text
alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
return sorted(l, key=alphanum_key)
|
sorted_nicely
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(l):\n convert = lambda text: int(text) if text.isdigit() else text\n alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]\n return sorted(l, key=alphanum_key)\n",
"role": "user"
},
{
"content": "sorted_nicely",
"role": "assistant"
}
] |
def x(self, sub_letter=None):
string = str(self)
if '.' not in string:
return string
if not string[-1].isalpha():
if sub_letter is not None:
return string.replace('.', sub_letter)
return string
letter = string[-1]
return string.replace('.', letter)[:-1]
|
to_pn
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, sub_letter=None):\n string = str(self)\n if '.' not in string:\n return string\n if not string[-1].isalpha():\n if sub_letter is not None:\n return string.replace('.', sub_letter)\n return string\n letter = string[-1]\n return string.replace('.', letter)[:-1]\n",
"role": "user"
},
{
"content": "to_pn",
"role": "assistant"
}
] |
def x(li):
size = len(repr(li))
keep = li
toss = []
n = len(li)
decrement_by = max(n / 10, 10)
while size >= MAX_SIZE and n > 0:
n -= decrement_by
toss = li[:-n]
keep = li[-n:]
size = len(repr(keep))
return keep, toss
|
reduce_list_size
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(li):\n size = len(repr(li))\n keep = li\n toss = []\n n = len(li)\n decrement_by = max(n / 10, 10)\n while size >= MAX_SIZE and n > 0:\n n -= decrement_by\n toss = li[:-n]\n keep = li[-n:]\n size = len(repr(keep))\n return keep, toss\n",
"role": "user"
},
{
"content": "reduce_list_size",
"role": "assistant"
}
] |
def x(request, cache_key, start_index=0):
path = request.get_full_path()
headers = OrderedDict()
for k, v in sorted(request.META.items()):
if k == 'HTTP_COOKIE' and CONSIDER_COOKIES:
cookie = SimpleCookie()
cookie.load(v)
headers['cookie'] = '; '.join([('%s=%s' % (k, morsel.value)) for
k, morsel in sorted(cookie.items()) if k in CONSIDER_COOKIES])
elif k.startswith('HTTP_'):
k = k[5:].replace('_', '-').lower()
if k in CONSIDER_HEADERS:
headers[k] = v
to_set_get_keys = []
to_set_paths_get_keys = []
to_set_content_types_get_keys = []
to_set_content_types_paths_get_keys = []
to_set = {}
to_set_paths = {}
to_set_content_types = {}
to_set_content_types_paths = {}
to_delete = []
to_set_objects = []
for ctid, obj_pk in request._ultracache[start_index:]:
key = 'ucache-%s-%s' % (ctid, obj_pk)
if key not in to_set_get_keys:
to_set_get_keys.append(key)
key = 'ucache-pth-%s-%s' % (ctid, obj_pk)
if key not in to_set_paths_get_keys:
to_set_paths_get_keys.append(key)
key = 'ucache-ct-%s' % ctid
if key not in to_set_content_types_get_keys:
to_set_content_types_get_keys.append(key)
key = 'ucache-ct-pth-%s' % ctid
if key not in to_set_content_types_paths_get_keys:
to_set_content_types_paths_get_keys.append(key)
tu = ctid, obj_pk
if tu not in to_set_objects:
to_set_objects.append(tu)
di = cache.get_many(to_set_get_keys)
for key in to_set_get_keys:
v = di.get(key, None)
keep = []
if v is not None:
keep, toss = reduce_list_size(v)
if toss:
to_set[key] = keep
to_delete.extend(toss)
if cache_key not in keep:
if key not in to_set:
to_set[key] = keep
to_set[key] = to_set[key] + [cache_key]
if to_set == di:
to_set = {}
di = cache.get_many(to_set_paths_get_keys)
for key in to_set_paths_get_keys:
v = di.get(key, None)
keep = []
if v is not None:
keep, toss = reduce_list_size(v)
if toss:
to_set_paths[key] = keep
if [path, headers] not in keep:
if key not in to_set_paths:
to_set_paths[key] = keep
to_set_paths[key] = to_set_paths[key] + [[path, headers]]
if to_set_paths == di:
to_set_paths = {}
di = cache.get_many(to_set_content_types_get_keys)
for key in to_set_content_types_get_keys:
v = di.get(key, None)
keep = []
if v is not None:
keep, toss = reduce_list_size(v)
if toss:
to_set_content_types[key] = keep
to_delete.extend(toss)
if cache_key not in keep:
if key not in to_set_content_types:
to_set_content_types[key] = keep
to_set_content_types[key] = to_set_content_types[key] + [cache_key]
if to_set_content_types == di:
to_set_content_types = {}
di = cache.get_many(to_set_content_types_paths_get_keys)
for key in to_set_content_types_paths_get_keys:
v = di.get(key, None)
keep = []
if v is not None:
keep, toss = reduce_list_size(v)
if toss:
to_set_content_types_paths[key] = keep
if [path, headers] not in keep:
if key not in to_set_content_types_paths:
to_set_content_types_paths[key] = keep
to_set_content_types_paths[key] = to_set_content_types_paths[key
] + [[path, headers]]
if to_set_content_types_paths == di:
to_set_content_types_paths = {}
if to_delete:
try:
cache.delete_many(to_delete)
except NotImplementedError:
for k in to_delete:
cache.delete(k)
di = {}
di.update(to_set)
del to_set
di.update(to_set_paths)
del to_set_paths
di.update(to_set_content_types)
del to_set_content_types
di.update(to_set_content_types_paths)
del to_set_content_types_paths
if to_set_objects:
di[cache_key + '-objs'] = to_set_objects
if di:
try:
cache.set_many(di, 86400)
except NotImplementedError:
for k, v in di.items():
cache.set(k, v, 86400)
|
cache_meta
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(request, cache_key, start_index=0):\n path = request.get_full_path()\n headers = OrderedDict()\n for k, v in sorted(request.META.items()):\n if k == 'HTTP_COOKIE' and CONSIDER_COOKIES:\n cookie = SimpleCookie()\n cookie.load(v)\n headers['cookie'] = '; '.join([('%s=%s' % (k, morsel.value)) for\n k, morsel in sorted(cookie.items()) if k in CONSIDER_COOKIES])\n elif k.startswith('HTTP_'):\n k = k[5:].replace('_', '-').lower()\n if k in CONSIDER_HEADERS:\n headers[k] = v\n to_set_get_keys = []\n to_set_paths_get_keys = []\n to_set_content_types_get_keys = []\n to_set_content_types_paths_get_keys = []\n to_set = {}\n to_set_paths = {}\n to_set_content_types = {}\n to_set_content_types_paths = {}\n to_delete = []\n to_set_objects = []\n for ctid, obj_pk in request._ultracache[start_index:]:\n key = 'ucache-%s-%s' % (ctid, obj_pk)\n if key not in to_set_get_keys:\n to_set_get_keys.append(key)\n key = 'ucache-pth-%s-%s' % (ctid, obj_pk)\n if key not in to_set_paths_get_keys:\n to_set_paths_get_keys.append(key)\n key = 'ucache-ct-%s' % ctid\n if key not in to_set_content_types_get_keys:\n to_set_content_types_get_keys.append(key)\n key = 'ucache-ct-pth-%s' % ctid\n if key not in to_set_content_types_paths_get_keys:\n to_set_content_types_paths_get_keys.append(key)\n tu = ctid, obj_pk\n if tu not in to_set_objects:\n to_set_objects.append(tu)\n di = cache.get_many(to_set_get_keys)\n for key in to_set_get_keys:\n v = di.get(key, None)\n keep = []\n if v is not None:\n keep, toss = reduce_list_size(v)\n if toss:\n to_set[key] = keep\n to_delete.extend(toss)\n if cache_key not in keep:\n if key not in to_set:\n to_set[key] = keep\n to_set[key] = to_set[key] + [cache_key]\n if to_set == di:\n to_set = {}\n di = cache.get_many(to_set_paths_get_keys)\n for key in to_set_paths_get_keys:\n v = di.get(key, None)\n keep = []\n if v is not None:\n keep, toss = reduce_list_size(v)\n if toss:\n to_set_paths[key] = keep\n if [path, headers] not in keep:\n if key not in to_set_paths:\n to_set_paths[key] = keep\n to_set_paths[key] = to_set_paths[key] + [[path, headers]]\n if to_set_paths == di:\n to_set_paths = {}\n di = cache.get_many(to_set_content_types_get_keys)\n for key in to_set_content_types_get_keys:\n v = di.get(key, None)\n keep = []\n if v is not None:\n keep, toss = reduce_list_size(v)\n if toss:\n to_set_content_types[key] = keep\n to_delete.extend(toss)\n if cache_key not in keep:\n if key not in to_set_content_types:\n to_set_content_types[key] = keep\n to_set_content_types[key] = to_set_content_types[key] + [cache_key]\n if to_set_content_types == di:\n to_set_content_types = {}\n di = cache.get_many(to_set_content_types_paths_get_keys)\n for key in to_set_content_types_paths_get_keys:\n v = di.get(key, None)\n keep = []\n if v is not None:\n keep, toss = reduce_list_size(v)\n if toss:\n to_set_content_types_paths[key] = keep\n if [path, headers] not in keep:\n if key not in to_set_content_types_paths:\n to_set_content_types_paths[key] = keep\n to_set_content_types_paths[key] = to_set_content_types_paths[key\n ] + [[path, headers]]\n if to_set_content_types_paths == di:\n to_set_content_types_paths = {}\n if to_delete:\n try:\n cache.delete_many(to_delete)\n except NotImplementedError:\n for k in to_delete:\n cache.delete(k)\n di = {}\n di.update(to_set)\n del to_set\n di.update(to_set_paths)\n del to_set_paths\n di.update(to_set_content_types)\n del to_set_content_types\n di.update(to_set_content_types_paths)\n del to_set_content_types_paths\n if to_set_objects:\n di[cache_key + '-objs'] = to_set_objects\n if di:\n try:\n cache.set_many(di, 86400)\n except NotImplementedError:\n for k, v in di.items():\n cache.set(k, v, 86400)\n",
"role": "user"
},
{
"content": "cache_meta",
"role": "assistant"
}
] |
def x() ->Role:
LOGGER.debug('AnchorSmith.least_role >>>')
rv = Role.TRUSTEE.token()
LOGGER.debug('AnchorSmith.least_role <<< %s', rv)
return rv
|
least_role
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x() ->Role:\n LOGGER.debug('AnchorSmith.least_role >>>')\n rv = Role.TRUSTEE.token()\n LOGGER.debug('AnchorSmith.least_role <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "least_role",
"role": "assistant"
}
] |
async def send_nym(self, did: str, verkey: str=None, alias: str=None, role:
Role=None) ->None:
"""
Send input anchor's cryptonym (including DID, verification key, plus optional alias and role)
to the distributed ledger.
Raise BadLedgerTxn on failure, BadIdentifier for bad DID, or BadRole for bad role.
:param did: anchor DID to send to ledger
:param verkey: optional anchor verification key
:param alias: optional alias
:param role: anchor role on the ledger (default value of USER)
"""
LOGGER.debug(
'AnchorSmith.send_nym >>> did: %s, verkey: %s, alias: %s, role: %s',
did, verkey, alias, role)
if not ok_did(did):
LOGGER.debug('AnchorSmith.send_nym <!< Bad DID %s', did)
raise BadIdentifier('Bad DID {}'.format(did))
req_json = await ledger.build_nym_request(self.did, did, verkey, alias,
(role or Role.USER).token())
await self._sign_submit(req_json)
LOGGER.debug('AnchorSmith.send_nym <<<')
|
send_nym
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def send_nym(self, did: str, verkey: str=None, alias: str=None, role:\n Role=None) ->None:\n \"\"\"\n Send input anchor's cryptonym (including DID, verification key, plus optional alias and role)\n to the distributed ledger.\n\n Raise BadLedgerTxn on failure, BadIdentifier for bad DID, or BadRole for bad role.\n\n :param did: anchor DID to send to ledger\n :param verkey: optional anchor verification key\n :param alias: optional alias\n :param role: anchor role on the ledger (default value of USER)\n \"\"\"\n LOGGER.debug(\n 'AnchorSmith.send_nym >>> did: %s, verkey: %s, alias: %s, role: %s',\n did, verkey, alias, role)\n if not ok_did(did):\n LOGGER.debug('AnchorSmith.send_nym <!< Bad DID %s', did)\n raise BadIdentifier('Bad DID {}'.format(did))\n req_json = await ledger.build_nym_request(self.did, did, verkey, alias,\n (role or Role.USER).token())\n await self._sign_submit(req_json)\n LOGGER.debug('AnchorSmith.send_nym <<<')\n",
"role": "user"
},
{
"content": "send_nym",
"role": "assistant"
}
] |
def x() ->None:
print()
print('Usage: setnym.py <config-ini>')
print()
print('where <config-ini> represents the path to the configuration file.')
print()
print(
'The operation submits a nym to a trustee anchor to send to the ledger,'
)
print('if the ledger does not have it already as configured.')
print()
print('The configuration file has sections and entries as follows:')
print(' * section [Node Pool]:')
print(
' - name: the name of the node pool to which the operation applies')
print(' - genesis.txn.path: the path to the genesis transaction file')
print(' for the node pool (may omit if node pool already exists)')
print(' * section [Trustee Anchor]:')
print(" - name: the trustee anchor's (wallet) name")
print(" - wallet.type: (default blank) the trustee anchor's wallet type"
)
print(" - wallet.access: (default blank) the trustee anchor's")
print(' wallet access credential (password) value')
print(' * section [VON Anchor]:')
print(
' - role: the role to request in the send-nym transaction; specify:'
)
print(
' - (default) empty value for user with no additional write privileges'
)
print(
' - TRUST_ANCHOR for VON anchor with write privileges for indy artifacts'
)
print(
' - TRUSTEE for VON anchor sending further cryptonyms to the ledger'
)
print(" - name: the VON anchor's (wallet) name")
print(
" - seed: the VON anchor's seed (optional, for wallet creation only)"
)
print(
" - did: the VON anchor's DID (optional, for wallet creation only)")
print(
' - wallet.create: whether create the wallet if it does not yet exist'
)
print(' (value True/False, 1/0, or Yes/No)')
print(" - wallet.type: (default blank) the VON anchor's wallet type")
print(" - wallet.access: (default blank) the VON anchor's")
print(' wallet access credential (password) value.')
print()
|
usage
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x() ->None:\n print()\n print('Usage: setnym.py <config-ini>')\n print()\n print('where <config-ini> represents the path to the configuration file.')\n print()\n print(\n 'The operation submits a nym to a trustee anchor to send to the ledger,'\n )\n print('if the ledger does not have it already as configured.')\n print()\n print('The configuration file has sections and entries as follows:')\n print(' * section [Node Pool]:')\n print(\n ' - name: the name of the node pool to which the operation applies')\n print(' - genesis.txn.path: the path to the genesis transaction file')\n print(' for the node pool (may omit if node pool already exists)')\n print(' * section [Trustee Anchor]:')\n print(\" - name: the trustee anchor's (wallet) name\")\n print(\" - wallet.type: (default blank) the trustee anchor's wallet type\"\n )\n print(\" - wallet.access: (default blank) the trustee anchor's\")\n print(' wallet access credential (password) value')\n print(' * section [VON Anchor]:')\n print(\n ' - role: the role to request in the send-nym transaction; specify:'\n )\n print(\n ' - (default) empty value for user with no additional write privileges'\n )\n print(\n ' - TRUST_ANCHOR for VON anchor with write privileges for indy artifacts'\n )\n print(\n ' - TRUSTEE for VON anchor sending further cryptonyms to the ledger'\n )\n print(\" - name: the VON anchor's (wallet) name\")\n print(\n \" - seed: the VON anchor's seed (optional, for wallet creation only)\"\n )\n print(\n \" - did: the VON anchor's DID (optional, for wallet creation only)\")\n print(\n ' - wallet.create: whether create the wallet if it does not yet exist'\n )\n print(' (value True/False, 1/0, or Yes/No)')\n print(\" - wallet.type: (default blank) the VON anchor's wallet type\")\n print(\" - wallet.access: (default blank) the VON anchor's\")\n print(' wallet access credential (password) value.')\n print()\n",
"role": "user"
},
{
"content": "usage",
"role": "assistant"
}
] |
async def _set_wallets(an_data: dict) ->dict:
"""
Set wallets as configured for setnym operation.
:param an_data: dict mapping profiles to anchor data
:return: dict mapping anchor names to wallet objects
"""
w_mgr = WalletManager()
rv = {}
for profile in an_data:
w_cfg = {'id': an_data[profile].name}
if an_data[profile].wallet_type:
w_cfg['storage_type'] = an_data[profile].wallet_type
if an_data[profile].seed:
w_cfg['seed'] = an_data[profile].seed
if an_data[profile].did:
w_cfg['did'] = an_data[profile].did
if an_data[profile].wallet_create:
try:
await w_mgr.create(w_cfg, access=an_data[profile].wallet_access
)
except ExtantWallet:
pass
rv[profile] = w_mgr.get(w_cfg, access=an_data[profile].wallet_access)
return rv
|
set_wallets
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def _set_wallets(an_data: dict) ->dict:\n \"\"\"\n Set wallets as configured for setnym operation.\n\n :param an_data: dict mapping profiles to anchor data\n :return: dict mapping anchor names to wallet objects\n \"\"\"\n w_mgr = WalletManager()\n rv = {}\n for profile in an_data:\n w_cfg = {'id': an_data[profile].name}\n if an_data[profile].wallet_type:\n w_cfg['storage_type'] = an_data[profile].wallet_type\n if an_data[profile].seed:\n w_cfg['seed'] = an_data[profile].seed\n if an_data[profile].did:\n w_cfg['did'] = an_data[profile].did\n if an_data[profile].wallet_create:\n try:\n await w_mgr.create(w_cfg, access=an_data[profile].wallet_access\n )\n except ExtantWallet:\n pass\n rv[profile] = w_mgr.get(w_cfg, access=an_data[profile].wallet_access)\n return rv\n",
"role": "user"
},
{
"content": "set_wallets",
"role": "assistant"
}
] |
async def setnym(ini_path: str) ->int:
"""
Set configuration. Open pool, trustee anchor, and wallet of anchor whose nym to send.
Register exit hooks to close pool and trustee anchor.
Engage trustee anchor to send nym for VON anchor, if it differs on the ledger from configuration.
:param ini_path: path to configuration file
:return: 0 for OK, 1 for failure
"""
config = inis2dict(ini_path)
if config['Trustee Anchor']['name'] == config['VON Anchor']['name']:
raise ExtantWallet(
'Wallet names must differ between VON Anchor and Trustee Anchor')
cfg_van_role = config['VON Anchor'].get('role', None) or None
if not ok_role(cfg_van_role):
raise BadRole('Configured role {} is not valid'.format(cfg_van_role))
pool_data = NodePoolData(config['Node Pool']['name'], config[
'Node Pool'].get('genesis.txn.path', None) or None)
an_data = {'tan': AnchorData(Role.TRUSTEE, config['Trustee Anchor'][
'name'], config['Trustee Anchor'].get('seed', None) or None, config
['Trustee Anchor'].get('did', None) or None, config[
'Trustee Anchor'].get('wallet.create', '0').lower() in ['1', 'true',
'yes'], config['Trustee Anchor'].get('wallet.type', None) or None,
config['Trustee Anchor'].get('wallet.access', None) or None), 'van':
AnchorData(Role.get(cfg_van_role), config['VON Anchor']['name'],
config['VON Anchor'].get('seed', None) or None, config['VON Anchor'
].get('did', None) or None, config['VON Anchor'].get(
'wallet.create', '0').lower() in ['1', 'true', 'yes'], config[
'VON Anchor'].get('wallet.type', None) or None, config['VON Anchor'
].get('wallet.access', None) or None)}
an_wallet = await _set_wallets(an_data)
p_mgr = NodePoolManager()
if pool_data.name not in await p_mgr.list():
if pool_data.genesis_txn_path:
await p_mgr.add_config(pool_data.name, pool_data.genesis_txn_path)
else:
raise AbsentPool(
'Node pool {} has no ledger configuration, but {} specifies no genesis txn path'
.format(pool_data.name, ini_path))
async with an_wallet['tan'] as w_tan, an_wallet['van'] as w_van, p_mgr.get(
pool_data.name) as pool, TrusteeAnchor(w_tan, pool
) as tan, NominalAnchor(w_van, pool) as van:
send_verkey = van.verkey
try:
nym_role = await tan.get_nym_role(van.did)
if an_data['van'].role == nym_role:
return 0
send_verkey = None
if nym_role != Role.USER:
await tan.send_nym(van.did, send_verkey, van.wallet.name,
Role.ROLE_REMOVE)
except AbsentNym:
pass
await tan.send_nym(van.did, send_verkey, van.wallet.name, an_data[
'van'].role)
return 0
|
setnym
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def setnym(ini_path: str) ->int:\n \"\"\"\n Set configuration. Open pool, trustee anchor, and wallet of anchor whose nym to send.\n Register exit hooks to close pool and trustee anchor.\n\n Engage trustee anchor to send nym for VON anchor, if it differs on the ledger from configuration.\n\n :param ini_path: path to configuration file\n :return: 0 for OK, 1 for failure\n \"\"\"\n config = inis2dict(ini_path)\n if config['Trustee Anchor']['name'] == config['VON Anchor']['name']:\n raise ExtantWallet(\n 'Wallet names must differ between VON Anchor and Trustee Anchor')\n cfg_van_role = config['VON Anchor'].get('role', None) or None\n if not ok_role(cfg_van_role):\n raise BadRole('Configured role {} is not valid'.format(cfg_van_role))\n pool_data = NodePoolData(config['Node Pool']['name'], config[\n 'Node Pool'].get('genesis.txn.path', None) or None)\n an_data = {'tan': AnchorData(Role.TRUSTEE, config['Trustee Anchor'][\n 'name'], config['Trustee Anchor'].get('seed', None) or None, config\n ['Trustee Anchor'].get('did', None) or None, config[\n 'Trustee Anchor'].get('wallet.create', '0').lower() in ['1', 'true',\n 'yes'], config['Trustee Anchor'].get('wallet.type', None) or None, \n config['Trustee Anchor'].get('wallet.access', None) or None), 'van':\n AnchorData(Role.get(cfg_van_role), config['VON Anchor']['name'], \n config['VON Anchor'].get('seed', None) or None, config['VON Anchor'\n ].get('did', None) or None, config['VON Anchor'].get(\n 'wallet.create', '0').lower() in ['1', 'true', 'yes'], config[\n 'VON Anchor'].get('wallet.type', None) or None, config['VON Anchor'\n ].get('wallet.access', None) or None)}\n an_wallet = await _set_wallets(an_data)\n p_mgr = NodePoolManager()\n if pool_data.name not in await p_mgr.list():\n if pool_data.genesis_txn_path:\n await p_mgr.add_config(pool_data.name, pool_data.genesis_txn_path)\n else:\n raise AbsentPool(\n 'Node pool {} has no ledger configuration, but {} specifies no genesis txn path'\n .format(pool_data.name, ini_path))\n async with an_wallet['tan'] as w_tan, an_wallet['van'] as w_van, p_mgr.get(\n pool_data.name) as pool, TrusteeAnchor(w_tan, pool\n ) as tan, NominalAnchor(w_van, pool) as van:\n send_verkey = van.verkey\n try:\n nym_role = await tan.get_nym_role(van.did)\n if an_data['van'].role == nym_role:\n return 0\n send_verkey = None\n if nym_role != Role.USER:\n await tan.send_nym(van.did, send_verkey, van.wallet.name,\n Role.ROLE_REMOVE)\n except AbsentNym:\n pass\n await tan.send_nym(van.did, send_verkey, van.wallet.name, an_data[\n 'van'].role)\n return 0\n",
"role": "user"
},
{
"content": "setnym",
"role": "assistant"
}
] |
def x(args: Sequence[str]=None) ->int:
logging.basicConfig(level=logging.INFO, format=
'%(asctime)-15s | %(levelname)-8s | %(message)s', datefmt=
'%Y-%m-%d %H:%M:%S')
logging.getLogger('von_anchor').setLevel(logging.WARNING)
logging.getLogger('indy').setLevel(logging.ERROR)
if args is None:
args = sys.argv[1:]
if len(sys.argv) == 2:
try:
return do_wait(setnym(sys.argv[1]))
except VonAnchorError as vax:
print(str(vax))
return 1
else:
usage()
return 1
|
main
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(args: Sequence[str]=None) ->int:\n logging.basicConfig(level=logging.INFO, format=\n '%(asctime)-15s | %(levelname)-8s | %(message)s', datefmt=\n '%Y-%m-%d %H:%M:%S')\n logging.getLogger('von_anchor').setLevel(logging.WARNING)\n logging.getLogger('indy').setLevel(logging.ERROR)\n if args is None:\n args = sys.argv[1:]\n if len(sys.argv) == 2:\n try:\n return do_wait(setnym(sys.argv[1]))\n except VonAnchorError as vax:\n print(str(vax))\n return 1\n else:\n usage()\n return 1\n",
"role": "user"
},
{
"content": "main",
"role": "assistant"
}
] |
def x(origin_did: str, name: str, version: str) ->str:
return '{}:2:{}:{}'.format(origin_did, name, version)
|
schema_id
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(origin_did: str, name: str, version: str) ->str:\n return '{}:2:{}:{}'.format(origin_did, name, version)\n",
"role": "user"
},
{
"content": "schema_id",
"role": "assistant"
}
] |
def x(token: str) ->bool:
return bool(re.match('[{}]{{21,22}}$'.format(B58), token or ''))
|
ok_did
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(token: str) ->bool:\n return bool(re.match('[{}]{{21,22}}$'.format(B58), token or ''))\n",
"role": "user"
},
{
"content": "ok_did",
"role": "assistant"
}
] |
def x(token: str) ->bool:
return bool(re.match('[{}]{{21,22}}:2:.+:[0-9.]+$'.format(B58), token or
''))
|
ok_schema_id
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(token: str) ->bool:\n return bool(re.match('[{}]{{21,22}}:2:.+:[0-9.]+$'.format(B58), token or\n ''))\n",
"role": "user"
},
{
"content": "ok_schema_id",
"role": "assistant"
}
] |
def x(s_id: str) ->SchemaKey:
s_key = s_id.split(':')
s_key.pop(1)
return SchemaKey(*s_key)
|
schema_key
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(s_id: str) ->SchemaKey:\n s_key = s_id.split(':')\n s_key.pop(1)\n return SchemaKey(*s_key)\n",
"role": "user"
},
{
"content": "schema_key",
"role": "assistant"
}
] |
def x(issuer_did: str, schema_seq_no: int, protocol: Protocol=None) ->str:
return (protocol or Protocol.DEFAULT).cred_def_id(issuer_did, schema_seq_no
)
|
cred_def_id
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(issuer_did: str, schema_seq_no: int, protocol: Protocol=None) ->str:\n return (protocol or Protocol.DEFAULT).cred_def_id(issuer_did, schema_seq_no\n )\n",
"role": "user"
},
{
"content": "cred_def_id",
"role": "assistant"
}
] |
def x(token: str, issuer_did: str=None) ->bool:
cd_id_m = re.match('([{}]{{21,22}}):3:CL:[1-9][0-9]*(:.+)?$'.format(B58
), token or '')
return bool(cd_id_m) and (not issuer_did or cd_id_m.group(1) == issuer_did)
|
ok_cred_def_id
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(token: str, issuer_did: str=None) ->bool:\n cd_id_m = re.match('([{}]{{21,22}}):3:CL:[1-9][0-9]*(:.+)?$'.format(B58\n ), token or '')\n return bool(cd_id_m) and (not issuer_did or cd_id_m.group(1) == issuer_did)\n",
"role": "user"
},
{
"content": "ok_cred_def_id",
"role": "assistant"
}
] |
def x(cd_id: str) ->int:
if ok_cred_def_id(cd_id):
return int(cd_id.split(':')[3])
raise BadIdentifier('Bad credential definition identifier {}'.format(cd_id)
)
|
cred_def_id2seq_no
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cd_id: str) ->int:\n if ok_cred_def_id(cd_id):\n return int(cd_id.split(':')[3])\n raise BadIdentifier('Bad credential definition identifier {}'.format(cd_id)\n )\n",
"role": "user"
},
{
"content": "cred_def_id2seq_no",
"role": "assistant"
}
] |
def x(cd_id: str, tag: Union[str, int]) ->str:
return '{}:4:{}:CL_ACCUM:{}'.format(cd_id.split(':', 1)[0], cd_id, tag)
|
rev_reg_id
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cd_id: str, tag: Union[str, int]) ->str:\n return '{}:4:{}:CL_ACCUM:{}'.format(cd_id.split(':', 1)[0], cd_id, tag)\n",
"role": "user"
},
{
"content": "rev_reg_id",
"role": "assistant"
}
] |
def x(token: str, issuer_did: str=None) ->bool:
rr_id_m = re.match(
'([{0}]{{21,22}}):4:([{0}]{{21,22}}):3:CL:[1-9][0-9]*(:.+)?:CL_ACCUM:.+$'
.format(B58), token or '')
return bool(rr_id_m) and (not issuer_did or rr_id_m.group(1) ==
issuer_did and rr_id_m.group(2) == issuer_did)
|
ok_rev_reg_id
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(token: str, issuer_did: str=None) ->bool:\n rr_id_m = re.match(\n '([{0}]{{21,22}}):4:([{0}]{{21,22}}):3:CL:[1-9][0-9]*(:.+)?:CL_ACCUM:.+$'\n .format(B58), token or '')\n return bool(rr_id_m) and (not issuer_did or rr_id_m.group(1) ==\n issuer_did and rr_id_m.group(2) == issuer_did)\n",
"role": "user"
},
{
"content": "ok_rev_reg_id",
"role": "assistant"
}
] |
def x(rr_id: str) ->str:
if ok_rev_reg_id(rr_id):
return ':'.join(rr_id.split(':')[2:-2])
raise BadIdentifier('Bad revocation registry identifier {}'.format(rr_id))
|
rev_reg_id2cred_def_id
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(rr_id: str) ->str:\n if ok_rev_reg_id(rr_id):\n return ':'.join(rr_id.split(':')[2:-2])\n raise BadIdentifier('Bad revocation registry identifier {}'.format(rr_id))\n",
"role": "user"
},
{
"content": "rev_reg_id2cred_def_id",
"role": "assistant"
}
] |
def x(rr_id: str) ->(str, str):
if ok_rev_reg_id(rr_id):
return ':'.join(rr_id.split(':')[2:-2]), str(rr_id.split(':')[-1])
raise BadIdentifier('Bad revocation registry identifier {}'.format(rr_id))
|
rev_reg_id2cred_def_id_tag
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(rr_id: str) ->(str, str):\n if ok_rev_reg_id(rr_id):\n return ':'.join(rr_id.split(':')[2:-2]), str(rr_id.split(':')[-1])\n raise BadIdentifier('Bad revocation registry identifier {}'.format(rr_id))\n",
"role": "user"
},
{
"content": "rev_reg_id2cred_def_id_tag",
"role": "assistant"
}
] |
def x(briefs: Union[dict, Sequence[dict]]) ->tuple:
if isinstance(briefs, dict):
if all(ok_wallet_reft(k) for k in briefs):
return tuple(briefs.values())
return briefs,
return tuple(briefs)
|
iter_briefs
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(briefs: Union[dict, Sequence[dict]]) ->tuple:\n if isinstance(briefs, dict):\n if all(ok_wallet_reft(k) for k in briefs):\n return tuple(briefs.values())\n return briefs,\n return tuple(briefs)\n",
"role": "user"
},
{
"content": "iter_briefs",
"role": "assistant"
}
] |
def x(briefs: Union[dict, Sequence[dict]], cred_ids: Union[Sequence[str],
str]=None) ->dict:
rv = {}
for brief in iter_briefs(briefs):
cred_info = brief.get('cred_info', {}) or brief
cred_id = cred_info['referent']
if cred_id not in rv and (not cred_ids or cred_id in [cred_ids, [
cred_ids]][isinstance(cred_ids, str)]):
rv[cred_id] = {'schema_id': cred_info['schema_id'],
'cred_def_id': cred_info['cred_def_id'], 'rev_reg_id':
cred_info['rev_reg_id']}
return rv
|
box_ids
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(briefs: Union[dict, Sequence[dict]], cred_ids: Union[Sequence[str],\n str]=None) ->dict:\n rv = {}\n for brief in iter_briefs(briefs):\n cred_info = brief.get('cred_info', {}) or brief\n cred_id = cred_info['referent']\n if cred_id not in rv and (not cred_ids or cred_id in [cred_ids, [\n cred_ids]][isinstance(cred_ids, str)]):\n rv[cred_id] = {'schema_id': cred_info['schema_id'],\n 'cred_def_id': cred_info['cred_def_id'], 'rev_reg_id':\n cred_info['rev_reg_id']}\n return rv\n",
"role": "user"
},
{
"content": "box_ids",
"role": "assistant"
}
] |
def x(creds: dict, cred_ids: set) ->str:
rv = deepcopy(creds)
for key in ('attrs', 'predicates'):
for attr_uuid, creds_by_uuid in rv[key].items():
rv[key][attr_uuid] = [cred for cred in creds_by_uuid if cred[
'cred_info']['referent'] in cred_ids]
empties = [attr_uuid for attr_uuid in rv[key] if not rv[key][attr_uuid]
]
for attr_uuid in empties:
del rv[key][attr_uuid]
return json.dumps(rv)
|
prune_creds_json
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(creds: dict, cred_ids: set) ->str:\n rv = deepcopy(creds)\n for key in ('attrs', 'predicates'):\n for attr_uuid, creds_by_uuid in rv[key].items():\n rv[key][attr_uuid] = [cred for cred in creds_by_uuid if cred[\n 'cred_info']['referent'] in cred_ids]\n empties = [attr_uuid for attr_uuid in rv[key] if not rv[key][attr_uuid]\n ]\n for attr_uuid in empties:\n del rv[key][attr_uuid]\n return json.dumps(rv)\n",
"role": "user"
},
{
"content": "prune_creds_json",
"role": "assistant"
}
] |
def x(proof_req: dict, infos: Union[dict, Sequence[dict]]) ->list:
rv = []
refts = proof_req_attr_referents(proof_req)
for info in iter_briefs(infos):
if info['cred_def_id'] not in refts:
continue
brief = {'cred_info': info, 'interval': {}}
fro = None
to = None
for uuid in refts[info['cred_def_id']].values():
interval = proof_req['requested_attributes'][uuid].get(
'non_revoked', {})
if 'from' in interval:
fro = min(fro or interval['from'], interval['from'])
if 'to' in interval:
to = max(to or interval['to'], interval['to'])
if to:
brief['interval']['to'] = to
if fro:
brief['interval']['from'] = fro
if not brief['interval']:
brief['interval'] = None
rv.append(brief)
return rv
|
proof_req_infos2briefs
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(proof_req: dict, infos: Union[dict, Sequence[dict]]) ->list:\n rv = []\n refts = proof_req_attr_referents(proof_req)\n for info in iter_briefs(infos):\n if info['cred_def_id'] not in refts:\n continue\n brief = {'cred_info': info, 'interval': {}}\n fro = None\n to = None\n for uuid in refts[info['cred_def_id']].values():\n interval = proof_req['requested_attributes'][uuid].get(\n 'non_revoked', {})\n if 'from' in interval:\n fro = min(fro or interval['from'], interval['from'])\n if 'to' in interval:\n to = max(to or interval['to'], interval['to'])\n if to:\n brief['interval']['to'] = to\n if fro:\n brief['interval']['from'] = fro\n if not brief['interval']:\n brief['interval'] = None\n rv.append(brief)\n return rv\n",
"role": "user"
},
{
"content": "proof_req_infos2briefs",
"role": "assistant"
}
] |
def x(proof_req: dict, briefs: Union[dict, Sequence[dict]]) ->dict:
rv = {'self_attested_attributes': {}, 'requested_attributes': {},
'requested_predicates': {}}
attr_refts = proof_req_attr_referents(proof_req)
pred_refts = proof_req_pred_referents(proof_req)
for brief in iter_briefs(briefs):
cred_info = brief['cred_info']
timestamp = (brief['interval'] or {}).get('to', None)
for attr in cred_info['attrs']:
if attr in attr_refts.get(cred_info['cred_def_id'], {}):
req_attr = {'cred_id': cred_info['referent'], 'revealed':
attr not in pred_refts.get(cred_info['cred_def_id'], {}
), 'timestamp': timestamp}
if not timestamp:
req_attr.pop('timestamp')
rv['requested_attributes'][attr_refts[cred_info[
'cred_def_id']][attr]] = req_attr
if attr in pred_refts.get(cred_info['cred_def_id'], {}):
for uuid in pred_refts[cred_info['cred_def_id']][attr]:
req_pred = {'cred_id': cred_info['referent'],
'timestamp': timestamp}
if not timestamp:
req_pred.pop('timestamp')
rv['requested_predicates'][uuid] = req_pred
return rv
|
proof_req_briefs2req_creds
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(proof_req: dict, briefs: Union[dict, Sequence[dict]]) ->dict:\n rv = {'self_attested_attributes': {}, 'requested_attributes': {},\n 'requested_predicates': {}}\n attr_refts = proof_req_attr_referents(proof_req)\n pred_refts = proof_req_pred_referents(proof_req)\n for brief in iter_briefs(briefs):\n cred_info = brief['cred_info']\n timestamp = (brief['interval'] or {}).get('to', None)\n for attr in cred_info['attrs']:\n if attr in attr_refts.get(cred_info['cred_def_id'], {}):\n req_attr = {'cred_id': cred_info['referent'], 'revealed': \n attr not in pred_refts.get(cred_info['cred_def_id'], {}\n ), 'timestamp': timestamp}\n if not timestamp:\n req_attr.pop('timestamp')\n rv['requested_attributes'][attr_refts[cred_info[\n 'cred_def_id']][attr]] = req_attr\n if attr in pred_refts.get(cred_info['cred_def_id'], {}):\n for uuid in pred_refts[cred_info['cred_def_id']][attr]:\n req_pred = {'cred_id': cred_info['referent'],\n 'timestamp': timestamp}\n if not timestamp:\n req_pred.pop('timestamp')\n rv['requested_predicates'][uuid] = req_pred\n return rv\n",
"role": "user"
},
{
"content": "proof_req_briefs2req_creds",
"role": "assistant"
}
] |
def x(creds: Union[dict, Sequence[dict]], filt: dict=None, filt_dflt_incl:
bool=False) ->dict:
def x(briefs):
nonlocal rv, filt
for brief in briefs:
cred_info = brief.get('cred_info', {}) or brief
if cred_info['referent'] in rv:
continue
cred_cd_id = cred_info['cred_def_id']
if not filt or filt_dflt_incl and cred_cd_id not in filt:
rv[cred_info['referent']] = cred_info
continue
if filt and cred_cd_id in filt:
if {k: str(filt[cred_cd_id][k]) for k in filt[cred_cd_id]
}.items() <= cred_info['attrs'].items():
rv[cred_info['referent']] = cred_info
rv = {}
if filt is None:
filt = {}
if isinstance(creds, dict):
if all(ok_wallet_reft(k) for k in creds):
_add(creds.values())
else:
for uuid2briefs in (creds.get('attrs', {}), creds.get(
'predicates', {})):
for briefs in uuid2briefs.values():
_add(briefs)
else:
_add(creds)
return rv
|
creds_display
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(creds: Union[dict, Sequence[dict]], filt: dict=None, filt_dflt_incl:\n bool=False) ->dict:\n\n def x(briefs):\n nonlocal rv, filt\n for brief in briefs:\n cred_info = brief.get('cred_info', {}) or brief\n if cred_info['referent'] in rv:\n continue\n cred_cd_id = cred_info['cred_def_id']\n if not filt or filt_dflt_incl and cred_cd_id not in filt:\n rv[cred_info['referent']] = cred_info\n continue\n if filt and cred_cd_id in filt:\n if {k: str(filt[cred_cd_id][k]) for k in filt[cred_cd_id]\n }.items() <= cred_info['attrs'].items():\n rv[cred_info['referent']] = cred_info\n rv = {}\n if filt is None:\n filt = {}\n if isinstance(creds, dict):\n if all(ok_wallet_reft(k) for k in creds):\n _add(creds.values())\n else:\n for uuid2briefs in (creds.get('attrs', {}), creds.get(\n 'predicates', {})):\n for briefs in uuid2briefs.values():\n _add(briefs)\n else:\n _add(creds)\n return rv\n",
"role": "user"
},
{
"content": "creds_display",
"role": "assistant"
}
] |
def x(proof_req: dict, x_cd_ids: Union[str, Sequence[str]]=None) ->dict:
rv = {}
attr_refts = proof_req_attr_referents(proof_req)
for cd_id in [k for k in attr_refts if k not in ([x_cd_ids] if
isinstance(x_cd_ids, str) else x_cd_ids or [])]:
rv[set(attr_refts[cd_id].values()).pop()] = {'cred_def_id': cd_id}
return rv
|
proof_req2wql_all
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(proof_req: dict, x_cd_ids: Union[str, Sequence[str]]=None) ->dict:\n rv = {}\n attr_refts = proof_req_attr_referents(proof_req)\n for cd_id in [k for k in attr_refts if k not in ([x_cd_ids] if\n isinstance(x_cd_ids, str) else x_cd_ids or [])]:\n rv[set(attr_refts[cd_id].values()).pop()] = {'cred_def_id': cd_id}\n return rv\n",
"role": "user"
},
{
"content": "proof_req2wql_all",
"role": "assistant"
}
] |
def x(proof_req: dict) ->dict:
rv = {}
for uuid, spec in proof_req['requested_attributes'].items():
cd_id = None
for restriction in spec.get('restrictions', []):
cd_id = restriction.get('cred_def_id', None)
if cd_id:
break
if not cd_id:
continue
if cd_id not in rv:
rv[cd_id] = {}
rv[cd_id][spec['name']] = uuid
return rv
|
proof_req_attr_referents
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(proof_req: dict) ->dict:\n rv = {}\n for uuid, spec in proof_req['requested_attributes'].items():\n cd_id = None\n for restriction in spec.get('restrictions', []):\n cd_id = restriction.get('cred_def_id', None)\n if cd_id:\n break\n if not cd_id:\n continue\n if cd_id not in rv:\n rv[cd_id] = {}\n rv[cd_id][spec['name']] = uuid\n return rv\n",
"role": "user"
},
{
"content": "proof_req_attr_referents",
"role": "assistant"
}
] |
def x(proof_req: dict) ->dict:
rv = {}
for uuid, spec in proof_req['requested_predicates'].items():
cd_id = None
for restriction in spec.get('restrictions', []):
cd_id = restriction.get('cred_def_id', None)
if cd_id:
break
if not cd_id:
continue
if cd_id not in rv:
rv[cd_id] = {}
if spec['name'] not in rv[cd_id]:
rv[cd_id][spec['name']] = {}
rv[cd_id][spec['name']][uuid] = [spec['p_type'], Predicate.to_int(
spec['p_value'])]
return rv
|
proof_req_pred_referents
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(proof_req: dict) ->dict:\n rv = {}\n for uuid, spec in proof_req['requested_predicates'].items():\n cd_id = None\n for restriction in spec.get('restrictions', []):\n cd_id = restriction.get('cred_def_id', None)\n if cd_id:\n break\n if not cd_id:\n continue\n if cd_id not in rv:\n rv[cd_id] = {}\n if spec['name'] not in rv[cd_id]:\n rv[cd_id][spec['name']] = {}\n rv[cd_id][spec['name']][uuid] = [spec['p_type'], Predicate.to_int(\n spec['p_value'])]\n return rv\n",
"role": "user"
},
{
"content": "proof_req_pred_referents",
"role": "assistant"
}
] |
def x(briefs: Union[dict, Sequence[dict]], filt: dict=None) ->dict:
rv = {}
for brief in iter_briefs(briefs):
cred_info = brief.get('cred_info', {}) or brief
rr_id, cr_id = cred_info['rev_reg_id'], cred_info['cred_rev_id']
if (rr_id, cr_id) in rv or rr_id is None or cr_id is None:
continue
if not filt:
rv[rr_id, cr_id] = cred_info['attrs']
continue
if {attr: str(filt[attr]) for attr in filt}.items() <= cred_info[
'attrs'].items():
rv[rr_id, cr_id] = cred_info['attrs']
return rv
|
revoc_info
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(briefs: Union[dict, Sequence[dict]], filt: dict=None) ->dict:\n rv = {}\n for brief in iter_briefs(briefs):\n cred_info = brief.get('cred_info', {}) or brief\n rr_id, cr_id = cred_info['rev_reg_id'], cred_info['cred_rev_id']\n if (rr_id, cr_id) in rv or rr_id is None or cr_id is None:\n continue\n if not filt:\n rv[rr_id, cr_id] = cred_info['attrs']\n continue\n if {attr: str(filt[attr]) for attr in filt}.items() <= cred_info[\n 'attrs'].items():\n rv[rr_id, cr_id] = cred_info['attrs']\n return rv\n",
"role": "user"
},
{
"content": "revoc_info",
"role": "assistant"
}
] |
def x(proof: dict) ->dict:
rv = {}
for sub_index in range(len(proof['identifiers'])):
cd_id = proof['identifiers'][sub_index]['cred_def_id']
rv[cd_id] = {'_'.join(uuid.split('_')[1:-1]): proof[
'requested_proof']['revealed_attrs'][uuid]['raw'] for uuid in
proof['requested_proof']['revealed_attrs'] if proof[
'requested_proof']['revealed_attrs'][uuid]['sub_proof_index'] ==
sub_index}
return rv
|
revealed_attrs
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(proof: dict) ->dict:\n rv = {}\n for sub_index in range(len(proof['identifiers'])):\n cd_id = proof['identifiers'][sub_index]['cred_def_id']\n rv[cd_id] = {'_'.join(uuid.split('_')[1:-1]): proof[\n 'requested_proof']['revealed_attrs'][uuid]['raw'] for uuid in\n proof['requested_proof']['revealed_attrs'] if proof[\n 'requested_proof']['revealed_attrs'][uuid]['sub_proof_index'] ==\n sub_index}\n return rv\n",
"role": "user"
},
{
"content": "revealed_attrs",
"role": "assistant"
}
] |
def x(self, value: dict) ->None:
self._config = value or {}
validate_config('verifier', self._config)
|
config
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, value: dict) ->None:\n self._config = value or {}\n validate_config('verifier', self._config)\n",
"role": "user"
},
{
"content": "config",
"role": "assistant"
}
] |
async def _build_rr_state_json(self, rr_id: str, timestamp: int) ->(str, int):
"""
Build rev reg state json at a given requested timestamp.
Return rev reg state json and its transaction time on the distributed ledger,
with upper bound at input timestamp of interest.
Raise AbsentRevReg if no revocation registry exists on input rev reg id,
or BadRevStateTime if requested timestamp predates revocation registry creation.
:param rr_id: rev reg id
:param timestamp: timestamp of interest (epoch seconds)
:return: rev reg state json and ledger timestamp (epoch seconds)
"""
LOGGER.debug('_Verifier._build_rr_state_json >>> rr_id: %s, timestamp: %s',
rr_id, timestamp)
if not ok_rev_reg_id(rr_id):
LOGGER.debug('Verifier._build_rr_state_json <!< Bad rev reg id %s',
rr_id)
raise BadIdentifier('Bad rev reg id {}'.format(rr_id))
rr_json = None
ledger_timestamp = None
get_rr_req_json = await ledger.build_get_revoc_reg_request(self.did,
rr_id, timestamp)
resp_json = await self._submit(get_rr_req_json)
resp = json.loads(resp_json)
if resp.get('result', {}).get('data', None) and resp['result']['data'].get(
'value', None):
try:
_, rr_json, ledger_timestamp = (await ledger.
parse_get_revoc_reg_response(resp_json))
except IndyError:
LOGGER.debug(
'Verifier._build_rr_state_json <!< no rev reg exists on %s',
rr_id)
raise AbsentRevReg('No rev reg exists on {}'.format(rr_id))
else:
LOGGER.debug(
'_Verifier._build_rr_state_json <!< Rev reg %s created after asked-for time %s'
, rr_id, timestamp)
raise BadRevStateTime('Rev reg {} created after asked-for time {}'.
format(rr_id, timestamp))
rv = rr_json, ledger_timestamp
LOGGER.debug('_Verifier._build_rr_state_json <<< %s', rv)
return rv
|
build_rr_state_json
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def _build_rr_state_json(self, rr_id: str, timestamp: int) ->(str, int):\n \"\"\"\n Build rev reg state json at a given requested timestamp.\n\n Return rev reg state json and its transaction time on the distributed ledger,\n with upper bound at input timestamp of interest.\n\n Raise AbsentRevReg if no revocation registry exists on input rev reg id,\n or BadRevStateTime if requested timestamp predates revocation registry creation.\n\n :param rr_id: rev reg id\n :param timestamp: timestamp of interest (epoch seconds)\n :return: rev reg state json and ledger timestamp (epoch seconds)\n \"\"\"\n LOGGER.debug('_Verifier._build_rr_state_json >>> rr_id: %s, timestamp: %s',\n rr_id, timestamp)\n if not ok_rev_reg_id(rr_id):\n LOGGER.debug('Verifier._build_rr_state_json <!< Bad rev reg id %s',\n rr_id)\n raise BadIdentifier('Bad rev reg id {}'.format(rr_id))\n rr_json = None\n ledger_timestamp = None\n get_rr_req_json = await ledger.build_get_revoc_reg_request(self.did,\n rr_id, timestamp)\n resp_json = await self._submit(get_rr_req_json)\n resp = json.loads(resp_json)\n if resp.get('result', {}).get('data', None) and resp['result']['data'].get(\n 'value', None):\n try:\n _, rr_json, ledger_timestamp = (await ledger.\n parse_get_revoc_reg_response(resp_json))\n except IndyError:\n LOGGER.debug(\n 'Verifier._build_rr_state_json <!< no rev reg exists on %s',\n rr_id)\n raise AbsentRevReg('No rev reg exists on {}'.format(rr_id))\n else:\n LOGGER.debug(\n '_Verifier._build_rr_state_json <!< Rev reg %s created after asked-for time %s'\n , rr_id, timestamp)\n raise BadRevStateTime('Rev reg {} created after asked-for time {}'.\n format(rr_id, timestamp))\n rv = rr_json, ledger_timestamp\n LOGGER.debug('_Verifier._build_rr_state_json <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "build_rr_state_json",
"role": "assistant"
}
] |
async def build_proof_req_json(self, cd_id2spec: dict) ->str:
"""
Build and return indy-sdk proof request for input attributes and non-revocation intervals by cred def id.
:param cd_id2spec: dict mapping cred def ids to:
- (optionally) 'attrs': lists of names of attributes of interest (omit for all, empty list or None for none)
- (optionally) '>=': (pred) inclusive int lower-bounds of interest (omit, empty list, or None for none)
- (optionally) '>': (pred) exclusive int lower-bounds of interest (omit, empty list, or None for none)
- (optionally) '<=': (pred) inclusive int upper-bounds of interest (omit, empty list, or None for none)
- (optionally) '<': (pred) exclusive int upper-bounds of interest (omit, empty list, or None for none)
- (optionally), 'interval': either
- (2-tuple) pair of epoch second counts marking 'from' and 'to' timestamps, or
- | single epoch second count to set 'from' and 'to' the same; default
| (now, now) for cred defs supporting revocation or None otherwise; e.g.,
::
{
'Vx4E82R17q...:3:CL:16:tag': {
'attrs': [ # request attrs 'name' and 'favouriteDrink' from this cred def's schema
'name',
'favouriteDrink'
],
'>=': { # request predicate score>=80 from this cred def
'score': 80
}
'<=': { # request ranking <=10 from this cred def
'ranking': 10
}
'interval': 1528116008 # same instant for all attrs and preds of corresponding schema
},
'R17v42T4pk...:3:CL:19:tag': None, # request all attrs, no preds, default intervals on all attrs
'e3vc5K168n...:3:CL:23:tag': {}, # request all attrs, no preds, default intervals on all attrs
'Z9ccax812j...:3:CL:27:tag': { # request all attrs, no preds, this interval on all attrs
'interval': (1528112408, 1528116008)
},
'9cHbp54C8n...:3:CL:37:tag': { # request no attrs and some predicates; specify interval
'attrs': [], # or equivalently, 'attrs': None
'>=': {
'employees': '50' # nicety: implementation converts to int for caller
},
'>=': {
'revenue': '10000000' # nicety: implementation converts to int for caller
'ebidta': 0
}
'interval': (1528029608, 1528116008)
},
'6caBcmLi33...:3:CL:41:tag': { # all attrs, one pred, default intervals to now on attrs & pred
'>': {
'regEpoch': 1514782800
}
},
...
}
:return: indy-sdk proof request json
"""
LOGGER.debug('Verifier.build_proof_req_json >>> cd_id2spec: %s', cd_id2spec
)
cd_id2schema = {}
now = int(time())
rv = {'nonce': str(int(time())), 'name': 'proof_req', 'version': '0.0',
'requested_attributes': {}, 'requested_predicates': {}}
for cd_id in cd_id2spec:
if not ok_cred_def_id(cd_id):
LOGGER.debug('Verifier.build_proof_req_json <!< Bad cred def id %s'
, cd_id)
raise BadIdentifier('Bad cred def id {}'.format(cd_id))
interval = None
cred_def = json.loads(await self.get_cred_def(cd_id))
seq_no = cred_def_id2seq_no(cd_id)
cd_id2schema[cd_id] = json.loads(await self.get_schema(seq_no))
if 'revocation' in cred_def['value']:
fro_to = cd_id2spec[cd_id].get('interval', (now, now)
) if cd_id2spec[cd_id] else (now, now)
interval = {'from': fro_to if isinstance(fro_to, int) else min(
fro_to), 'to': fro_to if isinstance(fro_to, int) else max(
fro_to)}
for attr in (cd_id2spec[cd_id].get('attrs', cd_id2schema[cd_id][
'attrNames']) or [] if cd_id2spec[cd_id] else cd_id2schema[
cd_id]['attrNames']):
attr_uuid = '{}_{}_uuid'.format(seq_no, canon(attr))
rv['requested_attributes'][attr_uuid] = {'name': attr,
'restrictions': [{'cred_def_id': cd_id}]}
if interval:
rv['requested_attributes'][attr_uuid]['non_revoked'] = interval
for pred in Predicate:
for attr in (cd_id2spec[cd_id].get(pred.value.math, {}) or {} if
cd_id2spec[cd_id] else {}):
pred_uuid = '{}_{}_{}_uuid'.format(seq_no, canon(attr),
pred.value.fortran)
try:
rv['requested_predicates'][pred_uuid] = {'name': attr,
'p_type': pred.value.math, 'p_value': Predicate.
to_int(cd_id2spec[cd_id][pred.value.math][attr]),
'restrictions': [{'cred_def_id': cd_id}]}
except ValueError:
LOGGER.info(
'cannot build %s predicate on non-int bound %s for %s',
pred.value.fortran, cd_id2spec[cd_id][pred.value.
math][attr], attr)
continue
if interval:
rv['requested_predicates'][pred_uuid]['non_revoked'
] = interval
LOGGER.debug('Verifier.build_proof_req_json <<< %s', json.dumps(rv))
return json.dumps(rv)
|
build_proof_req_json
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def build_proof_req_json(self, cd_id2spec: dict) ->str:\n \"\"\"\n Build and return indy-sdk proof request for input attributes and non-revocation intervals by cred def id.\n\n :param cd_id2spec: dict mapping cred def ids to:\n\n - (optionally) 'attrs': lists of names of attributes of interest (omit for all, empty list or None for none)\n - (optionally) '>=': (pred) inclusive int lower-bounds of interest (omit, empty list, or None for none)\n - (optionally) '>': (pred) exclusive int lower-bounds of interest (omit, empty list, or None for none)\n - (optionally) '<=': (pred) inclusive int upper-bounds of interest (omit, empty list, or None for none)\n - (optionally) '<': (pred) exclusive int upper-bounds of interest (omit, empty list, or None for none)\n - (optionally), 'interval': either\n - (2-tuple) pair of epoch second counts marking 'from' and 'to' timestamps, or\n - | single epoch second count to set 'from' and 'to' the same; default\n | (now, now) for cred defs supporting revocation or None otherwise; e.g.,\n\n ::\n\n {\n 'Vx4E82R17q...:3:CL:16:tag': {\n 'attrs': [ # request attrs 'name' and 'favouriteDrink' from this cred def's schema\n 'name',\n 'favouriteDrink'\n ],\n '>=': { # request predicate score>=80 from this cred def\n 'score': 80\n }\n '<=': { # request ranking <=10 from this cred def\n 'ranking': 10\n }\n 'interval': 1528116008 # same instant for all attrs and preds of corresponding schema\n },\n 'R17v42T4pk...:3:CL:19:tag': None, # request all attrs, no preds, default intervals on all attrs\n 'e3vc5K168n...:3:CL:23:tag': {}, # request all attrs, no preds, default intervals on all attrs\n 'Z9ccax812j...:3:CL:27:tag': { # request all attrs, no preds, this interval on all attrs\n 'interval': (1528112408, 1528116008)\n },\n '9cHbp54C8n...:3:CL:37:tag': { # request no attrs and some predicates; specify interval\n 'attrs': [], # or equivalently, 'attrs': None\n '>=': {\n 'employees': '50' # nicety: implementation converts to int for caller\n },\n '>=': {\n 'revenue': '10000000' # nicety: implementation converts to int for caller\n 'ebidta': 0\n }\n 'interval': (1528029608, 1528116008)\n },\n '6caBcmLi33...:3:CL:41:tag': { # all attrs, one pred, default intervals to now on attrs & pred\n '>': {\n 'regEpoch': 1514782800\n }\n },\n ...\n }\n\n :return: indy-sdk proof request json\n \"\"\"\n LOGGER.debug('Verifier.build_proof_req_json >>> cd_id2spec: %s', cd_id2spec\n )\n cd_id2schema = {}\n now = int(time())\n rv = {'nonce': str(int(time())), 'name': 'proof_req', 'version': '0.0',\n 'requested_attributes': {}, 'requested_predicates': {}}\n for cd_id in cd_id2spec:\n if not ok_cred_def_id(cd_id):\n LOGGER.debug('Verifier.build_proof_req_json <!< Bad cred def id %s'\n , cd_id)\n raise BadIdentifier('Bad cred def id {}'.format(cd_id))\n interval = None\n cred_def = json.loads(await self.get_cred_def(cd_id))\n seq_no = cred_def_id2seq_no(cd_id)\n cd_id2schema[cd_id] = json.loads(await self.get_schema(seq_no))\n if 'revocation' in cred_def['value']:\n fro_to = cd_id2spec[cd_id].get('interval', (now, now)\n ) if cd_id2spec[cd_id] else (now, now)\n interval = {'from': fro_to if isinstance(fro_to, int) else min(\n fro_to), 'to': fro_to if isinstance(fro_to, int) else max(\n fro_to)}\n for attr in (cd_id2spec[cd_id].get('attrs', cd_id2schema[cd_id][\n 'attrNames']) or [] if cd_id2spec[cd_id] else cd_id2schema[\n cd_id]['attrNames']):\n attr_uuid = '{}_{}_uuid'.format(seq_no, canon(attr))\n rv['requested_attributes'][attr_uuid] = {'name': attr,\n 'restrictions': [{'cred_def_id': cd_id}]}\n if interval:\n rv['requested_attributes'][attr_uuid]['non_revoked'] = interval\n for pred in Predicate:\n for attr in (cd_id2spec[cd_id].get(pred.value.math, {}) or {} if\n cd_id2spec[cd_id] else {}):\n pred_uuid = '{}_{}_{}_uuid'.format(seq_no, canon(attr),\n pred.value.fortran)\n try:\n rv['requested_predicates'][pred_uuid] = {'name': attr,\n 'p_type': pred.value.math, 'p_value': Predicate.\n to_int(cd_id2spec[cd_id][pred.value.math][attr]),\n 'restrictions': [{'cred_def_id': cd_id}]}\n except ValueError:\n LOGGER.info(\n 'cannot build %s predicate on non-int bound %s for %s',\n pred.value.fortran, cd_id2spec[cd_id][pred.value.\n math][attr], attr)\n continue\n if interval:\n rv['requested_predicates'][pred_uuid]['non_revoked'\n ] = interval\n LOGGER.debug('Verifier.build_proof_req_json <<< %s', json.dumps(rv))\n return json.dumps(rv)\n",
"role": "user"
},
{
"content": "build_proof_req_json",
"role": "assistant"
}
] |
async def load_cache_for_verification(self, archive: bool=False) ->int:
"""
Load schema, cred def, revocation caches; optionally archive enough to go
offline and be able to verify proof on content marked of interest in configuration.
Return timestamp (epoch seconds) of cache load event, also used as subdirectory
for cache archives.
:param archive: True to archive now or False to demur (subclasses may still
need to augment archivable caches further)
:return: cache load event timestamp (epoch seconds)
"""
LOGGER.debug('Verifier.load_cache_for_verification >>> archive: %s',
archive)
rv = int(time())
for s_id in self.config.get('archive-verifier-caches-on-close', {}).get(
'schema_id', {}):
if ok_schema_id(s_id):
with SCHEMA_CACHE.lock:
await self.get_schema(s_id)
else:
LOGGER.info('Not archiving schema for specified bad id %s', s_id)
for cd_id in self.config.get('archive-verifier-caches-on-close', {}).get(
'cred_def_id', {}):
if ok_cred_def_id(cd_id):
with CRED_DEF_CACHE.lock:
await self.get_cred_def(cd_id)
else:
LOGGER.info('Not archiving cred def for specified bad id %s', cd_id
)
for rr_id in self.config.get('archive-verifier-caches-on-close', {}).get(
'rev_reg_id', {}):
if ok_rev_reg_id(rr_id):
await self.get_rev_reg_def(rr_id)
with REVO_CACHE.lock:
revo_cache_entry = REVO_CACHE.get(rr_id, None)
if revo_cache_entry:
try:
await revo_cache_entry.get_state_json(self.
_build_rr_state_json, rv, rv)
except ClosedPool:
LOGGER.warning(
'Verifier %s is offline from pool %s, cannot update revo cache reg state for %s to %s'
, self.name, self.pool.name, rr_id, rv)
except AbsentPool:
LOGGER.warning(
'Verifier %s has no pool, cannot update revo cache reg state for %s to %s'
, self.name, rr_id, rv)
else:
LOGGER.info('Not archiving rev reg for specified bad id %s', rr_id)
if archive:
ArchivableCaches.archive(self.dir_cache)
LOGGER.debug('Verifier.load_cache_for_verification <<< %s', rv)
return rv
|
load_cache_for_verification
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def load_cache_for_verification(self, archive: bool=False) ->int:\n \"\"\"\n Load schema, cred def, revocation caches; optionally archive enough to go\n offline and be able to verify proof on content marked of interest in configuration.\n\n Return timestamp (epoch seconds) of cache load event, also used as subdirectory\n for cache archives.\n\n :param archive: True to archive now or False to demur (subclasses may still\n need to augment archivable caches further)\n :return: cache load event timestamp (epoch seconds)\n \"\"\"\n LOGGER.debug('Verifier.load_cache_for_verification >>> archive: %s',\n archive)\n rv = int(time())\n for s_id in self.config.get('archive-verifier-caches-on-close', {}).get(\n 'schema_id', {}):\n if ok_schema_id(s_id):\n with SCHEMA_CACHE.lock:\n await self.get_schema(s_id)\n else:\n LOGGER.info('Not archiving schema for specified bad id %s', s_id)\n for cd_id in self.config.get('archive-verifier-caches-on-close', {}).get(\n 'cred_def_id', {}):\n if ok_cred_def_id(cd_id):\n with CRED_DEF_CACHE.lock:\n await self.get_cred_def(cd_id)\n else:\n LOGGER.info('Not archiving cred def for specified bad id %s', cd_id\n )\n for rr_id in self.config.get('archive-verifier-caches-on-close', {}).get(\n 'rev_reg_id', {}):\n if ok_rev_reg_id(rr_id):\n await self.get_rev_reg_def(rr_id)\n with REVO_CACHE.lock:\n revo_cache_entry = REVO_CACHE.get(rr_id, None)\n if revo_cache_entry:\n try:\n await revo_cache_entry.get_state_json(self.\n _build_rr_state_json, rv, rv)\n except ClosedPool:\n LOGGER.warning(\n 'Verifier %s is offline from pool %s, cannot update revo cache reg state for %s to %s'\n , self.name, self.pool.name, rr_id, rv)\n except AbsentPool:\n LOGGER.warning(\n 'Verifier %s has no pool, cannot update revo cache reg state for %s to %s'\n , self.name, rr_id, rv)\n else:\n LOGGER.info('Not archiving rev reg for specified bad id %s', rr_id)\n if archive:\n ArchivableCaches.archive(self.dir_cache)\n LOGGER.debug('Verifier.load_cache_for_verification <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "load_cache_for_verification",
"role": "assistant"
}
] |
async def open(self) ->'Verifier':
"""
Explicit entry. Perform ancestor opening operations,
then parse cache from archive if so configured, and
synchronize revocation registry to tails tree content.
:return: current object
"""
LOGGER.debug('Verifier.open >>>')
await super().open()
if self.config.get('parse-caches-on-open', False):
ArchivableCaches.parse(self.dir_cache)
LOGGER.debug('Verifier.open <<<')
return self
|
open
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def open(self) ->'Verifier':\n \"\"\"\n Explicit entry. Perform ancestor opening operations,\n then parse cache from archive if so configured, and\n synchronize revocation registry to tails tree content.\n\n :return: current object\n \"\"\"\n LOGGER.debug('Verifier.open >>>')\n await super().open()\n if self.config.get('parse-caches-on-open', False):\n ArchivableCaches.parse(self.dir_cache)\n LOGGER.debug('Verifier.open <<<')\n return self\n",
"role": "user"
},
{
"content": "open",
"role": "assistant"
}
] |
async def close(self) ->None:
"""
Explicit exit. If so configured, populate cache to prove for any creds on schemata,
cred defs, and rev regs marked of interest in configuration at initialization,
archive cache, and purge prior cache archives.
:return: current object
"""
LOGGER.debug('Verifier.close >>>')
if self.config.get('archive-verifier-caches-on-close', {}):
await self.load_cache_for_verification(True)
ArchivableCaches.purge_archives(self.dir_cache, True)
await BaseAnchor.close(self)
LOGGER.debug('Verifier.close <<<')
|
close
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def close(self) ->None:\n \"\"\"\n Explicit exit. If so configured, populate cache to prove for any creds on schemata,\n cred defs, and rev regs marked of interest in configuration at initialization,\n archive cache, and purge prior cache archives.\n\n :return: current object\n \"\"\"\n LOGGER.debug('Verifier.close >>>')\n if self.config.get('archive-verifier-caches-on-close', {}):\n await self.load_cache_for_verification(True)\n ArchivableCaches.purge_archives(self.dir_cache, True)\n await BaseAnchor.close(self)\n LOGGER.debug('Verifier.close <<<')\n",
"role": "user"
},
{
"content": "close",
"role": "assistant"
}
] |
def x(proof_req: dict, proof: dict) ->bool:
LOGGER.debug('Verifier.check_encoding <<< proof_req: %s, proof: %s',
proof_req, proof)
cd_id2proof_id = {}
p_preds = {}
for idx in range(len(proof['identifiers'])):
cd_id = proof['identifiers'][idx]['cred_def_id']
cd_id2proof_id[cd_id] = idx
p_preds[cd_id] = {ge_proof['predicate']['attr_name']: ge_proof[
'predicate']['value'] for ge_proof in proof['proof']['proofs'][
idx]['primary_proof']['ge_proofs']}
for uuid, req_attr in proof_req['requested_attributes'].items():
canon_attr = canon(req_attr['name'])
proof_ident_idx = cd_id2proof_id[req_attr['restrictions'][0][
'cred_def_id']]
enco = proof['proof']['proofs'][proof_ident_idx]['primary_proof'][
'eq_proof']['revealed_attrs'].get(canon_attr)
if not enco:
continue
if enco != proof['requested_proof']['revealed_attrs'][uuid]['encoded']:
LOGGER.debug('Verifier.check_proof_encoding <<< False')
return False
if enco != encode(proof['requested_proof']['revealed_attrs'][uuid][
'raw']):
LOGGER.debug('Verifier.check_proof_encoding <<< False')
return False
for uuid, req_pred in proof_req['requested_predicates'].items():
canon_attr = canon(req_pred['name'])
if p_preds[req_pred['restrictions'][0]['cred_def_id']].get(canon_attr
) != req_pred['p_value']:
LOGGER.debug('Verifier.check_proof_encoding <<< False')
return False
LOGGER.debug('Verifier.check_proof_encoding <<< True')
return True
|
check_encoding
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(proof_req: dict, proof: dict) ->bool:\n LOGGER.debug('Verifier.check_encoding <<< proof_req: %s, proof: %s',\n proof_req, proof)\n cd_id2proof_id = {}\n p_preds = {}\n for idx in range(len(proof['identifiers'])):\n cd_id = proof['identifiers'][idx]['cred_def_id']\n cd_id2proof_id[cd_id] = idx\n p_preds[cd_id] = {ge_proof['predicate']['attr_name']: ge_proof[\n 'predicate']['value'] for ge_proof in proof['proof']['proofs'][\n idx]['primary_proof']['ge_proofs']}\n for uuid, req_attr in proof_req['requested_attributes'].items():\n canon_attr = canon(req_attr['name'])\n proof_ident_idx = cd_id2proof_id[req_attr['restrictions'][0][\n 'cred_def_id']]\n enco = proof['proof']['proofs'][proof_ident_idx]['primary_proof'][\n 'eq_proof']['revealed_attrs'].get(canon_attr)\n if not enco:\n continue\n if enco != proof['requested_proof']['revealed_attrs'][uuid]['encoded']:\n LOGGER.debug('Verifier.check_proof_encoding <<< False')\n return False\n if enco != encode(proof['requested_proof']['revealed_attrs'][uuid][\n 'raw']):\n LOGGER.debug('Verifier.check_proof_encoding <<< False')\n return False\n for uuid, req_pred in proof_req['requested_predicates'].items():\n canon_attr = canon(req_pred['name'])\n if p_preds[req_pred['restrictions'][0]['cred_def_id']].get(canon_attr\n ) != req_pred['p_value']:\n LOGGER.debug('Verifier.check_proof_encoding <<< False')\n return False\n LOGGER.debug('Verifier.check_proof_encoding <<< True')\n return True\n",
"role": "user"
},
{
"content": "check_encoding",
"role": "assistant"
}
] |
async def verify_proof(self, proof_req: dict, proof: dict) ->str:
"""
Verify proof as Verifier. Raise AbsentRevReg if a proof cites a revocation registry
that does not exist on the distributed ledger.
:param proof_req: proof request as Verifier creates, as per proof_req_json above
:param proof: proof as HolderProver creates
:return: json encoded True if proof is valid; False if not
"""
LOGGER.debug('Verifier.verify_proof >>> proof_req: %s, proof: %s',
proof_req, proof)
if not Verifier.check_encoding(proof_req, proof):
LOGGER.info(
'Proof encoding does not cross-reference with proof request %s: failing verification'
, proof_req.get('nonce', '(missing nonce)'))
LOGGER.debug('Verifier.verify_proof <<< "False"')
return json.dumps(False)
async def _set_schema(s_id: str) ->None:
nonlocal s_id2schema
if not ok_schema_id(s_id):
LOGGER.debug('Verifier.verify_proof <!< Bad schema id %s', s_id)
raise BadIdentifier('Bad schema id {}'.format(s_id))
if s_id not in s_id2schema:
schema = json.loads(await self.get_schema(s_id))
if not schema:
LOGGER.debug(
'Verifier.verify_proof <!< absent schema %s, proof req may be for another ledger'
, s_id)
raise AbsentSchema(
'Absent schema {}, proof req may be for another ledger'
.format(s_id))
s_id2schema[s_id] = schema
async def _set_cred_def(cd_id: str) ->None:
nonlocal cd_id2cred_def
if not ok_cred_def_id(cd_id):
LOGGER.debug('Verifier.verify_proof <!< Bad cred def id %s', cd_id)
raise BadIdentifier('Bad cred def id {}'.format(cd_id))
if cd_id not in cd_id2cred_def:
cd_id2cred_def[cd_id] = json.loads(await self.get_cred_def(cd_id))
async def _set_rev_reg_def(rr_id: str) ->bool:
"""
Return true to continue to timestamp setting, false to short-circuit
"""
nonlocal rr_id2rr_def
if not rr_id:
return False
if not ok_rev_reg_id(rr_id):
LOGGER.debug('Verifier.verify_proof <!< Bad rev reg id %s', rr_id)
raise BadIdentifier('Bad rev reg id {}'.format(rr_id))
if rr_id not in rr_id2rr_def:
rr_id2rr_def[rr_id] = json.loads(await self.get_rev_reg_def(rr_id))
return True
async def _set_timestamp(rr_id: str, timestamp: int) ->None:
nonlocal rr_id2rr
with REVO_CACHE.lock:
revo_cache_entry = REVO_CACHE.get(rr_id, None)
rr_json, _ = await revo_cache_entry.get_state_json(self.
_build_rr_state_json, timestamp, timestamp)
if rr_id not in rr_id2rr:
rr_id2rr[rr_id] = {}
rr_id2rr[rr_id][timestamp] = json.loads(rr_json)
s_id2schema = {}
cd_id2cred_def = {}
rr_id2rr_def = {}
rr_id2rr = {}
proof_ids = proof['identifiers']
for proof_id in proof_ids:
await _set_schema(proof_id['schema_id'])
await _set_cred_def(proof_id['cred_def_id'])
rr_id = proof_id['rev_reg_id']
if await _set_rev_reg_def(rr_id):
await _set_timestamp(rr_id, proof_id['timestamp'])
rv = json.dumps(await anoncreds.verifier_verify_proof(json.dumps(
proof_req), json.dumps(proof), json.dumps(s_id2schema), json.dumps(
cd_id2cred_def), json.dumps(rr_id2rr_def), json.dumps(rr_id2rr)))
LOGGER.debug('Verifier.verify_proof <<< %s', rv)
return rv
|
verify_proof
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def verify_proof(self, proof_req: dict, proof: dict) ->str:\n \"\"\"\n Verify proof as Verifier. Raise AbsentRevReg if a proof cites a revocation registry\n that does not exist on the distributed ledger.\n\n :param proof_req: proof request as Verifier creates, as per proof_req_json above\n :param proof: proof as HolderProver creates\n :return: json encoded True if proof is valid; False if not\n \"\"\"\n LOGGER.debug('Verifier.verify_proof >>> proof_req: %s, proof: %s',\n proof_req, proof)\n if not Verifier.check_encoding(proof_req, proof):\n LOGGER.info(\n 'Proof encoding does not cross-reference with proof request %s: failing verification'\n , proof_req.get('nonce', '(missing nonce)'))\n LOGGER.debug('Verifier.verify_proof <<< \"False\"')\n return json.dumps(False)\n\n async def _set_schema(s_id: str) ->None:\n nonlocal s_id2schema\n if not ok_schema_id(s_id):\n LOGGER.debug('Verifier.verify_proof <!< Bad schema id %s', s_id)\n raise BadIdentifier('Bad schema id {}'.format(s_id))\n if s_id not in s_id2schema:\n schema = json.loads(await self.get_schema(s_id))\n if not schema:\n LOGGER.debug(\n 'Verifier.verify_proof <!< absent schema %s, proof req may be for another ledger'\n , s_id)\n raise AbsentSchema(\n 'Absent schema {}, proof req may be for another ledger'\n .format(s_id))\n s_id2schema[s_id] = schema\n\n async def _set_cred_def(cd_id: str) ->None:\n nonlocal cd_id2cred_def\n if not ok_cred_def_id(cd_id):\n LOGGER.debug('Verifier.verify_proof <!< Bad cred def id %s', cd_id)\n raise BadIdentifier('Bad cred def id {}'.format(cd_id))\n if cd_id not in cd_id2cred_def:\n cd_id2cred_def[cd_id] = json.loads(await self.get_cred_def(cd_id))\n\n async def _set_rev_reg_def(rr_id: str) ->bool:\n \"\"\"\n Return true to continue to timestamp setting, false to short-circuit\n \"\"\"\n nonlocal rr_id2rr_def\n if not rr_id:\n return False\n if not ok_rev_reg_id(rr_id):\n LOGGER.debug('Verifier.verify_proof <!< Bad rev reg id %s', rr_id)\n raise BadIdentifier('Bad rev reg id {}'.format(rr_id))\n if rr_id not in rr_id2rr_def:\n rr_id2rr_def[rr_id] = json.loads(await self.get_rev_reg_def(rr_id))\n return True\n\n async def _set_timestamp(rr_id: str, timestamp: int) ->None:\n nonlocal rr_id2rr\n with REVO_CACHE.lock:\n revo_cache_entry = REVO_CACHE.get(rr_id, None)\n rr_json, _ = await revo_cache_entry.get_state_json(self.\n _build_rr_state_json, timestamp, timestamp)\n if rr_id not in rr_id2rr:\n rr_id2rr[rr_id] = {}\n rr_id2rr[rr_id][timestamp] = json.loads(rr_json)\n s_id2schema = {}\n cd_id2cred_def = {}\n rr_id2rr_def = {}\n rr_id2rr = {}\n proof_ids = proof['identifiers']\n for proof_id in proof_ids:\n await _set_schema(proof_id['schema_id'])\n await _set_cred_def(proof_id['cred_def_id'])\n rr_id = proof_id['rev_reg_id']\n if await _set_rev_reg_def(rr_id):\n await _set_timestamp(rr_id, proof_id['timestamp'])\n rv = json.dumps(await anoncreds.verifier_verify_proof(json.dumps(\n proof_req), json.dumps(proof), json.dumps(s_id2schema), json.dumps(\n cd_id2cred_def), json.dumps(rr_id2rr_def), json.dumps(rr_id2rr)))\n LOGGER.debug('Verifier.verify_proof <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "verify_proof",
"role": "assistant"
}
] |
def x(value: str) ->'Protocol':
for pktype in PublicKeyType:
if value in (pktype.ver_type, pktype.authn_type):
return pktype
return None
|
get
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(value: str) ->'Protocol':\n for pktype in PublicKeyType:\n if value in (pktype.ver_type, pktype.authn_type):\n return pktype\n return None\n",
"role": "user"
},
{
"content": "get",
"role": "assistant"
}
] |
def x(self):
return {'id': self.id, 'type': str(self.type.ver_type), 'controller':
canon_ref(self.did, self.controller), **self.type.specification(
self.value)}
|
to_dict
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n return {'id': self.id, 'type': str(self.type.ver_type), 'controller':\n canon_ref(self.did, self.controller), **self.type.specification(\n self.value)}\n",
"role": "user"
},
{
"content": "to_dict",
"role": "assistant"
}
] |
async def main(wallet_name: str) ->None:
"""
Main line for revocation registry builder operating in external process on behalf of issuer agent.
:param wallet_name: wallet name - must match that of issuer with existing wallet
"""
logging.basicConfig(level=logging.WARN, format=
'%(levelname)-8s | %(name)-12s | %(message)s')
logging.getLogger('indy').setLevel(logging.ERROR)
path_start = join(RevRegBuilder.dir_tails_sentinel(wallet_name), '.start')
with open(path_start, 'r') as fh_start:
start_data = json.loads(fh_start.read())
remove(path_start)
logging.getLogger(__name__).setLevel(start_data['logging']['level'])
for path_log in start_data['logging']['paths']:
logging.getLogger(__name__).addHandler(logging.FileHandler(path_log))
wallet = WalletManager().get({'id': wallet_name, 'storage_type':
start_data['wallet']['storage_type'], **start_data['wallet'][
'config']}, access=start_data['wallet']['access_creds'].get('key',
None))
async with wallet, RevRegBuilder(wallet, rrbx=True) as rrban:
await rrban.serve()
|
main
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def main(wallet_name: str) ->None:\n \"\"\"\n Main line for revocation registry builder operating in external process on behalf of issuer agent.\n\n :param wallet_name: wallet name - must match that of issuer with existing wallet\n \"\"\"\n logging.basicConfig(level=logging.WARN, format=\n '%(levelname)-8s | %(name)-12s | %(message)s')\n logging.getLogger('indy').setLevel(logging.ERROR)\n path_start = join(RevRegBuilder.dir_tails_sentinel(wallet_name), '.start')\n with open(path_start, 'r') as fh_start:\n start_data = json.loads(fh_start.read())\n remove(path_start)\n logging.getLogger(__name__).setLevel(start_data['logging']['level'])\n for path_log in start_data['logging']['paths']:\n logging.getLogger(__name__).addHandler(logging.FileHandler(path_log))\n wallet = WalletManager().get({'id': wallet_name, 'storage_type':\n start_data['wallet']['storage_type'], **start_data['wallet'][\n 'config']}, access=start_data['wallet']['access_creds'].get('key',\n None))\n async with wallet, RevRegBuilder(wallet, rrbx=True) as rrban:\n await rrban.serve()\n",
"role": "user"
},
{
"content": "main",
"role": "assistant"
}
] |
def x(self) ->str:
rv = {'logging': {'paths': []}, 'wallet': {}}
logger = LOGGER
while not logger.level:
logger = logger.parent
if logger is None:
break
rv['logging']['level'] = logger.level
logger = LOGGER
log_paths = [realpath(h.baseFilename) for h in logger.handlers if
hasattr(h, 'baseFilename')]
while not log_paths:
logger = logger.parent
if logger is None:
break
log_paths = [realpath(h.baseFilename) for h in logger.handlers if
hasattr(h, 'baseFilename')]
for log_path in log_paths:
rv['logging']['paths'].append(log_path)
rv['wallet']['storage_type'] = self.wallet.storage_type
rv['wallet']['config'] = self.wallet.config
rv['wallet']['access_creds'] = self.wallet.access_creds
return json.dumps(rv)
|
start_data_json
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self) ->str:\n rv = {'logging': {'paths': []}, 'wallet': {}}\n logger = LOGGER\n while not logger.level:\n logger = logger.parent\n if logger is None:\n break\n rv['logging']['level'] = logger.level\n logger = LOGGER\n log_paths = [realpath(h.baseFilename) for h in logger.handlers if\n hasattr(h, 'baseFilename')]\n while not log_paths:\n logger = logger.parent\n if logger is None:\n break\n log_paths = [realpath(h.baseFilename) for h in logger.handlers if\n hasattr(h, 'baseFilename')]\n for log_path in log_paths:\n rv['logging']['paths'].append(log_path)\n rv['wallet']['storage_type'] = self.wallet.storage_type\n rv['wallet']['config'] = self.wallet.config\n rv['wallet']['access_creds'] = self.wallet.access_creds\n return json.dumps(rv)\n",
"role": "user"
},
{
"content": "start_data_json",
"role": "assistant"
}
] |
def x(wallet_name: str) ->_STATE:
dir_sentinel = RevRegBuilder.dir_tails_sentinel(wallet_name)
file_pid = join(dir_sentinel, '.pid')
file_start = join(dir_sentinel, '.start')
file_stop = join(dir_sentinel, '.stop')
if isfile(file_stop):
return _STATE.STOPPING
if isfile(file_start) or isfile(file_pid):
return _STATE.RUNNING
return _STATE.ABSENT
|
get_state
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(wallet_name: str) ->_STATE:\n dir_sentinel = RevRegBuilder.dir_tails_sentinel(wallet_name)\n file_pid = join(dir_sentinel, '.pid')\n file_start = join(dir_sentinel, '.start')\n file_stop = join(dir_sentinel, '.stop')\n if isfile(file_stop):\n return _STATE.STOPPING\n if isfile(file_start) or isfile(file_pid):\n return _STATE.RUNNING\n return _STATE.ABSENT\n",
"role": "user"
},
{
"content": "get_state",
"role": "assistant"
}
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.