source_code
stringlengths 45
7.24k
| function_name
stringlengths 1
111
| messages
list |
|---|---|---|
def x(name, mode=None, driver=None, libver=None, userblock_size=None, **kwargs
):
import sys
import os
from h5py import h5p
from h5py._hl import files
try:
name = name.encode(sys.getfilesystemencoding())
except (UnicodeError, LookupError):
pass
exists = os.path.exists(name)
try:
fcpl = h5p.create(h5p.FILE_CREATE)
fcpl.set_link_creation_order(h5p.CRT_ORDER_TRACKED | h5p.
CRT_ORDER_INDEXED)
except AttributeError:
fp = files.File(name, mode=mode, driver=driver, libver=libver, **kwargs
)
else:
fapl = files.make_fapl(driver, libver, **kwargs)
fp = files.File(files.make_fid(name, mode, userblock_size, fapl, fcpl))
if not exists and fp.mode == 'r+':
set_attributes(fp, arf_library='python', arf_library_version=
__version__, arf_version=spec_version)
return fp
|
open_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(name, mode=None, driver=None, libver=None, userblock_size=None, **kwargs\n ):\n import sys\n import os\n from h5py import h5p\n from h5py._hl import files\n try:\n name = name.encode(sys.getfilesystemencoding())\n except (UnicodeError, LookupError):\n pass\n exists = os.path.exists(name)\n try:\n fcpl = h5p.create(h5p.FILE_CREATE)\n fcpl.set_link_creation_order(h5p.CRT_ORDER_TRACKED | h5p.\n CRT_ORDER_INDEXED)\n except AttributeError:\n fp = files.File(name, mode=mode, driver=driver, libver=libver, **kwargs\n )\n else:\n fapl = files.make_fapl(driver, libver, **kwargs)\n fp = files.File(files.make_fid(name, mode, userblock_size, fapl, fcpl))\n if not exists and fp.mode == 'r+':\n set_attributes(fp, arf_library='python', arf_library_version=\n __version__, arf_version=spec_version)\n return fp\n",
"role": "user"
},
{
"content": "open_file",
"role": "assistant"
}
] |
def x(group, name, timestamp, **attributes):
from h5py import h5p, h5g, _hl
try:
gcpl = h5p.create(h5p.GROUP_CREATE)
gcpl.set_link_creation_order(h5p.CRT_ORDER_TRACKED | h5p.
CRT_ORDER_INDEXED)
except AttributeError:
grp = group.create_group(name)
else:
name, lcpl = group._e(name, lcpl=True)
grp = _hl.group.Group(h5g.create(group.id, name, lcpl=lcpl, gcpl=gcpl))
set_uuid(grp, attributes.pop('uuid', None))
set_attributes(grp, timestamp=convert_timestamp(timestamp), **attributes)
return grp
|
create_entry
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(group, name, timestamp, **attributes):\n from h5py import h5p, h5g, _hl\n try:\n gcpl = h5p.create(h5p.GROUP_CREATE)\n gcpl.set_link_creation_order(h5p.CRT_ORDER_TRACKED | h5p.\n CRT_ORDER_INDEXED)\n except AttributeError:\n grp = group.create_group(name)\n else:\n name, lcpl = group._e(name, lcpl=True)\n grp = _hl.group.Group(h5g.create(group.id, name, lcpl=lcpl, gcpl=gcpl))\n set_uuid(grp, attributes.pop('uuid', None))\n set_attributes(grp, timestamp=convert_timestamp(timestamp), **attributes)\n return grp\n",
"role": "user"
},
{
"content": "create_entry",
"role": "assistant"
}
] |
def x(group, name, data, units='', datatype=DataTypes.UNDEFINED, chunks=
True, maxshape=None, compression=None, **attributes):
from numpy import asarray
srate = attributes.get('sampling_rate', None)
if not hasattr(data, 'dtype'):
data = asarray(data)
if data.dtype.kind in ('S', 'O', 'U'):
raise ValueError(
'data must be in array with numeric or compound type')
if data.dtype.kind == 'V':
if 'start' not in data.dtype.names:
raise ValueError("complex event data requires 'start' field")
if not isinstance(units, (list, tuple)):
raise ValueError('complex event data requires sequence of units')
if not len(units) == len(data.dtype.names):
raise ValueError("number of units doesn't match number of fields")
if units == '':
if srate is None or not srate > 0:
raise ValueError(
'unitless data assumed time series and requires sampling_rate attribute'
)
elif units == 'samples':
if srate is None or not srate > 0:
raise ValueError(
"data with units of 'samples' requires sampling_rate attribute"
)
dset = group.create_dataset(name, data=data, maxshape=maxshape, chunks=
chunks, compression=compression)
set_attributes(dset, units=units, datatype=datatype, **attributes)
return dset
|
create_dataset
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(group, name, data, units='', datatype=DataTypes.UNDEFINED, chunks=\n True, maxshape=None, compression=None, **attributes):\n from numpy import asarray\n srate = attributes.get('sampling_rate', None)\n if not hasattr(data, 'dtype'):\n data = asarray(data)\n if data.dtype.kind in ('S', 'O', 'U'):\n raise ValueError(\n 'data must be in array with numeric or compound type')\n if data.dtype.kind == 'V':\n if 'start' not in data.dtype.names:\n raise ValueError(\"complex event data requires 'start' field\")\n if not isinstance(units, (list, tuple)):\n raise ValueError('complex event data requires sequence of units')\n if not len(units) == len(data.dtype.names):\n raise ValueError(\"number of units doesn't match number of fields\")\n if units == '':\n if srate is None or not srate > 0:\n raise ValueError(\n 'unitless data assumed time series and requires sampling_rate attribute'\n )\n elif units == 'samples':\n if srate is None or not srate > 0:\n raise ValueError(\n \"data with units of 'samples' requires sampling_rate attribute\"\n )\n dset = group.create_dataset(name, data=data, maxshape=maxshape, chunks=\n chunks, compression=compression)\n set_attributes(dset, units=units, datatype=datatype, **attributes)\n return dset\n",
"role": "user"
},
{
"content": "create_dataset",
"role": "assistant"
}
] |
def x(group, name, dtype, **attributes):
dset = group.create_dataset(name, shape=(0,), dtype=dtype, maxshape=(None,)
)
set_attributes(dset, **attributes)
return dset
|
create_table
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(group, name, dtype, **attributes):\n dset = group.create_dataset(name, shape=(0,), dtype=dtype, maxshape=(None,)\n )\n set_attributes(dset, **attributes)\n return dset\n",
"role": "user"
},
{
"content": "create_table",
"role": "assistant"
}
] |
def x(dset, data):
N = data.shape[0] if hasattr(data, 'shape') else 1
if N == 0:
return
oldlen = dset.shape[0]
newlen = oldlen + N
dset.resize(newlen, axis=0)
dset[oldlen:] = data
|
append_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(dset, data):\n N = data.shape[0] if hasattr(data, 'shape') else 1\n if N == 0:\n return\n oldlen = dset.shape[0]\n newlen = oldlen + N\n dset.resize(newlen, axis=0)\n dset[oldlen:] = data\n",
"role": "user"
},
{
"content": "append_data",
"role": "assistant"
}
] |
def x(file):
from distutils.version import StrictVersion as Version
try:
ver = file.attrs.get('arf_version', None)
if ver is None:
ver = file.attrs['arf_library_version']
except KeyError:
raise UserWarning(
'Unable to determine ARF version for {0.filename};created by another program?'
.format(file))
try:
ver = ver.decode('ascii')
except (LookupError, AttributeError):
pass
file_version = Version(ver)
if file_version < Version('1.1'):
raise DeprecationWarning(
'ARF library {} may have trouble reading file version {} (< 1.1)'
.format(version, file_version))
elif file_version >= Version('3.0'):
raise FutureWarning(
'ARF library {} may be incompatible with file version {} (>= 3.0)'
.format(version, file_version))
return file_version
|
check_file_version
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file):\n from distutils.version import StrictVersion as Version\n try:\n ver = file.attrs.get('arf_version', None)\n if ver is None:\n ver = file.attrs['arf_library_version']\n except KeyError:\n raise UserWarning(\n 'Unable to determine ARF version for {0.filename};created by another program?'\n .format(file))\n try:\n ver = ver.decode('ascii')\n except (LookupError, AttributeError):\n pass\n file_version = Version(ver)\n if file_version < Version('1.1'):\n raise DeprecationWarning(\n 'ARF library {} may have trouble reading file version {} (< 1.1)'\n .format(version, file_version))\n elif file_version >= Version('3.0'):\n raise FutureWarning(\n 'ARF library {} may be incompatible with file version {} (>= 3.0)'\n .format(version, file_version))\n return file_version\n",
"role": "user"
},
{
"content": "check_file_version",
"role": "assistant"
}
] |
def x(node, overwrite=True, **attributes):
aset = node.attrs
for k, v in attributes.items():
if not overwrite and k in aset:
pass
elif v is None:
if k in aset:
del aset[k]
else:
aset[k] = v
|
set_attributes
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(node, overwrite=True, **attributes):\n aset = node.attrs\n for k, v in attributes.items():\n if not overwrite and k in aset:\n pass\n elif v is None:\n if k in aset:\n del aset[k]\n else:\n aset[k] = v\n",
"role": "user"
},
{
"content": "set_attributes",
"role": "assistant"
}
] |
def x(group):
from h5py import h5
out = []
try:
group._id.links.iterate(out.append, idx_type=h5.INDEX_CRT_ORDER,
order=h5.ITER_INC)
except (AttributeError, RuntimeError):
def x(name):
if name.find(b'/', 1) == -1:
out.append(name)
group._id.links.visit(f, idx_type=h5.INDEX_CRT_ORDER, order=h5.ITER_INC
)
return map(group._d, out)
|
keys_by_creation
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(group):\n from h5py import h5\n out = []\n try:\n group._id.links.iterate(out.append, idx_type=h5.INDEX_CRT_ORDER,\n order=h5.ITER_INC)\n except (AttributeError, RuntimeError):\n\n def x(name):\n if name.find(b'/', 1) == -1:\n out.append(name)\n group._id.links.visit(f, idx_type=h5.INDEX_CRT_ORDER, order=h5.ITER_INC\n )\n return map(group._d, out)\n",
"role": "user"
},
{
"content": "keys_by_creation",
"role": "assistant"
}
] |
def x(obj):
import numbers
from datetime import datetime
from time import mktime, struct_time
from numpy import zeros
out = zeros(2, dtype='int64')
if isinstance(obj, datetime):
out[0] = mktime(obj.timetuple())
out[1] = obj.microsecond
elif isinstance(obj, struct_time):
out[0] = mktime(obj)
elif isinstance(obj, numbers.Integral):
out[0] = obj
elif isinstance(obj, numbers.Real):
out[0] = obj
out[1] = (obj - out[0]) * 1000000.0
else:
try:
out[:2] = obj[:2]
except:
raise TypeError('unable to convert %s to timestamp' % obj)
return out
|
convert_timestamp
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(obj):\n import numbers\n from datetime import datetime\n from time import mktime, struct_time\n from numpy import zeros\n out = zeros(2, dtype='int64')\n if isinstance(obj, datetime):\n out[0] = mktime(obj.timetuple())\n out[1] = obj.microsecond\n elif isinstance(obj, struct_time):\n out[0] = mktime(obj)\n elif isinstance(obj, numbers.Integral):\n out[0] = obj\n elif isinstance(obj, numbers.Real):\n out[0] = obj\n out[1] = (obj - out[0]) * 1000000.0\n else:\n try:\n out[:2] = obj[:2]\n except:\n raise TypeError('unable to convert %s to timestamp' % obj)\n return out\n",
"role": "user"
},
{
"content": "convert_timestamp",
"role": "assistant"
}
] |
def x(timestamp):
from datetime import datetime, timedelta
obj = datetime.fromtimestamp(timestamp[0])
return obj + timedelta(microseconds=int(timestamp[1]))
|
timestamp_to_datetime
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(timestamp):\n from datetime import datetime, timedelta\n obj = datetime.fromtimestamp(timestamp[0])\n return obj + timedelta(microseconds=int(timestamp[1]))\n",
"role": "user"
},
{
"content": "timestamp_to_datetime",
"role": "assistant"
}
] |
def x(obj, uuid=None):
from uuid import uuid4, UUID
if uuid is None:
uuid = uuid4()
elif isinstance(uuid, bytes):
if len(uuid) == 16:
uuid = UUID(bytes=uuid)
else:
uuid = UUID(hex=uuid)
if 'uuid' in obj.attrs:
del obj.attrs['uuid']
obj.attrs.create('uuid', str(uuid).encode('ascii'), dtype='|S36')
|
set_uuid
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(obj, uuid=None):\n from uuid import uuid4, UUID\n if uuid is None:\n uuid = uuid4()\n elif isinstance(uuid, bytes):\n if len(uuid) == 16:\n uuid = UUID(bytes=uuid)\n else:\n uuid = UUID(hex=uuid)\n if 'uuid' in obj.attrs:\n del obj.attrs['uuid']\n obj.attrs.create('uuid', str(uuid).encode('ascii'), dtype='|S36')\n",
"role": "user"
},
{
"content": "set_uuid",
"role": "assistant"
}
] |
def x(obj):
from uuid import UUID
try:
uuid = obj.attrs['uuid']
except KeyError:
return UUID(int=0)
try:
uuid = uuid.decode('ascii')
except (LookupError, AttributeError):
pass
return UUID(uuid)
|
get_uuid
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(obj):\n from uuid import UUID\n try:\n uuid = obj.attrs['uuid']\n except KeyError:\n return UUID(int=0)\n try:\n uuid = uuid.decode('ascii')\n except (LookupError, AttributeError):\n pass\n return UUID(uuid)\n",
"role": "user"
},
{
"content": "get_uuid",
"role": "assistant"
}
] |
def x(obj, type=None):
if type is None:
return len(obj)
else:
return sum(1 for x in obj if obj.get(x, getclass=True) is type)
|
count_children
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(obj, type=None):\n if type is None:\n return len(obj)\n else:\n return sum(1 for x in obj if obj.get(x, getclass=True) is type)\n",
"role": "user"
},
{
"content": "count_children",
"role": "assistant"
}
] |
def x(cls):
return dict((getattr(cls, attr), attr) for attr in dir(cls) if not attr
.startswith('_'))
|
todict
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cls):\n return dict((getattr(cls, attr), attr) for attr in dir(cls) if not attr\n .startswith('_'))\n",
"role": "user"
},
{
"content": "todict",
"role": "assistant"
}
] |
def x(template_name, fields=None):
template = None
if not template_name.endswith('.html'):
template_name = '%s.html' % template_name
here = '%s/cli/app/templates' % get_installdir()
template_path = '%s/%s' % (here, template_name)
if os.path.exists(template_path):
template = ''.join(read_file(template_path))
if fields is not None:
for tag, sub in fields.items():
template = template.replace(tag, sub)
return template
|
get_template
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(template_name, fields=None):\n template = None\n if not template_name.endswith('.html'):\n template_name = '%s.html' % template_name\n here = '%s/cli/app/templates' % get_installdir()\n template_path = '%s/%s' % (here, template_name)\n if os.path.exists(template_path):\n template = ''.join(read_file(template_path))\n if fields is not None:\n for tag, sub in fields.items():\n template = template.replace(tag, sub)\n return template\n",
"role": "user"
},
{
"content": "get_template",
"role": "assistant"
}
] |
def x(container1=None, packages_set=None, by=None):
if by == None:
by = ['files.txt']
if not isinstance(by, list):
by = [by]
if not isinstance(packages_set, list):
packages_set = [packages_set]
comparisons = dict()
for b in by:
bot.debug('Starting comparisons for %s' % b)
df = pandas.DataFrame(columns=packages_set)
for package2 in packages_set:
sim = calculate_similarity(container1=container1,
image_package2=package2, by=b)[b]
name1 = os.path.basename(package2).replace('.img.zip', '')
bot.debug('container vs. %s: %s' % (name1, sim))
df.loc['container', package2] = sim
df.columns = [os.path.basename(x).replace('.img.zip', '') for x in
df.columns.tolist()]
comparisons[b] = df
return comparisons
|
container_similarity_vector
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(container1=None, packages_set=None, by=None):\n if by == None:\n by = ['files.txt']\n if not isinstance(by, list):\n by = [by]\n if not isinstance(packages_set, list):\n packages_set = [packages_set]\n comparisons = dict()\n for b in by:\n bot.debug('Starting comparisons for %s' % b)\n df = pandas.DataFrame(columns=packages_set)\n for package2 in packages_set:\n sim = calculate_similarity(container1=container1,\n image_package2=package2, by=b)[b]\n name1 = os.path.basename(package2).replace('.img.zip', '')\n bot.debug('container vs. %s: %s' % (name1, sim))\n df.loc['container', package2] = sim\n df.columns = [os.path.basename(x).replace('.img.zip', '') for x in\n df.columns.tolist()]\n comparisons[b] = df\n return comparisons\n",
"role": "user"
},
{
"content": "container_similarity_vector",
"role": "assistant"
}
] |
def x(image_paths1, image_paths2=None):
repeat = False
if image_paths2 is None:
image_paths2 = image_paths1
repeat = True
if not isinstance(image_paths1, list):
image_paths1 = [image_paths1]
if not isinstance(image_paths2, list):
image_paths2 = [image_paths2]
dfs = pandas.DataFrame(index=image_paths1, columns=image_paths2)
comparisons_done = []
for image1 in image_paths1:
fileobj1, tar1 = get_image_tar(image1)
members1 = [x.name for x in tar1]
for image2 in image_paths2:
comparison_id = [image1, image2]
comparison_id.sort()
comparison_id = ''.join(comparison_id)
if comparison_id not in comparisons_done:
if image1 == image2:
sim = 1.0
else:
fileobj2, tar2 = get_image_tar(image2)
members2 = [x.name for x in tar2]
c = compare_lists(members1, members2)
sim = information_coefficient(c['total1'], c['total2'],
c['intersect'])
delete_image_tar(fileobj2, tar2)
dfs.loc[image1, image2] = sim
if repeat:
dfs.loc[image2, image1] = sim
comparisons_done.append(comparison_id)
delete_image_tar(fileobj1, tar1)
return dfs
|
compare_singularity_images
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image_paths1, image_paths2=None):\n repeat = False\n if image_paths2 is None:\n image_paths2 = image_paths1\n repeat = True\n if not isinstance(image_paths1, list):\n image_paths1 = [image_paths1]\n if not isinstance(image_paths2, list):\n image_paths2 = [image_paths2]\n dfs = pandas.DataFrame(index=image_paths1, columns=image_paths2)\n comparisons_done = []\n for image1 in image_paths1:\n fileobj1, tar1 = get_image_tar(image1)\n members1 = [x.name for x in tar1]\n for image2 in image_paths2:\n comparison_id = [image1, image2]\n comparison_id.sort()\n comparison_id = ''.join(comparison_id)\n if comparison_id not in comparisons_done:\n if image1 == image2:\n sim = 1.0\n else:\n fileobj2, tar2 = get_image_tar(image2)\n members2 = [x.name for x in tar2]\n c = compare_lists(members1, members2)\n sim = information_coefficient(c['total1'], c['total2'],\n c['intersect'])\n delete_image_tar(fileobj2, tar2)\n dfs.loc[image1, image2] = sim\n if repeat:\n dfs.loc[image2, image1] = sim\n comparisons_done.append(comparison_id)\n delete_image_tar(fileobj1, tar1)\n return dfs\n",
"role": "user"
},
{
"content": "compare_singularity_images",
"role": "assistant"
}
] |
def x(container1=None, container2=None):
container1_guts = get_container_contents(split_delim='\n', container=
container1)['all']
container2_guts = get_container_contents(split_delim='\n', container=
container2)['all']
return compare_lists(container1_guts, container2_guts)
|
compare_containers
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(container1=None, container2=None):\n container1_guts = get_container_contents(split_delim='\\n', container=\n container1)['all']\n container2_guts = get_container_contents(split_delim='\\n', container=\n container2)['all']\n return compare_lists(container1_guts, container2_guts)\n",
"role": "user"
},
{
"content": "compare_containers",
"role": "assistant"
}
] |
def x(list1, list2):
intersect = list(set(list1).intersection(list2))
unique1 = list(set(list1).difference(list2))
unique2 = list(set(list2).difference(list1))
comparison = {'intersect': intersect, 'unique1': unique1, 'unique2':
unique2, 'total1': len(list1), 'total2': len(list2)}
return comparison
|
compare_lists
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(list1, list2):\n intersect = list(set(list1).intersection(list2))\n unique1 = list(set(list1).difference(list2))\n unique2 = list(set(list2).difference(list1))\n comparison = {'intersect': intersect, 'unique1': unique1, 'unique2':\n unique2, 'total1': len(list1), 'total2': len(list2)}\n return comparison\n",
"role": "user"
},
{
"content": "compare_lists",
"role": "assistant"
}
] |
def x(container1=None, container2=None, comparison=None, metric=None):
if metric is None:
metric = information_coefficient
if comparison == None:
comparison = compare_containers(container1=container1, container2=
container2)
return metric(total1=comparison['total1'], total2=comparison['total2'],
intersect=comparison['intersect'])
|
calculate_similarity
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(container1=None, container2=None, comparison=None, metric=None):\n if metric is None:\n metric = information_coefficient\n if comparison == None:\n comparison = compare_containers(container1=container1, container2=\n container2)\n return metric(total1=comparison['total1'], total2=comparison['total2'],\n intersect=comparison['intersect'])\n",
"role": "user"
},
{
"content": "calculate_similarity",
"role": "assistant"
}
] |
def x(root=None, name=None):
if name is None:
name = platform.node()
if root is None:
root = '/'
tmpdir = tempfile.mkdtemp()
image = '%s/%s.tgz' % (tmpdir, name)
print('Preparing to package root %s into %s' % (root, name))
cmd = ['tar', '--one-file-system', '-czvSf', image, root, '--exclude',
image]
output = run_command(cmd)
return image
|
package_node
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(root=None, name=None):\n if name is None:\n name = platform.node()\n if root is None:\n root = '/'\n tmpdir = tempfile.mkdtemp()\n image = '%s/%s.tgz' % (tmpdir, name)\n print('Preparing to package root %s into %s' % (root, name))\n cmd = ['tar', '--one-file-system', '-czvSf', image, root, '--exclude',\n image]\n output = run_command(cmd)\n return image\n",
"role": "user"
},
{
"content": "package_node",
"role": "assistant"
}
] |
def x(image_path, name=None, output_folder=None, size=None):
if not image_path.endswith('.tgz'):
bot.error(
'The image_path should end with .tgz. Did you create with package_node?'
)
sys.exit(1)
if output_folder is None:
output_folder = os.path.dirname(os.path.abspath(image_path))
image_name = os.path.basename(image_path)
if name is None:
name = image_name.replace('.tgz', '.img')
if not name.endswith('.img'):
name = '%s.img' % name
bot.debug('Preparing to unpack %s to %s.' % (image_name, name))
unpacked_image = '%s/%s' % (output_folder, name)
if not os.path.exists(unpacked_image):
os.mkdir(unpacked_image)
cmd = ['gunzip', '-dc', image_path, '|', 'sudo', 'singularity',
'import', unpacked_image]
output = run_command(cmd)
return unpacked_image
|
unpack_node
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image_path, name=None, output_folder=None, size=None):\n if not image_path.endswith('.tgz'):\n bot.error(\n 'The image_path should end with .tgz. Did you create with package_node?'\n )\n sys.exit(1)\n if output_folder is None:\n output_folder = os.path.dirname(os.path.abspath(image_path))\n image_name = os.path.basename(image_path)\n if name is None:\n name = image_name.replace('.tgz', '.img')\n if not name.endswith('.img'):\n name = '%s.img' % name\n bot.debug('Preparing to unpack %s to %s.' % (image_name, name))\n unpacked_image = '%s/%s' % (output_folder, name)\n if not os.path.exists(unpacked_image):\n os.mkdir(unpacked_image)\n cmd = ['gunzip', '-dc', image_path, '|', 'sudo', 'singularity',\n 'import', unpacked_image]\n output = run_command(cmd)\n return unpacked_image\n",
"role": "user"
},
{
"content": "unpack_node",
"role": "assistant"
}
] |
def x(template_name, params=None, to_file=None):
base = get_installdir()
template_folder = '%s/build/scripts' % base
template_file = '%s/%s' % (template_folder, template_name)
if os.path.exists(template_file):
bot.debug('Found template %s' % template_file)
if to_file is not None:
shutil.copyfile(template_file, to_file)
bot.debug('Template file saved to %s' % to_file)
return to_file
content = ''.join(read_file(template_file))
return content
else:
bot.warning('Template %s not found.' % template_file)
|
get_build_template
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(template_name, params=None, to_file=None):\n base = get_installdir()\n template_folder = '%s/build/scripts' % base\n template_file = '%s/%s' % (template_folder, template_name)\n if os.path.exists(template_file):\n bot.debug('Found template %s' % template_file)\n if to_file is not None:\n shutil.copyfile(template_file, to_file)\n bot.debug('Template file saved to %s' % to_file)\n return to_file\n content = ''.join(read_file(template_file))\n return content\n else:\n bot.warning('Template %s not found.' % template_file)\n",
"role": "user"
},
{
"content": "get_build_template",
"role": "assistant"
}
] |
def x(file_path, verbose=True):
mime_types = {'xls': 'application/vnd.ms-excel', 'xlsx':
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'xml': 'text/xml', 'ods':
'application/vnd.oasis.opendocument.spreadsheet', 'csv':
'text/plain', 'tmpl': 'text/plain', 'pdf': 'application/pdf', 'php':
'application/x-httpd-php', 'jpg': 'image/jpeg', 'png': 'image/png',
'gif': 'image/gif', 'bmp': 'image/bmp', 'txt': 'text/plain', 'doc':
'application/msword', 'js': 'text/js', 'swf':
'application/x-shockwave-flash', 'mp3': 'audio/mpeg', 'zip':
'application/zip', 'simg': 'application/zip', 'rar':
'application/rar', 'tar': 'application/tar', 'arj':
'application/arj', 'cab': 'application/cab', 'html': 'text/html',
'htm': 'text/html', 'default': 'application/octet-stream', 'folder':
'application/vnd.google-apps.folder', 'img': 'application/octet-stream'
}
ext = os.path.basename(file_path).split('.')[-1]
mime_type = mime_types.get(ext, None)
if mime_type == None:
mime_type = mime_types['txt']
if verbose == True:
bot.info('%s --> %s' % (file_path, mime_type))
return mime_type
|
sniff_extension
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_path, verbose=True):\n mime_types = {'xls': 'application/vnd.ms-excel', 'xlsx':\n 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',\n 'xml': 'text/xml', 'ods':\n 'application/vnd.oasis.opendocument.spreadsheet', 'csv':\n 'text/plain', 'tmpl': 'text/plain', 'pdf': 'application/pdf', 'php':\n 'application/x-httpd-php', 'jpg': 'image/jpeg', 'png': 'image/png',\n 'gif': 'image/gif', 'bmp': 'image/bmp', 'txt': 'text/plain', 'doc':\n 'application/msword', 'js': 'text/js', 'swf':\n 'application/x-shockwave-flash', 'mp3': 'audio/mpeg', 'zip':\n 'application/zip', 'simg': 'application/zip', 'rar':\n 'application/rar', 'tar': 'application/tar', 'arj':\n 'application/arj', 'cab': 'application/cab', 'html': 'text/html',\n 'htm': 'text/html', 'default': 'application/octet-stream', 'folder':\n 'application/vnd.google-apps.folder', 'img': 'application/octet-stream'\n }\n ext = os.path.basename(file_path).split('.')[-1]\n mime_type = mime_types.get(ext, None)\n if mime_type == None:\n mime_type = mime_types['txt']\n if verbose == True:\n bot.info('%s --> %s' % (file_path, mime_type))\n return mime_type\n",
"role": "user"
},
{
"content": "sniff_extension",
"role": "assistant"
}
] |
def x(script_name):
install_dir = get_installdir()
script_path = '%s/build/scripts/%s' % (install_dir, script_name)
if os.path.exists(script_path):
return script_path
else:
bot.error('Script %s is not included in singularity-python!' %
script_path)
return None
|
get_script
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(script_name):\n install_dir = get_installdir()\n script_path = '%s/build/scripts/%s' % (install_dir, script_name)\n if os.path.exists(script_path):\n return script_path\n else:\n bot.error('Script %s is not included in singularity-python!' %\n script_path)\n return None\n",
"role": "user"
},
{
"content": "get_script",
"role": "assistant"
}
] |
def x(file_list, zip_name, output_folder=None):
tmpdir = tempfile.mkdtemp()
output_zip = '%s/%s' % (tmpdir, zip_name)
zf = zipfile.ZipFile(output_zip, 'w', zipfile.ZIP_DEFLATED, allowZip64=True
)
for filename, content in file_list.items():
bot.debug('Adding %s to package...' % filename)
if filename.lower() == 'files':
if not isinstance(content, list):
content = [content]
for copyfile in content:
zf.write(copyfile, os.path.basename(copyfile))
os.remove(copyfile)
else:
output_file = '%s/%s' % (tmpdir, filename)
if isinstance(content, list):
write_file(output_file, '\n'.join(content))
elif isinstance(content, dict):
write_json(content, output_file)
elif isinstance(content, bytes):
write_file(output_file, content.decode('utf-8'))
else:
output_file = write_file(output_file, content)
if os.path.exists(output_file):
zf.write(output_file, filename)
os.remove(output_file)
zf.close()
if output_folder is not None:
shutil.copyfile(output_zip, '%s/%s' % (output_folder, zip_name))
shutil.rmtree(tmpdir)
output_zip = '%s/%s' % (output_folder, zip_name)
return output_zip
|
zip_up
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_list, zip_name, output_folder=None):\n tmpdir = tempfile.mkdtemp()\n output_zip = '%s/%s' % (tmpdir, zip_name)\n zf = zipfile.ZipFile(output_zip, 'w', zipfile.ZIP_DEFLATED, allowZip64=True\n )\n for filename, content in file_list.items():\n bot.debug('Adding %s to package...' % filename)\n if filename.lower() == 'files':\n if not isinstance(content, list):\n content = [content]\n for copyfile in content:\n zf.write(copyfile, os.path.basename(copyfile))\n os.remove(copyfile)\n else:\n output_file = '%s/%s' % (tmpdir, filename)\n if isinstance(content, list):\n write_file(output_file, '\\n'.join(content))\n elif isinstance(content, dict):\n write_json(content, output_file)\n elif isinstance(content, bytes):\n write_file(output_file, content.decode('utf-8'))\n else:\n output_file = write_file(output_file, content)\n if os.path.exists(output_file):\n zf.write(output_file, filename)\n os.remove(output_file)\n zf.close()\n if output_folder is not None:\n shutil.copyfile(output_zip, '%s/%s' % (output_folder, zip_name))\n shutil.rmtree(tmpdir)\n output_zip = '%s/%s' % (output_folder, zip_name)\n return output_zip\n",
"role": "user"
},
{
"content": "zip_up",
"role": "assistant"
}
] |
def x(container, split_delim=None):
guts = dict()
SINGULARITY_HUB = os.environ.get('SINGULARITY_HUB', 'False')
if SINGULARITY_HUB == 'False':
file_obj, tar = get_image_tar(container)
guts = extract_guts(image_path=container, tar=tar)
delete_image_tar(file_obj, tar)
elif container == None:
guts = load_package(image_package, get=gets)
else:
for sfile in container.files:
for gut_key in gets:
if os.path.basename(sfile['name']) == gut_key:
if split_delim == None:
guts[gut_key] = requests.get(sfile['mediaLink']).text
else:
guts[gut_key] = requests.get(sfile['mediaLink']
).text.split(split_delim)
return guts
|
get_container_contents
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(container, split_delim=None):\n guts = dict()\n SINGULARITY_HUB = os.environ.get('SINGULARITY_HUB', 'False')\n if SINGULARITY_HUB == 'False':\n file_obj, tar = get_image_tar(container)\n guts = extract_guts(image_path=container, tar=tar)\n delete_image_tar(file_obj, tar)\n elif container == None:\n guts = load_package(image_package, get=gets)\n else:\n for sfile in container.files:\n for gut_key in gets:\n if os.path.basename(sfile['name']) == gut_key:\n if split_delim == None:\n guts[gut_key] = requests.get(sfile['mediaLink']).text\n else:\n guts[gut_key] = requests.get(sfile['mediaLink']\n ).text.split(split_delim)\n return guts\n",
"role": "user"
},
{
"content": "get_container_contents",
"role": "assistant"
}
] |
def x(image_path, version=None, levels=None):
if levels is None:
levels = get_levels(version=version)
hashes = dict()
for level_name, level_filter in levels.items():
hashes[level_name] = get_image_hash(image_path, level_filter=
level_filter)
return hashes
|
get_image_hashes
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image_path, version=None, levels=None):\n if levels is None:\n levels = get_levels(version=version)\n hashes = dict()\n for level_name, level_filter in levels.items():\n hashes[level_name] = get_image_hash(image_path, level_filter=\n level_filter)\n return hashes\n",
"role": "user"
},
{
"content": "get_image_hashes",
"role": "assistant"
}
] |
def x(image_path, level=None, level_filter=None, include_files=None,
skip_files=None, version=None):
if level_filter is not None:
file_filter = level_filter
elif level is None:
file_filter = get_level('RECIPE', version=version, include_files=
include_files, skip_files=skip_files)
else:
file_filter = get_level(level, version=version, skip_files=
skip_files, include_files=include_files)
file_obj, tar = get_image_tar(image_path)
hasher = hashlib.md5()
for member in tar:
member_name = member.name.replace('.', '', 1)
if member.isdir() or member.issym():
continue
elif assess_content(member, file_filter):
content = extract_content(image_path, member.name)
hasher.update(content)
elif include_file(member, file_filter):
buf = member.tobuf()
hasher.update(buf)
digest = hasher.hexdigest()
try:
file_obj.close()
except:
tar.close()
if os.path.exists(file_obj):
os.remove(file_obj)
return digest
|
get_image_hash
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image_path, level=None, level_filter=None, include_files=None,\n skip_files=None, version=None):\n if level_filter is not None:\n file_filter = level_filter\n elif level is None:\n file_filter = get_level('RECIPE', version=version, include_files=\n include_files, skip_files=skip_files)\n else:\n file_filter = get_level(level, version=version, skip_files=\n skip_files, include_files=include_files)\n file_obj, tar = get_image_tar(image_path)\n hasher = hashlib.md5()\n for member in tar:\n member_name = member.name.replace('.', '', 1)\n if member.isdir() or member.issym():\n continue\n elif assess_content(member, file_filter):\n content = extract_content(image_path, member.name)\n hasher.update(content)\n elif include_file(member, file_filter):\n buf = member.tobuf()\n hasher.update(buf)\n digest = hasher.hexdigest()\n try:\n file_obj.close()\n except:\n tar.close()\n if os.path.exists(file_obj):\n os.remove(file_obj)\n return digest\n",
"role": "user"
},
{
"content": "get_image_hash",
"role": "assistant"
}
] |
def x(image_path, level=None, regexp=None, include_files=None, tag_root=
True, level_filter=None, skip_files=None, version=None, include_sizes=True
):
if level_filter is not None:
file_filter = level_filter
elif level is None:
file_filter = get_level('REPLICATE', version=version, skip_files=
skip_files, include_files=include_files)
else:
file_filter = get_level(level, version=version, skip_files=
skip_files, include_files=include_files)
file_obj, tar = get_image_tar(image_path)
results = extract_guts(image_path=image_path, tar=tar, file_filter=
file_filter, tag_root=tag_root, include_sizes=include_sizes)
delete_image_tar(file_obj, tar)
return results
|
get_content_hashes
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image_path, level=None, regexp=None, include_files=None, tag_root=\n True, level_filter=None, skip_files=None, version=None, include_sizes=True\n ):\n if level_filter is not None:\n file_filter = level_filter\n elif level is None:\n file_filter = get_level('REPLICATE', version=version, skip_files=\n skip_files, include_files=include_files)\n else:\n file_filter = get_level(level, version=version, skip_files=\n skip_files, include_files=include_files)\n file_obj, tar = get_image_tar(image_path)\n results = extract_guts(image_path=image_path, tar=tar, file_filter=\n file_filter, tag_root=tag_root, include_sizes=include_sizes)\n delete_image_tar(file_obj, tar)\n return results\n",
"role": "user"
},
{
"content": "get_content_hashes",
"role": "assistant"
}
] |
def x(image_path):
hasher = hashlib.md5()
with open(image_path, 'rb') as f:
for chunk in iter(lambda : f.read(4096), b''):
hasher.update(chunk)
return hasher.hexdigest()
|
get_image_file_hash
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image_path):\n hasher = hashlib.md5()\n with open(image_path, 'rb') as f:\n for chunk in iter(lambda : f.read(4096), b''):\n hasher.update(chunk)\n return hasher.hexdigest()\n",
"role": "user"
},
{
"content": "get_image_file_hash",
"role": "assistant"
}
] |
def x(container=None, container_subtract=None, image_package=None,
image_package_subtract=None, comparison=None):
if comparison == None:
comparison = compare_containers(container1=container, container2=
container_subtract, image_package1=image_package,
image_package2=image_package_subtract, by=['files.txt',
'folders.txt'])
files = comparison['files.txt']['unique1']
folders = comparison['folders.txt']['unique1']
tree = make_container_tree(folders=folders, files=files)
return tree
|
container_difference
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(container=None, container_subtract=None, image_package=None,\n image_package_subtract=None, comparison=None):\n if comparison == None:\n comparison = compare_containers(container1=container, container2=\n container_subtract, image_package1=image_package,\n image_package2=image_package_subtract, by=['files.txt',\n 'folders.txt'])\n files = comparison['files.txt']['unique1']\n folders = comparison['folders.txt']['unique1']\n tree = make_container_tree(folders=folders, files=files)\n return tree\n",
"role": "user"
},
{
"content": "container_difference",
"role": "assistant"
}
] |
def x(container1=None, container2=None, image_package1=None, image_package2
=None, comparison=None):
if comparison == None:
comparison = compare_containers(container1=container1, container2=
container2, image_package1=image_package1, image_package2=
image_package2, by=['files.txt', 'folders.txt'])
files = comparison['files.txt']['intersect']
folders = comparison['folders.txt']['intersect']
tree = make_container_tree(folders=folders, files=files)
return tree
|
container_similarity
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(container1=None, container2=None, image_package1=None, image_package2\n =None, comparison=None):\n if comparison == None:\n comparison = compare_containers(container1=container1, container2=\n container2, image_package1=image_package1, image_package2=\n image_package2, by=['files.txt', 'folders.txt'])\n files = comparison['files.txt']['intersect']\n folders = comparison['folders.txt']['intersect']\n tree = make_container_tree(folders=folders, files=files)\n return tree\n",
"role": "user"
},
{
"content": "container_similarity",
"role": "assistant"
}
] |
def x(container=None, image_package=None):
guts = get_container_contents(container=container, image_package=
image_package, split_delim='\n')
tree = make_container_tree(folders=guts['folders.txt'], files=guts[
'files.txt'])
return tree
|
container_tree
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(container=None, image_package=None):\n guts = get_container_contents(container=container, image_package=\n image_package, split_delim='\\n')\n tree = make_container_tree(folders=guts['folders.txt'], files=guts[\n 'files.txt'])\n return tree\n",
"role": "user"
},
{
"content": "container_tree",
"role": "assistant"
}
] |
def x(folders, files, path_delim='/', parse_files=True):
nodes = {}
lookup = {}
count = 1
max_depth = 0
for folder in folders:
if folder != '.':
folder = re.sub('^[.]/', '', folder)
path_components = folder.split(path_delim)
for p in range(len(path_components)):
path_component = path_components[p]
fullpath = path_delim.join(path_components[0:p + 1])
if fullpath not in lookup:
lookup[fullpath] = count
node = {'id': count, 'name': path_component, 'path':
fullpath, 'level': p, 'children': []}
count += 1
if p > max_depth:
max_depth = p
if p == 0:
parent_id = 0
else:
parent_path = path_delim.join(path_components[0:p])
parent_id = lookup[parent_path]
node['parent'] = parent_id
nodes[node['id']] = node
seen = []
graph = []
iters = list(range(max_depth + 1))
iters.reverse()
iters.pop()
for level in iters:
children = {x: y for x, y in nodes.items() if y['level'] == level}
seen = seen + [y['id'] for x, y in children.items()]
nodes = {x: y for x, y in nodes.items() if y['id'] not in seen}
for node_id, child_node in children.items():
if node_id == 0:
graph[node_id] = child_node
else:
parent_id = child_node['parent']
nodes[parent_id]['children'].append(child_node)
for parent, parent_info in nodes.items():
graph.append(parent_info)
graph = {'name': 'base', 'children': graph}
result = {'graph': graph, 'lookup': lookup, 'depth': max_depth + 1}
if parse_files == True:
file_lookup = {}
for filey in files:
filey = re.sub('^[.]/', '', filey)
filepath, filename = os.path.split(filey)
if filepath in lookup:
folder_id = lookup[filepath]
if folder_id in file_lookup:
file_lookup[folder_id].append(filename)
else:
file_lookup[folder_id] = [filename]
elif filepath == '':
if 0 in file_lookup:
file_lookup[0].append(filename)
else:
file_lookup[0] = [filename]
result['files'] = file_lookup
return result
|
make_container_tree
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(folders, files, path_delim='/', parse_files=True):\n nodes = {}\n lookup = {}\n count = 1\n max_depth = 0\n for folder in folders:\n if folder != '.':\n folder = re.sub('^[.]/', '', folder)\n path_components = folder.split(path_delim)\n for p in range(len(path_components)):\n path_component = path_components[p]\n fullpath = path_delim.join(path_components[0:p + 1])\n if fullpath not in lookup:\n lookup[fullpath] = count\n node = {'id': count, 'name': path_component, 'path':\n fullpath, 'level': p, 'children': []}\n count += 1\n if p > max_depth:\n max_depth = p\n if p == 0:\n parent_id = 0\n else:\n parent_path = path_delim.join(path_components[0:p])\n parent_id = lookup[parent_path]\n node['parent'] = parent_id\n nodes[node['id']] = node\n seen = []\n graph = []\n iters = list(range(max_depth + 1))\n iters.reverse()\n iters.pop()\n for level in iters:\n children = {x: y for x, y in nodes.items() if y['level'] == level}\n seen = seen + [y['id'] for x, y in children.items()]\n nodes = {x: y for x, y in nodes.items() if y['id'] not in seen}\n for node_id, child_node in children.items():\n if node_id == 0:\n graph[node_id] = child_node\n else:\n parent_id = child_node['parent']\n nodes[parent_id]['children'].append(child_node)\n for parent, parent_info in nodes.items():\n graph.append(parent_info)\n graph = {'name': 'base', 'children': graph}\n result = {'graph': graph, 'lookup': lookup, 'depth': max_depth + 1}\n if parse_files == True:\n file_lookup = {}\n for filey in files:\n filey = re.sub('^[.]/', '', filey)\n filepath, filename = os.path.split(filey)\n if filepath in lookup:\n folder_id = lookup[filepath]\n if folder_id in file_lookup:\n file_lookup[folder_id].append(filename)\n else:\n file_lookup[folder_id] = [filename]\n elif filepath == '':\n if 0 in file_lookup:\n file_lookup[0].append(filename)\n else:\n file_lookup[0] = [filename]\n result['files'] = file_lookup\n return result\n",
"role": "user"
},
{
"content": "make_container_tree",
"role": "assistant"
}
] |
def x(matrix=None, labels=None, width=25, height=10, title=None, font_size=None
):
from matplotlib import pyplot as plt
from scipy.cluster.hierarchy import dendrogram, linkage
if font_size is None:
font_size = 8.0
from scipy.cluster.hierarchy import cophenet
from scipy.spatial.distance import pdist
if not isinstance(matrix, pandas.DataFrame):
bot.info(
'No pandas DataFrame (matrix) of similarities defined, will use default.'
)
matrix = compare_packages()['files.txt']
title = 'Docker Library Similarity to Base OS'
Z = linkage(matrix, 'ward')
c, coph_dists = cophenet(Z, pdist(matrix))
if labels == None:
labels = matrix.index.tolist()
plt.figure(figsize=(width, height))
if title != None:
plt.title(title)
plt.xlabel('image index')
plt.ylabel('distance')
dendrogram(Z, leaf_rotation=90.0, leaf_font_size=font_size, labels=labels)
return plt
|
make_package_tree
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(matrix=None, labels=None, width=25, height=10, title=None, font_size=None\n ):\n from matplotlib import pyplot as plt\n from scipy.cluster.hierarchy import dendrogram, linkage\n if font_size is None:\n font_size = 8.0\n from scipy.cluster.hierarchy import cophenet\n from scipy.spatial.distance import pdist\n if not isinstance(matrix, pandas.DataFrame):\n bot.info(\n 'No pandas DataFrame (matrix) of similarities defined, will use default.'\n )\n matrix = compare_packages()['files.txt']\n title = 'Docker Library Similarity to Base OS'\n Z = linkage(matrix, 'ward')\n c, coph_dists = cophenet(Z, pdist(matrix))\n if labels == None:\n labels = matrix.index.tolist()\n plt.figure(figsize=(width, height))\n if title != None:\n plt.title(title)\n plt.xlabel('image index')\n plt.ylabel('distance')\n dendrogram(Z, leaf_rotation=90.0, leaf_font_size=font_size, labels=labels)\n return plt\n",
"role": "user"
},
{
"content": "make_package_tree",
"role": "assistant"
}
] |
def x(matrix=None, labels=None):
from scipy.cluster.hierarchy import dendrogram, linkage, to_tree
d3 = None
from scipy.cluster.hierarchy import cophenet
from scipy.spatial.distance import pdist
if isinstance(matrix, pandas.DataFrame):
Z = linkage(matrix, 'ward')
T = to_tree(Z, rd=False)
if labels == None:
labels = matrix.index.tolist()
lookup = dict(zip(range(len(labels)), labels))
dend = dendrogram(Z, no_plot=True, orientation='right',
leaf_rotation=90.0, leaf_font_size=8.0, labels=labels)
d3 = dict(children=[], name='root')
add_node(T, d3)
label_tree(d3['children'][0], lookup)
else:
bot.warning('Please provide data as pandas Data Frame.')
return d3
|
make_interactive_tree
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(matrix=None, labels=None):\n from scipy.cluster.hierarchy import dendrogram, linkage, to_tree\n d3 = None\n from scipy.cluster.hierarchy import cophenet\n from scipy.spatial.distance import pdist\n if isinstance(matrix, pandas.DataFrame):\n Z = linkage(matrix, 'ward')\n T = to_tree(Z, rd=False)\n if labels == None:\n labels = matrix.index.tolist()\n lookup = dict(zip(range(len(labels)), labels))\n dend = dendrogram(Z, no_plot=True, orientation='right',\n leaf_rotation=90.0, leaf_font_size=8.0, labels=labels)\n d3 = dict(children=[], name='root')\n add_node(T, d3)\n label_tree(d3['children'][0], lookup)\n else:\n bot.warning('Please provide data as pandas Data Frame.')\n return d3\n",
"role": "user"
},
{
"content": "make_interactive_tree",
"role": "assistant"
}
] |
def x(node, parent):
newNode = dict(node_id=node.id, children=[])
parent['children'].append(newNode)
if node.left:
add_node(node.left, newNode)
if node.right:
add_node(node.right, newNode)
|
add_node
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(node, parent):\n newNode = dict(node_id=node.id, children=[])\n parent['children'].append(newNode)\n if node.left:\n add_node(node.left, newNode)\n if node.right:\n add_node(node.right, newNode)\n",
"role": "user"
},
{
"content": "add_node",
"role": "assistant"
}
] |
def x(n, lookup):
if len(n['children']) == 0:
leaves = [lookup[n['node_id']]]
else:
leaves = reduce(lambda ls, c: ls + label_tree(c, lookup), n[
'children'], [])
del n['node_id']
n['name'] = name = '|||'.join(sorted(map(str, leaves)))
return leaves
|
label_tree
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(n, lookup):\n if len(n['children']) == 0:\n leaves = [lookup[n['node_id']]]\n else:\n leaves = reduce(lambda ls, c: ls + label_tree(c, lookup), n[\n 'children'], [])\n del n['node_id']\n n['name'] = name = '|||'.join(sorted(map(str, leaves)))\n return leaves\n",
"role": "user"
},
{
"content": "label_tree",
"role": "assistant"
}
] |
def x(image, app_names):
apps = dict()
if isinstance(app_names, tuple):
app_names = list(app_names)
if not isinstance(app_names, list):
app_names = [app_names]
if len(app_names) == 0:
return apps
for app_name in app_names:
metadata = dict()
try:
inspection = json.loads(Client.inspect(image, app=app_name))
del inspection['data']['attributes']['deffile']
metadata['inspect'] = inspection
except:
pass
apps[app_name] = metadata
return apps
|
extract_apps
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image, app_names):\n apps = dict()\n if isinstance(app_names, tuple):\n app_names = list(app_names)\n if not isinstance(app_names, list):\n app_names = [app_names]\n if len(app_names) == 0:\n return apps\n for app_name in app_names:\n metadata = dict()\n try:\n inspection = json.loads(Client.inspect(image, app=app_name))\n del inspection['data']['attributes']['deffile']\n metadata['inspect'] = inspection\n except:\n pass\n apps[app_name] = metadata\n return apps\n",
"role": "user"
},
{
"content": "extract_apps",
"role": "assistant"
}
] |
def x(cmd, sudo=False):
if sudo is True:
cmd = ['sudo'] + cmd
output = Popen(cmd, stderr=STDOUT, stdout=PIPE)
t = output.communicate()[0], output.returncode
output = {'message': t[0], 'return_code': t[1]}
return output
|
run_command
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cmd, sudo=False):\n if sudo is True:\n cmd = ['sudo'] + cmd\n output = Popen(cmd, stderr=STDOUT, stdout=PIPE)\n t = output.communicate()[0], output.returncode\n output = {'message': t[0], 'return_code': t[1]}\n return output\n",
"role": "user"
},
{
"content": "run_command",
"role": "assistant"
}
] |
def x(repo_url, destination, commit=None):
command = 'git clone %s %s' % (repo_url, destination)
os.system(command)
return destination
|
download_repo
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(repo_url, destination, commit=None):\n command = 'git clone %s %s' % (repo_url, destination)\n os.system(command)\n return destination\n",
"role": "user"
},
{
"content": "download_repo",
"role": "assistant"
}
] |
def x(container=None, search_folders=None, file_list=None, return_unique=True):
if file_list is None:
file_list = get_container_contents(container, split_delim='\n')['all']
if search_folders == None:
search_folders = 'bin'
if not isinstance(search_folders, list):
search_folders = [search_folders]
tags = []
for search_folder in search_folders:
for file_name in file_list:
if search_folder in file_name:
tags.append(file_name)
if return_unique == True:
tags = list(set(tags))
return tags
|
get_tags
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(container=None, search_folders=None, file_list=None, return_unique=True):\n if file_list is None:\n file_list = get_container_contents(container, split_delim='\\n')['all']\n if search_folders == None:\n search_folders = 'bin'\n if not isinstance(search_folders, list):\n search_folders = [search_folders]\n tags = []\n for search_folder in search_folders:\n for file_name in file_list:\n if search_folder in file_name:\n tags.append(file_name)\n if return_unique == True:\n tags = list(set(tags))\n return tags\n",
"role": "user"
},
{
"content": "get_tags",
"role": "assistant"
}
] |
def x(container=None, patterns=None, image_package=None, file_list=None):
if file_list is None:
file_list = get_container_contents(container, split_delim='\n')['all']
if patterns == None:
patterns = 'readme'
if not isinstance(patterns, list):
patterns = [patterns]
count = 0
for pattern in patterns:
count += len([x for x in file_list if re.search(pattern.lower(), x.
lower())])
bot.info('Total files matching patterns is %s' % count)
return count
|
file_counts
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(container=None, patterns=None, image_package=None, file_list=None):\n if file_list is None:\n file_list = get_container_contents(container, split_delim='\\n')['all']\n if patterns == None:\n patterns = 'readme'\n if not isinstance(patterns, list):\n patterns = [patterns]\n count = 0\n for pattern in patterns:\n count += len([x for x in file_list if re.search(pattern.lower(), x.\n lower())])\n bot.info('Total files matching patterns is %s' % count)\n return count\n",
"role": "user"
},
{
"content": "file_counts",
"role": "assistant"
}
] |
def x(container=None, file_list=None, return_counts=True):
if file_list is None:
file_list = get_container_contents(container, split_delim='\n')['all']
extensions = dict()
for item in file_list:
filename, ext = os.path.splitext(item)
if ext == '':
if return_counts == False:
extensions = update_dict(extensions, 'no-extension', item)
else:
extensions = update_dict_sum(extensions, 'no-extension')
elif return_counts == False:
extensions = update_dict(extensions, ext, item)
else:
extensions = update_dict_sum(extensions, ext)
return extensions
|
extension_counts
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(container=None, file_list=None, return_counts=True):\n if file_list is None:\n file_list = get_container_contents(container, split_delim='\\n')['all']\n extensions = dict()\n for item in file_list:\n filename, ext = os.path.splitext(item)\n if ext == '':\n if return_counts == False:\n extensions = update_dict(extensions, 'no-extension', item)\n else:\n extensions = update_dict_sum(extensions, 'no-extension')\n elif return_counts == False:\n extensions = update_dict(extensions, ext, item)\n else:\n extensions = update_dict_sum(extensions, ext)\n return extensions\n",
"role": "user"
},
{
"content": "extension_counts",
"role": "assistant"
}
] |
def x(image_file1, image_file2, levels=None, version=None, size_heuristic=
False, guts1=None, guts2=None):
if levels is None:
levels = get_levels(version=version)
reports = dict()
scores = dict()
for level_name, level_filter in levels.items():
contenders = []
different = []
setdiff = []
same = 0
if guts1 is None:
guts1 = get_content_hashes(image_path=image_file1, level_filter
=level_filter)
if guts2 is None:
guts2 = get_content_hashes(image_path=image_file2, level_filter
=level_filter)
print(level_name)
files = list(set(list(guts1['hashes'].keys()) + list(guts2['hashes'
].keys())))
for file_name in files:
if file_name not in guts1['hashes'] or file_name not in guts2[
'hashes']:
setdiff.append(file_name)
elif guts1['hashes'][file_name] == guts2['hashes'][file_name]:
same += 1
elif size_heuristic == True:
if guts1['root_owned'][file_name] or guts2['root_owned'][
file_name]:
if guts1['sizes'][file_name] == guts2['sizes'][file_name]:
same += 1
else:
different.append(file_name)
else:
contenders.append(file_name)
else:
contenders.append(file_name)
if level_name == 'IDENTICAL':
different = different + contenders
elif len(contenders) > 0:
for rogue in contenders:
hashy1 = extract_content(image_file1, rogue, return_hash=True)
hashy2 = extract_content(image_file2, rogue, return_hash=True)
if hashy1 is None or hashy2 is None:
different.append(file_name)
elif len(hashy1) == 0 or len(hashy2) == 0:
if guts1['sizes'][file_name] == guts2['sizes'][file_name]:
same += 1
else:
different.append(file_name)
elif hashy1 != hashy2:
different.append(rogue)
else:
same += 1
union = len(guts1['hashes']) + len(guts2['hashes'])
report = {'difference': setdiff, 'intersect_different': different,
'same': same, 'union': union}
if union == 0:
scores[level_name] = 0
else:
scores[level_name] = 2 * same / union
reports[level_name] = report
reports['scores'] = scores
return reports
|
assess_differences
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image_file1, image_file2, levels=None, version=None, size_heuristic=\n False, guts1=None, guts2=None):\n if levels is None:\n levels = get_levels(version=version)\n reports = dict()\n scores = dict()\n for level_name, level_filter in levels.items():\n contenders = []\n different = []\n setdiff = []\n same = 0\n if guts1 is None:\n guts1 = get_content_hashes(image_path=image_file1, level_filter\n =level_filter)\n if guts2 is None:\n guts2 = get_content_hashes(image_path=image_file2, level_filter\n =level_filter)\n print(level_name)\n files = list(set(list(guts1['hashes'].keys()) + list(guts2['hashes'\n ].keys())))\n for file_name in files:\n if file_name not in guts1['hashes'] or file_name not in guts2[\n 'hashes']:\n setdiff.append(file_name)\n elif guts1['hashes'][file_name] == guts2['hashes'][file_name]:\n same += 1\n elif size_heuristic == True:\n if guts1['root_owned'][file_name] or guts2['root_owned'][\n file_name]:\n if guts1['sizes'][file_name] == guts2['sizes'][file_name]:\n same += 1\n else:\n different.append(file_name)\n else:\n contenders.append(file_name)\n else:\n contenders.append(file_name)\n if level_name == 'IDENTICAL':\n different = different + contenders\n elif len(contenders) > 0:\n for rogue in contenders:\n hashy1 = extract_content(image_file1, rogue, return_hash=True)\n hashy2 = extract_content(image_file2, rogue, return_hash=True)\n if hashy1 is None or hashy2 is None:\n different.append(file_name)\n elif len(hashy1) == 0 or len(hashy2) == 0:\n if guts1['sizes'][file_name] == guts2['sizes'][file_name]:\n same += 1\n else:\n different.append(file_name)\n elif hashy1 != hashy2:\n different.append(rogue)\n else:\n same += 1\n union = len(guts1['hashes']) + len(guts2['hashes'])\n report = {'difference': setdiff, 'intersect_different': different,\n 'same': same, 'union': union}\n if union == 0:\n scores[level_name] = 0\n else:\n scores[level_name] = 2 * same / union\n reports[level_name] = report\n reports['scores'] = scores\n return reports\n",
"role": "user"
},
{
"content": "assess_differences",
"role": "assistant"
}
] |
def x(member, file_filter):
member_path = member.name.replace('.', '', 1)
if len(member_path) == 0:
return False
if 'skip_files' in file_filter:
if member_path in file_filter['skip_files']:
return False
if 'include_files' in file_filter:
if member_path in file_filter['include_files']:
return True
if 'regexp' in file_filter:
if re.search(file_filter['regexp'], member_path):
return True
return False
|
include_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(member, file_filter):\n member_path = member.name.replace('.', '', 1)\n if len(member_path) == 0:\n return False\n if 'skip_files' in file_filter:\n if member_path in file_filter['skip_files']:\n return False\n if 'include_files' in file_filter:\n if member_path in file_filter['include_files']:\n return True\n if 'regexp' in file_filter:\n if re.search(file_filter['regexp'], member_path):\n return True\n return False\n",
"role": "user"
},
{
"content": "include_file",
"role": "assistant"
}
] |
def x(member):
if member.uid == 0 or member.gid == 0:
return True
elif member.uname == 'root' or member.gname == 'root':
return True
return False
|
is_root_owned
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(member):\n if member.uid == 0 or member.gid == 0:\n return True\n elif member.uname == 'root' or member.gname == 'root':\n return True\n return False\n",
"role": "user"
},
{
"content": "is_root_owned",
"role": "assistant"
}
] |
def x(member, file_filter):
member_path = member.name.replace('.', '', 1)
if len(member_path) == 0:
return False
if 'skip_files' in file_filter:
if member_path in file_filter['skip_files']:
return False
if 'assess_content' in file_filter:
if member_path in file_filter['assess_content']:
return True
return False
|
assess_content
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(member, file_filter):\n member_path = member.name.replace('.', '', 1)\n if len(member_path) == 0:\n return False\n if 'skip_files' in file_filter:\n if member_path in file_filter['skip_files']:\n return False\n if 'assess_content' in file_filter:\n if member_path in file_filter['assess_content']:\n return True\n return False\n",
"role": "user"
},
{
"content": "assess_content",
"role": "assistant"
}
] |
def x(regexp=None, description=None, skip_files=None, include_files=None):
if regexp == None:
regexp = '.'
if description is None:
description = 'This is a custom filter generated by the user.'
custom = {'description': description, 'regexp': regexp}
if include_files is not None:
if not isinstance(include_files, set):
include_files = set(include_files)
custom['include_files'] = include_files
if skip_files is not None:
if not isinstance(skip_files, set):
skip_files = set(skip_files)
custom['skip_files'] = skip_files
return custom
|
get_custom_level
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(regexp=None, description=None, skip_files=None, include_files=None):\n if regexp == None:\n regexp = '.'\n if description is None:\n description = 'This is a custom filter generated by the user.'\n custom = {'description': description, 'regexp': regexp}\n if include_files is not None:\n if not isinstance(include_files, set):\n include_files = set(include_files)\n custom['include_files'] = include_files\n if skip_files is not None:\n if not isinstance(skip_files, set):\n skip_files = set(skip_files)\n custom['skip_files'] = skip_files\n return custom\n",
"role": "user"
},
{
"content": "get_custom_level",
"role": "assistant"
}
] |
def x(level, version=None, include_files=None, skip_files=None):
levels = get_levels(version=version)
level_names = list(levels.keys())
if level.upper() in level_names:
level = levels[level]
else:
bot.warning('%s is not a valid level. Options are %s' % (level.
upper(), '\n'.join(levels)))
return None
if skip_files is not None:
level = modify_level(level, 'skip_files', skip_files)
if include_files is not None:
level = modify_level(level, 'include_files', include_files)
level = make_level_set(level)
return level
|
get_level
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(level, version=None, include_files=None, skip_files=None):\n levels = get_levels(version=version)\n level_names = list(levels.keys())\n if level.upper() in level_names:\n level = levels[level]\n else:\n bot.warning('%s is not a valid level. Options are %s' % (level.\n upper(), '\\n'.join(levels)))\n return None\n if skip_files is not None:\n level = modify_level(level, 'skip_files', skip_files)\n if include_files is not None:\n level = modify_level(level, 'include_files', include_files)\n level = make_level_set(level)\n return level\n",
"role": "user"
},
{
"content": "get_level",
"role": "assistant"
}
] |
def x(level, field, values, append=True):
field = field.lower()
valid_fields = ['regexp', 'skip_files', 'include_files']
if field not in valid_fields:
bot.warning('%s is not a valid field, skipping. Choices are %s' % (
field, ','.join(valid_fields)))
return level
if append:
if not isinstance(values, list):
values = [values]
if field in level:
level[field] = level[field] + values
else:
level[field] = values
else:
level[field] = values
level = make_level_set(level)
return level
|
modify_level
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(level, field, values, append=True):\n field = field.lower()\n valid_fields = ['regexp', 'skip_files', 'include_files']\n if field not in valid_fields:\n bot.warning('%s is not a valid field, skipping. Choices are %s' % (\n field, ','.join(valid_fields)))\n return level\n if append:\n if not isinstance(values, list):\n values = [values]\n if field in level:\n level[field] = level[field] + values\n else:\n level[field] = values\n else:\n level[field] = values\n level = make_level_set(level)\n return level\n",
"role": "user"
},
{
"content": "modify_level",
"role": "assistant"
}
] |
def x(version=None):
valid_versions = ['2.3', '2.2']
if version is None:
version = '2.3'
version = str(version)
if version not in valid_versions:
bot.error('Unsupported version %s, valid versions are %s' % (
version, ','.join(valid_versions)))
levels_file = os.path.abspath(os.path.join(get_installdir(), 'analysis',
'reproduce', 'data', 'reproduce_levels.json'))
levels = read_json(levels_file)
if version == '2.2':
del levels['LABELS']
levels = make_levels_set(levels)
return levels
|
get_levels
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(version=None):\n valid_versions = ['2.3', '2.2']\n if version is None:\n version = '2.3'\n version = str(version)\n if version not in valid_versions:\n bot.error('Unsupported version %s, valid versions are %s' % (\n version, ','.join(valid_versions)))\n levels_file = os.path.abspath(os.path.join(get_installdir(), 'analysis',\n 'reproduce', 'data', 'reproduce_levels.json'))\n levels = read_json(levels_file)\n if version == '2.2':\n del levels['LABELS']\n levels = make_levels_set(levels)\n return levels\n",
"role": "user"
},
{
"content": "get_levels",
"role": "assistant"
}
] |
def x(levels):
for level_key, level_filters in levels.items():
levels[level_key] = make_level_set(level_filters)
return levels
|
make_levels_set
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(levels):\n for level_key, level_filters in levels.items():\n levels[level_key] = make_level_set(level_filters)\n return levels\n",
"role": "user"
},
{
"content": "make_levels_set",
"role": "assistant"
}
] |
def x(level):
new_level = dict()
for key, value in level.items():
if isinstance(value, list):
new_level[key] = set(value)
else:
new_level[key] = value
return new_level
|
make_level_set
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(level):\n new_level = dict()\n for key, value in level.items():\n if isinstance(value, list):\n new_level[key] = set(value)\n else:\n new_level[key] = value\n return new_level\n",
"role": "user"
},
{
"content": "make_level_set",
"role": "assistant"
}
] |
def x(image_path, tar, file_filter=None, tag_root=True, include_sizes=True):
if file_filter is None:
file_filter = get_level('IDENTICAL')
results = dict()
digest = dict()
allfiles = []
if tag_root:
roots = dict()
if include_sizes:
sizes = dict()
for member in tar:
member_name = member.name.replace('.', '', 1)
allfiles.append(member_name)
included = False
if member.isdir() or member.issym():
continue
elif assess_content(member, file_filter):
digest[member_name] = extract_content(image_path, member.name,
return_hash=True)
included = True
elif include_file(member, file_filter):
hasher = hashlib.md5()
buf = member.tobuf()
hasher.update(buf)
digest[member_name] = hasher.hexdigest()
included = True
if included:
if include_sizes:
sizes[member_name] = member.size
if tag_root:
roots[member_name] = is_root_owned(member)
results['all'] = allfiles
results['hashes'] = digest
if include_sizes:
results['sizes'] = sizes
if tag_root:
results['root_owned'] = roots
return results
|
extract_guts
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image_path, tar, file_filter=None, tag_root=True, include_sizes=True):\n if file_filter is None:\n file_filter = get_level('IDENTICAL')\n results = dict()\n digest = dict()\n allfiles = []\n if tag_root:\n roots = dict()\n if include_sizes:\n sizes = dict()\n for member in tar:\n member_name = member.name.replace('.', '', 1)\n allfiles.append(member_name)\n included = False\n if member.isdir() or member.issym():\n continue\n elif assess_content(member, file_filter):\n digest[member_name] = extract_content(image_path, member.name,\n return_hash=True)\n included = True\n elif include_file(member, file_filter):\n hasher = hashlib.md5()\n buf = member.tobuf()\n hasher.update(buf)\n digest[member_name] = hasher.hexdigest()\n included = True\n if included:\n if include_sizes:\n sizes[member_name] = member.size\n if tag_root:\n roots[member_name] = is_root_owned(member)\n results['all'] = allfiles\n results['hashes'] = digest\n if include_sizes:\n results['sizes'] = sizes\n if tag_root:\n results['root_owned'] = roots\n return results\n",
"role": "user"
},
{
"content": "extract_guts",
"role": "assistant"
}
] |
def x(image_path):
byte_array = Client.image.export(image_path)
file_object = io.BytesIO(byte_array)
tar = tarfile.open(mode='r|*', fileobj=file_object)
return file_object, tar
|
get_memory_tar
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image_path):\n byte_array = Client.image.export(image_path)\n file_object = io.BytesIO(byte_array)\n tar = tarfile.open(mode='r|*', fileobj=file_object)\n return file_object, tar\n",
"role": "user"
},
{
"content": "get_memory_tar",
"role": "assistant"
}
] |
def x(image_path):
bot.debug('Generate file system tar...')
file_obj = Client.image.export(image_path=image_path)
if file_obj is None:
bot.error('Error generating tar, exiting.')
sys.exit(1)
tar = tarfile.open(file_obj)
return file_obj, tar
|
get_image_tar
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image_path):\n bot.debug('Generate file system tar...')\n file_obj = Client.image.export(image_path=image_path)\n if file_obj is None:\n bot.error('Error generating tar, exiting.')\n sys.exit(1)\n tar = tarfile.open(file_obj)\n return file_obj, tar\n",
"role": "user"
},
{
"content": "get_image_tar",
"role": "assistant"
}
] |
def x(file_obj, tar):
try:
file_obj.close()
except:
tar.close()
if os.path.exists(file_obj):
os.remove(file_obj)
deleted = True
bot.debug('Deleted temporary tar.')
return deleted
|
delete_image_tar
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_obj, tar):\n try:\n file_obj.close()\n except:\n tar.close()\n if os.path.exists(file_obj):\n os.remove(file_obj)\n deleted = True\n bot.debug('Deleted temporary tar.')\n return deleted\n",
"role": "user"
},
{
"content": "delete_image_tar",
"role": "assistant"
}
] |
def x(image_path, member_name, return_hash=False):
if member_name.startswith('./'):
member_name = member_name.replace('.', '', 1)
if return_hash:
hashy = hashlib.md5()
try:
content = Client.execute(image_path, 'cat %s' % member_name)
except:
return None
if not isinstance(content, bytes):
content = content.encode('utf-8')
content = bytes(content)
if len(content) == 0:
return None
if return_hash:
hashy.update(content)
return hashy.hexdigest()
return content
|
extract_content
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image_path, member_name, return_hash=False):\n if member_name.startswith('./'):\n member_name = member_name.replace('.', '', 1)\n if return_hash:\n hashy = hashlib.md5()\n try:\n content = Client.execute(image_path, 'cat %s' % member_name)\n except:\n return None\n if not isinstance(content, bytes):\n content = content.encode('utf-8')\n content = bytes(content)\n if len(content) == 0:\n return None\n if return_hash:\n hashy.update(content)\n return hashy.hexdigest()\n return content\n",
"role": "user"
},
{
"content": "extract_content",
"role": "assistant"
}
] |
def x(build_dir, params, verbose=True):
download_repo(repo_url=params['repo_url'], destination=build_dir)
os.chdir(build_dir)
if params['branch'] != None:
bot.info('Checking out branch %s' % params['branch'])
os.system('git checkout %s' % params['branch'])
else:
params['branch'] = 'master'
Client.debug = params['debug']
if params['commit'] not in [None, '']:
bot.info('Checking out commit %s' % params['commit'])
os.system('git checkout %s .' % params['commit'])
else:
params['commit'] = os.popen('git log -n 1 --pretty=format:"%H"').read()
bot.warning('commit not specified, setting to current %s' % params[
'commit'])
passing_params = '/tmp/params.pkl'
pickle.dump(params, open(passing_params, 'wb'))
if os.path.exists(params['spec_file']):
bot.info('Found spec file %s in repository' % params['spec_file'])
if os.path.islink(params['spec_file']):
bot.info('%s is a symbolic link.' % params['spec_file'])
params['spec_file'] = os.path.realpath(params['spec_file'])
start_time = datetime.now()
image = Client.build(recipe=params['spec_file'], build_folder=
build_dir, isolated=True)
version = get_image_file_hash(image)
params['version'] = version
pickle.dump(params, open(passing_params, 'wb'))
finished_image = '%s/%s.simg' % (os.path.dirname(image), version)
image = shutil.move(image, finished_image)
final_time = (datetime.now() - start_time).seconds
bot.info('Final time of build %s seconds.' % final_time)
test_result = test_container(image)
if test_result['return_code'] != 0:
bot.error('Image failed to build, cancelling.')
sys.exit(1)
singularity_version = Client.version()
Client.debug = False
inspect = Client.inspect(image)
Client.debug = params['debug']
Client.debug = False
app_names = Client.apps(image)
Client.debug = params['debug']
apps = extract_apps(image, app_names)
metrics = {'build_time_seconds': final_time, 'singularity_version':
singularity_version, 'singularity_python_version':
singularity_python_version, 'inspect': inspect, 'version':
version, 'apps': apps}
output = {'image': image, 'metadata': metrics, 'params': params}
return output
else:
present_files = glob('*')
bot.error('Build file %s not found in repository' % params['spec_file']
)
bot.info('Found files are %s' % '\n'.join(present_files))
sys.exit(1)
|
run_build
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(build_dir, params, verbose=True):\n download_repo(repo_url=params['repo_url'], destination=build_dir)\n os.chdir(build_dir)\n if params['branch'] != None:\n bot.info('Checking out branch %s' % params['branch'])\n os.system('git checkout %s' % params['branch'])\n else:\n params['branch'] = 'master'\n Client.debug = params['debug']\n if params['commit'] not in [None, '']:\n bot.info('Checking out commit %s' % params['commit'])\n os.system('git checkout %s .' % params['commit'])\n else:\n params['commit'] = os.popen('git log -n 1 --pretty=format:\"%H\"').read()\n bot.warning('commit not specified, setting to current %s' % params[\n 'commit'])\n passing_params = '/tmp/params.pkl'\n pickle.dump(params, open(passing_params, 'wb'))\n if os.path.exists(params['spec_file']):\n bot.info('Found spec file %s in repository' % params['spec_file'])\n if os.path.islink(params['spec_file']):\n bot.info('%s is a symbolic link.' % params['spec_file'])\n params['spec_file'] = os.path.realpath(params['spec_file'])\n start_time = datetime.now()\n image = Client.build(recipe=params['spec_file'], build_folder=\n build_dir, isolated=True)\n version = get_image_file_hash(image)\n params['version'] = version\n pickle.dump(params, open(passing_params, 'wb'))\n finished_image = '%s/%s.simg' % (os.path.dirname(image), version)\n image = shutil.move(image, finished_image)\n final_time = (datetime.now() - start_time).seconds\n bot.info('Final time of build %s seconds.' % final_time)\n test_result = test_container(image)\n if test_result['return_code'] != 0:\n bot.error('Image failed to build, cancelling.')\n sys.exit(1)\n singularity_version = Client.version()\n Client.debug = False\n inspect = Client.inspect(image)\n Client.debug = params['debug']\n Client.debug = False\n app_names = Client.apps(image)\n Client.debug = params['debug']\n apps = extract_apps(image, app_names)\n metrics = {'build_time_seconds': final_time, 'singularity_version':\n singularity_version, 'singularity_python_version':\n singularity_python_version, 'inspect': inspect, 'version':\n version, 'apps': apps}\n output = {'image': image, 'metadata': metrics, 'params': params}\n return output\n else:\n present_files = glob('*')\n bot.error('Build file %s not found in repository' % params['spec_file']\n )\n bot.info('Found files are %s' % '\\n'.join(present_files))\n sys.exit(1)\n",
"role": "user"
},
{
"content": "run_build",
"role": "assistant"
}
] |
def x(build_dir, data, secret, response_url=None, clean_up=True):
body = '%s|%s|%s|%s|%s' % (data['container_id'], data['commit'], data[
'branch'], data['token'], data['tag'])
signature = generate_header_signature(secret=secret, payload=body,
request_type='push')
headers = {'Authorization': signature}
if response_url is not None:
finish = requests.post(response_url, data=data, headers=headers)
bot.debug('RECEIVE POST TO SINGULARITY HUB ---------------------')
bot.debug(finish.status_code)
bot.debug(finish.reason)
else:
bot.warning('response_url set to None, skipping sending of build.')
if clean_up == True:
shutil.rmtree(build_dir)
time.sleep(20)
|
send_build_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(build_dir, data, secret, response_url=None, clean_up=True):\n body = '%s|%s|%s|%s|%s' % (data['container_id'], data['commit'], data[\n 'branch'], data['token'], data['tag'])\n signature = generate_header_signature(secret=secret, payload=body,\n request_type='push')\n headers = {'Authorization': signature}\n if response_url is not None:\n finish = requests.post(response_url, data=data, headers=headers)\n bot.debug('RECEIVE POST TO SINGULARITY HUB ---------------------')\n bot.debug(finish.status_code)\n bot.debug(finish.reason)\n else:\n bot.warning('response_url set to None, skipping sending of build.')\n if clean_up == True:\n shutil.rmtree(build_dir)\n time.sleep(20)\n",
"role": "user"
},
{
"content": "send_build_data",
"role": "assistant"
}
] |
def x(params, response_url):
response = {'log': json.dumps(params['log_file']), 'repo_url': params[
'repo_url'], 'logfile': params['logfile'], 'repo_id': params[
'repo_id'], 'container_id': params['container_id']}
body = '%s|%s|%s|%s|%s' % (params['container_id'], params['commit'],
params['branch'], params['token'], params['tag'])
signature = generate_header_signature(secret=params['token'], payload=
body, request_type='finish')
headers = {'Authorization': signature}
finish = requests.post(response_url, data=response, headers=headers)
bot.debug('FINISH POST TO SINGULARITY HUB ---------------------')
bot.debug(finish.status_code)
bot.debug(finish.reason)
return finish
|
send_build_close
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(params, response_url):\n response = {'log': json.dumps(params['log_file']), 'repo_url': params[\n 'repo_url'], 'logfile': params['logfile'], 'repo_id': params[\n 'repo_id'], 'container_id': params['container_id']}\n body = '%s|%s|%s|%s|%s' % (params['container_id'], params['commit'],\n params['branch'], params['token'], params['tag'])\n signature = generate_header_signature(secret=params['token'], payload=\n body, request_type='finish')\n headers = {'Authorization': signature}\n finish = requests.post(response_url, data=response, headers=headers)\n bot.debug('FINISH POST TO SINGULARITY HUB ---------------------')\n bot.debug(finish.status_code)\n bot.debug(finish.reason)\n return finish\n",
"role": "user"
},
{
"content": "send_build_close",
"role": "assistant"
}
] |
def x(input_dict):
if isinstance(input_dict, collections.Mapping):
return dict(map(remove_unicode_dict, input_dict.iteritems()))
elif isinstance(input_dict, collections.Iterable):
return type(input_dict)(map(remove_unicode_dict, input_dict))
else:
return input_dict
|
remove_unicode_dict
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(input_dict):\n if isinstance(input_dict, collections.Mapping):\n return dict(map(remove_unicode_dict, input_dict.iteritems()))\n elif isinstance(input_dict, collections.Iterable):\n return type(input_dict)(map(remove_unicode_dict, input_dict))\n else:\n return input_dict\n",
"role": "user"
},
{
"content": "remove_unicode_dict",
"role": "assistant"
}
] |
def x(input_dict, key, value):
if key in input_dict:
input_dict[key].append(value)
else:
input_dict[key] = [value]
return input_dict
|
update_dict
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(input_dict, key, value):\n if key in input_dict:\n input_dict[key].append(value)\n else:\n input_dict[key] = [value]\n return input_dict\n",
"role": "user"
},
{
"content": "update_dict",
"role": "assistant"
}
] |
def x(input_dict, key, increment=None, initial_value=None):
if increment == None:
increment = 1
if initial_value == None:
initial_value = 0
if key in input_dict:
input_dict[key] += increment
else:
input_dict[key] = initial_value + increment
return input_dict
|
update_dict_sum
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(input_dict, key, increment=None, initial_value=None):\n if increment == None:\n increment = 1\n if initial_value == None:\n initial_value = 0\n if key in input_dict:\n input_dict[key] += increment\n else:\n input_dict[key] = initial_value + increment\n return input_dict\n",
"role": "user"
},
{
"content": "update_dict_sum",
"role": "assistant"
}
] |
def x(total1, total2, intersect):
total = total1 + total2
return 2.0 * len(intersect) / total
|
information_coefficient
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(total1, total2, intersect):\n total = total1 + total2\n return 2.0 * len(intersect) / total\n",
"role": "user"
},
{
"content": "information_coefficient",
"role": "assistant"
}
] |
def x(m1, m2):
from scipy.stats import pearsonr
import scipy.linalg
import numpy
vectorm1 = m1.mask(numpy.triu(numpy.ones(m1.shape)).astype(numpy.bool)
).values.flatten()
vectorm2 = m2.mask(numpy.triu(numpy.ones(m2.shape)).astype(numpy.bool)
).values.flatten()
m1defined = numpy.argwhere(~numpy.isnan(numpy.array(vectorm1, dtype=float))
)
m2defined = numpy.argwhere(~numpy.isnan(numpy.array(vectorm2, dtype=float))
)
idx = numpy.intersect1d(m1defined, m2defined)
return pearsonr(vectorm1[idx], vectorm2[idx])[0]
|
RSA
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(m1, m2):\n from scipy.stats import pearsonr\n import scipy.linalg\n import numpy\n vectorm1 = m1.mask(numpy.triu(numpy.ones(m1.shape)).astype(numpy.bool)\n ).values.flatten()\n vectorm2 = m2.mask(numpy.triu(numpy.ones(m2.shape)).astype(numpy.bool)\n ).values.flatten()\n m1defined = numpy.argwhere(~numpy.isnan(numpy.array(vectorm1, dtype=float))\n )\n m2defined = numpy.argwhere(~numpy.isnan(numpy.array(vectorm2, dtype=float))\n )\n idx = numpy.intersect1d(m1defined, m2defined)\n return pearsonr(vectorm1[idx], vectorm2[idx])[0]\n",
"role": "user"
},
{
"content": "RSA",
"role": "assistant"
}
] |
def x(service_type=None, version=None):
if service_type == None:
service_type = 'storage'
if version == None:
version = 'v1'
credentials = GoogleCredentials.get_application_default()
return build(service_type, version, credentials=credentials)
|
get_google_service
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(service_type=None, version=None):\n if service_type == None:\n service_type = 'storage'\n if version == None:\n version = 'v1'\n credentials = GoogleCredentials.get_application_default()\n return build(service_type, version, credentials=credentials)\n",
"role": "user"
},
{
"content": "get_google_service",
"role": "assistant"
}
] |
def x(storage_service, bucket, bucket_path, file_name, verbose=True):
upload_path = '%s/%s' % (bucket['id'], bucket_path)
if upload_path[-1] != '/':
upload_path = '%s/' % upload_path
upload_path = '%s%s' % (upload_path, os.path.basename(file_name))
body = {'name': upload_path}
if os.path.exists(file_name):
mimetype = sniff_extension(file_name, verbose=verbose)
media = http.MediaFileUpload(file_name, mimetype=mimetype,
resumable=True)
request = storage_service.objects().insert(bucket=bucket['id'],
body=body, predefinedAcl='publicRead', media_body=media)
result = request.execute()
return result
bot.warning('%s requested for upload does not exist, skipping' % file_name)
|
upload_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(storage_service, bucket, bucket_path, file_name, verbose=True):\n upload_path = '%s/%s' % (bucket['id'], bucket_path)\n if upload_path[-1] != '/':\n upload_path = '%s/' % upload_path\n upload_path = '%s%s' % (upload_path, os.path.basename(file_name))\n body = {'name': upload_path}\n if os.path.exists(file_name):\n mimetype = sniff_extension(file_name, verbose=verbose)\n media = http.MediaFileUpload(file_name, mimetype=mimetype,\n resumable=True)\n request = storage_service.objects().insert(bucket=bucket['id'],\n body=body, predefinedAcl='publicRead', media_body=media)\n result = request.execute()\n return result\n bot.warning('%s requested for upload does not exist, skipping' % file_name)\n",
"role": "user"
},
{
"content": "upload_file",
"role": "assistant"
}
] |
def x(repo_url, trailing_path):
repo_url = repo_url.split('@')[-1].strip()
if repo_url.endswith('.git'):
repo_url = repo_url[:-4]
return '%s/%s' % (re.sub('^http.+//www[.]', '', repo_url), trailing_path)
|
get_image_path
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(repo_url, trailing_path):\n repo_url = repo_url.split('@')[-1].strip()\n if repo_url.endswith('.git'):\n repo_url = repo_url[:-4]\n return '%s/%s' % (re.sub('^http.+//www[.]', '', repo_url), trailing_path)\n",
"role": "user"
},
{
"content": "get_image_path",
"role": "assistant"
}
] |
def x(logfile='/tmp/.shub-log'):
go = get_build_metadata(key='dobuild')
if go == None:
sys.exit(0)
debug = True
enable_debug = get_build_metadata(key='debug')
if enable_debug == None:
debug = False
bot.info('DEBUG %s' % debug)
build_dir = tempfile.mkdtemp()
metadata = [{'key': 'repo_url', 'value': None}, {'key': 'repo_id',
'value': None}, {'key': 'response_url', 'value': None}, {'key':
'bucket_name', 'value': None}, {'key': 'tag', 'value': None}, {
'key': 'container_id', 'value': None}, {'key': 'commit', 'value':
None}, {'key': 'token', 'value': None}, {'key': 'branch', 'value':
None}, {'key': 'spec_file', 'value': None}, {'key': 'logging_url',
'value': None}, {'key': 'logfile', 'value': logfile}]
bot.log('BUILD PARAMETERS:')
params = get_build_params(metadata)
params['debug'] = debug
if params['spec_file'] == None:
params['spec_file'] = 'Singularity'
if params['bucket_name'] == None:
params['bucket_name'] = 'singularityhub'
if params['tag'] == None:
params['tag'] = 'latest'
output = run_build_main(build_dir=build_dir, params=params)
finished_image = output['image']
metadata = output['metadata']
params = output['params']
if os.path.exists(finished_image):
bot.info('%s successfully built' % finished_image)
dest_dir = tempfile.mkdtemp(prefix='build')
trailing_path = '%s/%s' % (params['commit'], params['version'])
image_path = get_image_path(params['repo_url'], trailing_path)
build_files = [finished_image]
bot.info('Sending image to storage:')
bot.info('\n'.join(build_files))
storage_service = get_google_service()
bucket = get_bucket(storage_service, params['bucket_name'])
files = []
for build_file in build_files:
bot.info('Uploading %s to storage...' % build_file)
storage_file = upload_file(storage_service, bucket=bucket,
bucket_path=image_path, file_name=build_file)
files.append(storage_file)
response = {'files': json.dumps(files), 'repo_url': params[
'repo_url'], 'commit': params['commit'], 'repo_id': params[
'repo_id'], 'branch': params['branch'], 'tag': params['tag'],
'container_id': params['container_id'], 'spec_file': params[
'spec_file'], 'token': params['token'], 'metadata': json.dumps(
metadata)}
custom_logfile = get_build_metadata('logfile')
if custom_logfile is not None:
logfile = custom_logfile
response['logfile'] = logfile
send_build_data(build_dir=build_dir, response_url=params[
'response_url'], secret=params['token'], data=response)
passing_params = '/tmp/params.pkl'
pickle.dump(params, open(passing_params, 'wb'))
|
run_build
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(logfile='/tmp/.shub-log'):\n go = get_build_metadata(key='dobuild')\n if go == None:\n sys.exit(0)\n debug = True\n enable_debug = get_build_metadata(key='debug')\n if enable_debug == None:\n debug = False\n bot.info('DEBUG %s' % debug)\n build_dir = tempfile.mkdtemp()\n metadata = [{'key': 'repo_url', 'value': None}, {'key': 'repo_id',\n 'value': None}, {'key': 'response_url', 'value': None}, {'key':\n 'bucket_name', 'value': None}, {'key': 'tag', 'value': None}, {\n 'key': 'container_id', 'value': None}, {'key': 'commit', 'value':\n None}, {'key': 'token', 'value': None}, {'key': 'branch', 'value':\n None}, {'key': 'spec_file', 'value': None}, {'key': 'logging_url',\n 'value': None}, {'key': 'logfile', 'value': logfile}]\n bot.log('BUILD PARAMETERS:')\n params = get_build_params(metadata)\n params['debug'] = debug\n if params['spec_file'] == None:\n params['spec_file'] = 'Singularity'\n if params['bucket_name'] == None:\n params['bucket_name'] = 'singularityhub'\n if params['tag'] == None:\n params['tag'] = 'latest'\n output = run_build_main(build_dir=build_dir, params=params)\n finished_image = output['image']\n metadata = output['metadata']\n params = output['params']\n if os.path.exists(finished_image):\n bot.info('%s successfully built' % finished_image)\n dest_dir = tempfile.mkdtemp(prefix='build')\n trailing_path = '%s/%s' % (params['commit'], params['version'])\n image_path = get_image_path(params['repo_url'], trailing_path)\n build_files = [finished_image]\n bot.info('Sending image to storage:')\n bot.info('\\n'.join(build_files))\n storage_service = get_google_service()\n bucket = get_bucket(storage_service, params['bucket_name'])\n files = []\n for build_file in build_files:\n bot.info('Uploading %s to storage...' % build_file)\n storage_file = upload_file(storage_service, bucket=bucket,\n bucket_path=image_path, file_name=build_file)\n files.append(storage_file)\n response = {'files': json.dumps(files), 'repo_url': params[\n 'repo_url'], 'commit': params['commit'], 'repo_id': params[\n 'repo_id'], 'branch': params['branch'], 'tag': params['tag'],\n 'container_id': params['container_id'], 'spec_file': params[\n 'spec_file'], 'token': params['token'], 'metadata': json.dumps(\n metadata)}\n custom_logfile = get_build_metadata('logfile')\n if custom_logfile is not None:\n logfile = custom_logfile\n response['logfile'] = logfile\n send_build_data(build_dir=build_dir, response_url=params[\n 'response_url'], secret=params['token'], data=response)\n passing_params = '/tmp/params.pkl'\n pickle.dump(params, open(passing_params, 'wb'))\n",
"role": "user"
},
{
"content": "run_build",
"role": "assistant"
}
] |
def x(verbose=True):
go = get_build_metadata(key='dobuild')
if go == None:
sys.exit(0)
passing_params = '/tmp/params.pkl'
params = pickle.load(open(passing_params, 'rb'))
storage_service = get_google_service()
bucket = get_bucket(storage_service, params['bucket_name'])
version = 'error-%s' % str(uuid.uuid4())
if 'version' in params:
version = params['version']
trailing_path = '%s/%s' % (params['commit'], version)
image_path = get_image_path(params['repo_url'], trailing_path)
params['log_file'] = upload_file(storage_service, bucket=bucket,
bucket_path=image_path, file_name=params['logfile'])
send_build_close(params=params, response_url=params['logging_url'])
|
finish_build
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(verbose=True):\n go = get_build_metadata(key='dobuild')\n if go == None:\n sys.exit(0)\n passing_params = '/tmp/params.pkl'\n params = pickle.load(open(passing_params, 'rb'))\n storage_service = get_google_service()\n bucket = get_bucket(storage_service, params['bucket_name'])\n version = 'error-%s' % str(uuid.uuid4())\n if 'version' in params:\n version = params['version']\n trailing_path = '%s/%s' % (params['commit'], version)\n image_path = get_image_path(params['repo_url'], trailing_path)\n params['log_file'] = upload_file(storage_service, bucket=bucket,\n bucket_path=image_path, file_name=params['logfile'])\n send_build_close(params=params, response_url=params['logging_url'])\n",
"role": "user"
},
{
"content": "finish_build",
"role": "assistant"
}
] |
def x(key):
headers = {'Metadata-Flavor': 'Google'}
url = (
'http://metadata.google.internal/computeMetadata/v1/instance/attributes/%s'
% key)
response = requests.get(url=url, headers=headers)
if response.status_code == 200:
return response.text
return None
|
get_build_metadata
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(key):\n headers = {'Metadata-Flavor': 'Google'}\n url = (\n 'http://metadata.google.internal/computeMetadata/v1/instance/attributes/%s'\n % key)\n response = requests.get(url=url, headers=headers)\n if response.status_code == 200:\n return response.text\n return None\n",
"role": "user"
},
{
"content": "get_build_metadata",
"role": "assistant"
}
] |
def x(metadata):
params = dict()
for item in metadata:
if item['value'] == None:
response = get_build_metadata(key=item['key'])
item['value'] = response
params[item['key']] = item['value']
if item['key'] not in ['token', 'secret', 'credential']:
bot.info('%s is set to %s' % (item['key'], item['value']))
return params
|
get_build_params
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(metadata):\n params = dict()\n for item in metadata:\n if item['value'] == None:\n response = get_build_metadata(key=item['key'])\n item['value'] = response\n params[item['key']] = item['value']\n if item['key'] not in ['token', 'secret', 'credential']:\n bot.info('%s is set to %s' % (item['key'], item['value']))\n return params\n",
"role": "user"
},
{
"content": "get_build_params",
"role": "assistant"
}
] |
def x(*args, **kwargs):
kwargs.setdefault('capture', False)
replacements = dict(host_string='{user}@{host}'.format(user=env.
instance.config.get('user', 'root'), host=env.instance.config.get(
'host', env.instance.config.get('ip', env.instance.uid))))
args = [x.format(**replacements) for x in args]
ssh_info = env.instance.init_ssh_key()
ssh_info.pop('host')
ssh_info.pop('user')
ssh_args = env.instance.ssh_args_from_info(ssh_info)
cmd_parts = ['rsync']
cmd_parts.extend(['-e', 'ssh %s' % shjoin(ssh_args)])
cmd_parts.extend(args)
cmd = shjoin(cmd_parts)
return local(cmd, **kwargs)
|
rsync
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(*args, **kwargs):\n kwargs.setdefault('capture', False)\n replacements = dict(host_string='{user}@{host}'.format(user=env.\n instance.config.get('user', 'root'), host=env.instance.config.get(\n 'host', env.instance.config.get('ip', env.instance.uid))))\n args = [x.format(**replacements) for x in args]\n ssh_info = env.instance.init_ssh_key()\n ssh_info.pop('host')\n ssh_info.pop('user')\n ssh_args = env.instance.ssh_args_from_info(ssh_info)\n cmd_parts = ['rsync']\n cmd_parts.extend(['-e', 'ssh %s' % shjoin(ssh_args)])\n cmd_parts.extend(args)\n cmd = shjoin(cmd_parts)\n return local(cmd, **kwargs)\n",
"role": "user"
},
{
"content": "rsync",
"role": "assistant"
}
] |
def x(**kwargs):
original_host = env.host_string
env.host_string = 'ec2-user@%s' % env.instance.uid
bootstrap_files = env.instance.config.get('bootstrap-files',
'bootstrap-files')
put('%s/authorized_keys' % bootstrap_files, '/tmp/authorized_keys')
put(join(bsdploy_path, 'enable_root_login_on_daemonology.sh'), '/tmp/',
mode='0775')
run("su root -c '/tmp/enable_root_login_on_daemonology.sh'")
env.host_string = original_host
sleep(2)
run('rm /tmp/enable_root_login_on_daemonology.sh')
env.instance.config.update(kwargs)
bu = BootstrapUtils()
bu.ssh_keys = None
bu.upload_authorized_keys = False
bu.bootstrap_files_yaml = 'daemonology-files.yml'
bu.print_bootstrap_files()
bu.create_bootstrap_directories()
bu.upload_bootstrap_files({})
bu.install_pkg('/', chroot=False, packages=['python27'])
|
bootstrap
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(**kwargs):\n original_host = env.host_string\n env.host_string = 'ec2-user@%s' % env.instance.uid\n bootstrap_files = env.instance.config.get('bootstrap-files',\n 'bootstrap-files')\n put('%s/authorized_keys' % bootstrap_files, '/tmp/authorized_keys')\n put(join(bsdploy_path, 'enable_root_login_on_daemonology.sh'), '/tmp/',\n mode='0775')\n run(\"su root -c '/tmp/enable_root_login_on_daemonology.sh'\")\n env.host_string = original_host\n sleep(2)\n run('rm /tmp/enable_root_login_on_daemonology.sh')\n env.instance.config.update(kwargs)\n bu = BootstrapUtils()\n bu.ssh_keys = None\n bu.upload_authorized_keys = False\n bu.bootstrap_files_yaml = 'daemonology-files.yml'\n bu.print_bootstrap_files()\n bu.create_bootstrap_directories()\n bu.upload_bootstrap_files({})\n bu.install_pkg('/', chroot=False, packages=['python27'])\n",
"role": "user"
},
{
"content": "bootstrap",
"role": "assistant"
}
] |
def x(**kwargs):
bu = BootstrapUtils()
original_host = env.host_string
env.host_string = 'freebsd@%s' % env.instance.uid
if bu.os_release.startswith('10'):
sudo('cat /etc/rc.digitalocean.d/droplet.conf > /etc/rc.conf')
sudo('sysrc zfs_enable=YES')
sudo('sysrc sshd_enable=YES')
sudo('sysrc pf_enable=YES')
sudo('sysrc -f /boot/loader.conf pfload=YES')
sudo('kldload pf', warn_only=True)
sudo("echo 'pass in all' > /etc/pf.conf")
sudo("echo 'pass out all' >> /etc/pf.conf")
sudo('chmod 644 /etc/pf.conf')
sudo('service pf start')
sudo("echo 'PermitRootLogin without-password' > /etc/ssh/sshd_config")
sudo('pw unlock root')
bootstrap_files = env.instance.config.get('bootstrap-files',
'bootstrap-files')
put(path.abspath(path.join(env['config_base'], bootstrap_files,
'authorized_keys')), '/tmp/authorized_keys', use_sudo=True)
sudo('mv /tmp/authorized_keys /root/.ssh/')
sudo('chown root:wheel /root/.ssh/authorized_keys')
sudo('service sshd fastreload')
env.host_string = original_host
sleep(2)
run('rm -f /etc/rc.d/digitalocean')
run('rm -rf /etc/rc.digitalocean.d')
run('rm -rf /usr/local/bsd-cloudinit/')
run('pkg remove -y avahi-autoipd || true')
env.instance.config.update(kwargs)
bu.ssh_keys = None
bu.upload_authorized_keys = False
|
bootstrap
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(**kwargs):\n bu = BootstrapUtils()\n original_host = env.host_string\n env.host_string = 'freebsd@%s' % env.instance.uid\n if bu.os_release.startswith('10'):\n sudo('cat /etc/rc.digitalocean.d/droplet.conf > /etc/rc.conf')\n sudo('sysrc zfs_enable=YES')\n sudo('sysrc sshd_enable=YES')\n sudo('sysrc pf_enable=YES')\n sudo('sysrc -f /boot/loader.conf pfload=YES')\n sudo('kldload pf', warn_only=True)\n sudo(\"echo 'pass in all' > /etc/pf.conf\")\n sudo(\"echo 'pass out all' >> /etc/pf.conf\")\n sudo('chmod 644 /etc/pf.conf')\n sudo('service pf start')\n sudo(\"echo 'PermitRootLogin without-password' > /etc/ssh/sshd_config\")\n sudo('pw unlock root')\n bootstrap_files = env.instance.config.get('bootstrap-files',\n 'bootstrap-files')\n put(path.abspath(path.join(env['config_base'], bootstrap_files,\n 'authorized_keys')), '/tmp/authorized_keys', use_sudo=True)\n sudo('mv /tmp/authorized_keys /root/.ssh/')\n sudo('chown root:wheel /root/.ssh/authorized_keys')\n sudo('service sshd fastreload')\n env.host_string = original_host\n sleep(2)\n run('rm -f /etc/rc.d/digitalocean')\n run('rm -rf /etc/rc.digitalocean.d')\n run('rm -rf /usr/local/bsd-cloudinit/')\n run('pkg remove -y avahi-autoipd || true')\n env.instance.config.update(kwargs)\n bu.ssh_keys = None\n bu.upload_authorized_keys = False\n",
"role": "user"
},
{
"content": "bootstrap",
"role": "assistant"
}
] |
def x(self):
install_devices = self.install_devices
if 'bootstrap-system-devices' in env.instance.config:
devices = set(env.instance.config['bootstrap-system-devices'].split())
else:
devices = set(self.sysctl_devices)
for sysctl_device in self.sysctl_devices:
for install_device in install_devices:
if install_device.startswith(sysctl_device):
devices.remove(sysctl_device)
return devices
|
devices
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n install_devices = self.install_devices\n if 'bootstrap-system-devices' in env.instance.config:\n devices = set(env.instance.config['bootstrap-system-devices'].split())\n else:\n devices = set(self.sysctl_devices)\n for sysctl_device in self.sysctl_devices:\n for install_device in install_devices:\n if install_device.startswith(sysctl_device):\n devices.remove(sysctl_device)\n return devices\n",
"role": "user"
},
{
"content": "devices",
"role": "assistant"
}
] |
def x(self):
packages = set(env.instance.config.get('bootstrap-packages', '').split())
packages.update(['python27'])
cmd = env.instance.config.get('bootstrap-local-download-cmd',
'wget -c -O "{0.local}" "{0.url}"')
items = sorted(self.bootstrap_files.items())
for filename, asset in items:
if asset.url:
if not exists(dirname(asset.local)):
os.makedirs(dirname(asset.local))
local(cmd.format(asset))
if filename == 'packagesite.txz':
items.extend(self._fetch_packages(asset.local, packages))
|
fetch_assets
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n packages = set(env.instance.config.get('bootstrap-packages', '').split())\n packages.update(['python27'])\n cmd = env.instance.config.get('bootstrap-local-download-cmd',\n 'wget -c -O \"{0.local}\" \"{0.url}\"')\n items = sorted(self.bootstrap_files.items())\n for filename, asset in items:\n if asset.url:\n if not exists(dirname(asset.local)):\n os.makedirs(dirname(asset.local))\n local(cmd.format(asset))\n if filename == 'packagesite.txz':\n items.extend(self._fetch_packages(asset.local, packages))\n",
"role": "user"
},
{
"content": "fetch_assets",
"role": "assistant"
}
] |
def x(res):
if 'Authorization' in res.request.headers:
res.request.headers['Authorization'] = '*****'
return (
"""
####################################
url = %s
headers = %s
-------- data sent -----------------
%s
------------------------------------
@@@@@ response @@@@@@@@@@@@@@@@
headers = %s
code = %d
reason = %s
--------- data received ------------
%s
------------------------------------
####################################
"""
% (res.url, str(res.request.headers), OLD_REQ and res.request.data or
res.request.body, res.headers, res.status_code, res.reason, res.text))
|
res_to_str
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(res):\n if 'Authorization' in res.request.headers:\n res.request.headers['Authorization'] = '*****'\n return (\n \"\"\"\n####################################\nurl = %s\nheaders = %s\n-------- data sent -----------------\n%s\n------------------------------------\n@@@@@ response @@@@@@@@@@@@@@@@\nheaders = %s\ncode = %d\nreason = %s\n--------- data received ------------\n%s\n------------------------------------\n####################################\n\"\"\"\n % (res.url, str(res.request.headers), OLD_REQ and res.request.data or\n res.request.body, res.headers, res.status_code, res.reason, res.text))\n",
"role": "user"
},
{
"content": "res_to_str",
"role": "assistant"
}
] |
def x(resource_name, resource_dct):
new_dict = {'__module__': resource_dct.get('__module__', __name__),
'__doc__': resource_dct['full_description'], '_resource_name':
resource_name, '_own_methods': set(), '_conflicting_methods': []}
foreign_methods = {}
for method in resource_dct['methods']:
if not new_dict['__doc__'] and len(method['apis']) == 1:
new_dict['__doc__'] = method['apis'][0]['short_description']
for api in method['apis']:
api = MethodAPIDescription(resource_name, method, api)
if api.resource != resource_name:
functions = foreign_methods.setdefault(api.resource, {})
if api.name in functions:
old_api = functions.get(api.name).defs
log_method = logger.warning
if api.url == old_api.url:
log_method = logger.debug
log_method(
"""There is a conflict trying to redefine a method for a foreign resource (%s):
resource:
apipie_resource: %s
new_api: %s
new_url: %s
old_api: %s
old_url: %s"""
, api.name, resource_name, pprint.pformat(api), api
.url, pprint.pformat(old_api), old_api.url)
new_dict['_conflicting_methods'].append(api)
continue
functions[api.name] = api.generate_func()
else:
if api.name in new_dict['_own_methods']:
old_api = new_dict.get(api.name).defs
log_method = logger.warning
if api.url == old_api.url:
log_method = logger.debug
log_method(
"""There is a conflict trying to redefine method (%s):
apipie_resource: %s
new_api: %s
new_url: %s
old_api: %s
old_url: %s"""
, api.name, resource_name, pprint.pformat(api), api
.url, pprint.pformat(old_api), old_api.url)
new_dict['_conflicting_methods'].append(api)
continue
new_dict['_own_methods'].add(api.name)
new_dict[api.name] = api.generate_func()
return new_dict, foreign_methods
|
parse_resource_definition
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(resource_name, resource_dct):\n new_dict = {'__module__': resource_dct.get('__module__', __name__),\n '__doc__': resource_dct['full_description'], '_resource_name':\n resource_name, '_own_methods': set(), '_conflicting_methods': []}\n foreign_methods = {}\n for method in resource_dct['methods']:\n if not new_dict['__doc__'] and len(method['apis']) == 1:\n new_dict['__doc__'] = method['apis'][0]['short_description']\n for api in method['apis']:\n api = MethodAPIDescription(resource_name, method, api)\n if api.resource != resource_name:\n functions = foreign_methods.setdefault(api.resource, {})\n if api.name in functions:\n old_api = functions.get(api.name).defs\n log_method = logger.warning\n if api.url == old_api.url:\n log_method = logger.debug\n log_method(\n \"\"\"There is a conflict trying to redefine a method for a foreign resource (%s): \n\tresource:\n\tapipie_resource: %s\n\tnew_api: %s\n\tnew_url: %s\n\told_api: %s\n\told_url: %s\"\"\"\n , api.name, resource_name, pprint.pformat(api), api\n .url, pprint.pformat(old_api), old_api.url)\n new_dict['_conflicting_methods'].append(api)\n continue\n functions[api.name] = api.generate_func()\n else:\n if api.name in new_dict['_own_methods']:\n old_api = new_dict.get(api.name).defs\n log_method = logger.warning\n if api.url == old_api.url:\n log_method = logger.debug\n log_method(\n \"\"\"There is a conflict trying to redefine method (%s): \n\tapipie_resource: %s\n\tnew_api: %s\n\tnew_url: %s\n\told_api: %s\n\told_url: %s\"\"\"\n , api.name, resource_name, pprint.pformat(api), api\n .url, pprint.pformat(old_api), old_api.url)\n new_dict['_conflicting_methods'].append(api)\n continue\n new_dict['_own_methods'].add(api.name)\n new_dict[api.name] = api.generate_func()\n return new_dict, foreign_methods\n",
"role": "user"
},
{
"content": "parse_resource_definition",
"role": "assistant"
}
] |
def x(self, url):
if url == '/api':
return 'api'
elif url == '/katello':
return 'katello'
match = self.resource_pattern.match(url)
if match:
return match.groupdict().get('resource', None)
|
parse_resource_from_url
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, url):\n if url == '/api':\n return 'api'\n elif url == '/katello':\n return 'katello'\n match = self.resource_pattern.match(url)\n if match:\n return match.groupdict().get('resource', None)\n",
"role": "user"
},
{
"content": "parse_resource_from_url",
"role": "assistant"
}
] |
def x(self):
if self.url.count(':') > 1:
base_name = self.url.split('/', 3)[-1].replace('/', '_')[1:]
if base_name.startswith(':'):
base_name = base_name.split('_')[-1]
base_name = re.sub('_:[^/]+', '', base_name)
if base_name.endswith('_'):
base_name = base_name[:-1]
base_name = ('_' + self._method['name']).join(base_name.rsplit('_', 1))
else:
base_name = self._method['name']
if base_name == 'import':
base_name = 'import_'
if self._apipie_resource != self.resource:
return '%s_%s' % (self._apipie_resource, base_name)
else:
return base_name
|
get_name
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if self.url.count(':') > 1:\n base_name = self.url.split('/', 3)[-1].replace('/', '_')[1:]\n if base_name.startswith(':'):\n base_name = base_name.split('_')[-1]\n base_name = re.sub('_:[^/]+', '', base_name)\n if base_name.endswith('_'):\n base_name = base_name[:-1]\n base_name = ('_' + self._method['name']).join(base_name.rsplit('_', 1))\n else:\n base_name = self._method['name']\n if base_name == 'import':\n base_name = 'import_'\n if self._apipie_resource != self.resource:\n return '%s_%s' % (self._apipie_resource, base_name)\n else:\n return base_name\n",
"role": "user"
},
{
"content": "get_name",
"role": "assistant"
}
] |
def x(self, as_global=False):
keywords = []
params_def = []
params_doc = ''
original_names = {}
params = dict((param['name'], param) for param in self.params)
for param in self.url_params:
if param not in params:
param = {'name': param, 'required': True, 'description': '',
'validator': ''}
params[param['name']] = param
else:
params[param]['required'] = True
req_params = []
nonreq_params = []
for param in six.itervalues(params):
if param['required']:
req_params.append(param)
else:
nonreq_params.append(param)
for param in (req_params + nonreq_params):
params_doc += self.create_param_doc(param) + '\n'
local_name = param['name']
if param['name'] == 'except':
local_name = 'except_'
original_names[local_name] = param['name']
keywords.append(local_name)
if param['required']:
params_def.append('%s' % local_name)
else:
params_def.append('%s=None' % local_name)
func_head = 'def {0}(self, {1}):'.format(as_global and self.
get_global_method_name() or self.name, ', '.join(params_def))
code_body = """ _vars_ = locals()
_url = self._fill_url("{url}", _vars_, {url_params})
_original_names = {original_names}
_kwargs = dict((_original_names[k], _vars_[k])
for k in {keywords} if _vars_[k])
return self._foreman.do_{http_method}(_url, _kwargs)"""
code_body = code_body.format(http_method=self.http_method.lower(), url=
self.url, url_params=self.url_params, keywords=keywords,
original_names=original_names)
code = [func_head, ' """', self.short_desc, '', params_doc, ' """',
code_body]
code = '\n'.join(code)
six.exec_(code)
function = locals()[self.name]
setattr(function, 'defs', self)
return function
|
generate_func
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, as_global=False):\n keywords = []\n params_def = []\n params_doc = ''\n original_names = {}\n params = dict((param['name'], param) for param in self.params)\n for param in self.url_params:\n if param not in params:\n param = {'name': param, 'required': True, 'description': '',\n 'validator': ''}\n params[param['name']] = param\n else:\n params[param]['required'] = True\n req_params = []\n nonreq_params = []\n for param in six.itervalues(params):\n if param['required']:\n req_params.append(param)\n else:\n nonreq_params.append(param)\n for param in (req_params + nonreq_params):\n params_doc += self.create_param_doc(param) + '\\n'\n local_name = param['name']\n if param['name'] == 'except':\n local_name = 'except_'\n original_names[local_name] = param['name']\n keywords.append(local_name)\n if param['required']:\n params_def.append('%s' % local_name)\n else:\n params_def.append('%s=None' % local_name)\n func_head = 'def {0}(self, {1}):'.format(as_global and self.\n get_global_method_name() or self.name, ', '.join(params_def))\n code_body = \"\"\" _vars_ = locals()\n _url = self._fill_url(\"{url}\", _vars_, {url_params})\n _original_names = {original_names}\n _kwargs = dict((_original_names[k], _vars_[k])\n for k in {keywords} if _vars_[k])\n return self._foreman.do_{http_method}(_url, _kwargs)\"\"\"\n code_body = code_body.format(http_method=self.http_method.lower(), url=\n self.url, url_params=self.url_params, keywords=keywords,\n original_names=original_names)\n code = [func_head, ' \"\"\"', self.short_desc, '', params_doc, ' \"\"\"',\n code_body]\n code = '\\n'.join(code)\n six.exec_(code)\n function = locals()[self.name]\n setattr(function, 'defs', self)\n return function\n",
"role": "user"
},
{
"content": "generate_func",
"role": "assistant"
}
] |
def x(cls, param, prefix=None):
desc = cls.exclude_html_reg.sub('', param['description']).strip()
if not desc:
desc = '<no description>'
name = param['name']
if prefix:
name = '%s[%s]' % (prefix, name)
doc_ = ':param %s: %s; %s' % (name, desc, param['validator'])
if param['required']:
doc_ += ' (REQUIRED)'
else:
doc_ += ' (OPTIONAL)'
for param in param.get('params', []):
doc_ += '\n' + cls.create_param_doc(param, name)
return doc_
|
create_param_doc
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cls, param, prefix=None):\n desc = cls.exclude_html_reg.sub('', param['description']).strip()\n if not desc:\n desc = '<no description>'\n name = param['name']\n if prefix:\n name = '%s[%s]' % (prefix, name)\n doc_ = ':param %s: %s; %s' % (name, desc, param['validator'])\n if param['required']:\n doc_ += ' (REQUIRED)'\n else:\n doc_ += ' (OPTIONAL)'\n for param in param.get('params', []):\n doc_ += '\\n' + cls.create_param_doc(param, name)\n return doc_\n",
"role": "user"
},
{
"content": "create_param_doc",
"role": "assistant"
}
] |
def x(http_method, funcs):
methods = []
if http_method not in ('GET', 'PUT', 'POST', 'DELETE'):
logger.error('Plugin load failure, HTTP method %s unsupported.',
http_method)
return methods
for fname, params in six.iteritems(funcs):
method = {'apis': [{'short_description': 'no-doc'}], 'params': []}
method['apis'][0]['http_method'] = http_method
method['apis'][0]['api_url'] = '/api/' + fname
method['name'] = fname
for pname, pdef in six.iteritems(params):
param = {'name': pname, 'validator': 'Must be %s' % pdef[
'ptype'], 'description': '', 'required': pdef['required']}
method['params'].append(param)
methods.append(method)
return methods
|
convert_plugin_def
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(http_method, funcs):\n methods = []\n if http_method not in ('GET', 'PUT', 'POST', 'DELETE'):\n logger.error('Plugin load failure, HTTP method %s unsupported.',\n http_method)\n return methods\n for fname, params in six.iteritems(funcs):\n method = {'apis': [{'short_description': 'no-doc'}], 'params': []}\n method['apis'][0]['http_method'] = http_method\n method['apis'][0]['api_url'] = '/api/' + fname\n method['name'] = fname\n for pname, pdef in six.iteritems(params):\n param = {'name': pname, 'validator': 'Must be %s' % pdef[\n 'ptype'], 'description': '', 'required': pdef['required']}\n method['params'].append(param)\n methods.append(method)\n return methods\n",
"role": "user"
},
{
"content": "convert_plugin_def",
"role": "assistant"
}
] |
def x(repo_path):
repo = dulwich.repo.Repo(repo_path)
tags = get_tags(repo)
maj_version = 0
feat_version = 0
fix_version = 0
for commit_sha, children in reversed(get_children_per_first_parent(
repo_path).items()):
commit = get_repo_object(repo, commit_sha)
maj_version, feat_version, fix_version = get_version(commit=commit,
tags=tags, maj_version=maj_version, feat_version=feat_version,
fix_version=fix_version, children=children)
return '%s.%s.%s' % (maj_version, feat_version, fix_version)
|
get_current_version
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(repo_path):\n repo = dulwich.repo.Repo(repo_path)\n tags = get_tags(repo)\n maj_version = 0\n feat_version = 0\n fix_version = 0\n for commit_sha, children in reversed(get_children_per_first_parent(\n repo_path).items()):\n commit = get_repo_object(repo, commit_sha)\n maj_version, feat_version, fix_version = get_version(commit=commit,\n tags=tags, maj_version=maj_version, feat_version=feat_version,\n fix_version=fix_version, children=children)\n return '%s.%s.%s' % (maj_version, feat_version, fix_version)\n",
"role": "user"
},
{
"content": "get_current_version",
"role": "assistant"
}
] |
def x(repo_path, from_commit):
repo = dulwich.repo.Repo(repo_path)
refs = get_refs(repo)
start_including = False
authors = set()
if from_commit is None:
start_including = True
for commit_sha, children in reversed(get_children_per_first_parent(
repo_path).items()):
commit = get_repo_object(repo, commit_sha)
if start_including or commit_sha.startswith(from_commit
) or fuzzy_matches_refs(from_commit, refs.get(commit_sha, [])):
authors.add(commit.author.decode())
for child in children:
authors.add(child.author.decode())
start_including = True
return '\n'.join(sorted(authors))
|
get_authors
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(repo_path, from_commit):\n repo = dulwich.repo.Repo(repo_path)\n refs = get_refs(repo)\n start_including = False\n authors = set()\n if from_commit is None:\n start_including = True\n for commit_sha, children in reversed(get_children_per_first_parent(\n repo_path).items()):\n commit = get_repo_object(repo, commit_sha)\n if start_including or commit_sha.startswith(from_commit\n ) or fuzzy_matches_refs(from_commit, refs.get(commit_sha, [])):\n authors.add(commit.author.decode())\n for child in children:\n authors.add(child.author.decode())\n start_including = True\n return '\\n'.join(sorted(authors))\n",
"role": "user"
},
{
"content": "get_authors",
"role": "assistant"
}
] |
def x(self, tup, tup_id=None, stream=None, direct_task=None, need_task_ids=
False):
return super(Spout, self).emit(tup, tup_id=tup_id, stream=stream,
direct_task=direct_task, need_task_ids=need_task_ids)
|
emit
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, tup, tup_id=None, stream=None, direct_task=None, need_task_ids=\n False):\n return super(Spout, self).emit(tup, tup_id=tup_id, stream=stream,\n direct_task=direct_task, need_task_ids=need_task_ids)\n",
"role": "user"
},
{
"content": "emit",
"role": "assistant"
}
] |
def x(self):
cmd = self.read_command()
if cmd['command'] == 'next':
self.next_tuple()
elif cmd['command'] == 'ack':
self.ack(cmd['id'])
elif cmd['command'] == 'fail':
self.fail(cmd['id'])
elif cmd['command'] == 'activate':
self.activate()
elif cmd['command'] == 'deactivate':
self.deactivate()
else:
self.logger.error('Received invalid command from Storm: %r', cmd)
self.send_message({'command': 'sync'})
|
run
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n cmd = self.read_command()\n if cmd['command'] == 'next':\n self.next_tuple()\n elif cmd['command'] == 'ack':\n self.ack(cmd['id'])\n elif cmd['command'] == 'fail':\n self.fail(cmd['id'])\n elif cmd['command'] == 'activate':\n self.activate()\n elif cmd['command'] == 'deactivate':\n self.deactivate()\n else:\n self.logger.error('Received invalid command from Storm: %r', cmd)\n self.send_message({'command': 'sync'})\n",
"role": "user"
},
{
"content": "run",
"role": "assistant"
}
] |
def x(self, tup_id):
self.failed_tuples.pop(tup_id, None)
try:
del self.unacked_tuples[tup_id]
except KeyError:
self.logger.error('Received ack for unknown tuple ID: %r', tup_id)
|
ack
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, tup_id):\n self.failed_tuples.pop(tup_id, None)\n try:\n del self.unacked_tuples[tup_id]\n except KeyError:\n self.logger.error('Received ack for unknown tuple ID: %r', tup_id)\n",
"role": "user"
},
{
"content": "ack",
"role": "assistant"
}
] |
def x(self, tup_id):
saved_args = self.unacked_tuples.get(tup_id)
if saved_args is None:
self.logger.error('Received fail for unknown tuple ID: %r', tup_id)
return
tup, stream, direct_task, need_task_ids = saved_args
if self.failed_tuples[tup_id] < self.max_fails:
self.emit(tup, tup_id=tup_id, stream=stream, direct_task=
direct_task, need_task_ids=need_task_ids)
self.failed_tuples[tup_id] += 1
else:
self.logger.info(
'Acking tuple ID %r after it exceeded retry limit (%r)', tup_id,
self.max_fails)
self.ack(tup_id)
|
fail
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, tup_id):\n saved_args = self.unacked_tuples.get(tup_id)\n if saved_args is None:\n self.logger.error('Received fail for unknown tuple ID: %r', tup_id)\n return\n tup, stream, direct_task, need_task_ids = saved_args\n if self.failed_tuples[tup_id] < self.max_fails:\n self.emit(tup, tup_id=tup_id, stream=stream, direct_task=\n direct_task, need_task_ids=need_task_ids)\n self.failed_tuples[tup_id] += 1\n else:\n self.logger.info(\n 'Acking tuple ID %r after it exceeded retry limit (%r)', tup_id,\n self.max_fails)\n self.ack(tup_id)\n",
"role": "user"
},
{
"content": "fail",
"role": "assistant"
}
] |
def x(self, tup, tup_id=None, stream=None, direct_task=None, need_task_ids=
False):
if tup_id is None:
raise ValueError(
'You must provide a tuple ID when emitting with a ReliableSpout in order for the tuple to be tracked.'
)
args = tup, stream, direct_task, need_task_ids
self.unacked_tuples[tup_id] = args
return super(ReliableSpout, self).emit(tup, tup_id=tup_id, stream=
stream, direct_task=direct_task, need_task_ids=need_task_ids)
|
emit
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, tup, tup_id=None, stream=None, direct_task=None, need_task_ids=\n False):\n if tup_id is None:\n raise ValueError(\n 'You must provide a tuple ID when emitting with a ReliableSpout in order for the tuple to be tracked.'\n )\n args = tup, stream, direct_task, need_task_ids\n self.unacked_tuples[tup_id] = args\n return super(ReliableSpout, self).emit(tup, tup_id=tup_id, stream=\n stream, direct_task=direct_task, need_task_ids=need_task_ids)\n",
"role": "user"
},
{
"content": "emit",
"role": "assistant"
}
] |
def x(signum, frame):
try:
from remote_pdb import RemotePdb
rdb = RemotePdb(host='127.0.0.1', port=0)
rdb.set_trace(frame=frame)
except ImportError:
log.warning(
'remote_pdb unavailable. Please install remote_pdb to allow remote debugging.'
)
signal.signal(signum, remote_pdb_handler)
|
remote_pdb_handler
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(signum, frame):\n try:\n from remote_pdb import RemotePdb\n rdb = RemotePdb(host='127.0.0.1', port=0)\n rdb.set_trace(frame=frame)\n except ImportError:\n log.warning(\n 'remote_pdb unavailable. Please install remote_pdb to allow remote debugging.'\n )\n signal.signal(signum, remote_pdb_handler)\n",
"role": "user"
},
{
"content": "remote_pdb_handler",
"role": "assistant"
}
] |
def x(self, record):
try:
msg = self.format(record)
level = _STORM_LOG_LEVELS.get(record.levelname.lower(), _STORM_LOG_INFO
)
self.serializer.send_message({'command': 'log', 'msg': str(msg),
'level': level})
except Exception:
self.handleError(record)
|
emit
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, record):\n try:\n msg = self.format(record)\n level = _STORM_LOG_LEVELS.get(record.levelname.lower(), _STORM_LOG_INFO\n )\n self.serializer.send_message({'command': 'log', 'msg': str(msg),\n 'level': level})\n except Exception:\n self.handleError(record)\n",
"role": "user"
},
{
"content": "emit",
"role": "assistant"
}
] |
def x(self, storm_conf, context):
self.topology_name = storm_conf.get('topology.name', '')
self.task_id = context.get('taskid', '')
self.component_name = context.get('componentid')
if self.component_name is None:
self.component_name = context.get('task->component', {}).get(str(
self.task_id), '')
self.debug = storm_conf.get('topology.debug', False)
self.storm_conf = storm_conf
self.context = context
self.logger = logging.getLogger('.'.join((__name__, self.component_name)))
log_path = self.storm_conf.get('pystorm.log.path')
log_file_name = self.storm_conf.get('pystorm.log.file',
'pystorm_{topology_name}_{component_name}_{task_id}_{pid}.log')
root_log = logging.getLogger()
log_level = self.storm_conf.get('pystorm.log.level', 'info')
if log_path:
max_bytes = self.storm_conf.get('pystorm.log.max_bytes', 1000000)
backup_count = self.storm_conf.get('pystorm.log.backup_count', 10)
log_file = join(log_path, log_file_name.format(topology_name=self.
topology_name, component_name=self.component_name, task_id=self
.task_id, pid=self.pid))
handler = RotatingFileHandler(log_file, maxBytes=max_bytes,
backupCount=backup_count)
log_format = self.storm_conf.get('pystorm.log.format',
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
else:
self.log(
'pystorm StormHandler logging enabled, so all messages at levels greater than "pystorm.log.level" ({}) will be sent to Storm.'
.format(log_level))
handler = StormHandler(self.serializer)
log_format = self.storm_conf.get('pystorm.log.format',
'%(asctime)s - %(name)s - %(message)s')
formatter = logging.Formatter(log_format)
log_level = _PYTHON_LOG_LEVELS.get(log_level, logging.INFO)
if self.debug:
log_level = logging.DEBUG
handler.setLevel(log_level)
handler.setFormatter(formatter)
root_log.addHandler(handler)
self.logger.setLevel(log_level)
logging.getLogger('pystorm').setLevel(log_level)
if self.serializer.output_stream == sys.stdout:
sys.stdout = LogStream(logging.getLogger('pystorm.stdout'))
|
setup_component
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, storm_conf, context):\n self.topology_name = storm_conf.get('topology.name', '')\n self.task_id = context.get('taskid', '')\n self.component_name = context.get('componentid')\n if self.component_name is None:\n self.component_name = context.get('task->component', {}).get(str(\n self.task_id), '')\n self.debug = storm_conf.get('topology.debug', False)\n self.storm_conf = storm_conf\n self.context = context\n self.logger = logging.getLogger('.'.join((__name__, self.component_name)))\n log_path = self.storm_conf.get('pystorm.log.path')\n log_file_name = self.storm_conf.get('pystorm.log.file',\n 'pystorm_{topology_name}_{component_name}_{task_id}_{pid}.log')\n root_log = logging.getLogger()\n log_level = self.storm_conf.get('pystorm.log.level', 'info')\n if log_path:\n max_bytes = self.storm_conf.get('pystorm.log.max_bytes', 1000000)\n backup_count = self.storm_conf.get('pystorm.log.backup_count', 10)\n log_file = join(log_path, log_file_name.format(topology_name=self.\n topology_name, component_name=self.component_name, task_id=self\n .task_id, pid=self.pid))\n handler = RotatingFileHandler(log_file, maxBytes=max_bytes,\n backupCount=backup_count)\n log_format = self.storm_conf.get('pystorm.log.format',\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n else:\n self.log(\n 'pystorm StormHandler logging enabled, so all messages at levels greater than \"pystorm.log.level\" ({}) will be sent to Storm.'\n .format(log_level))\n handler = StormHandler(self.serializer)\n log_format = self.storm_conf.get('pystorm.log.format',\n '%(asctime)s - %(name)s - %(message)s')\n formatter = logging.Formatter(log_format)\n log_level = _PYTHON_LOG_LEVELS.get(log_level, logging.INFO)\n if self.debug:\n log_level = logging.DEBUG\n handler.setLevel(log_level)\n handler.setFormatter(formatter)\n root_log.addHandler(handler)\n self.logger.setLevel(log_level)\n logging.getLogger('pystorm').setLevel(log_level)\n if self.serializer.output_stream == sys.stdout:\n sys.stdout = LogStream(logging.getLogger('pystorm.stdout'))\n",
"role": "user"
},
{
"content": "setup_component",
"role": "assistant"
}
] |
def x(self):
msg = self.read_message()
pid_dir, _conf, _context = msg['pidDir'], msg['conf'], msg['context']
open(join(pid_dir, str(self.pid)), 'w').close()
self.send_message({'pid': self.pid})
return _conf, _context
|
read_handshake
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n msg = self.read_message()\n pid_dir, _conf, _context = msg['pidDir'], msg['conf'], msg['context']\n open(join(pid_dir, str(self.pid)), 'w').close()\n self.send_message({'pid': self.pid})\n return _conf, _context\n",
"role": "user"
},
{
"content": "read_handshake",
"role": "assistant"
}
] |
def x(self, message):
if not isinstance(message, dict):
logger = self.logger if self.logger else log
logger.error('%s.%d attempted to send a non dict message to Storm: %r',
self.component_name, self.pid, message)
return
self.serializer.send_message(message)
|
send_message
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, message):\n if not isinstance(message, dict):\n logger = self.logger if self.logger else log\n logger.error('%s.%d attempted to send a non dict message to Storm: %r',\n self.component_name, self.pid, message)\n return\n self.serializer.send_message(message)\n",
"role": "user"
},
{
"content": "send_message",
"role": "assistant"
}
] |
def x(self, exception, tup=None):
if tup:
message = (
'Python {exception_name} raised while processing Tuple {tup!r}\n{traceback}'
)
else:
message = 'Python {exception_name} raised\n{traceback}'
message = message.format(exception_name=exception.__class__.__name__,
tup=tup, traceback=format_exc())
self.send_message({'command': 'error', 'msg': str(message)})
self.send_message({'command': 'sync'})
|
raise_exception
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, exception, tup=None):\n if tup:\n message = (\n 'Python {exception_name} raised while processing Tuple {tup!r}\\n{traceback}'\n )\n else:\n message = 'Python {exception_name} raised\\n{traceback}'\n message = message.format(exception_name=exception.__class__.__name__,\n tup=tup, traceback=format_exc())\n self.send_message({'command': 'error', 'msg': str(message)})\n self.send_message({'command': 'sync'})\n",
"role": "user"
},
{
"content": "raise_exception",
"role": "assistant"
}
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.