source_code stringlengths 45 7.24k | function_name stringlengths 1 111 | messages list |
|---|---|---|
def x(self, test, err, capt=None, tbinfo=None):
self.__insert_test_result(constants.State.FAILURE, test, err)
| addFailure | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, test, err, capt=None, tbinfo=None):\n self.__insert_test_result(constants.State.FAILURE, test, err)\n",
"role": "user"
},
{
"content": "ad... |
def x(chrome_options, proxy_string, proxy_user, proxy_pass):
if not ''.join(sys.argv) == '-c':
proxy_helper.create_proxy_zip(proxy_string, proxy_user, proxy_pass)
else:
lock = threading.Lock()
with lock:
time.sleep(random.uniform(0.02, 0.15))
if not os.path.exists(PROXY_ZIP_PATH):
proxy_helper.create_proxy_zip(proxy_string, proxy_user,
proxy_pass)
time.sleep(random.uniform(0.1, 0.2))
proxy_zip = PROXY_ZIP_PATH
if not os.path.exists(PROXY_ZIP_PATH):
proxy_zip = PROXY_ZIP_PATH_2
chrome_options.add_extension(proxy_zip)
return chrome_options
| add_chrome_proxy_extension | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(chrome_options, proxy_string, proxy_user, proxy_pass):\n if not ''.join(sys.argv) == '-c':\n proxy_helper.create_proxy_zip(proxy_string, proxy_user, ... |
def x(browser_name, headless, proxy_string, proxy_auth, proxy_user,
proxy_pass, user_agent, disable_csp):
downloads_path = download_helper.get_downloads_folder()
download_helper.reset_downloads_folder()
if browser_name == constants.Browser.FIREFOX:
try:
try:
profile = _create_firefox_profile(downloads_path,
proxy_string, user_agent, disable_csp)
firefox_capabilities = DesiredCapabilities.FIREFOX.copy()
firefox_capabilities['marionette'] = True
options = webdriver.FirefoxOptions()
if headless:
options.add_argument('-headless')
if LOCAL_GECKODRIVER and os.path.exists(LOCAL_GECKODRIVER):
make_driver_executable_if_not(LOCAL_GECKODRIVER)
firefox_driver = webdriver.Firefox(firefox_profile=
profile, capabilities=firefox_capabilities, options
=options, executable_path=LOCAL_GECKODRIVER)
else:
firefox_driver = webdriver.Firefox(firefox_profile=
profile, capabilities=firefox_capabilities, options
=options)
except WebDriverException:
profile = _create_firefox_profile(downloads_path,
proxy_string, user_agent, disable_csp)
firefox_capabilities = DesiredCapabilities.FIREFOX.copy()
firefox_capabilities['marionette'] = False
firefox_driver = webdriver.Firefox(firefox_profile=profile,
capabilities=firefox_capabilities)
return firefox_driver
except Exception as e:
if headless:
raise Exception(e)
return webdriver.Firefox()
elif browser_name == constants.Browser.INTERNET_EXPLORER:
if not IS_WINDOWS:
raise Exception(
'IE Browser is for Windows-based operating systems only!')
from selenium.webdriver.ie.options import Options
ie_options = Options()
ie_options.ignore_protected_mode_settings = False
ie_options.ignore_zoom_level = True
ie_options.require_window_focus = False
ie_options.native_events = True
ie_options.full_page_screenshot = True
ie_options.persistent_hover = True
ie_capabilities = ie_options.to_capabilities()
if LOCAL_IEDRIVER and os.path.exists(LOCAL_IEDRIVER):
make_driver_executable_if_not(LOCAL_IEDRIVER)
return webdriver.Ie(capabilities=ie_capabilities,
executable_path=LOCAL_IEDRIVER)
else:
return webdriver.Ie(capabilities=ie_capabilities)
elif browser_name == constants.Browser.EDGE:
if not IS_WINDOWS:
raise Exception(
'Edge Browser is for Windows-based operating systems only!')
edge_capabilities = DesiredCapabilities.EDGE.copy()
if LOCAL_EDGEDRIVER and os.path.exists(LOCAL_EDGEDRIVER):
make_driver_executable_if_not(LOCAL_EDGEDRIVER)
return webdriver.Edge(capabilities=edge_capabilities,
executable_path=LOCAL_EDGEDRIVER)
else:
return webdriver.Edge(capabilities=edge_capabilities)
elif browser_name == constants.Browser.SAFARI:
return webdriver.Safari()
elif browser_name == constants.Browser.OPERA:
if LOCAL_OPERADRIVER and os.path.exists(LOCAL_OPERADRIVER):
make_driver_executable_if_not(LOCAL_OPERADRIVER)
return webdriver.Opera(executable_path=LOCAL_OPERADRIVER)
else:
return webdriver.Opera()
elif browser_name == constants.Browser.PHANTOM_JS:
with warnings.catch_warnings():
warnings.simplefilter('ignore', category=UserWarning)
return webdriver.PhantomJS()
elif browser_name == constants.Browser.GOOGLE_CHROME:
try:
chrome_options = _set_chrome_options(downloads_path, headless,
proxy_string, proxy_auth, proxy_user, proxy_pass,
user_agent, disable_csp)
if headless:
if not proxy_auth:
chrome_options.add_argument('--headless')
chrome_options.add_argument('--disable-gpu')
chrome_options.add_argument('--no-sandbox')
if LOCAL_CHROMEDRIVER and os.path.exists(LOCAL_CHROMEDRIVER):
make_driver_executable_if_not(LOCAL_CHROMEDRIVER)
return webdriver.Chrome(executable_path=LOCAL_CHROMEDRIVER,
options=chrome_options)
else:
return webdriver.Chrome(options=chrome_options)
except Exception as e:
if headless:
raise Exception(e)
if LOCAL_CHROMEDRIVER and os.path.exists(LOCAL_CHROMEDRIVER):
make_driver_executable_if_not(LOCAL_CHROMEDRIVER)
return webdriver.Chrome(executable_path=LOCAL_CHROMEDRIVER)
else:
return webdriver.Chrome()
else:
raise Exception('%s is not a valid browser option for this system!' %
browser_name)
| get_local_driver | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(browser_name, headless, proxy_string, proxy_auth, proxy_user,\n proxy_pass, user_agent, disable_csp):\n downloads_path = download_helper.get_downloads_fo... |
def x(self, options, conf):
super(S3Logging, self).configure(options, conf)
self.options = options
| configure | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, options, conf):\n super(S3Logging, self).configure(options, conf)\n self.options = options\n",
"role": "user"
},
{
"content": "configur... |
def x(self, *args, **kwargs):
serializer_class = self.get_serializer_class()
kwargs['context'] = self.get_serializer_context()
return serializer_class(*args, **kwargs)
| get_serializer | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, *args, **kwargs):\n serializer_class = self.get_serializer_class()\n kwargs['context'] = self.get_serializer_context()\n return serializer_class... |
def x(self, payload):
User = get_user_model()
username = jwt_get_username_from_payload(payload)
if not username:
msg = _('Invalid payload.')
raise exceptions.AuthenticationFailed(msg)
try:
user = User.objects.get_by_natural_key(username)
except User.DoesNotExist:
msg = _('Invalid signature.')
raise exceptions.AuthenticationFailed(msg)
if not user.is_active:
msg = _('User account is disabled.')
raise exceptions.AuthenticationFailed(msg)
return user
| authenticate_credentials | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, payload):\n User = get_user_model()\n username = jwt_get_username_from_payload(payload)\n if not username:\n msg = _('Invalid payload.')\... |
def x(rowvalues, rowtypes):
n = len(rowvalues)
assert n == len(rowtypes)
if not n:
return []
range_start = None
ranges = []
for i in range(n):
ctype = rowtypes[i]
cval = rowvalues[i]
if ctype == 0 or ctype == 6 or ctype == 1 and (cval == '' or cval.
isspace()):
if range_start is not None:
ranges.append((range_start, i))
range_start = None
elif range_start is None:
range_start = i
if range_start is not None:
ranges.append((range_start, n))
return ranges
| parse_row | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(rowvalues, rowtypes):\n n = len(rowvalues)\n assert n == len(rowtypes)\n if not n:\n return []\n range_start = None\n ranges = []\n fo... |
def x(ranges):
ranges2d = []
for irow, rowranges in enumerate(ranges):
ja = 0
jb = 0
while jb < len(rowranges):
bcol0, bcol1 = rowranges[jb]
if ja < len(ranges2d):
_, arow1, acol0, acol1 = ranges2d[ja]
if arow1 < irow:
ja += 1
continue
assert arow1 == irow or arow1 == irow + 1
else:
acol0 = acol1 = 1000000000
if bcol0 == acol0 and bcol1 == acol1:
ranges2d[ja][1] = irow + 1
ja += 1
jb += 1
elif bcol1 <= acol0:
ranges2d.insert(ja, [irow, irow + 1, bcol0, bcol1])
ja += 1
jb += 1
elif bcol0 >= acol1:
ja += 1
else:
assert ja < len(ranges2d)
ranges2d[ja][1] = irow + 1
if bcol0 < acol0:
ranges2d[ja][2] = bcol0
if bcol1 > acol1:
ranges2d[ja][3] = acol1 = bcol1
ja = _collapse_ranges(ranges2d, ja)
jb += 1
return ranges2d
| combine_ranges | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(ranges):\n ranges2d = []\n for irow, rowranges in enumerate(ranges):\n ja = 0\n jb = 0\n while jb < len(rowranges):\n bco... |
def x(ranges, ja):
arow0, _, acol0, acol1 = ranges[ja]
jb = 0
while jb < len(ranges):
if jb == ja:
jb += 1
continue
brow0, brow1, bcol0, bcol1 = ranges[jb]
if bcol0 <= acol1 and brow1 >= arow0 and not (bcol0 == acol1 and
brow1 == arow0):
ranges[ja][0] = arow0 = min(arow0, brow0)
ranges[ja][3] = acol1 = max(acol1, bcol1)
del ranges[jb]
if jb < ja:
ja -= 1
else:
jb += 1
return ja
| collapse_ranges | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(ranges, ja):\n arow0, _, acol0, acol1 = ranges[ja]\n jb = 0\n while jb < len(ranges):\n if jb == ja:\n jb += 1\n continue... |
def x():
with TaskContext('Find the required dynamic libraries') as log:
llvm = get_llvm()
libs = required_link_libraries()
resolved = []
for libname in libs:
if llvm:
fullpath = os.path.join(llvm, 'lib', libname)
if os.path.isfile(fullpath):
resolved.append(fullpath)
log.info('Library `%s` found at %s' % (libname, fullpath))
continue
else:
log.info('%s does not exist' % fullpath)
proc = subprocess.Popen(['locate', libname], stdout=subprocess.
PIPE, stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if proc.returncode == 0:
results = stdout.decode().strip().split('\n')
results = [r for r in results if r]
if results:
results.sort(key=len)
fullpath = results[0]
assert os.path.isfile(fullpath), 'Invalid path: %r' % (
fullpath,)
resolved.append(fullpath)
log.info('Library `%s` found at %s' % (libname, fullpath))
continue
else:
log.fatal('Cannot locate dynamic library `%s`' % libname)
else:
log.fatal(
'`locate` command returned the following error:\n%s' %
stderr.decode())
return resolved
| find_linked_dynamic_libraries | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x():\n with TaskContext('Find the required dynamic libraries') as log:\n llvm = get_llvm()\n libs = required_link_libraries()\n resolved ... |
def x(self, refresh_rate=1):
if not self._enable_keyboard:
return
with self._blessed_term.cbreak():
while True:
yield self._blessed_term.inkey(timeout=refresh_rate)
| wait_for_keypresses | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, refresh_rate=1):\n if not self._enable_keyboard:\n return\n with self._blessed_term.cbreak():\n while True:\n yield self._... |
def x(e, n):
if n == 0:
return 0, 0, 1
step = e.step
if step is None:
step = 1
if step == 0:
start = e.start
count = e.stop
if isinstance(start, int) and isinstance(count, int) and count >= 0:
if start < 0:
start += n
if start < 0:
return 0, 0, 0
return start, count, 0
else:
raise ValueError('Invalid slice %r' % e)
assert isinstance(step, int) and step != 0
if e.start is None:
start = 0 if step > 0 else n - 1
else:
start = e.start
if start < 0:
start += n
if start < 0 and step < 0 or start >= n and step > 0:
return 0, 0, 0
start = min(max(0, start), n - 1)
assert isinstance(start, int
) and 0 <= start < n, 'Invalid start: %r' % start
if e.stop is None:
if step > 0:
count = (n - 1 - start) // step + 1
else:
count = start // -step + 1
else:
stop = e.stop
if stop < 0:
stop += n
if step > 0:
if stop > start:
count = (min(n, stop) - 1 - start) // step + 1
else:
count = 0
elif stop < start:
count = (start - max(stop, -1) - 1) // -step + 1
else:
count = 0
assert isinstance(count, int) and count >= 0
assert count == 0 or 0 <= start + step * (count - 1
) < n, 'Wrong tuple: (%d, %d, %d)' % (start, count, step)
return start, count, step
| normalize_slice | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(e, n):\n if n == 0:\n return 0, 0, 1\n step = e.step\n if step is None:\n step = 1\n if step == 0:\n start = e.start\n ... |
def x(e, n):
if e.step > 0:
count = max(0, (e.stop - e.start - 1) // e.step + 1)
else:
count = max(0, (e.start - e.stop - 1) // -e.step + 1)
if count == 0:
return 0, 0, e.step
start = e.start
finish = e.start + (count - 1) * e.step
if start >= 0:
if start >= n or finish < 0 or finish >= n:
return None
else:
start += n
finish += n
if start < 0 or start >= n or finish < 0 or finish >= n:
return None
assert count >= 0
return start, count, e.step
| normalize_range | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(e, n):\n if e.step > 0:\n count = max(0, (e.stop - e.start - 1) // e.step + 1)\n else:\n count = max(0, (e.start - e.stop - 1) // -e.step +... |
def x(module):
try:
m = importlib.import_module(module)
return m
except ModuleNotFoundError:
raise TImportError(
'Module `%s` is not installed. It is required for running this function.'
% module)
| load_module | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(module):\n try:\n m = importlib.import_module(module)\n return m\n except ModuleNotFoundError:\n raise TImportError(\n 'M... |
def x(size):
if size == 0:
return '0'
if size is None:
return ''
assert size >= 0, '`size` cannot be negative, got %d' % size
suffixes = 'TGMK'
maxl = len(suffixes)
for i in range(maxl + 1):
shift = (maxl - i) * 10
if size >> shift == 0:
continue
ndigits = 0
for nd in [3, 2, 1]:
if size >> shift + 12 - nd * 3 == 0:
ndigits = nd
break
if ndigits == 0 or size == size >> shift << shift:
rounded_val = str(size >> shift)
else:
rounded_val = '%.*f' % (ndigits, size / (1 << shift))
return '%s%sB' % (rounded_val, suffixes[i] if i < maxl else '')
| humanize_bytes | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(size):\n if size == 0:\n return '0'\n if size is None:\n return ''\n assert size >= 0, '`size` cannot be negative, got %d' % size\n s... |
def x(self):
self._view_col0 = clamp(self._view_col0, 0, self._max_col0)
self._view_row0 = clamp(self._view_row0, 0, self._max_row0)
self._view_ncols = clamp(self._view_ncols, 0, self._conn.frame_ncols -
self._view_col0)
self._view_nrows = clamp(self._view_nrows, 0, self._conn.frame_nrows -
self._view_row0)
self._conn.fetch_data(self._view_row0, self._view_row0 + self.
_view_nrows, self._view_col0, self._view_col0 + self._view_ncols)
| fetch_data | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self._view_col0 = clamp(self._view_col0, 0, self._max_col0)\n self._view_row0 = clamp(self._view_row0, 0, self._max_row0)\n self._view_ncols ... |
def x():
sources = []
headers = ['datatable/include/datatable.h']
assert os.path.isfile(headers[0])
for dirpath, _, filenames in os.walk('c'):
for f in filenames:
fullname = os.path.join(dirpath, f)
if f.endswith('.h') or f.endswith('.inc'):
headers.append(fullname)
elif f.endswith('.c') or f.endswith('.cc'):
sources.append(fullname)
return sources, headers
| get_files | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x():\n sources = []\n headers = ['datatable/include/datatable.h']\n assert os.path.isfile(headers[0])\n for dirpath, _, filenames in os.walk('c'):\n ... |
def x(filename):
includes = []
with open(filename, 'r', encoding='utf-8') as inp:
for line in inp:
line = line.strip()
if not line or line.startswith('//'):
continue
if line.startswith('#'):
mm = re.match(rx_include, line)
if mm:
includename = os.path.join('c', mm.group(1))
includes.append(includename)
return includes
| find_includes | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(filename):\n includes = []\n with open(filename, 'r', encoding='utf-8') as inp:\n for line in inp:\n line = line.strip()\n i... |
def x(headers):
headermap = {}
for hfile in headers:
headermap[hfile] = None
for hfile in headers:
assert hfile.startswith('c/') or hfile.startswith('datatable/include/')
inc = find_includes(hfile)
for f in inc:
assert f != hfile, 'File %s includes itself?' % f
assert f.startswith('c/')
if f not in headers:
raise ValueError('Unknown header "%s" included from %s' % (
f, hfile))
headermap[hfile] = set(inc)
return headermap
| build_headermap | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(headers):\n headermap = {}\n for hfile in headers:\n headermap[hfile] = None\n for hfile in headers:\n assert hfile.startswith('c/') or ... |
def x(sources):
sourcemap = {}
for sfile in sources:
inc = find_includes(sfile)
sourcemap[sfile] = set(inc)
return sourcemap
| build_sourcemap | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(sources):\n sourcemap = {}\n for sfile in sources:\n inc = find_includes(sfile)\n sourcemap[sfile] = set(inc)\n return sourcemap\n",
... |
def x(folder, include_headers=False):
allowed_extensions = ['.c', '.C', '.cc', '.cpp', '.cxx', '.c++']
if include_headers:
allowed_extensions += ['.h', '.hpp']
sources = []
for root, _, files in os.walk(folder):
for name in files:
ext = os.path.splitext(name)[1]
if name == 'types.cc':
sources.insert(0, os.path.join(root, name))
elif ext in allowed_extensions:
sources.append(os.path.join(root, name))
return sources
| get_c_sources | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(folder, include_headers=False):\n allowed_extensions = ['.c', '.C', '.cc', '.cpp', '.cxx', '.c++']\n if include_headers:\n allowed_extensions += [... |
def x(self, estimated_size):
global _psutil_load_attempted
if not _psutil_load_attempted:
_psutil_load_attempted = True
try:
import psutil
except ImportError:
psutil = None
if self.verbose and estimated_size > 1:
self.logger.debug('The Frame is estimated to require %s bytes' %
humanize_bytes(estimated_size))
if estimated_size < 1024 or psutil is None:
return None
vm = psutil.virtual_memory()
if self.verbose:
self.logger.debug('Memory available = %s (out of %s)' % (
humanize_bytes(vm.available), humanize_bytes(vm.total)))
if (estimated_size < vm.available and self._save_to is None or self.
_save_to == 'memory'):
if self.verbose:
self.logger.debug('Frame will be loaded into memory')
return None
else:
if self._save_to:
tmpdir = self._save_to
os.makedirs(tmpdir)
else:
tmpdir = tempfile.mkdtemp()
du = psutil.disk_usage(tmpdir)
if self.verbose:
self.logger.debug('Free disk space on drive %s = %s' % (os.path
.splitdrive(tmpdir)[0] or '/', humanize_bytes(du.free)))
if du.free > estimated_size or self._save_to:
if self.verbose:
self.logger.debug('Frame will be stored in %s' % tmpdir)
return tmpdir
raise RuntimeError(
"The Frame is estimated to require at lest %s of memory, and you don't have that much available either in RAM or on a hard drive."
% humanize_bytes(estimated_size))
| get_destination | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, estimated_size):\n global _psutil_load_attempted\n if not _psutil_load_attempted:\n _psutil_load_attempted = True\n try:\n ... |
def x(self, dest, _strategy='auto'):
if _strategy not in ('auto', 'write', 'mmap'):
raise TValueError(
"Invalid parameter _strategy: only 'write' / 'mmap' / 'auto' are allowed"
)
dest = os.path.expanduser(dest)
if not os.path.exists(dest):
os.makedirs(dest)
self.materialize()
mins = self.min().to_list()
maxs = self.max().to_list()
metafile = os.path.join(dest, '_meta.nff')
with _builtin_open(metafile, 'w', encoding='utf-8') as out:
out.write('# NFF2\n')
out.write('# nrows = %d\n' % self.nrows)
out.write('filename,stype,meta,colname,min,max\n')
l = len(str(self.ncols))
for i in range(self.ncols):
filename = 'c%0*d' % (l, i + 1)
colname = self.names[i].replace('"', '""')
stype = self.stypes[i]
if stype == dt.stype.obj64:
dtwarn('Column %r of type obj64 was not saved' % self.names[i])
continue
smin = _stringify(mins[i][0])
smax = _stringify(maxs[i][0])
out.write('%s,%s,,"%s",%s,%s\n' % (filename, stype.code,
colname, smin, smax))
filename = os.path.join(dest, filename)
core._column_save_to_disk(self, i, filename, _strategy)
| save_nff | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, dest, _strategy='auto'):\n if _strategy not in ('auto', 'write', 'mmap'):\n raise TValueError(\n \"Invalid parameter _strategy: only... |
def x(title, text, alert_type=None, aggregation_key=None, source_type_name=
None, date_happened=None, priority=None, tags=None, hostname=None):
| event | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(title, text, alert_type=None, aggregation_key=None, source_type_name=\n None, date_happened=None, priority=None, tags=None, hostname=None):\n",
"role": ... |
def x(request):
projects_owned = request.db.query(Project.id).join(Role.project).filter(
Role.role_name == 'Owner', Role.user == request.user).subquery()
with_sole_owner = request.db.query(Role.project_id).join(projects_owned
).filter(Role.role_name == 'Owner').group_by(Role.project_id).having(
func.count(Role.project_id) == 1).subquery()
return {'projects_owned': request.db.query(Project).join(projects_owned,
Project.id == projects_owned.c.id).order_by(Project.name).all(),
'projects_sole_owned': request.db.query(Project).join(
with_sole_owner).order_by(Project.name).all()}
| user_projects | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(request):\n projects_owned = request.db.query(Project.id).join(Role.project).filter(\n Role.role_name == 'Owner', Role.user == request.user).subquery... |
def x(config, revisions, **kwargs):
with alembic_lock(config.registry['sqlalchemy.engine'], config.
alembic_config()) as alembic_config:
alembic.command.merge(alembic_config, revisions, **kwargs)
| merge | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(config, revisions, **kwargs):\n with alembic_lock(config.registry['sqlalchemy.engine'], config.\n alembic_config()) as alembic_config:\n alemb... |
def x(session, column, windowsize):
def x(start_id, end_id):
if end_id:
return and_(column >= start_id, column < end_id)
else:
return column >= start_id
q = session.query(column, func.row_number().over(order_by=column).label
('rownum')).from_self(column)
if windowsize > 1:
q = q.filter(text('rownum %% %d=1' % windowsize))
intervals = [row[0] for row in q]
while intervals:
start = intervals.pop(0)
if intervals:
end = intervals[0]
else:
end = None
yield int_for_range(start, end)
| column_windows | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(session, column, windowsize):\n\n def x(start_id, end_id):\n if end_id:\n return and_(column >= start_id, column < end_id)\n else:\... |
def x(q, column, windowsize):
for whereclause in column_windows(q.session, column, windowsize):
for row in q.filter(whereclause).order_by(column):
yield row
| windowed_query | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(q, column, windowsize):\n for whereclause in column_windows(q.session, column, windowsize):\n for row in q.filter(whereclause).order_by(column):\n ... |
def x(s):
matches = re.findall('(?:"([^"]*)")|([^"]*)', s)
result_quoted = [t[0].strip() for t in matches if t[0]]
result_unquoted = [t[1].strip() for t in matches if t[1]]
return result_quoted, result_unquoted
| filter_query | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(s):\n matches = re.findall('(?:\"([^\"]*)\")|([^\"]*)', s)\n result_quoted = [t[0].strip() for t in matches if t[0]]\n result_unquoted = [t[1].strip()... |
def x(query_type, query):
fields = [(field + '^' + str(SEARCH_BOOSTS[field]) if field in
SEARCH_BOOSTS else field) for field in SEARCH_FIELDS]
return Q('multi_match', fields=fields, query=query, type=query_type)
| form_query | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(query_type, query):\n fields = [(field + '^' + str(SEARCH_BOOSTS[field]) if field in\n SEARCH_BOOSTS else field) for field in SEARCH_FIELDS]\n ret... |
def x():
url = context.config.get_main_option('sqlalchemy.url')
context.configure(url=url, compare_server_default=True)
with context.begin_transaction():
context.run_migrations()
| run_migrations_offline | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x():\n url = context.config.get_main_option('sqlalchemy.url')\n context.configure(url=url, compare_server_default=True)\n with context.begin_transaction... |
def x():
connectable = context.config.attributes.get('connection', None)
if connectable is None:
options = context.config.get_section(context.config.config_ini_section)
url = options.pop('url')
connectable = create_engine(url, poolclass=pool.NullPool)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=db.
metadata, compare_server_default=True)
with context.begin_transaction():
context.run_migrations()
| run_migrations_online | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x():\n connectable = context.config.attributes.get('connection', None)\n if connectable is None:\n options = context.config.get_section(context.conf... |
def x(config, **kwargs):
with alembic_lock(config.registry['sqlalchemy.engine'], config.
alembic_config()) as alembic_config:
alembic.command.branches(alembic_config, **kwargs)
| branches | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(config, **kwargs):\n with alembic_lock(config.registry['sqlalchemy.engine'], config.\n alembic_config()) as alembic_config:\n alembic.command.... |
def x(config, revision, **kwargs):
with alembic_lock(config.registry['sqlalchemy.engine'], config.
alembic_config()) as alembic_config:
alembic.command.upgrade(alembic_config, revision, **kwargs)
| upgrade | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(config, revision, **kwargs):\n with alembic_lock(config.registry['sqlalchemy.engine'], config.\n alembic_config()) as alembic_config:\n alembi... |
def x(url, host=None):
if url is not None:
url = url.strip()
if not url:
return False
url = url.replace('\\', '/')
if url.startswith('///'):
return False
url_info = urlparse(url)
if not url_info.netloc and url_info.scheme:
return False
if unicodedata.category(url[0])[0] == 'C':
return False
return (not url_info.netloc or url_info.netloc == host) and (not
url_info.scheme or url_info.scheme in {'http', 'https'})
| is_safe_url | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(url, host=None):\n if url is not None:\n url = url.strip()\n if not url:\n return False\n url = url.replace('\\\\', '/')\n if url.sta... |
def x(data):
if data:
data = data.encode('ascii', 'xmlcharrefreplace').decode('ascii')
return _illegal_xml_chars_re.sub('', data)
return data
| clean_for_xml | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data):\n if data:\n data = data.encode('ascii', 'xmlcharrefreplace').decode('ascii')\n return _illegal_xml_chars_re.sub('', data)\n return ... |
def x(method=None):
def x(f):
def x(context, request):
metrics = request.find_service(IMetricsService, context=None)
metrics.increment('warehouse.xmlrpc.call', tags=[
f'rpc_method:{method}'])
with metrics.timed('warehouse.xmlrpc.timing', tags=[
f'rpc_method:{method}']):
return f(context, request)
return wrapped
return decorator
| submit_xmlrpc_metrics | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(method=None):\n\n def x(f):\n\n def x(context, request):\n metrics = request.find_service(IMetricsService, context=None)\n metr... |
def x(**kwargs):
kwargs.update(require_csrf=False, require_methods=['POST'], decorator=(
submit_xmlrpc_metrics(method=kwargs['method']),), mapper=
TypedMapplyViewMapper)
def x(f):
rpc2 = _xmlrpc_method(endpoint='RPC2', **kwargs)
pypi = _xmlrpc_method(endpoint='pypi', **kwargs)
pypi_slash = _xmlrpc_method(endpoint='pypi_slash', **kwargs)
return rpc2(pypi_slash(pypi(f)))
return decorator
| xmlrpc_method | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(**kwargs):\n kwargs.update(require_csrf=False, require_methods=['POST'], decorator=(\n submit_xmlrpc_metrics(method=kwargs['method']),), mapper=\n ... |
def x(config, **kwargs):
with alembic_lock(config.registry['sqlalchemy.engine'], config.
alembic_config()) as alembic_config:
alembic.command.current(alembic_config, **kwargs)
| current | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(config, **kwargs):\n with alembic_lock(config.registry['sqlalchemy.engine'], config.\n alembic_config()) as alembic_config:\n alembic.command.... |
def x(filename, filetype):
if zipfile.is_zipfile(filename):
with zipfile.ZipFile(filename) as zfp:
for zinfo in zfp.infolist():
if zinfo.compress_type not in {zipfile.ZIP_STORED, zipfile.
ZIP_DEFLATED}:
return False
if filename.endswith('.exe'):
if filetype != 'bdist_wininst':
return False
try:
with zipfile.ZipFile(filename, 'r') as zfp:
for zipname in zfp.namelist():
if not _safe_zipnames.match(zipname):
return False
except zipfile.BadZipFile:
return False
elif filename.endswith('.msi'):
if filetype != 'bdist_msi':
return False
with open(filename, 'rb') as fp:
if fp.read(8) != b'\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1':
return False
elif filename.endswith('.zip') or filename.endswith('.egg'):
try:
with zipfile.ZipFile(filename, 'r') as zfp:
for zipname in zfp.namelist():
parts = os.path.split(zipname)
if len(parts) == 2 and parts[1] == 'PKG-INFO':
break
else:
return False
except zipfile.BadZipFile:
return False
elif filename.endswith('.whl'):
try:
with zipfile.ZipFile(filename, 'r') as zfp:
for zipname in zfp.namelist():
parts = os.path.split(zipname)
if len(parts) == 2 and parts[1] == 'WHEEL':
break
else:
return False
except zipfile.BadZipFile:
return False
return True
| is_valid_dist_file | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(filename, filetype):\n if zipfile.is_zipfile(filename):\n with zipfile.ZipFile(filename) as zfp:\n for zinfo in zfp.infolist():\n ... |
def x(db_session, filename, hashes):
file_ = db_session.query(File).filter((File.filename == filename) | (
File.blake2_256_digest == hashes['blake2_256'])).first()
if file_ is not None:
return file_.filename == filename and file_.sha256_digest == hashes[
'sha256'] and file_.md5_digest == hashes['md5'
] and file_.blake2_256_digest == hashes['blake2_256']
return None
| is_duplicate_file | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(db_session, filename, hashes):\n file_ = db_session.query(File).filter((File.filename == filename) | (\n File.blake2_256_digest == hashes['blake2_256... |
def x(config, type_):
from warehouse.db import Session
if type_ is None:
type_ = autodetect()
runner = {'bpython': bpython, 'ipython': ipython, 'plain': plain}[type_]
session = Session(bind=config.registry['sqlalchemy.engine'])
try:
runner(config=config, db=session)
except ImportError:
raise click.ClickException('The {!r} shell is not available.'.
format(type_)) from None
| shell | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(config, type_):\n from warehouse.db import Session\n if type_ is None:\n type_ = autodetect()\n runner = {'bpython': bpython, 'ipython': ipytho... |
def x(name, *, allow_unverified=False):
def x(fn):
@functools.wraps(fn)
def x(request, user_or_users, **kwargs):
if isinstance(user_or_users, (list, set)):
recipients = user_or_users
else:
recipients = [user_or_users]
context = fn(request, user_or_users, **kwargs)
msg = EmailMessage.from_template(name, context, request=request)
for recipient in recipients:
if isinstance(recipient, tuple):
user, email = recipient
else:
user, email = recipient, None
_send_email_to_user(request, user, msg, email=email,
allow_unverified=allow_unverified)
return context
return wrapper
return inner
| email | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(name, *, allow_unverified=False):\n\n def x(fn):\n\n @functools.wraps(fn)\n def x(request, user_or_users, **kwargs):\n if isinstanc... |
def x(config, revision_range, **kwargs):
with alembic_lock(config.registry['sqlalchemy.engine'], config.
alembic_config()) as alembic_config:
alembic.command.history(alembic_config, revision_range, **kwargs)
| history | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(config, revision_range, **kwargs):\n with alembic_lock(config.registry['sqlalchemy.engine'], config.\n alembic_config()) as alembic_config:\n ... |
def x(config):
request = config.task(_reindex).get_request()
config.task(_reindex).run(request)
| reindex | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(config):\n request = config.task(_reindex).get_request()\n config.task(_reindex).run(request)\n",
"role": "user"
},
{
"content": "reindex",
... |
def x(config, **kwargs):
with alembic_lock(config.registry['sqlalchemy.engine'], config.
alembic_config()) as alembic_config:
alembic.command.heads(alembic_config, **kwargs)
| heads | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(config, **kwargs):\n with alembic_lock(config.registry['sqlalchemy.engine'], config.\n alembic_config()) as alembic_config:\n alembic.command.... |
def x(self, request):
r = redis.StrictRedis.from_url(request.registry.settings[
'celery.scheduler_url'])
try:
with SearchLock(r, timeout=30 * 60, blocking_timeout=30):
p = urllib.parse.urlparse(request.registry.settings[
'elasticsearch.url'])
client = elasticsearch.Elasticsearch([urllib.parse.urlunparse(p
[:2] + ('',) * 4)], verify_certs=True, ca_certs=certifi.
where(), timeout=30, retry_on_timeout=True, serializer=
serializer.serializer)
number_of_replicas = request.registry.get('elasticsearch.replicas',
0)
refresh_interval = request.registry.get('elasticsearch.interval',
'1s')
index_base = request.registry['elasticsearch.index']
random_token = binascii.hexlify(os.urandom(5)).decode('ascii')
new_index_name = '{}-{}'.format(index_base, random_token)
doc_types = request.registry.get('search.doc_types', set())
shards = request.registry.get('elasticsearch.shards', 1)
new_index = get_index(new_index_name, doc_types, using=client,
shards=shards, replicas=0, interval='-1')
new_index.create(wait_for_active_shards=shards)
try:
request.db.execute("SET statement_timeout = '600s'")
for _ in parallel_bulk(client, _project_docs(request.db),
index=new_index_name):
pass
except:
new_index.delete()
raise
finally:
request.db.rollback()
request.db.close()
client.indices.put_settings(index=new_index_name, body={'index':
{'number_of_replicas': number_of_replicas,
'refresh_interval': refresh_interval}})
if client.indices.exists_alias(name=index_base):
to_delete = set()
actions = []
for name in client.indices.get_alias(name=index_base):
to_delete.add(name)
actions.append({'remove': {'index': name, 'alias':
index_base}})
actions.append({'add': {'index': new_index_name, 'alias':
index_base}})
client.indices.update_aliases({'actions': actions})
client.indices.delete(','.join(to_delete))
else:
client.indices.put_alias(name=index_base, index=new_index_name)
except redis.exceptions.LockError as exc:
raise self.retry(countdown=60, exc=exc)
| reindex | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, request):\n r = redis.StrictRedis.from_url(request.registry.settings[\n 'celery.scheduler_url'])\n try:\n with SearchLock(r, timeout=... |
def x(self):
parser = reqparse.RequestParser()
parser.add_argument('public_key', type=parameters.valid_ed25519,
required=True)
parser.add_argument('spent', type=parameters.valid_bool)
args = parser.parse_args(strict=True)
pool = current_app.config['bigchain_pool']
with pool() as bigchain:
outputs = bigchain.get_outputs_filtered(args['public_key'], args[
'spent'])
return [{'transaction_id': output.txid, 'output_index': output.
output} for output in outputs]
| get | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n parser = reqparse.RequestParser()\n parser.add_argument('public_key', type=parameters.valid_ed25519,\n required=True)\n parser.add_arg... |
def x(args):
config = copy.deepcopy(bigchaindb.config)
del config['CONFIGURED']
print(json.dumps(config, indent=4, sort_keys=True))
| run_show_config | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(args):\n config = copy.deepcopy(bigchaindb.config)\n del config['CONFIGURED']\n print(json.dumps(config, indent=4, sort_keys=True))\n",
"role": "u... |
def x(args):
config_path = args.config or bigchaindb.config_utils.CONFIG_DEFAULT_PATH
config_file_exists = False
if config_path != '-':
config_file_exists = os.path.exists(config_path)
if config_file_exists and not args.yes:
want = input_on_stderr(
'Config file `{}` exists, do you want to override it? (cannot be undone) [y/N]: '
.format(config_path))
if want != 'y':
return
conf = copy.deepcopy(bigchaindb.config)
print('Generating default configuration for backend {}'.format(args.
backend), file=sys.stderr)
database_keys = bigchaindb._database_keys_map[args.backend]
conf['database'] = bigchaindb._database_map[args.backend]
if not args.yes:
for key in ('bind',):
val = conf['server'][key]
conf['server'][key] = input_on_stderr(
'API Server {}? (default `{}`): '.format(key, val), val)
for key in ('scheme', 'host', 'port'):
val = conf['wsserver'][key]
conf['wsserver'][key] = input_on_stderr(
'WebSocket Server {}? (default `{}`): '.format(key, val), val)
for key in database_keys:
val = conf['database'][key]
conf['database'][key] = input_on_stderr(
'Database {}? (default `{}`): '.format(key, val), val)
for key in ('host', 'port'):
val = conf['tendermint'][key]
conf['tendermint'][key] = input_on_stderr(
'Tendermint {}? (default `{}`)'.format(key, val), val)
if config_path != '-':
bigchaindb.config_utils.write_config(conf, config_path)
else:
print(json.dumps(conf, indent=4, sort_keys=True))
print('Configuration written to {}'.format(config_path), file=sys.stderr)
print('Ready to go!', file=sys.stderr)
| run_configure | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(args):\n config_path = args.config or bigchaindb.config_utils.CONFIG_DEFAULT_PATH\n config_file_exists = False\n if config_path != '-':\n confi... |
def x(args, bigchain):
new_validator = {'public_key': {'value': public_key_from_base64(args.
public_key), 'type': 'ed25519-base16'}, 'power': args.power,
'node_id': args.node_id}
return create_new_election(args.sk, bigchain, ValidatorElection,
new_validator)
| run_election_new_upsert_validator | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(args, bigchain):\n new_validator = {'public_key': {'value': public_key_from_base64(args.\n public_key), 'type': 'ed25519-base16'}, 'power': args.powe... |
def x(args, bigchain):
key = load_node_key(args.sk)
tx = bigchain.get_transaction(args.election_id)
voting_powers = [v.amount for v in tx.outputs if key.public_key in v.
public_keys]
if len(voting_powers) > 0:
voting_power = voting_powers[0]
else:
logger.error(
'The key you provided does not match any of the eligible voters in this election.'
)
return False
inputs = [i for i in tx.to_inputs() if key.public_key in i.owners_before]
election_pub_key = ValidatorElection.to_public_key(tx.id)
approval = Vote.generate(inputs, [([election_pub_key], voting_power)],
tx.id).sign([key.private_key])
approval.validate(bigchain)
resp = bigchain.write_transaction(approval, 'broadcast_tx_commit')
if resp == (202, ''):
logger.info('[SUCCESS] Your vote has been submitted')
return approval.id
else:
logger.error('Failed to commit vote')
return False
| run_election_approve | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(args, bigchain):\n key = load_node_key(args.sk)\n tx = bigchain.get_transaction(args.election_id)\n voting_powers = [v.amount for v in tx.outputs if k... |
def x(args, bigchain):
election = bigchain.get_transaction(args.election_id)
if not election:
logger.error(f'No election found with election_id {args.election_id}')
return
response = election.show_election(bigchain)
logger.info(response)
return response
| run_election_show | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(args, bigchain):\n election = bigchain.get_transaction(args.election_id)\n if not election:\n logger.error(f'No election found with election_id {a... |
def x(args):
dbname = bigchaindb.config['database']['name']
if not args.yes:
response = input_on_stderr('Do you want to drop `{}` database? [y/n]: '
.format(dbname))
if response != 'y':
return
conn = backend.connect()
dbname = bigchaindb.config['database']['name']
try:
schema.drop_database(conn, dbname)
except DatabaseDoesNotExist:
print("Cannot drop '{name}'. The database does not exist.".format(
name=dbname), file=sys.stderr)
| run_drop | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(args):\n dbname = bigchaindb.config['database']['name']\n if not args.yes:\n response = input_on_stderr('Do you want to drop `{}` database? [y/n]:... |
def x(args):
setup_logging()
logger.info('BigchainDB Version %s', bigchaindb.__version__)
run_recover(bigchaindb.lib.BigchainDB())
if not args.skip_initialize_database:
logger.info('Initializing database')
_run_init()
logger.info('Starting BigchainDB main process.')
from bigchaindb.start import start
start(args)
| run_start | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(args):\n setup_logging()\n logger.info('BigchainDB Version %s', bigchaindb.__version__)\n run_recover(bigchaindb.lib.BigchainDB())\n if not args.sk... |
def x(args):
supported_tm_ver = {'description':
'BigchainDB supports the following Tendermint version(s)',
'tendermint': __tm_supported_versions__}
print(json.dumps(supported_tm_ver, indent=4, sort_keys=True))
| run_tendermint_version | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(args):\n supported_tm_ver = {'description':\n 'BigchainDB supports the following Tendermint version(s)',\n 'tendermint': __tm_supported_versio... |
def x(conn, search, *, language='english', case_sensitive=False,
diacritic_sensitive=False, text_score=False, limit=0, table=None):
raise OperationError(
'This query is only supported when running BigchainDB with MongoDB as the backend.'
)
| text_search | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(conn, search, *, language='english', case_sensitive=False,\n diacritic_sensitive=False, text_score=False, limit=0, table=None):\n raise OperationError(\n... |
def x():
ctx = {}
def x(data):
return json.dumps(data, indent=2, sort_keys=True)
client = server.create_app().test_client()
host = 'example.com:9984'
res = client.get('/', environ_overrides={'HTTP_HOST': host})
res_data = json.loads(res.data.decode())
ctx['index'] = pretty_json(res_data)
res = client.get('/api/v1/', environ_overrides={'HTTP_HOST': host})
ctx['api_index'] = pretty_json(json.loads(res.data.decode()))
privkey = 'CfdqtD7sS7FgkMoGPXw55MVGGFwQLAoHYTcBhZDtF99Z'
pubkey = '4K9sWUMFwTgaDGPfdynrbxWqWS6sWmKbZoTjxLtVUibD'
asset = {'msg': 'Hello BigchainDB!'}
tx = Transaction.create([pubkey], [([pubkey], 1)], asset=asset,
metadata={'sequence': 0})
tx = tx.sign([privkey])
ctx['tx'] = pretty_json(tx.to_dict())
ctx['public_keys'] = tx.outputs[0].public_keys[0]
ctx['txid'] = tx.id
privkey_transfer = '3AeWpPdhEZzWLYfkfYHBfMFC2r1f8HEaGS9NtbbKssya'
pubkey_transfer = '3yfQPHeWAa1MxTX9Zf9176QqcpcnWcanVZZbaHb8B3h9'
cid = 0
input_ = Input(fulfillment=tx.outputs[cid].fulfillment, fulfills=
TransactionLink(txid=tx.id, output=cid), owners_before=tx.outputs[
cid].public_keys)
tx_transfer = Transaction.transfer([input_], [([pubkey_transfer], 1)],
asset_id=tx.id, metadata={'sequence': 1})
tx_transfer = tx_transfer.sign([privkey])
ctx['tx_transfer'] = pretty_json(tx_transfer.to_dict())
ctx['public_keys_transfer'] = tx_transfer.outputs[0].public_keys[0]
ctx['tx_transfer_id'] = tx_transfer.id
pubkey_transfer_last = '3Af3fhhjU6d9WecEM9Uw5hfom9kNEwE7YuDWdqAUssqm'
cid = 0
input_ = Input(fulfillment=tx_transfer.outputs[cid].fulfillment,
fulfills=TransactionLink(txid=tx_transfer.id, output=cid),
owners_before=tx_transfer.outputs[cid].public_keys)
tx_transfer_last = Transaction.transfer([input_], [([
pubkey_transfer_last], 1)], asset_id=tx.id, metadata={'sequence': 2})
tx_transfer_last = tx_transfer_last.sign([privkey_transfer])
ctx['tx_transfer_last'] = pretty_json(tx_transfer_last.to_dict())
ctx['tx_transfer_last_id'] = tx_transfer_last.id
ctx['public_keys_transfer_last'] = tx_transfer_last.outputs[0].public_keys[
0]
node_private = '5G2kE1zJAgTajkVSbPAQWo4c2izvtwqaNHYsaNpbbvxX'
node_public = 'DngBurxfeNVKZWCEcDnLj1eMPAS7focUZTE5FndFGuHT'
signature = (
'53wxrEQDYk1dXzmvNSytbCfmNVnPqPkDQaTnAe8Jf43s6ssejPxezkCvUnGTnduNUmaLjhaan1iRLi3peu6s5DzA'
)
app_hash = (
'f6e0c49c6d94d6924351f25bb334cf2a99af4206339bf784e741d1a5ab599056')
block = lib.Block(height=1, transactions=[tx.to_dict()], app_hash=app_hash)
block_dict = block._asdict()
block_dict.pop('app_hash')
ctx['block'] = pretty_json(block_dict)
ctx['blockid'] = block.height
block_list = [block.height]
ctx['block_list'] = pretty_json(block_list)
base_path = os.path.join(os.path.dirname(__file__), 'source/http-samples')
if not os.path.exists(base_path):
os.makedirs(base_path)
for name, tpl in TPLS.items():
path = os.path.join(base_path, name + '.http')
code = tpl % ctx
with open(path, 'w') as handle:
handle.write(code)
| main | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x():\n ctx = {}\n\n def x(data):\n return json.dumps(data, indent=2, sort_keys=True)\n client = server.create_app().test_client()\n host = 'ex... |
def x(data):
return rapidjson.dumps(data, skipkeys=False, ensure_ascii=False,
sort_keys=True)
| serialize | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data):\n return rapidjson.dumps(data, skipkeys=False, ensure_ascii=False,\n sort_keys=True)\n",
"role": "user"
},
{
"content": "serialize... |
def x(obj_name, obj, key, validation_fun):
backend = bigchaindb.config['database']['backend']
if backend == 'localmongodb':
data = obj.get(key, {})
if isinstance(data, dict):
validate_all_keys_in_obj(obj_name, data, validation_fun)
elif isinstance(data, list):
validate_all_items_in_list(obj_name, data, validation_fun)
| validate_txn_obj | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(obj_name, obj, key, validation_fun):\n backend = bigchaindb.config['database']['backend']\n if backend == 'localmongodb':\n data = obj.get(key, {}... |
def x(obj_name, obj, validation_fun):
for key, value in obj.items():
validation_fun(obj_name, key)
if isinstance(value, dict):
validate_all_keys_in_obj(obj_name, value, validation_fun)
elif isinstance(value, list):
validate_all_items_in_list(obj_name, value, validation_fun)
| validate_all_keys_in_obj | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(obj_name, obj, validation_fun):\n for key, value in obj.items():\n validation_fun(obj_name, key)\n if isinstance(value, dict):\n va... |
def x(obj, key, validation_fun):
for vkey, value in obj.items():
if vkey == key:
validation_fun(value)
elif isinstance(value, dict):
validate_all_values_for_key_in_obj(value, key, validation_fun)
elif isinstance(value, list):
validate_all_values_for_key_in_list(value, key, validation_fun)
| validate_all_values_for_key_in_obj | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(obj, key, validation_fun):\n for vkey, value in obj.items():\n if vkey == key:\n validation_fun(value)\n elif isinstance(value, dic... |
def x(obj_name, key):
if re.search('^[$]|\\.|\\x00', key):
error_str = (
'Invalid key name "{}" in {} object. The key name cannot contain characters ".", "$" or null characters'
.format(key, obj_name))
raise ValidationError(error_str)
| validate_key | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(obj_name, key):\n if re.search('^[$]|\\\\.|\\\\x00', key):\n error_str = (\n 'Invalid key name \"{}\" in {} object. The key name cannot co... |
def x(*, debug=False, threads=1, bigchaindb_factory=None):
if not bigchaindb_factory:
bigchaindb_factory = BigchainDB
app = Flask(__name__)
app.wsgi_app = StripContentTypeMiddleware(app.wsgi_app)
CORS(app)
app.debug = debug
app.config['bigchain_pool'] = utils.pool(bigchaindb_factory, size=threads)
add_routes(app)
return app
| create_app | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(*, debug=False, threads=1, bigchaindb_factory=None):\n if not bigchaindb_factory:\n bigchaindb_factory = BigchainDB\n app = Flask(__name__)\n a... |
def x(settings, log_config=None, bigchaindb_factory=None):
settings = copy.deepcopy(settings)
if not settings.get('workers'):
settings['workers'] = multiprocessing.cpu_count() * 2 + 1
if not settings.get('threads'):
settings['threads'] = 1
settings['custom_log_config'] = log_config
app = create_app(debug=settings.get('debug', False), threads=settings[
'threads'], bigchaindb_factory=bigchaindb_factory)
standalone = StandaloneApplication(app, options=settings)
return standalone
| create_server | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(settings, log_config=None, bigchaindb_factory=None):\n settings = copy.deepcopy(settings)\n if not settings.get('workers'):\n settings['workers'] ... |
def x():
logging_configs = DEFAULT_LOGGING_CONFIG
new_logging_configs = bigchaindb.config['log']
if 'file' in new_logging_configs:
filename = new_logging_configs['file']
logging_configs['handlers']['file']['filename'] = filename
if 'error_file' in new_logging_configs:
error_filename = new_logging_configs['error_file']
logging_configs['handlers']['errors']['filename'] = error_filename
if 'level_console' in new_logging_configs:
level = _normalize_log_level(new_logging_configs['level_console'])
logging_configs['handlers']['console']['level'] = level
if 'level_logfile' in new_logging_configs:
level = _normalize_log_level(new_logging_configs['level_logfile'])
logging_configs['handlers']['file']['level'] = level
if 'fmt_console' in new_logging_configs:
fmt = new_logging_configs['fmt_console']
logging_configs['formatters']['console']['format'] = fmt
if 'fmt_logfile' in new_logging_configs:
fmt = new_logging_configs['fmt_logfile']
logging_configs['formatters']['file']['format'] = fmt
if 'datefmt_console' in new_logging_configs:
fmt = new_logging_configs['datefmt_console']
logging_configs['formatters']['console']['datefmt'] = fmt
if 'datefmt_logfile' in new_logging_configs:
fmt = new_logging_configs['datefmt_logfile']
logging_configs['formatters']['file']['datefmt'] = fmt
log_levels = new_logging_configs.get('granular_levels', {})
for logger_name, level in log_levels.items():
level = _normalize_log_level(level)
try:
logging_configs['loggers'][logger_name]['level'] = level
except KeyError:
logging_configs['loggers'][logger_name] = {'level': level}
set_logging_config(logging_configs)
| setup_logging | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x():\n logging_configs = DEFAULT_LOGGING_CONFIG\n new_logging_configs = bigchaindb.config['log']\n if 'file' in new_logging_configs:\n filename =... |
def x(api_prefix):
websocket_root = base_ws_uri() + EVENTS_ENDPOINT
docs_url = ['https://docs.bigchaindb.com/projects/server/en/v', version
.__version__, '/http-client-server-api.html']
return {'docs': ''.join(docs_url), 'transactions': '{}transactions/'.
format(api_prefix), 'blocks': '{}blocks/'.format(api_prefix),
'assets': '{}assets/'.format(api_prefix), 'outputs': '{}outputs/'.
format(api_prefix), 'streams': websocket_root, 'metadata':
'{}metadata/'.format(api_prefix), 'validators': '{}validators'.
format(api_prefix)}
| get_api_v1_info | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(api_prefix):\n websocket_root = base_ws_uri() + EVENTS_ENDPOINT\n docs_url = ['https://docs.bigchaindb.com/projects/server/en/v', version\n .__ver... |
def x(command):
@functools.wraps(command)
def x(args):
config_from_cmdline = None
try:
if args.log_level is not None:
config_from_cmdline = {'log': {'level_console': args.
log_level, 'level_logfile': args.log_level}, 'server':
{'loglevel': args.log_level}}
except AttributeError:
pass
bigchaindb.config_utils.autoconfigure(filename=args.config, config=
config_from_cmdline, force=True)
command(args)
return configure
| configure_bigchaindb | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(command):\n\n @functools.wraps(command)\n def x(args):\n config_from_cmdline = None\n try:\n if args.log_level is not None:\n ... |
def x(prompt='', default=None, convert=None):
print(prompt, end='', file=sys.stderr)
value = builtins.input()
return _convert(value, default, convert)
| input_on_stderr | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(prompt='', default=None, convert=None):\n print(prompt, end='', file=sys.stderr)\n value = builtins.input()\n return _convert(value, default, convert)... |
def x(parser, argv, scope):
args = parser.parse_args(argv)
if not args.command:
parser.print_help()
raise SystemExit()
func = scope.get('run_' + args.command.replace('-', '_'))
if not func:
raise NotImplementedError('Command `{}` not yet implemented'.format
(args.command))
args.multiprocess = getattr(args, 'multiprocess', False)
if args.multiprocess is False:
args.multiprocess = 1
elif args.multiprocess is None:
args.multiprocess = mp.cpu_count()
return func(args)
| start | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(parser, argv, scope):\n args = parser.parse_args(argv)\n if not args.command:\n parser.print_help()\n raise SystemExit()\n func = scope.... |
def x(self):
parser = reqparse.RequestParser()
parser.add_argument('search', type=str, required=True)
parser.add_argument('limit', type=int)
args = parser.parse_args()
if not args['search']:
return make_error(400, 'text_search cannot be empty')
if not args['limit']:
del args['limit']
pool = current_app.config['bigchain_pool']
with pool() as bigchain:
assets = bigchain.text_search(**args)
try:
return list(assets)
except OperationError as e:
return make_error(400, '({}): {}'.format(type(e).__name__, e))
| get | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n parser = reqparse.RequestParser()\n parser.add_argument('search', type=str, required=True)\n parser.add_argument('limit', type=int)\n args... |
def x(self, genesis):
app_hash = ''
height = 0
known_chain = self.bigchaindb.get_latest_abci_chain()
if known_chain is not None:
chain_id = known_chain['chain_id']
if known_chain['is_synced']:
msg = (f'Got invalid InitChain ABCI request ({genesis}) - ' +
'the chain {chain_id} is already synced.')
logger.error(msg)
sys.exit(1)
if chain_id != genesis.chain_id:
validators = self.bigchaindb.get_validators()
self.log_abci_migration_error(chain_id, validators)
sys.exit(1)
block = self.bigchaindb.get_latest_block()
app_hash = '' if block is None else block['app_hash']
height = 0 if block is None else block['height'] + 1
known_validators = self.bigchaindb.get_validators()
validator_set = [vutils.decode_validator(v) for v in genesis.validators]
if known_validators and known_validators != validator_set:
self.log_abci_migration_error(known_chain['chain_id'], known_validators
)
sys.exit(1)
block = Block(app_hash=app_hash, height=height, transactions=[])
self.bigchaindb.store_block(block._asdict())
self.bigchaindb.store_validator_set(height + 1, validator_set)
abci_chain_height = 0 if known_chain is None else known_chain['height']
self.bigchaindb.store_abci_chain(abci_chain_height, genesis.chain_id, True)
self.chain = {'height': abci_chain_height, 'is_synced': True,
'chain_id': genesis.chain_id}
return ResponseInitChain()
| init_chain | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, genesis):\n app_hash = ''\n height = 0\n known_chain = self.bigchaindb.get_latest_abci_chain()\n if known_chain is not None:\n chain_i... |
def x(self, request):
self.abort_if_abci_chain_is_not_synced()
if not (hasattr(request, 'version') and
tendermint_version_is_compatible(request.version)):
logger.error(
f"Unsupported Tendermint version: {getattr(request, 'version', 'no version')}. Currently, BigchainDB only supports {__tm_supported_versions__}. Exiting!"
)
sys.exit(1)
logger.info(f'Tendermint version: {request.version}')
r = ResponseInfo()
block = self.bigchaindb.get_latest_block()
if block:
chain_shift = 0 if self.chain is None else self.chain['height']
r.last_block_height = block['height'] - chain_shift
r.last_block_app_hash = block['app_hash'].encode('utf-8')
else:
r.last_block_height = 0
r.last_block_app_hash = b''
return r
| info | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, request):\n self.abort_if_abci_chain_is_not_synced()\n if not (hasattr(request, 'version') and\n tendermint_version_is_compatible(request.ve... |
def x(self, raw_transaction):
self.abort_if_abci_chain_is_not_synced()
logger.debug('check_tx: %s', raw_transaction)
transaction = decode_transaction(raw_transaction)
if self.bigchaindb.is_valid_transaction(transaction):
logger.debug('check_tx: VALID')
return ResponseCheckTx(code=CodeTypeOk)
else:
logger.debug('check_tx: INVALID')
return ResponseCheckTx(code=CodeTypeError)
| check_tx | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, raw_transaction):\n self.abort_if_abci_chain_is_not_synced()\n logger.debug('check_tx: %s', raw_transaction)\n transaction = decode_transaction(... |
def x(self, req_begin_block):
self.abort_if_abci_chain_is_not_synced()
chain_shift = 0 if self.chain is None else self.chain['height']
logger.debug('BEGIN BLOCK, height:%s, num_txs:%s', req_begin_block.
header.height + chain_shift, req_begin_block.header.num_txs)
self.block_txn_ids = []
self.block_transactions = []
return ResponseBeginBlock()
| begin_block | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, req_begin_block):\n self.abort_if_abci_chain_is_not_synced()\n chain_shift = 0 if self.chain is None else self.chain['height']\n logger.debug('B... |
def x(self, raw_transaction):
self.abort_if_abci_chain_is_not_synced()
logger.debug('deliver_tx: %s', raw_transaction)
transaction = self.bigchaindb.is_valid_transaction(decode_transaction(
raw_transaction), self.block_transactions)
if not transaction:
logger.debug('deliver_tx: INVALID')
return ResponseDeliverTx(code=CodeTypeError)
else:
logger.debug('storing tx')
self.block_txn_ids.append(transaction.id)
self.block_transactions.append(transaction)
return ResponseDeliverTx(code=CodeTypeOk)
| deliver_tx | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, raw_transaction):\n self.abort_if_abci_chain_is_not_synced()\n logger.debug('deliver_tx: %s', raw_transaction)\n transaction = self.bigchaindb.i... |
def x(self, request_end_block):
self.abort_if_abci_chain_is_not_synced()
chain_shift = 0 if self.chain is None else self.chain['height']
height = request_end_block.height + chain_shift
self.new_height = height
logger.debug(f'Updating pre-commit state: {self.new_height}')
pre_commit_state = dict(height=self.new_height, transactions=self.
block_txn_ids)
self.bigchaindb.store_pre_commit_state(pre_commit_state)
block_txn_hash = calculate_hash(self.block_txn_ids)
block = self.bigchaindb.get_latest_block()
if self.block_txn_ids:
self.block_txn_hash = calculate_hash([block['app_hash'],
block_txn_hash])
else:
self.block_txn_hash = block['app_hash']
validator_update = Election.process_block(self.bigchaindb, self.
new_height, self.block_transactions)
return ResponseEndBlock(validator_updates=validator_update)
| end_block | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, request_end_block):\n self.abort_if_abci_chain_is_not_synced()\n chain_shift = 0 if self.chain is None else self.chain['height']\n height = requ... |
def x(self):
self.abort_if_abci_chain_is_not_synced()
data = self.block_txn_hash.encode('utf-8')
if self.block_txn_ids:
self.bigchaindb.store_bulk_transactions(self.block_transactions)
block = Block(app_hash=self.block_txn_hash, height=self.new_height,
transactions=self.block_txn_ids)
self.bigchaindb.store_block(block._asdict())
logger.debug(
'Commit-ing new block with hash: apphash=%s ,height=%s, txn ids=%s',
data, self.new_height, self.block_txn_ids)
if self.events_queue:
event = Event(EventTypes.BLOCK_VALID, {'height': self.new_height,
'transactions': self.block_transactions})
self.events_queue.put(event)
return ResponseCommit(data=data)
| commit | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self.abort_if_abci_chain_is_not_synced()\n data = self.block_txn_hash.encode('utf-8')\n if self.block_txn_ids:\n self.bigchaindb.store... |
def x(self, bigchain, current_transactions=[]):
current_validators = self.get_validators(bigchain)
super(ValidatorElection, self).validate(bigchain, current_transactions=
current_transactions)
if self.asset['data']['power'] >= 1 / 3 * sum(current_validators.values()):
raise InvalidPowerChange(
'`power` change must be less than 1/3 of total power')
return self
| validate | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, bigchain, current_transactions=[]):\n current_validators = self.get_validators(bigchain)\n super(ValidatorElection, self).validate(bigchain, curren... |
def x(self, event_types=None):
try:
self.started_queue.get(timeout=1)
raise RuntimeError(
'Cannot create a new subscriber queue while Exchange is running.')
except Empty:
pass
if event_types is None:
event_types = EventTypes.ALL
queue = Queue()
self.queues[event_types].append(queue)
return queue
| get_subscriber_queue | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, event_types=None):\n try:\n self.started_queue.get(timeout=1)\n raise RuntimeError(\n 'Cannot create a new subscriber queue w... |
def x(self, event):
for event_types, queues in self.queues.items():
if event.type & event_types:
for queue in queues:
queue.put(event)
| dispatch | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, event):\n for event_types, queues in self.queues.items():\n if event.type & event_types:\n for queue in queues:\n que... |
def x(self):
self.started_queue.put('STARTED')
while True:
event = self.publisher_queue.get()
if event == POISON_PILL:
return
else:
self.dispatch(event)
| run | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self.started_queue.put('STARTED')\n while True:\n event = self.publisher_queue.get()\n if event == POISON_PILL:\n retur... |
def x(connection=None, dbname=None):
connection = connection or connect()
dbname = dbname or bigchaindb.config['database']['name']
create_database(connection, dbname)
create_tables(connection, dbname)
| init_database | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(connection=None, dbname=None):\n connection = connection or connect()\n dbname = dbname or bigchaindb.config['database']['name']\n create_database(con... |
def x(obj, key):
backend = bigchaindb.config['database']['backend']
if backend == 'localmongodb':
data = obj.get(key, {})
if isinstance(data, dict):
validate_all_values_for_key_in_obj(data, 'language',
validate_language)
elif isinstance(data, list):
validate_all_values_for_key_in_list(data, 'language',
validate_language)
| validate_language_key | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(obj, key):\n backend = bigchaindb.config['database']['backend']\n if backend == 'localmongodb':\n data = obj.get(key, {})\n if isinstance(d... |
def x(self, block_id):
pool = current_app.config['bigchain_pool']
with pool() as bigchain:
block = bigchain.get_block(block_id=block_id)
if not block:
return make_error(404)
return block
| get | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, block_id):\n pool = current_app.config['bigchain_pool']\n with pool() as bigchain:\n block = bigchain.get_block(block_id=block_id)\n if n... |
def x(self):
parser = reqparse.RequestParser()
parser.add_argument('transaction_id', type=str, required=True)
args = parser.parse_args(strict=True)
tx_id = args['transaction_id']
pool = current_app.config['bigchain_pool']
with pool() as bigchain:
blocks = bigchain.get_block_containing_tx(tx_id)
return blocks
| get | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n parser = reqparse.RequestParser()\n parser.add_argument('transaction_id', type=str, required=True)\n args = parser.parse_args(strict=True)\n ... |
def x(hex_private_key):
priv_key = crypto.Ed25519SigningKey(bytes.fromhex(hex_private_key)[:32],
encoding='bytes')
public_key = priv_key.get_verifying_key()
return CryptoKeypair(private_key=priv_key.encode(encoding='base58').
decode('utf-8'), public_key=public_key.encode(encoding='base58').
decode('utf-8'))
| key_pair_from_ed25519_key | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(hex_private_key):\n priv_key = crypto.Ed25519SigningKey(bytes.fromhex(hex_private_key)[:32],\n encoding='bytes')\n public_key = priv_key.get_verif... |
def x(hex_public_key):
public_key = crypto.Ed25519VerifyingKey(bytes.fromhex(hex_public_key),
encoding='bytes')
return public_key.encode(encoding='base58').decode('utf-8')
| public_key_from_ed25519_key | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(hex_public_key):\n public_key = crypto.Ed25519VerifyingKey(bytes.fromhex(hex_public_key),\n encoding='bytes')\n return public_key.encode(encoding=... |
def x(fulfillment):
if fulfillment.type_name == 'ed25519-sha-256':
return {'type': 'ed25519-sha-256', 'public_key': base58.b58encode(
fulfillment.public_key).decode()}
if fulfillment.type_name == 'threshold-sha-256':
subconditions = [_fulfillment_to_details(cond['body']) for cond in
fulfillment.subconditions]
return {'type': 'threshold-sha-256', 'threshold': fulfillment.
threshold, 'subconditions': subconditions}
raise UnsupportedTypeError(fulfillment.type_name)
| fulfillment_to_details | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(fulfillment):\n if fulfillment.type_name == 'ed25519-sha-256':\n return {'type': 'ed25519-sha-256', 'public_key': base58.b58encode(\n fulf... |
def x(data, _depth=0):
if _depth == 100:
raise ThresholdTooDeep()
if data['type'] == 'ed25519-sha-256':
public_key = base58.b58decode(data['public_key'])
return Ed25519Sha256(public_key=public_key)
if data['type'] == 'threshold-sha-256':
threshold = ThresholdSha256(data['threshold'])
for cond in data['subconditions']:
cond = _fulfillment_from_details(cond, _depth + 1)
threshold.add_subfulfillment(cond)
return threshold
raise UnsupportedTypeError(data.get('type'))
| fulfillment_from_details | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data, _depth=0):\n if _depth == 100:\n raise ThresholdTooDeep()\n if data['type'] == 'ed25519-sha-256':\n public_key = base58.b58decode(dat... |
def x(self):
try:
fulfillment = self.fulfillment.serialize_uri()
except (TypeError, AttributeError, ASN1EncodeError, ASN1DecodeError):
fulfillment = _fulfillment_to_details(self.fulfillment)
try:
fulfills = self.fulfills.to_dict()
except AttributeError:
fulfills = None
input_ = {'owners_before': self.owners_before, 'fulfills': fulfills,
'fulfillment': fulfillment}
return input_
| to_dict | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n try:\n fulfillment = self.fulfillment.serialize_uri()\n except (TypeError, AttributeError, ASN1EncodeError, ASN1DecodeError):\n fu... |
def x(cls, data):
fulfillment = data['fulfillment']
if not isinstance(fulfillment, (Fulfillment, type(None))):
try:
fulfillment = Fulfillment.from_uri(data['fulfillment'])
except ASN1DecodeError:
raise InvalidSignature("Fulfillment URI couldn't been parsed")
except TypeError:
fulfillment = _fulfillment_from_details(data['fulfillment'])
fulfills = TransactionLink.from_dict(data['fulfills'])
return cls(fulfillment, data['owners_before'], fulfills)
| from_dict | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cls, data):\n fulfillment = data['fulfillment']\n if not isinstance(fulfillment, (Fulfillment, type(None))):\n try:\n fulfillment = Ful... |
def x(self):
if self.txid is None and self.output is None:
return None
else:
return {'transaction_id': self.txid, 'output_index': self.output}
| to_dict | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if self.txid is None and self.output is None:\n return None\n else:\n return {'transaction_id': self.txid, 'output_index': self.ou... |
def x(self):
condition = {}
try:
condition['details'] = _fulfillment_to_details(self.fulfillment)
except AttributeError:
pass
try:
condition['uri'] = self.fulfillment.condition_uri
except AttributeError:
condition['uri'] = self.fulfillment
output = {'public_keys': self.public_keys, 'condition': condition,
'amount': str(self.amount)}
return output
| to_dict | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n condition = {}\n try:\n condition['details'] = _fulfillment_to_details(self.fulfillment)\n except AttributeError:\n pass\n t... |
def x(cls, public_keys, amount):
threshold = len(public_keys)
if not isinstance(amount, int):
raise TypeError('`amount` must be a int')
if amount < 1:
raise AmountError('`amount` needs to be greater than zero')
if not isinstance(public_keys, list):
raise TypeError('`public_keys` must be an instance of list')
if len(public_keys) == 0:
raise ValueError('`public_keys` needs to contain at least oneowner')
elif len(public_keys) == 1 and not isinstance(public_keys[0], list):
if isinstance(public_keys[0], Fulfillment):
ffill = public_keys[0]
else:
ffill = Ed25519Sha256(public_key=base58.b58decode(public_keys[0]))
return cls(ffill, public_keys, amount=amount)
else:
initial_cond = ThresholdSha256(threshold=threshold)
threshold_cond = reduce(cls._gen_condition, public_keys, initial_cond)
return cls(threshold_cond, public_keys, amount=amount)
| generate | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cls, public_keys, amount):\n threshold = len(public_keys)\n if not isinstance(amount, int):\n raise TypeError('`amount` must be a int')\n if am... |
def x(cls, initial, new_public_keys):
try:
threshold = len(new_public_keys)
except TypeError:
threshold = None
if isinstance(new_public_keys, list) and len(new_public_keys) > 1:
ffill = ThresholdSha256(threshold=threshold)
reduce(cls._gen_condition, new_public_keys, ffill)
elif isinstance(new_public_keys, list) and len(new_public_keys) <= 1:
raise ValueError('Sublist cannot contain single owner')
else:
try:
new_public_keys = new_public_keys.pop()
except AttributeError:
pass
if isinstance(new_public_keys, Fulfillment):
ffill = new_public_keys
else:
ffill = Ed25519Sha256(public_key=base58.b58decode(new_public_keys))
initial.add_subfulfillment(ffill)
return initial
| gen_condition | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cls, initial, new_public_keys):\n try:\n threshold = len(new_public_keys)\n except TypeError:\n threshold = None\n if isinstance(new_pub... |
def x(cls, data):
try:
fulfillment = _fulfillment_from_details(data['condition']['details'])
except KeyError:
fulfillment = data['condition']['uri']
try:
amount = int(data['amount'])
except ValueError:
raise AmountError('Invalid amount: %s' % data['amount'])
return cls(fulfillment, data['public_keys'], amount)
| from_dict | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cls, data):\n try:\n fulfillment = _fulfillment_from_details(data['condition']['details'])\n except KeyError:\n fulfillment = data['conditi... |
def x(self):
return (input_.fulfills.to_dict() for input_ in self.inputs if input_.
fulfills)
| spent_outputs | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n return (input_.fulfills.to_dict() for input_ in self.inputs if input_.\n fulfills)\n",
"role": "user"
},
{
"content": "spent_out... |
def x(cls, tx_signers, recipients, metadata=None, asset=None):
inputs, outputs = cls.validate_create(tx_signers, recipients, asset,
metadata)
return cls(cls.CREATE, {'data': asset}, inputs, outputs, metadata)
| create | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cls, tx_signers, recipients, metadata=None, asset=None):\n inputs, outputs = cls.validate_create(tx_signers, recipients, asset,\n metadata)\n retu... |
def x(cls, inputs, recipients, asset_id, metadata=None):
inputs, outputs = cls.validate_transfer(inputs, recipients, asset_id,
metadata)
return cls(cls.TRANSFER, {'id': asset_id}, inputs, outputs, metadata)
| transfer | [
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cls, inputs, recipients, asset_id, metadata=None):\n inputs, outputs = cls.validate_transfer(inputs, recipients, asset_id,\n metadata)\n return cl... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.