_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q13100
|
get_is_property
|
train
|
def get_is_property(value, is_bytes=False):
"""Get shortcut for `SC` or `Binary` property."""
if value.startswith('^'):
prefix = value[1:3]
temp = value[3:]
negate = '^'
else:
prefix = value[:2]
temp = value[2:]
negate = ''
if prefix != 'is':
raise ValueError("Does not start with 'is'!")
script_obj = unidata.ascii_script_extensions if is_bytes else unidata.unicode_script_extensions
bin_obj = unidata.ascii_binary if is_bytes else unidata.unicode_binary
value = negate + unidata.unicode_alias['script'].get(temp, temp)
if value not in script_obj:
value = negate + unidata.unicode_alias['binary'].get(temp, temp)
obj = bin_obj
else:
obj = script_obj
return obj[value]
|
python
|
{
"resource": ""
}
|
q13101
|
get_in_property
|
train
|
def get_in_property(value, is_bytes=False):
"""Get shortcut for `Block` property."""
if value.startswith('^'):
prefix = value[1:3]
temp = value[3:]
negate = '^'
else:
prefix = value[:2]
temp = value[2:]
negate = ''
if prefix != 'in':
raise ValueError("Does not start with 'in'!")
value = negate + unidata.unicode_alias['block'].get(temp, temp)
obj = unidata.ascii_blocks if is_bytes else unidata.unicode_blocks
return obj[value]
|
python
|
{
"resource": ""
}
|
q13102
|
get_unicode_property
|
train
|
def get_unicode_property(value, prop=None, is_bytes=False):
"""Retrieve the Unicode category from the table."""
if prop is not None:
prop = unidata.unicode_alias['_'].get(prop, prop)
try:
if prop == 'generalcategory':
return get_gc_property(value, is_bytes)
elif prop == 'script':
return get_script_property(value, is_bytes)
elif prop == 'scriptextensions':
return get_script_extension_property(value, is_bytes)
elif prop == 'block':
return get_block_property(value, is_bytes)
elif prop == 'binary':
return get_binary_property(value, is_bytes)
elif prop == 'bidiclass':
return get_bidi_property(value, is_bytes)
elif prop == 'bidipairedbrackettype':
return get_bidi_paired_bracket_type_property(value, is_bytes)
elif prop == 'age':
return get_age_property(value, is_bytes)
elif prop == 'eastasianwidth':
return get_east_asian_width_property(value, is_bytes)
elif PY35 and prop == 'indicpositionalcategory':
return get_indic_positional_category_property(value, is_bytes)
elif not PY35 and prop == 'indicmatracategory':
return get_indic_positional_category_property(value, is_bytes)
elif prop == 'indicsyllabiccategory':
return get_indic_syllabic_category_property(value, is_bytes)
elif prop == 'hangulsyllabletype':
return get_hangul_syllable_type_property(value, is_bytes)
elif prop == 'decompositiontype':
return get_decomposition_type_property(value, is_bytes)
elif prop == 'canonicalcombiningclass':
return get_canonical_combining_class_property(value, is_bytes)
elif prop == 'numerictype':
return get_numeric_type_property(value, is_bytes)
elif prop == 'numericvalue':
return get_numeric_value_property(value, is_bytes)
elif prop == 'joiningtype':
return get_joining_type_property(value, is_bytes)
elif prop == 'joininggroup':
return get_joining_group_property(value, is_bytes)
elif prop == 'graphemeclusterbreak':
return get_grapheme_cluster_break_property(value, is_bytes)
elif prop == 'linebreak':
return get_line_break_property(value, is_bytes)
elif prop == 'sentencebreak':
return get_sentence_break_property(value, is_bytes)
elif prop == 'wordbreak':
return get_word_break_property(value, is_bytes)
elif prop == 'nfcquickcheck':
return get_nfc_quick_check_property(value, is_bytes)
elif prop == 'nfdquickcheck':
return get_nfd_quick_check_property(value, is_bytes)
elif prop == 'nfkcquickcheck':
return get_nfkc_quick_check_property(value, is_bytes)
elif prop == 'nfkdquickcheck':
return get_nfkd_quick_check_property(value, is_bytes)
elif PY37 and prop == 'verticalorientation':
return get_vertical_orientation_property(value, is_bytes)
else:
raise ValueError('Invalid Unicode property!')
except Exception:
raise ValueError('Invalid Unicode property!')
try:
return get_gc_property(value, is_bytes)
except Exception:
pass
try:
return get_script_extension_property(value, is_bytes)
except Exception:
pass
try:
return get_block_property(value, is_bytes)
except Exception:
pass
try:
return get_binary_property(value, is_bytes)
except Exception:
pass
try:
return get_is_property(value, is_bytes)
except Exception:
pass
try:
return get_in_property(value, is_bytes)
except Exception:
pass
raise ValueError('Invalid Unicode property!')
|
python
|
{
"resource": ""
}
|
q13103
|
compile_replace
|
train
|
def compile_replace(pattern, repl, flags=0):
"""Construct a method that can be used as a replace method for `sub`, `subn`, etc."""
call = None
if pattern is not None and isinstance(pattern, _RE_TYPE):
if isinstance(repl, (str, bytes)):
if not (pattern.flags & DEBUG):
call = _cached_replace_compile(pattern, repl, flags, type(repl))
else: # pragma: no cover
call = _bre_parse._ReplaceParser().parse(pattern, repl, bool(flags & FORMAT))
elif isinstance(repl, ReplaceTemplate):
if flags:
raise ValueError("Cannot process flags argument with a ReplaceTemplate!")
if repl.pattern_hash != hash(pattern):
raise ValueError("Pattern hash doesn't match hash in compiled replace!")
call = repl
else:
raise TypeError("Not a valid type!")
else:
raise TypeError("Pattern must be a compiled regular expression!")
return call
|
python
|
{
"resource": ""
}
|
q13104
|
findall
|
train
|
def findall(pattern, string, *args, **kwargs):
"""Apply `findall` after applying backrefs."""
flags = args[2] if len(args) > 2 else kwargs.get('flags', 0)
return _re.findall(_apply_search_backrefs(pattern, flags), string, *args, **kwargs)
|
python
|
{
"resource": ""
}
|
q13105
|
sub
|
train
|
def sub(pattern, repl, string, *args, **kwargs):
"""Apply `sub` after applying backrefs."""
flags = args[4] if len(args) > 4 else kwargs.get('flags', 0)
is_replace = _is_replace(repl)
is_string = isinstance(repl, (str, bytes))
if is_replace and repl.use_format:
raise ValueError("Compiled replace cannot be a format object!")
pattern = compile_search(pattern, flags)
return _re.sub(
pattern, (compile_replace(pattern, repl) if is_replace or is_string else repl), string, *args, **kwargs
)
|
python
|
{
"resource": ""
}
|
q13106
|
get_version
|
train
|
def get_version():
""" Extract the version number from the code. """
here = os.path.abspath(os.path.dirname(__file__))
jbxapi_file = os.path.join(here, "jbxapi.py")
with open(jbxapi_file) as f:
content = f.read()
match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", content, re.M)
if not match:
raise RuntimeError("Unable to find version string.")
return match.group(1)
|
python
|
{
"resource": ""
}
|
q13107
|
JoeSandbox.analysis_list
|
train
|
def analysis_list(self):
"""
Fetch a list of all analyses.
"""
response = self._post(self.apiurl + '/v2/analysis/list', data={'apikey': self.apikey})
return self._raise_or_extract(response)
|
python
|
{
"resource": ""
}
|
q13108
|
JoeSandbox.submit_sample
|
train
|
def submit_sample(self, sample, cookbook=None, params={}, _extra_params={}):
"""
Submit a sample and returns the submission id.
Parameters:
sample: The sample to submit. Needs to be a file-like object or a tuple in
the shape (filename, file-like object).
cookbook: Uploads a cookbook together with the sample. Needs to be a file-like object or a
tuple in the shape (filename, file-like object)
params: Customize the sandbox parameters. They are described in more detail
in the default submission parameters.
Example:
import jbxapi
joe = jbxapi.JoeSandbox()
with open("sample.exe", "rb") as f:
joe.submit_sample(f, params={"systems": ["w7"]})
Example:
import io, jbxapi
joe = jbxapi.JoeSandbox()
cookbook = io.BytesIO(b"cookbook content")
with open("sample.exe", "rb") as f:
joe.submit_sample(f, cookbook=cookbook)
"""
self._check_user_parameters(params)
files = {'sample': sample}
if cookbook:
files['cookbook'] = cookbook
return self._submit(params, files, _extra_params=_extra_params)
|
python
|
{
"resource": ""
}
|
q13109
|
JoeSandbox.submit_sample_url
|
train
|
def submit_sample_url(self, url, params={}, _extra_params={}):
"""
Submit a sample at a given URL for analysis.
"""
self._check_user_parameters(params)
params = copy.copy(params)
params['sample-url'] = url
return self._submit(params, _extra_params=_extra_params)
|
python
|
{
"resource": ""
}
|
q13110
|
JoeSandbox.submit_url
|
train
|
def submit_url(self, url, params={}, _extra_params={}):
"""
Submit a website for analysis.
"""
self._check_user_parameters(params)
params = copy.copy(params)
params['url'] = url
return self._submit(params, _extra_params=_extra_params)
|
python
|
{
"resource": ""
}
|
q13111
|
JoeSandbox.submit_cookbook
|
train
|
def submit_cookbook(self, cookbook, params={}, _extra_params={}):
"""
Submit a cookbook.
"""
self._check_user_parameters(params)
files = {'cookbook': cookbook}
return self._submit(params, files, _extra_params=_extra_params)
|
python
|
{
"resource": ""
}
|
q13112
|
JoeSandbox.submission_delete
|
train
|
def submission_delete(self, submission_id):
"""
Delete a submission.
"""
response = self._post(self.apiurl + '/v2/submission/delete', data={'apikey': self.apikey, 'submission_id': submission_id})
return self._raise_or_extract(response)
|
python
|
{
"resource": ""
}
|
q13113
|
JoeSandbox.server_online
|
train
|
def server_online(self):
"""
Returns True if the Joe Sandbox servers are running or False if they are in maintenance mode.
"""
response = self._post(self.apiurl + '/v2/server/online', data={'apikey': self.apikey})
return self._raise_or_extract(response)
|
python
|
{
"resource": ""
}
|
q13114
|
JoeSandbox.analysis_info
|
train
|
def analysis_info(self, webid):
"""
Show the status and most important attributes of an analysis.
"""
response = self._post(self.apiurl + "/v2/analysis/info", data={'apikey': self.apikey, 'webid': webid})
return self._raise_or_extract(response)
|
python
|
{
"resource": ""
}
|
q13115
|
JoeSandbox.analysis_download
|
train
|
def analysis_download(self, webid, type, run=None, file=None):
"""
Download a resource for an analysis. E.g. the full report, binaries, screenshots.
The full list of resources can be found in our API documentation.
When `file` is given, the return value is the filename specified by the server,
otherwise it's a tuple of (filename, bytes).
Parameters:
webid: the webid of the analysis
type: the report type, e.g. 'html', 'bins'
run: specify the run. If it is None, let Joe Sandbox pick one
file: a writeable file-like object (When obmitted, the method returns
the data as a bytes object.)
Example:
json_report, name = joe.analysis_download(123456, 'jsonfixed')
Example:
with open("full_report.html", "wb") as f:
name = joe.analysis_download(123456, "html", file=f)
"""
# when no file is specified, we create our own
if file is None:
_file = io.BytesIO()
else:
_file = file
data = {
'apikey': self.apikey,
'webid': webid,
'type': type,
'run': run,
}
response = self._post(self.apiurl + "/v2/analysis/download", data=data, stream=True)
try:
filename = response.headers["Content-Disposition"].split("filename=")[1][1:-2]
except Exception as e:
filename = type
# do standard error handling when encountering an error (i.e. throw an exception)
if not response.ok:
self._raise_or_extract(response)
raise RuntimeError("Unreachable because statement above should raise.")
try:
for chunk in response.iter_content(1024):
_file.write(chunk)
except requests.exceptions.RequestException as e:
raise ConnectionError(e)
# no user file means we return the content
if file is None:
return (filename, _file.getvalue())
else:
return filename
|
python
|
{
"resource": ""
}
|
q13116
|
JoeSandbox.analysis_search
|
train
|
def analysis_search(self, query):
"""
Lists the webids of the analyses that match the given query.
Searches in MD5, SHA1, SHA256, filename, cookbook name, comment, url and report id.
"""
response = self._post(self.apiurl + "/v2/analysis/search", data={'apikey': self.apikey, 'q': query})
return self._raise_or_extract(response)
|
python
|
{
"resource": ""
}
|
q13117
|
JoeSandbox.server_systems
|
train
|
def server_systems(self):
"""
Retrieve a list of available systems.
"""
response = self._post(self.apiurl + "/v2/server/systems", data={'apikey': self.apikey})
return self._raise_or_extract(response)
|
python
|
{
"resource": ""
}
|
q13118
|
JoeSandbox.account_info
|
train
|
def account_info(self):
"""
Only available on Joe Sandbox Cloud
Show information about the account.
"""
response = self._post(self.apiurl + "/v2/account/info", data={'apikey': self.apikey})
return self._raise_or_extract(response)
|
python
|
{
"resource": ""
}
|
q13119
|
JoeSandbox.server_info
|
train
|
def server_info(self):
"""
Query information about the server.
"""
response = self._post(self.apiurl + "/v2/server/info", data={'apikey': self.apikey})
return self._raise_or_extract(response)
|
python
|
{
"resource": ""
}
|
q13120
|
JoeSandbox.server_lia_countries
|
train
|
def server_lia_countries(self):
"""
Show the available localized internet anonymization countries.
"""
response = self._post(self.apiurl + "/v2/server/lia_countries", data={'apikey': self.apikey})
return self._raise_or_extract(response)
|
python
|
{
"resource": ""
}
|
q13121
|
JoeSandbox.server_languages_and_locales
|
train
|
def server_languages_and_locales(self):
"""
Show the available languages and locales
"""
response = self._post(self.apiurl + "/v2/server/languages_and_locales", data={'apikey': self.apikey})
return self._raise_or_extract(response)
|
python
|
{
"resource": ""
}
|
q13122
|
JoeSandbox._post
|
train
|
def _post(self, url, data=None, **kwargs):
"""
Wrapper around requests.post which
(a) always inserts a timeout
(b) converts errors to ConnectionError
(c) re-tries a few times
(d) converts file names to ASCII
"""
# Remove non-ASCII characters from filenames due to a limitation of the combination of
# urllib3 (via python-requests) and our server
# https://github.com/requests/requests/issues/2117
# Internal Ticket #3090
if "files" in kwargs and kwargs["files"] is not None:
acceptable_chars = "0123456789" + "abcdefghijklmnopqrstuvwxyz" + \
"ABCDEFGHIJKLMNOPQRSTUVWXYZ" + " _-.,()[]{}"
for param_name, fp in kwargs["files"].items():
if isinstance(fp, (tuple, list)):
filename, fp = fp
else:
filename = requests.utils.guess_filename(fp) or param_name
def encode(char):
try:
if char in acceptable_chars:
return char
except UnicodeDecodeError:
pass
return "x{:02x}".format(ord(char))
filename = "".join(encode(x) for x in filename)
kwargs["files"][param_name] = (filename, fp)
for i in itertools.count(1):
try:
return self.session.post(url, data=data, timeout=self.timeout, **kwargs)
except requests.exceptions.Timeout as e:
# exhausted all retries
if i >= self.retries:
raise ConnectionError(e)
except requests.exceptions.RequestException as e:
raise ConnectionError(e)
# exponential backoff
max_backoff = 4 ** i / 10 # .4, 1.6, 6.4, 25.6, ...
time.sleep(random.uniform(0, max_backoff))
|
python
|
{
"resource": ""
}
|
q13123
|
JoeSandbox._check_user_parameters
|
train
|
def _check_user_parameters(self, user_parameters):
"""
Verifies that the parameter dict given by the user only contains
known keys. This ensures that the user detects typos faster.
"""
if not user_parameters:
return
# sanity check against typos
for key in user_parameters:
if key not in submission_defaults:
raise ValueError("Unknown parameter {0}".format(key))
|
python
|
{
"resource": ""
}
|
q13124
|
JoeSandbox._raise_or_extract
|
train
|
def _raise_or_extract(self, response):
"""
Raises an exception if the response indicates an API error.
Otherwise returns the object at the 'data' key of the API response.
"""
try:
data = response.json()
except ValueError:
raise JoeException("The server responded with an unexpected format ({}). Is the API url correct?". format(response.status_code))
try:
if response.ok:
return data['data']
else:
error = data['errors'][0]
raise ApiError(error)
except (KeyError, TypeError):
raise JoeException("Unexpected data ({}). Is the API url correct?". format(response.status_code))
|
python
|
{
"resource": ""
}
|
q13125
|
LinuxDistribution._parse_os_release_content
|
train
|
def _parse_os_release_content(lines):
"""
Parse the lines of an os-release file.
Parameters:
* lines: Iterable through the lines in the os-release file.
Each line must be a unicode string or a UTF-8 encoded byte
string.
Returns:
A dictionary containing all information items.
"""
props = {}
lexer = shlex.shlex(lines, posix=True)
lexer.whitespace_split = True
# The shlex module defines its `wordchars` variable using literals,
# making it dependent on the encoding of the Python source file.
# In Python 2.6 and 2.7, the shlex source file is encoded in
# 'iso-8859-1', and the `wordchars` variable is defined as a byte
# string. This causes a UnicodeDecodeError to be raised when the
# parsed content is a unicode object. The following fix resolves that
# (... but it should be fixed in shlex...):
if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes):
lexer.wordchars = lexer.wordchars.decode('iso-8859-1')
tokens = list(lexer)
for token in tokens:
# At this point, all shell-like parsing has been done (i.e.
# comments processed, quotes and backslash escape sequences
# processed, multi-line values assembled, trailing newlines
# stripped, etc.), so the tokens are now either:
# * variable assignments: var=value
# * commands or their arguments (not allowed in os-release)
if '=' in token:
k, v = token.split('=', 1)
if isinstance(v, bytes):
v = v.decode('utf-8')
props[k.lower()] = v
else:
# Ignore any tokens that are not variable assignments
pass
if 'version_codename' in props:
# os-release added a version_codename field. Use that in
# preference to anything else Note that some distros purposefully
# do not have code names. They should be setting
# version_codename=""
props['codename'] = props['version_codename']
elif 'ubuntu_codename' in props:
# Same as above but a non-standard field name used on older Ubuntus
props['codename'] = props['ubuntu_codename']
elif 'version' in props:
# If there is no version_codename, parse it from the version
codename = re.search(r'(\(\D+\))|,(\s+)?\D+', props['version'])
if codename:
codename = codename.group()
codename = codename.strip('()')
codename = codename.strip(',')
codename = codename.strip()
# codename appears within paranthese.
props['codename'] = codename
return props
|
python
|
{
"resource": ""
}
|
q13126
|
pretty_print
|
train
|
def pretty_print(n):
"""Pretty print function for very big integers"""
if type(n) != int:
return n
ret = []
n = str(n)
for i in range(len(n) - 1, -1, -1):
ret.append(n[i])
if (len(n) - i) % 3 == 0:
ret.append(',')
ret.reverse()
return ''.join(ret[1:]) if ret[0] == ',' else ''.join(ret)
|
python
|
{
"resource": ""
}
|
q13127
|
A10DeviceInstancePlugin.create_a10_device_instance
|
train
|
def create_a10_device_instance(self, context, a10_device_instance):
"""Attempt to create instance using neutron context"""
LOG.debug("A10DeviceInstancePlugin.create(): a10_device_instance=%s", a10_device_instance)
config = a10_config.A10Config()
vthunder_defaults = config.get_vthunder_config()
imgr = instance_manager.InstanceManager.from_config(config, context)
dev_instance = common_resources.remove_attributes_not_specified(
a10_device_instance.get(resources.RESOURCE))
# Create the instance with specified defaults.
vthunder_config = vthunder_defaults.copy()
vthunder_config.update(_convert(dev_instance, _API, _VTHUNDER_CONFIG))
instance = imgr.create_device_instance(vthunder_config, dev_instance.get("name"))
db_record = {}
db_record.update(_convert(vthunder_config, _VTHUNDER_CONFIG, _DB))
db_record.update(_convert(dev_instance, _API, _DB))
db_record.update(_convert(instance, _INSTANCE, _DB))
# If success, return the created DB record
# Else, raise an exception because that's what we would do anyway
db_instance = super(A10DeviceInstancePlugin, self).create_a10_device_instance(
context, {resources.RESOURCE: db_record})
return _make_api_dict(db_instance)
|
python
|
{
"resource": ""
}
|
q13128
|
VipHandler.vport_meta
|
train
|
def vport_meta(self, vip):
"""Get the vport meta, no matter which name was used"""
vport_meta = self.meta(vip, 'vport', None)
if vport_meta is None:
vport_meta = self.meta(vip, 'port', {})
return vport_meta
|
python
|
{
"resource": ""
}
|
q13129
|
apply_template
|
train
|
def apply_template(template, *args, **kw):
"""Applies every callable in any Mapping or Iterable"""
if six.callable(template):
return template(*args, **kw)
if isinstance(template, six.string_types):
return template
if isinstance(template, collections.Mapping):
return template.__class__((k, apply_template(v, *args, **kw)) for k, v in template.items())
if isinstance(template, collections.Iterable):
return template.__class__(apply_template(v, *args, **kw) for v in template)
return template
|
python
|
{
"resource": ""
}
|
q13130
|
initialize_vthunder
|
train
|
def initialize_vthunder(a10_cfg, device_cfg, client):
"""Perform initialization of system-wide settings"""
vth = a10_cfg.get_vthunder_config()
initialize_interfaces(vth, device_cfg, client)
initialize_dns(vth, device_cfg, client)
initialize_licensing(vth, device_cfg, client)
initialize_sflow(vth, device_cfg, client)
|
python
|
{
"resource": ""
}
|
q13131
|
magic_session
|
train
|
def magic_session(db_session=None, url=None):
"""Either does nothing with the session you already have or
makes one that commits and closes no matter what happens
"""
if db_session is not None:
yield db_session
else:
session = get_session(url, expire_on_commit=False)
try:
try:
yield session
finally:
session.commit()
finally:
session.close()
|
python
|
{
"resource": ""
}
|
q13132
|
InstanceManager._plumb_port
|
train
|
def _plumb_port(self, server, network_id, wrong_ips):
"""Look for an existing port on the network
Add one if it doesn't exist
"""
for attached_interface in server.interface_list():
if attached_interface.net_id == network_id:
if any(map(lambda x: x['ip_address'] in wrong_ips, attached_interface.fixed_ips)):
continue
return attached_interface
return server.interface_attach(None, network_id, None)
|
python
|
{
"resource": ""
}
|
q13133
|
NeutronDbWrapper.allocate_ip_for_subnet
|
train
|
def allocate_ip_for_subnet(self, subnet_id, mac, port_id):
"""Allocates an IP from the specified subnet and creates a port"""
# Get an available IP and mark it as used before someone else does
# If there's no IP, , log it and return an error
# If we successfully get an IP, create a port with the specified MAC and device data
# If port creation fails, deallocate the IP
subnet = self.get_subnet(subnet_id)
ip, mask, port_id = self.a10_allocate_ip_from_dhcp_range(subnet, "vlan", mac, port_id)
return ip, mask, port_id
|
python
|
{
"resource": ""
}
|
q13134
|
NeutronDbWrapper.a10_allocate_ip_from_dhcp_range
|
train
|
def a10_allocate_ip_from_dhcp_range(self, subnet, interface_id, mac, port_id):
"""Search for an available IP.addr from unallocated nmodels.IPAllocationPool range.
If no addresses are available then an error is raised. Returns the address as a string.
This search is conducted by a difference of the nmodels.IPAllocationPool set_a
and the current IP allocations.
"""
subnet_id = subnet["id"]
network_id = subnet["network_id"]
iprange_result = self.get_ipallocationpool_by_subnet_id(subnet_id)
ip_in_use_list = [x.ip_address for x in self.get_ipallocations_by_subnet_id(subnet_id)]
range_begin, range_end = iprange_result.first_ip, iprange_result.last_ip
ip_address = IPHelpers.find_unused_ip(range_begin, range_end, ip_in_use_list)
if not ip_address:
msg = "Cannot allocate from subnet {0}".format(subnet)
LOG.error(msg)
# TODO(mdurrant) - Raise neutron exception
raise Exception
mark_in_use = {
"ip_address": ip_address,
"network_id": network_id,
"port_id": port_id,
"subnet_id": subnet["id"]
}
self.create_ipallocation(mark_in_use)
return ip_address, subnet["cidr"], mark_in_use["port_id"]
|
python
|
{
"resource": ""
}
|
q13135
|
HealthMonitorHandler._dissociate
|
train
|
def _dissociate(self, c, context, hm, pool_id):
"""Remove a pool association"""
pool_name = self._pool_name(context, pool_id)
c.client.slb.service_group.update(pool_name, health_monitor="",
health_check_disable=True)
|
python
|
{
"resource": ""
}
|
q13136
|
HealthMonitorHandler.dissociate
|
train
|
def dissociate(self, c, context, hm, pool_id):
"""Remove a pool association, and the healthmonitor if its the last one"""
self._dissociate(c, context, hm, pool_id)
pools = hm.get("pools", [])
if not any(p for p in pools if p.get("pool_id") != pool_id):
self._delete_unused(c, context, hm)
|
python
|
{
"resource": ""
}
|
q13137
|
HealthMonitorHandler._delete
|
train
|
def _delete(self, c, context, hm):
"""Delete a healthmonitor and ALL its pool associations"""
pools = hm.get("pools", [])
for pool in pools:
pool_id = pool.get("pool_id")
self._dissociate(c, context, hm, pool_id)
self._delete_unused(c, context, hm)
|
python
|
{
"resource": ""
}
|
q13138
|
Bartlett1932.add_node_to_network
|
train
|
def add_node_to_network(self, node, network):
"""Add node to the chain and receive transmissions."""
network.add_node(node)
parents = node.neighbors(direction="from")
if len(parents):
parent = parents[0]
parent.transmit()
node.receive()
|
python
|
{
"resource": ""
}
|
q13139
|
Bartlett1932.recruit
|
train
|
def recruit(self):
"""Recruit one participant at a time until all networks are full."""
if self.networks(full=False):
self.recruiter.recruit(n=1)
else:
self.recruiter.close_recruitment()
|
python
|
{
"resource": ""
}
|
q13140
|
Source.create_information
|
train
|
def create_information(self):
"""Create new infos on demand."""
info = self._info_type()(origin=self, contents=self._contents())
return info
|
python
|
{
"resource": ""
}
|
q13141
|
RogersExperiment.recruit
|
train
|
def recruit(self):
"""Recruit participants if necessary."""
num_approved = len(Participant.query.filter_by(status="approved").all())
end_of_generation = num_approved % self.generation_size == 0
complete = num_approved >= (self.generations * self.generation_size)
if complete:
self.log("All networks full: closing recruitment", "-----")
self.recruiter.close_recruitment()
elif end_of_generation:
self.log("generation finished, recruiting another")
self.recruiter.recruit(n=self.generation_size)
|
python
|
{
"resource": ""
}
|
q13142
|
size_on_copy
|
train
|
def size_on_copy(root="."):
"""Return the size of the experiment directory in bytes, excluding any
files and directories which would be excluded on copy.
"""
total_size = 0
exclusions = exclusion_policy()
for dirpath, dirnames, filenames in os.walk(root, topdown=True):
current_exclusions = exclusions(dirpath, os.listdir(dirpath))
# Modifying dirnames in-place will prune the subsequent files and
# directories visited by os.walk. This is only possible when
# topdown = True
dirnames[:] = [d for d in dirnames if d not in current_exclusions]
legit_files = [f for f in filenames if f not in current_exclusions]
for f in legit_files:
fp = os.path.join(dirpath, f)
total_size += os.path.getsize(fp)
return total_size
|
python
|
{
"resource": ""
}
|
q13143
|
setup_experiment
|
train
|
def setup_experiment(log, debug=True, verbose=False, app=None, exp_config=None):
"""Checks the experiment's python dependencies, then prepares a temp directory
with files merged from the custom experiment and Dallinger.
The resulting directory includes all the files necessary to deploy to
Heroku.
"""
# Verify that the Postgres server is running.
try:
db.check_connection()
except Exception:
log("There was a problem connecting to the Postgres database!")
raise
# Check that the demo-specific requirements are satisfied.
try:
with open("requirements.txt", "r") as f:
dependencies = [r for r in f.readlines() if r[:3] != "-e "]
except (OSError, IOError):
dependencies = []
pkg_resources.require(dependencies)
# Generate a unique id for this experiment.
from dallinger.experiment import Experiment
generated_uid = public_id = Experiment.make_uuid(app)
# If the user provided an app name, use it everywhere that's user-facing.
if app:
public_id = str(app)
log("Experiment id is " + public_id + "")
# Load and update the config
config = get_config()
if not config.ready:
config.load() #
if exp_config:
config.extend(exp_config)
config.extend({"id": six.text_type(generated_uid)})
temp_dir = assemble_experiment_temp_dir(config)
log("Deployment temp directory: {}".format(temp_dir), chevrons=False)
# Zip up the temporary directory and place it in the cwd.
if not debug:
log("Freezing the experiment package...")
shutil.make_archive(
os.path.join(os.getcwd(), "snapshots", public_id + "-code"), "zip", temp_dir
)
return (public_id, temp_dir)
|
python
|
{
"resource": ""
}
|
q13144
|
HerokuLocalDeployment.notify
|
train
|
def notify(self, message):
"""Callback function which checks lines of output, tries to match
against regex defined in subclass's "dispatch" dict, and passes through
to a handler on match.
"""
for regex, handler in self.dispatch.items():
match = re.search(regex, message)
if match:
handler = getattr(self, handler)
return handler(match)
|
python
|
{
"resource": ""
}
|
q13145
|
DebugDeployment.recruitment_closed
|
train
|
def recruitment_closed(self, match):
"""Recruitment is closed.
Start a thread to check the experiment summary.
"""
if self.status_thread is None:
self.status_thread = threading.Thread(target=self.check_status)
self.status_thread.start()
|
python
|
{
"resource": ""
}
|
q13146
|
DebugDeployment.check_status
|
train
|
def check_status(self):
"""Check the output of the summary route until
the experiment is complete, then we can stop monitoring Heroku
subprocess output.
"""
self.out.log("Recruitment is complete. Waiting for experiment completion...")
base_url = get_base_url()
status_url = base_url + "/summary"
while not self.complete:
time.sleep(10)
try:
resp = requests.get(status_url)
exp_data = resp.json()
except (ValueError, requests.exceptions.RequestException):
self.out.error("Error fetching experiment status.")
else:
self.out.log("Experiment summary: {}".format(exp_data))
if exp_data.get("completed", False):
self.out.log("Experiment completed, all nodes filled.")
self.complete = True
self.heroku.stop()
|
python
|
{
"resource": ""
}
|
q13147
|
DebugDeployment.notify
|
train
|
def notify(self, message):
"""Monitor output from heroku process.
This overrides the base class's `notify`
to make sure that we stop if the status-monitoring thread
has determined that the experiment is complete.
"""
if self.complete:
return HerokuLocalWrapper.MONITOR_STOP
return super(DebugDeployment, self).notify(message)
|
python
|
{
"resource": ""
}
|
q13148
|
StandaloneServer.load
|
train
|
def load(self):
"""Return our application to be run."""
app = util.import_app("dallinger.experiment_server.sockets:app")
if self.options.get("mode") == "debug":
app.debug = True
return app
|
python
|
{
"resource": ""
}
|
q13149
|
AnimalInfo.perturbed_contents
|
train
|
def perturbed_contents(self):
"""Perturb the given animal."""
animal = json.loads(self.contents)
for prop, prop_range in self.properties.items():
range = prop_range[1] - prop_range[0]
jittered = animal[prop] + random.gauss(0, 0.1 * range)
animal[prop] = max(min(jittered, prop_range[1]), prop_range[0])
return json.dumps(animal)
|
python
|
{
"resource": ""
}
|
q13150
|
FullyConnected.add_node
|
train
|
def add_node(self, node):
"""Add a node, connecting it to everyone and back."""
other_nodes = [n for n in self.nodes() if n.id != node.id]
for n in other_nodes:
if isinstance(n, Source):
node.connect(direction="from", whom=n)
else:
node.connect(direction="both", whom=n)
|
python
|
{
"resource": ""
}
|
q13151
|
Empty.add_source
|
train
|
def add_source(self, source):
"""Connect the source to all existing other nodes."""
nodes = [n for n in self.nodes() if not isinstance(n, Source)]
source.connect(whom=nodes)
|
python
|
{
"resource": ""
}
|
q13152
|
Star.add_node
|
train
|
def add_node(self, node):
"""Add a node and connect it to the center."""
nodes = self.nodes()
if len(nodes) > 1:
first_node = min(nodes, key=attrgetter("creation_time"))
first_node.connect(direction="both", whom=node)
|
python
|
{
"resource": ""
}
|
q13153
|
DiscreteGenerational.add_node
|
train
|
def add_node(self, node):
"""Link to the agent from a parent based on the parent's fitness"""
num_agents = len(self.nodes(type=Agent))
curr_generation = int((num_agents - 1) / float(self.generation_size))
node.generation = curr_generation
if curr_generation == 0 and self.initial_source:
parent = self._select_oldest_source()
else:
parent = self._select_fit_node_from_generation(
node_type=type(node), generation=curr_generation - 1
)
if parent is not None:
parent.connect(whom=node)
parent.transmit(to_whom=node)
|
python
|
{
"resource": ""
}
|
q13154
|
CoordinationChatroom.create_network
|
train
|
def create_network(self):
"""Create a new network by reading the configuration file."""
class_ = getattr(networks, self.network_class)
return class_(max_size=self.quorum)
|
python
|
{
"resource": ""
}
|
q13155
|
CoordinationChatroom.info_post_request
|
train
|
def info_post_request(self, node, info):
"""Run when a request to create an info is complete."""
for agent in node.neighbors():
node.transmit(what=info, to_whom=agent)
|
python
|
{
"resource": ""
}
|
q13156
|
nocache
|
train
|
def nocache(func):
"""Stop caching for pages wrapped in nocache decorator."""
def new_func(*args, **kwargs):
"""No cache Wrapper."""
resp = make_response(func(*args, **kwargs))
resp.cache_control.no_cache = True
return resp
return update_wrapper(new_func, func)
|
python
|
{
"resource": ""
}
|
q13157
|
ValidatesBrowser.exclusions
|
train
|
def exclusions(self):
"""Return list of browser exclusion rules defined in the Configuration.
"""
exclusion_rules = [
r.strip()
for r in self.config.get("browser_exclude_rule", "").split(",")
if r.strip()
]
return exclusion_rules
|
python
|
{
"resource": ""
}
|
q13158
|
ValidatesBrowser.is_supported
|
train
|
def is_supported(self, user_agent_string):
"""Check user agent against configured exclusions.
"""
user_agent_obj = user_agents.parse(user_agent_string)
browser_ok = True
for rule in self.exclusions:
if rule in ["mobile", "tablet", "touchcapable", "pc", "bot"]:
if (
(rule == "mobile" and user_agent_obj.is_mobile)
or (rule == "tablet" and user_agent_obj.is_tablet)
or (rule == "touchcapable" and user_agent_obj.is_touch_capable)
or (rule == "pc" and user_agent_obj.is_pc)
or (rule == "bot" and user_agent_obj.is_bot)
):
browser_ok = False
elif rule in user_agent_string:
browser_ok = False
return browser_ok
|
python
|
{
"resource": ""
}
|
q13159
|
MCMCP.create_node
|
train
|
def create_node(self, network, participant):
"""Create a node for a participant."""
return self.models.MCMCPAgent(network=network, participant=participant)
|
python
|
{
"resource": ""
}
|
q13160
|
MCMCP.data_check
|
train
|
def data_check(self, participant):
"""Make sure each trial contains exactly one chosen info."""
infos = participant.infos()
return len([info for info in infos if info.chosen]) * 2 == len(infos)
|
python
|
{
"resource": ""
}
|
q13161
|
find_experiment_export
|
train
|
def find_experiment_export(app_id):
"""Attempt to find a zipped export of an experiment with the ID provided
and return its path. Returns None if not found.
Search order:
1. local "data" subdirectory
2. user S3 bucket
3. Dallinger S3 bucket
"""
# Check locally first
cwd = os.getcwd()
data_filename = "{}-data.zip".format(app_id)
path_to_data = os.path.join(cwd, "data", data_filename)
if os.path.exists(path_to_data):
try:
Data(path_to_data)
except IOError:
from dallinger import logger
logger.exception(
"Error reading local data file {}, checking remote.".format(
path_to_data
)
)
else:
return path_to_data
# Get remote file instead
path_to_data = os.path.join(tempfile.mkdtemp(), data_filename)
buckets = [user_s3_bucket(), dallinger_s3_bucket()]
for bucket in buckets:
try:
bucket.download_file(data_filename, path_to_data)
except botocore.exceptions.ClientError:
pass
else:
return path_to_data
|
python
|
{
"resource": ""
}
|
q13162
|
load
|
train
|
def load(app_id):
"""Load the data from wherever it is found."""
path_to_data = find_experiment_export(app_id)
if path_to_data is None:
raise IOError("Dataset {} could not be found.".format(app_id))
return Data(path_to_data)
|
python
|
{
"resource": ""
}
|
q13163
|
dump_database
|
train
|
def dump_database(id):
"""Dump the database to a temporary directory."""
tmp_dir = tempfile.mkdtemp()
current_dir = os.getcwd()
os.chdir(tmp_dir)
FNULL = open(os.devnull, "w")
heroku_app = HerokuApp(dallinger_uid=id, output=FNULL)
heroku_app.backup_capture()
heroku_app.backup_download()
for filename in os.listdir(tmp_dir):
if filename.startswith("latest.dump"):
os.rename(filename, "database.dump")
os.chdir(current_dir)
return os.path.join(tmp_dir, "database.dump")
|
python
|
{
"resource": ""
}
|
q13164
|
backup
|
train
|
def backup(id):
"""Backup the database to S3."""
filename = dump_database(id)
key = "{}.dump".format(id)
bucket = user_s3_bucket()
bucket.upload_file(filename, key)
return _generate_s3_url(bucket, key)
|
python
|
{
"resource": ""
}
|
q13165
|
register
|
train
|
def register(id, url=None):
"""Register a UUID key in the global S3 bucket."""
bucket = registration_s3_bucket()
key = registration_key(id)
obj = bucket.Object(key)
obj.put(Body=url or "missing")
return _generate_s3_url(bucket, key)
|
python
|
{
"resource": ""
}
|
q13166
|
is_registered
|
train
|
def is_registered(id):
"""Check if a UUID is already registered"""
bucket = registration_s3_bucket()
key = registration_key(id)
found_keys = set(obj.key for obj in bucket.objects.filter(Prefix=key))
return key in found_keys
|
python
|
{
"resource": ""
}
|
q13167
|
copy_heroku_to_local
|
train
|
def copy_heroku_to_local(id):
"""Copy a Heroku database locally."""
heroku_app = HerokuApp(dallinger_uid=id)
try:
subprocess.call(["dropdb", heroku_app.name])
except Exception:
pass
heroku_app.pg_pull()
|
python
|
{
"resource": ""
}
|
q13168
|
copy_db_to_csv
|
train
|
def copy_db_to_csv(dsn, path, scrub_pii=False):
"""Copy a local database to a set of CSV files."""
if "postgresql://" in dsn or "postgres://" in dsn:
conn = psycopg2.connect(dsn=dsn)
else:
conn = psycopg2.connect(database=dsn, user="dallinger")
cur = conn.cursor()
for table in table_names:
csv_path = os.path.join(path, "{}.csv".format(table))
with open(csv_path, "w") as f:
sql = "COPY {} TO STDOUT WITH CSV HEADER".format(table)
cur.copy_expert(sql, f)
conn.close()
if scrub_pii:
_scrub_participant_table(path)
|
python
|
{
"resource": ""
}
|
q13169
|
_scrub_participant_table
|
train
|
def _scrub_participant_table(path_to_data):
"""Scrub PII from the given participant table."""
path = os.path.join(path_to_data, "participant.csv")
with open_for_csv(path, "r") as input, open("{}.0".format(path), "w") as output:
reader = csv.reader(input)
writer = csv.writer(output)
headers = next(reader)
writer.writerow(headers)
for i, row in enumerate(reader):
row[headers.index("worker_id")] = row[headers.index("id")]
row[headers.index("unique_id")] = "{}:{}".format(
row[headers.index("id")], row[headers.index("assignment_id")]
)
writer.writerow(row)
os.rename("{}.0".format(path), path)
|
python
|
{
"resource": ""
}
|
q13170
|
export
|
train
|
def export(id, local=False, scrub_pii=False):
"""Export data from an experiment."""
print("Preparing to export the data...")
if local:
db_uri = db.db_url
else:
db_uri = HerokuApp(id).db_uri
# Create the data package if it doesn't already exist.
subdata_path = os.path.join("data", id, "data")
try:
os.makedirs(subdata_path)
except OSError as e:
if e.errno != errno.EEXIST or not os.path.isdir(subdata_path):
raise
# Copy in the data.
copy_db_to_csv(db_uri, subdata_path, scrub_pii=scrub_pii)
# Copy the experiment code into a code/ subdirectory.
try:
shutil.copyfile(
os.path.join("snapshots", id + "-code.zip"),
os.path.join("data", id, id + "-code.zip"),
)
except Exception:
pass
# Copy in the DATA readme.
# open(os.path.join(id, "README.txt"), "a").close()
# Save the experiment id.
with open(os.path.join("data", id, "experiment_id.md"), "a+") as file:
file.write(id)
# Zip data
src = os.path.join("data", id)
dst = os.path.join("data", id + "-data.zip")
archive_data(id, src, dst)
cwd = os.getcwd()
data_filename = "{}-data.zip".format(id)
path_to_data = os.path.join(cwd, "data", data_filename)
# Backup data on S3 unless run locally
if not local:
bucket = user_s3_bucket()
bucket.upload_file(path_to_data, data_filename)
url = _generate_s3_url(bucket, data_filename)
# Register experiment UUID with dallinger
register(id, url)
return path_to_data
|
python
|
{
"resource": ""
}
|
q13171
|
ingest_to_model
|
train
|
def ingest_to_model(file, model, engine=None):
"""Load data from a CSV file handle into storage for a
SQLAlchemy model class.
"""
if engine is None:
engine = db.engine
reader = csv.reader(file)
columns = tuple('"{}"'.format(n) for n in next(reader))
postgres_copy.copy_from(
file, model, engine, columns=columns, format="csv", HEADER=False
)
fix_autoincrement(model.__table__.name)
|
python
|
{
"resource": ""
}
|
q13172
|
_get_or_create_s3_bucket
|
train
|
def _get_or_create_s3_bucket(s3, name):
"""Get an S3 bucket resource after making sure it exists"""
exists = True
try:
s3.meta.client.head_bucket(Bucket=name)
except botocore.exceptions.ClientError as e:
error_code = int(e.response["Error"]["Code"])
if error_code == 404:
exists = False
else:
raise
if not exists:
s3.create_bucket(Bucket=name)
return s3.Bucket(name)
|
python
|
{
"resource": ""
}
|
q13173
|
user_s3_bucket
|
train
|
def user_s3_bucket(canonical_user_id=None):
"""Get the user's S3 bucket."""
s3 = _s3_resource()
if not canonical_user_id:
canonical_user_id = _get_canonical_aws_user_id(s3)
s3_bucket_name = "dallinger-{}".format(
hashlib.sha256(canonical_user_id.encode("utf8")).hexdigest()[0:8]
)
return _get_or_create_s3_bucket(s3, s3_bucket_name)
|
python
|
{
"resource": ""
}
|
q13174
|
_s3_resource
|
train
|
def _s3_resource(dallinger_region=False):
"""A boto3 S3 resource using the AWS keys in the config."""
config = get_config()
if not config.ready:
config.load()
region = "us-east-1" if dallinger_region else config.get("aws_region")
return boto3.resource(
"s3",
region_name=region,
aws_access_key_id=config.get("aws_access_key_id"),
aws_secret_access_key=config.get("aws_secret_access_key"),
)
|
python
|
{
"resource": ""
}
|
q13175
|
transmit_by_fitness
|
train
|
def transmit_by_fitness(from_whom, to_whom=None, what=None):
"""Choose a parent with probability proportional to their fitness."""
parents = from_whom
parent_fs = [p.fitness for p in parents]
parent_probs = [(f / (1.0 * sum(parent_fs))) for f in parent_fs]
rnd = random.random()
temp = 0.0
for i, probability in enumerate(parent_probs):
temp += probability
if temp > rnd:
parent = parents[i]
break
parent.transmit(what=what, to_whom=to_whom)
|
python
|
{
"resource": ""
}
|
q13176
|
get_messenger
|
train
|
def get_messenger(config):
"""Return an appropriate Messenger.
If we're in debug mode, or email settings aren't set, return a debug
version which logs the message instead of attempting to send a real
email.
"""
email_settings = EmailConfig(config)
if config.get("mode") == "debug":
return DebugMessenger(email_settings)
problems = email_settings.validate()
if problems:
logger.info(problems + " Will log errors instead of emailing them.")
return DebugMessenger(email_settings)
return EmailingMessenger(email_settings)
|
python
|
{
"resource": ""
}
|
q13177
|
EmailConfig.validate
|
train
|
def validate(self):
"""Could this config be used to send a real email?"""
missing = []
for k, v in self._map.items():
attr = getattr(self, k, False)
if not attr or attr == CONFIG_PLACEHOLDER:
missing.append(v)
if missing:
return "Missing or invalid config values: {}".format(
", ".join(sorted(missing))
)
|
python
|
{
"resource": ""
}
|
q13178
|
scoped_session_decorator
|
train
|
def scoped_session_decorator(func):
"""Manage contexts and add debugging to db sessions."""
@wraps(func)
def wrapper(*args, **kwargs):
with sessions_scope(session):
# The session used in func comes from the funcs globals, but
# it will be a proxied thread local var from the session
# registry, and will therefore be identical to the one returned
# by the context manager above.
logger.debug("Running worker %s in scoped DB session", func.__name__)
return func(*args, **kwargs)
return wrapper
|
python
|
{
"resource": ""
}
|
q13179
|
serialized
|
train
|
def serialized(func):
"""Run a function within a db transaction using SERIALIZABLE isolation.
With this isolation level, committing will fail if this transaction
read data that was since modified by another transaction. So we need
to handle that case and retry the transaction.
"""
@wraps(func)
def wrapper(*args, **kw):
attempts = 100
session.remove()
while attempts > 0:
try:
session.connection(
execution_options={"isolation_level": "SERIALIZABLE"}
)
result = func(*args, **kw)
session.commit()
return result
except OperationalError as exc:
session.rollback()
if isinstance(exc.orig, TransactionRollbackError):
if attempts > 0:
attempts -= 1
else:
raise Exception(
"Could not commit serialized transaction "
"after 100 attempts."
)
else:
raise
finally:
session.remove()
time.sleep(random.expovariate(0.5))
return wrapper
|
python
|
{
"resource": ""
}
|
q13180
|
chat
|
train
|
def chat(ws):
"""Relay chat messages to and from clients.
"""
lag_tolerance_secs = float(request.args.get("tolerance", 0.1))
client = Client(ws, lag_tolerance_secs=lag_tolerance_secs)
client.subscribe(request.args.get("channel"))
gevent.spawn(client.heartbeat)
client.publish()
|
python
|
{
"resource": ""
}
|
q13181
|
Channel.subscribe
|
train
|
def subscribe(self, client):
"""Subscribe a client to the channel."""
self.clients.append(client)
log("Subscribed client {} to channel {}".format(client, self.name))
|
python
|
{
"resource": ""
}
|
q13182
|
Channel.unsubscribe
|
train
|
def unsubscribe(self, client):
"""Unsubscribe a client from the channel."""
if client in self.clients:
self.clients.remove(client)
log("Unsubscribed client {} from channel {}".format(client, self.name))
|
python
|
{
"resource": ""
}
|
q13183
|
Channel.listen
|
train
|
def listen(self):
"""Relay messages from a redis pubsub to all subscribed clients.
This is run continuously in a separate greenlet.
"""
pubsub = redis_conn.pubsub()
name = self.name
if isinstance(name, six.text_type):
name = name.encode("utf-8")
try:
pubsub.subscribe([name])
except ConnectionError:
app.logger.exception("Could not connect to redis.")
log("Listening on channel {}".format(self.name))
for message in pubsub.listen():
data = message.get("data")
if message["type"] == "message" and data != "None":
channel = message["channel"]
payload = "{}:{}".format(channel.decode("utf-8"), data.decode("utf-8"))
for client in self.clients:
gevent.spawn(client.send, payload)
gevent.sleep(0.001)
|
python
|
{
"resource": ""
}
|
q13184
|
ChatBackend.subscribe
|
train
|
def subscribe(self, client, channel_name):
"""Register a new client to receive messages on a channel."""
if channel_name not in self.channels:
self.channels[channel_name] = channel = Channel(channel_name)
channel.start()
self.channels[channel_name].subscribe(client)
|
python
|
{
"resource": ""
}
|
q13185
|
ChatBackend.unsubscribe
|
train
|
def unsubscribe(self, client):
"""Unsubscribe a client from all channels."""
for channel in self.channels.values():
channel.unsubscribe(client)
|
python
|
{
"resource": ""
}
|
q13186
|
Client.send
|
train
|
def send(self, message):
"""Send a single message to the websocket."""
if isinstance(message, bytes):
message = message.decode("utf8")
with self.send_lock:
try:
self.ws.send(message)
except socket.error:
chat_backend.unsubscribe(self)
|
python
|
{
"resource": ""
}
|
q13187
|
Client.heartbeat
|
train
|
def heartbeat(self):
"""Send a ping to the websocket periodically.
This is needed so that Heroku won't close the connection
from inactivity.
"""
while not self.ws.closed:
gevent.sleep(HEARTBEAT_DELAY)
gevent.spawn(self.send, "ping")
|
python
|
{
"resource": ""
}
|
q13188
|
Client.publish
|
train
|
def publish(self):
"""Relay messages from client to redis."""
while not self.ws.closed:
# Sleep to prevent *constant* context-switches.
gevent.sleep(self.lag_tolerance_secs)
message = self.ws.receive()
if message is not None:
channel_name, data = message.split(":", 1)
redis_conn.publish(channel_name, data)
|
python
|
{
"resource": ""
}
|
q13189
|
BotBase.driver
|
train
|
def driver(self):
"""Returns a Selenium WebDriver instance of the type requested in the
configuration."""
from dallinger.config import get_config
config = get_config()
if not config.ready:
config.load()
driver_url = config.get("webdriver_url", None)
driver_type = config.get("webdriver_type")
driver = None
if driver_url:
capabilities = CAPABILITY_MAP.get(driver_type.lower())
if capabilities is None:
raise ValueError(
"Unsupported remote webdriver_type: {}".format(driver_type)
)
driver = webdriver.Remote(
desired_capabilities=capabilities, command_executor=driver_url
)
else:
driver_class = DRIVER_MAP.get(driver_type.lower())
if driver_class is not None:
driver = driver_class()
if driver is None:
raise ValueError("Unsupported webdriver_type: {}".format(driver_type))
driver.set_window_size(1024, 768)
logger.info("Created {} webdriver.".format(driver_type))
return driver
|
python
|
{
"resource": ""
}
|
q13190
|
BotBase.sign_up
|
train
|
def sign_up(self):
"""Accept HIT, give consent and start experiment.
This uses Selenium to click through buttons on the ad,
consent, and instruction pages.
"""
try:
self.driver.get(self.URL)
logger.info("Loaded ad page.")
begin = WebDriverWait(self.driver, 10).until(
EC.element_to_be_clickable((By.CLASS_NAME, "btn-primary"))
)
begin.click()
logger.info("Clicked begin experiment button.")
WebDriverWait(self.driver, 10).until(lambda d: len(d.window_handles) == 2)
self.driver.switch_to_window(self.driver.window_handles[-1])
self.driver.set_window_size(1024, 768)
logger.info("Switched to experiment popup.")
consent = WebDriverWait(self.driver, 10).until(
EC.element_to_be_clickable((By.ID, "consent"))
)
consent.click()
logger.info("Clicked consent button.")
participate = WebDriverWait(self.driver, 10).until(
EC.element_to_be_clickable((By.CLASS_NAME, "btn-success"))
)
participate.click()
logger.info("Clicked start button.")
return True
except TimeoutException:
logger.error("Error during experiment sign up.")
return False
|
python
|
{
"resource": ""
}
|
q13191
|
BotBase.sign_off
|
train
|
def sign_off(self):
"""Submit questionnaire and finish.
This uses Selenium to click the submit button on the questionnaire
and return to the original window.
"""
try:
logger.info("Bot player signing off.")
feedback = WebDriverWait(self.driver, 20).until(
EC.presence_of_element_located((By.ID, "submit-questionnaire"))
)
self.complete_questionnaire()
feedback.click()
logger.info("Clicked submit questionnaire button.")
self.driver.switch_to_window(self.driver.window_handles[0])
self.driver.set_window_size(1024, 768)
logger.info("Switched back to initial window.")
return True
except TimeoutException:
logger.error("Error during experiment sign off.")
return False
|
python
|
{
"resource": ""
}
|
q13192
|
BotBase.run_experiment
|
train
|
def run_experiment(self):
"""Sign up, run the ``participate`` method, then sign off and close
the driver."""
try:
self.sign_up()
self.participate()
if self.sign_off():
self.complete_experiment("worker_complete")
else:
self.complete_experiment("worker_failed")
finally:
self.driver.quit()
|
python
|
{
"resource": ""
}
|
q13193
|
HighPerformanceBotBase.run_experiment
|
train
|
def run_experiment(self):
"""Runs the phases of interacting with the experiment
including signup, participation, signoff, and recording completion.
"""
self.sign_up()
self.participate()
if self.sign_off():
self.complete_experiment("worker_complete")
else:
self.complete_experiment("worker_failed")
|
python
|
{
"resource": ""
}
|
q13194
|
HighPerformanceBotBase.sign_up
|
train
|
def sign_up(self):
"""Signs up a participant for the experiment.
This is done using a POST request to the /participant/ endpoint.
"""
self.log("Bot player signing up.")
self.subscribe_to_quorum_channel()
while True:
url = (
"{host}/participant/{self.worker_id}/"
"{self.hit_id}/{self.assignment_id}/"
"debug?fingerprint_hash={hash}&recruiter=bots:{bot_name}".format(
host=self.host,
self=self,
hash=uuid.uuid4().hex,
bot_name=self.__class__.__name__,
)
)
try:
result = requests.post(url)
result.raise_for_status()
except RequestException:
self.stochastic_sleep()
continue
if result.json()["status"] == "error":
self.stochastic_sleep()
continue
self.on_signup(result.json())
return True
|
python
|
{
"resource": ""
}
|
q13195
|
HighPerformanceBotBase.complete_experiment
|
train
|
def complete_experiment(self, status):
"""Record worker completion status to the experiment server.
This is done using a GET request to the /worker_complete
or /worker_failed endpoints.
"""
self.log("Bot player completing experiment. Status: {}".format(status))
while True:
url = "{host}/{status}?participant_id={participant_id}".format(
host=self.host, participant_id=self.participant_id, status=status
)
try:
result = requests.get(url)
result.raise_for_status()
except RequestException:
self.stochastic_sleep()
continue
return result
|
python
|
{
"resource": ""
}
|
q13196
|
HighPerformanceBotBase.subscribe_to_quorum_channel
|
train
|
def subscribe_to_quorum_channel(self):
"""In case the experiment enforces a quorum, listen for notifications
before creating Partipant objects.
"""
from dallinger.experiment_server.sockets import chat_backend
self.log("Bot subscribing to quorum channel.")
chat_backend.subscribe(self, "quorum")
|
python
|
{
"resource": ""
}
|
q13197
|
MTurkService.set_rest_notification
|
train
|
def set_rest_notification(self, url, hit_type_id):
"""Set a REST endpoint to recieve notifications about the HIT
The newer AWS MTurk API does not support this feature, which means we
cannot use boto3 here. Instead, we make the call manually after
assembling a properly signed request.
"""
ISO8601 = "%Y-%m-%dT%H:%M:%SZ"
notification_version = "2006-05-05"
API_version = "2014-08-15"
data = {
"AWSAccessKeyId": self.aws_key,
"HITTypeId": hit_type_id,
"Notification.1.Active": "True",
"Notification.1.Destination": url,
"Notification.1.EventType.1": "AssignmentAccepted",
"Notification.1.EventType.2": "AssignmentAbandoned",
"Notification.1.EventType.3": "AssignmentReturned",
"Notification.1.EventType.4": "AssignmentSubmitted",
"Notification.1.EventType.5": "HITReviewable",
"Notification.1.EventType.6": "HITExpired",
"Notification.1.Transport": "REST",
"Notification.1.Version": notification_version,
"Operation": "SetHITTypeNotification",
"SignatureVersion": "1",
"Timestamp": time.strftime(ISO8601, time.gmtime()),
"Version": API_version,
}
query_string, signature = self._calc_old_api_signature(data)
body = query_string + "&Signature=" + urllib.parse.quote_plus(signature)
data["Signature"] = signature
headers = {
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"Content-Length": str(len(body)),
"Host": self.legacy_host,
}
resp = requests.post("https://" + self.legacy_host, headers=headers, data=body)
return "<IsValid>True</IsValid>" in resp.text
|
python
|
{
"resource": ""
}
|
q13198
|
MTurkService.register_hit_type
|
train
|
def register_hit_type(
self, title, description, reward, duration_hours, keywords, qualifications
):
"""Register HIT Type for this HIT and return the type's ID, which
is required for creating a HIT.
"""
reward = str(reward)
duration_secs = int(datetime.timedelta(hours=duration_hours).total_seconds())
hit_type = self.mturk.create_hit_type(
Title=title,
Description=description,
Reward=reward,
AssignmentDurationInSeconds=duration_secs,
Keywords=",".join(keywords),
AutoApprovalDelayInSeconds=0,
QualificationRequirements=qualifications,
)
return hit_type["HITTypeId"]
|
python
|
{
"resource": ""
}
|
q13199
|
MTurkService.create_qualification_type
|
train
|
def create_qualification_type(self, name, description, status="Active"):
"""Create a new qualification Workers can be scored for.
"""
try:
response = self.mturk.create_qualification_type(
Name=name, Description=description, QualificationTypeStatus=status
)
except Exception as ex:
if "already created a QualificationType with this name" in str(ex):
raise DuplicateQualificationNameError(str(ex))
return self._translate_qtype(response["QualificationType"])
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.