Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
|---|---|---|---|
7,700
|
def query_filter(query):
"""Translate a query-style string to a 'filter'.
Query can be the following formats:
Case Insensitive
'value' OR '*= value' Contains
'value*' OR '^= value' Begins with value
'*value' OR '$= value' Ends with value
'*value*' OR '_= value' Contains value
Case Sensitive
'~ value' Contains
'!~ value' Does not contain
'> value' Greater than value
'< value' Less than value
'>= value' Greater than or equal to value
'<= value' Less than or equal to value
:param string query: query string
"""
try:
return {'operation': int(query)}
except __HOLE__:
pass
if isinstance(query, string_types):
query = query.strip()
for operation in KNOWN_OPERATIONS:
if query.startswith(operation):
query = "%s %s" % (operation, query[len(operation):].strip())
return {'operation': query}
if query.startswith('*') and query.endswith('*'):
query = "*= %s" % query.strip('*')
elif query.startswith('*'):
query = "$= %s" % query.strip('*')
elif query.endswith('*'):
query = "^= %s" % query.strip('*')
else:
query = "_= %s" % query
return {'operation': query}
|
ValueError
|
dataset/ETHPy150Open softlayer/softlayer-python/SoftLayer/utils.py/query_filter
|
7,701
|
def resolve_ids(identifier, resolvers):
"""Resolves IDs given a list of functions.
:param string identifier: identifier string
:param list resolvers: a list of functions
:returns list:
"""
# Before doing anything, let's see if this is an integer
try:
return [int(identifier)]
except __HOLE__:
pass # It was worth a shot
# This looks like a globalIdentifier (UUID)
if len(identifier) == 36 and UUID_RE.match(identifier):
return [identifier]
for resolver in resolvers:
ids = resolver(identifier)
if ids:
return ids
return []
|
ValueError
|
dataset/ETHPy150Open softlayer/softlayer-python/SoftLayer/utils.py/resolve_ids
|
7,702
|
def convert_resource(self, resource, view):
serializer_data = view.get_serializer(instance=None)
fields = serializer_data.fields
links = {}
if "links" in resource:
links = resource["links"]
del resource["links"]
for field_name, field in six.iteritems(fields):
if field_name not in links:
continue
related_field = get_related_field(field)
if isinstance(related_field, relations.HyperlinkedRelatedField):
if is_related_many(field):
pks = links[field_name]
model = related_field.queryset.model
resource[field_name] = []
for pk in pks:
obj = model(pk=pk)
try:
url = related_field.to_representation(obj)
except __HOLE__:
url = related_field.to_native(obj)
resource[field_name].append(url)
else:
pk = links[field_name]
model = related_field.queryset.model
obj = model(pk=pk)
try:
url = related_field.to_representation(obj)
except AttributeError:
url = related_field.to_native(obj)
resource[field_name] = url
else:
resource[field_name] = links[field_name]
return resource
|
AttributeError
|
dataset/ETHPy150Open kevin-brown/drf-json-api/rest_framework_json_api/parsers.py/JsonApiMixin.convert_resource
|
7,703
|
def __del__(self):
"Deletes this Geometry."
try:
capi.destroy_geom(self._ptr)
except (AttributeError, __HOLE__):
pass # Some part might already have been garbage collected
# Pickle routines
|
TypeError
|
dataset/ETHPy150Open django/django/django/contrib/gis/gdal/geometries.py/OGRGeometry.__del__
|
7,704
|
def parse_body(self):
if len(self.body) == 0 and not self.parse_zero_length_body:
return self.body
try:
try:
body = ET.XML(self.body)
except __HOLE__:
# lxml wants a bytes and tests are basically hard-coded to str
body = ET.XML(self.body.encode('utf-8'))
except:
raise MalformedResponseError("Failed to parse XML",
body=self.body,
driver=self.connection.driver)
return body
|
ValueError
|
dataset/ETHPy150Open apache/libcloud/libcloud/test/storage/test_s3.py/S3MockRawResponse.parse_body
|
7,705
|
def _remove_test_file(self):
file_path = os.path.abspath(__file__) + '.temp'
try:
os.unlink(file_path)
except __HOLE__:
pass
|
OSError
|
dataset/ETHPy150Open apache/libcloud/libcloud/test/storage/test_s3.py/S3Tests._remove_test_file
|
7,706
|
def test_upload_object_invalid_ex_storage_class(self):
# Invalid hash is detected on the amazon side and BAD_REQUEST is
# returned
file_path = os.path.abspath(__file__)
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_upload'
try:
self.driver.upload_object(file_path=file_path, container=container,
object_name=object_name,
verify_hash=True,
ex_storage_class='invalid-class')
except __HOLE__:
e = sys.exc_info()[1]
self.assertTrue(str(e).lower().find('invalid storage class') != -1)
else:
self.fail('Exception was not thrown')
|
ValueError
|
dataset/ETHPy150Open apache/libcloud/libcloud/test/storage/test_s3.py/S3Tests.test_upload_object_invalid_ex_storage_class
|
7,707
|
def parse_color(text):
'''Parse a string to a kivy color. Supported formats:
* rgb(r, g, b)
* rgba(r, g, b, a)
* rgb
* rgba
* rrggbb
* rrggbbaa
For hexadecimal values, you case also use:
* #rgb
* #rgba
* #rrggbb
* #rrggbbaa
'''
value = [1, 1, 1, 1]
if text.startswith('rgb'):
res = re.match('rgba?\((.*)\)', text)
if res:
try:
# default r/g/b values to 1 if greater than 255 else x/255
value = [1 if int(x) > 255. else (int(x) / 255.)
for x in re.split(',\ ?', res.groups()[0])]
if len(value) < 3:
# in case of invalid input like rgb()/rgb(r)/rgb(r, g)
raise ValueError
except ValueError:
return color_error('ColorParser: Invalid color for %r' % text)
except __HOLE__:
return color_error('ColorParser: Invalid color for %r' % text)
else:
return color_error('ColorParser: Invalid color for %r' % text)
if len(value) == 3:
value.append(1.)
elif len(text):
res = text
if text[0] == '#':
res = text[1:]
lres = len(res)
if lres == 3 or lres == 4:
res = ''.join([x + x for x in res])
elif lres != 6 and lres != 8:
# raise ColorException('Invalid color format for %r' % text)
return color_error(
'ColorParser: Invalid color format for %r' % text)
try:
value = [int(res[i:i + 2], 16) / 255.
for i in range(0, len(res), 2)]
except ValueError:
return color_error('ColorParser: Invalid color for %r' % text)
if lres == 6 or lres == 3:
value.append(1.)
return value
|
AttributeError
|
dataset/ETHPy150Open kivy/kivy/kivy/parser.py/parse_color
|
7,708
|
def string_to_numeric(string, type=int):
try:
return type(string)
except __HOLE__:
return 0
|
ValueError
|
dataset/ETHPy150Open dimagi/commcare-hq/custom/m4change/user_calcs/__init__.py/string_to_numeric
|
7,709
|
def run():
'''Run the command-line runner.'''
args = arg_parser()
settings = utils.Settings.read()
console.configure(settings)
retry_decryption = settings.get("retry_decryption", config.USER_SETTINGS["retry_decryption"])
def try_decrypt(remindme):
if not remindme.is_encrypted():
return remindme.get_content(), None
content = None
while 1:
password = None
try:
password = get_password()
except __HOLE__:
return content, None
content = remindme.get_content(password=password)
if content is None:
console.error("could not decrypt text")
else:
return content, password
if retry_decryption is False:
return content, None
def get_password(retry=False):
# determining whether to ask for a password based on need to encrypt
encryption_disabled = settings.get("disable_encryption", config.USER_SETTINGS["disable_encryption"])
encrypt_by_default = settings.get("encrypt_by_default", config.USER_SETTINGS["encrypt_by_default"])
retry_password = retry and settings.get("retry_password_match", config.USER_SETTINGS["retry_password_match"])
encryption_requested = args["encrypt"] or False
plaintext_requested = args["plain"] or False
password = None
# ensure encryption is not disabled
if encryption_disabled:
console.info("encryption is disabled")
return password
# if encryption has been requested
if encryption_requested:
password = console.get_password(retry=retry_password)
# if encryption is by default and plaintext has not been requested
elif encrypt_by_default and not plaintext_requested:
password = console.get_password(retry=retry_password)
# warn the user that no password was captured, if the case is so
if password is None:
console.info("NO password was captured. Storing as plain text.")
return password
def get_remindme(title):
if args['index']:
try:
return repository.find_at_index(title)
except ValueError:
console.error("index provided is not an integer")
return None
else:
return repository.find_by_title(title)
if args['list']:
if args['keywords']:
# searching using a phrase
phrase = ' '.join(args['keywords'])
remindmes = repository.find(lambda r: r.get_title().startswith(phrase))
else:
remindmes = repository.get_remindmes()
titles = repository.titles_in_order(remindmes)
num = len(titles)
console.success('Found {0} remindmes'.format(num))
if num == 0:
return
number = 0
display_content = ""
for title in titles:
number += 1
display_content = ''.join([display_content, '%-2d - %s\n' % (number, title)])
console.raw(display_content)
return
if args['add']:
title = ' '.join(args['add'])
results = repository.find_by_title(title)
if results:
console.error("A remindme already has that title")
return
# use editor if available, otherwise use console
if settings.get("editor", None):
try:
content = gui.editor(settings["editor"])
except Exception as err:
console.error("External editor (%s) exited with a non-zero status code" % (settings["editor"]))
console.error(str(err))
return
else:
message = "Enter what you remember now"
content = console.get_long_input(message)
if not content:
console.error("We have nothing to save!")
return
password = get_password(retry=True)
if repository.create_remindme(title, content, password=password):
console.success('Remindme will remind you next time.')
else:
console.error('Remindme failed to get that in memory.')
return
if args['edit']:
title = ' '.join(args['edit'])
remindme = get_remindme(title)
if not remindme:
console.error("no such remindme exists")
return
# we require an external editor for this
if not settings.get("editor", None):
console.error("you need to set an external editor for editing existing remindmes")
return
# editing encrypted content
content, password = try_decrypt(remindme)
if content is None:
return 1
content = gui.editor(settings["editor"], content=content)
# update content, only if we got some content
if content:
remindme.set_content(content, password=password)
if repository.update_remindme(remindme):
console.success('The remindme has been updated.')
else:
console.error('Remindme failed to save the remindme.')
return
if args['in']:
title = ' '.join(args['in'])
content = sys.stdin.read().strip()
if content is '':
console.error('Remindme got no data!')
else:
password = get_password()
if repository.create_remindme(title, content, password=password):
console.success('Remindme will remind you next time')
else:
console.error('Remindme failed to get that in memory.\n\
Maybe there is already another remindme with the same title.')
if args['remove']:
title = ' '.join(args['remove'])
remindme = get_remindme(title)
if remindme and remindme.delete():
console.success('remindme successfully removed')
else:
console.error('Remindme can not remove that. Check if the remindme \
really exists with me.')
if args['remove_all']:
confirm = console.get_input("remove All Remindmes(yes/NO)")
if confirm is None or confirm.strip().lower() != "yes":
return console.error("removal cancelled")
if repository.remove_remindmes():
console.success('removed all of them')
else:
console.error('failed to remove all')
if args['keywords']:
title = ' '.join(args['keywords'])
remindme = get_remindme(title)
if remindme:
console.success('Reminding you:')
content, __ = try_decrypt(remindme)
if content is None:
return 1
lines = content.split("\n")
number = 0
for line in lines:
number += 1
console.raw("%-2d %s\n" % (number, line))
else:
console.error('I too can\'t remember that')
return 0
|
KeyboardInterrupt
|
dataset/ETHPy150Open GochoMugo/remindme/remindme/cli.py/run
|
7,710
|
def init_notebook():
""" Initialize the Jupyter notebook by injecting the necessary CSS
and JS into the browser.
"""
from IPython.display import display, Javascript, HTML
# todo: ideally you don't want user interactions done this way:
# they result in spamming of JavaScript "objects" and when nbconverting,
# this will result in a huge number of output_javascript elements.
def my_send_command(command):
display(Javascript('flexx.command(%s);' % reprs(command)))
# Create default session and monkey-patch it
# Not very pretty, but this keeps notebook logic confined to this module/function.
session = manager.get_default_session()
if hasattr(session, '_original_send_command'):
display(HTML("<i>Flexx already loaded</i>"))
return # Don't inject twice
else:
session._original_send_command = session._send_command
session._send_command = my_send_command
try:
session.use_global_asset('phosphor-all.js')
session.use_global_asset('flexx-ui.css')
session.use_global_asset('flexx-ui.js')
except __HOLE__:
pass # Ok if it fails; assets can be loaded dynamically.
# Open server - we only use websocket for JS-to-Py communication
_server_open()
host, port = server.serving_at
asset_elements = session.get_assets_as_html()
# Make the JS that we inject not take any vertical space when nbconverted
extra_css = '.output_subarea.output_javascript { padding: 0px; }'
# Compose HTML to inject
url = 'ws://%s:%i/%s/ws' % (host, port, session.app_name)
t = "<i>Injecting Flexx JS and CSS</i>"
t += '\n\n'.join(asset_elements)
t += '\n\n<style>%s</style>\n' % extra_css
t += "<script>flexx.ws_url='%s'; " % url
t += "flexx.is_notebook=true; flexx.init();</script>"
display(HTML(t))
|
IndexError
|
dataset/ETHPy150Open zoofIO/flexx/flexx/app/funcs.py/init_notebook
|
7,711
|
def main():
try:
options = make_argument_parser().parse_args()
if options.debug:
logging.basicConfig()
stun.log.setLevel(logging.DEBUG)
nat_type, external_ip, external_port = stun.get_ip_info(
source_ip=options.source_ip,
source_port=options.source_port,
stun_host=options.stun_host,
stun_port=options.stun_port
)
print('NAT Type:', nat_type)
print('External IP:', external_ip)
print('External Port:', external_port)
except __HOLE__:
sys.exit()
|
KeyboardInterrupt
|
dataset/ETHPy150Open jtriley/pystun/stun/cli.py/main
|
7,712
|
def create_library(self, source_code=None, args=None,
wait_time=3, retries=10):
"""Creates a whizzml library from its source code. The `source_code`
parameter can be a:
{library ID}: the ID for an existing whizzml library
{path}: the path to a file containing the source code
{string} : the string containing the source code for the library
"""
create_args = {}
if args is not None:
create_args.update(args)
if source_code is None:
raise Exception('A valid code string'
' or a library id must be provided.')
resource_type = get_resource_type(source_code)
if resource_type == LIBRARY_PATH:
library_id = get_library_id(source_code)
if library_id:
check_resource(library_id,
query_string=TINY_RESOURCE,
wait_time=wait_time, retries=retries,
raise_on_error=True, api=self)
create_args.update({
"origin": library_id})
elif isinstance(source_code, basestring):
try:
if os.path.exists(source_code):
with open(source_code) as code_file:
source_code = code_file.read()
except __HOLE__:
raise IOError("Could not open the source code file %s." %
source_code)
create_args.update({
"source_code": source_code})
else:
raise Exception("A library id or a valid source code"
" is needed to create a"
" library. %s found." % resource_type)
body = json.dumps(create_args)
return self._create(self.library_url, body)
|
IOError
|
dataset/ETHPy150Open bigmlcom/python/bigml/libraryhandler.py/LibraryHandler.create_library
|
7,713
|
def setWeightMatrixSparse(self, W):
"""
Set the weight matrix of this graph. Requires as input a scipy sparse matrix with the
same dimensions as the current weight matrix. Edges are represented by
non-zero edges.
:param W: The weight matrix to use.
"""
if not isinstance(W, spmatrix.LLMatType) and not sparse.issparse(W):
raise ValueError("Input must be a sparse matrix, not " + str(type(W)))
if W.shape != (self.vList.getNumVertices(), self.vList.getNumVertices()):
raise ValueError("Weight matrix has wrong shape : " + str(W.shape))
try:
self.W = spmatrix.ll_mat(W.shape[0], W.shape[0], W.getnnz())
except __HOLE__:
self.W = spmatrix.ll_mat(W.shape[0], W.shape[0], W.nnz)
if isinstance(W, spmatrix.LLMatType):
#Warning: no check for symmetric matrix
#if self.undirected:
# raise ValueError("Weight matrix of undirected graph must be symmetric")
items = W.items()
for inds, val in items:
self.W[inds[0], inds[1]] = val
else:
if self.undirected and (W - W.transpose()).nonzero()[0].shape[0]:
raise ValueError("Weight matrix of undirected graph must be symmetric")
rowInds, colInds = W.nonzero()
for i in range(rowInds.shape[0]):
self.W[int(rowInds[i]), int(colInds[i])] = W[int(rowInds[i]), int(colInds[i])]
|
AttributeError
|
dataset/ETHPy150Open charanpald/APGL/apgl/graph/PySparseGraph.py/PySparseGraph.setWeightMatrixSparse
|
7,714
|
@staticmethod
def _bytes_to_cursor_pb(cursor):
try:
cursor_pb = datastore_pb.CompiledCursor(cursor)
except (__HOLE__, TypeError), e:
raise datastore_errors.BadValueError(
'Invalid cursor (%r). Details: %s' % (cursor, e))
except Exception, e:
if e.__class__.__name__ == 'ProtocolBufferDecodeError':
raise datastore_errors.BadValueError(
'Invalid cursor %s. Details: %s' % (cursor, e))
else:
raise
return cursor_pb
|
ValueError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/datastore/datastore_query.py/Cursor._bytes_to_cursor_pb
|
7,715
|
@staticmethod
def _urlsafe_to_bytes(cursor):
if not isinstance(cursor, basestring):
raise datastore_errors.BadValueError(
'cursor argument should be str or unicode (%r)' % (cursor,))
try:
decoded_bytes = base64.b64decode(str(cursor).replace('-', '+').replace('_', '/'))
except (__HOLE__, TypeError), e:
raise datastore_errors.BadValueError(
'Invalid cursor %s. Details: %s' % (cursor, e))
return decoded_bytes
|
ValueError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/datastore/datastore_query.py/Cursor._urlsafe_to_bytes
|
7,716
|
def process_query_result_if_first(self, query_result):
if not self.__was_first_result_processed:
self.__was_first_result_processed = True
self.__keys_only = query_result.keys_only()
if query_result.has_compiled_query():
self.__compiled_query = query_result.compiled_query
else:
self.__compiled_query = None
try:
self.__index_list = [self.__conn.adapter.pb_to_index(index_pb)
for index_pb in query_result.index_list()]
except __HOLE__:
self.__index_list = None
|
NotImplementedError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/datastore/datastore_query.py/_BatchShared.process_query_result_if_first
|
7,717
|
@command("Set up Taskwarrior to sync issues with your Inthe.AM account.")
def setup(config, args, *extra, **kwargs):
parser = argparse.ArgumentParser()
parser.add_argument(
'--data-dir',
type=os.path.expanduser,
default='~/.tasks'
)
extra_args = parser.parse_args(extra)
twversion = TaskWarriorShellout.get_version()
if twversion < LooseVersion('2.3'):
raise IncompatibleVersionError(
"Only Taskwarrior versions 2.3 and above support "
"synchronization with a task server. Please upgrade "
"and try again."
)
api = get_api_connection(config)
twconfig = get_taskwarrior_config(args.taskrc)
# Make sure that none of these settings are already set.
necessary_settings = ['certificate', 'key', 'ca', 'trust']
if 'taskd' in twconfig:
for setting in necessary_settings:
if setting in twconfig['taskd'] and twconfig['taskd'][setting]:
raise ConfigurationError(
"Cannot configure! Setting taskd.%s is already "
"configured in your TaskRC file at %s." % (
setting,
args.taskrc
)
)
# Create the data directory if necessary
data_location = os.path.expanduser(
twconfig.get('data', {}).get('location', extra_args.data_dir)
)
try:
os.mkdir(data_location)
logger.info(
"Data directory %s created.",
data_location
)
except __HOLE__:
logger.warning(
"Data directory %s already exists.",
data_location
)
# Get user information
status = api.get('https://inthe.am/api/v1/user/status/').json()
# Write certificate files
files = {
'private.cert': '/api/v1/user/my-certificate/',
'private.key': '/api/v1/user/my-key/',
'ca.cert.pem': '/api/v1/user/ca-certificate/',
}
for filename, url in files.items():
full_path = os.path.join(data_location, filename)
with open(full_path, 'w') as out:
full_url = 'https://inthe.am%s' % url
content = api.get(full_url).content
out.write(content)
logger.info(
"File '%s' written to %s.",
filename,
full_path,
)
# Write configuration
taskrc_path = os.path.expanduser(args.taskrc)
with open(taskrc_path, 'a') as out:
lines = []
if twconfig.get('data', {}).get('location') is None:
lines.append(
'data.location=%s' % data_location
)
lines.extend([
'taskd.certificate=%s' % os.path.join(
data_location,
'private.cert',
),
'taskd.key=%s' % os.path.join(
data_location,
'private.key',
),
'taskd.ca=%s' % os.path.join(
data_location,
'ca.cert.pem',
),
'taskd.server=%s' % status['taskd_server'],
'taskd.credentials=%s' % status['taskd_credentials'],
])
if twversion >= LooseVersion('2.4'):
lines.append(
'taskd.trust=ignore hostname'
)
for line in lines:
out.write('%s\n' % line)
logger.info(
"Configuration written to %s.",
taskrc_path,
)
# Synchronizing with Inthe.AM
logger.info(
"Performing initial sync..."
)
warrior = TaskWarriorShellout(
config_filename=taskrc_path
)
warrior.sync()
logger.info(
"Taskwarrior has successfully been configured to synchronize with "
"Inthe.AM; In the future, just run `task sync` to synchronize."
)
|
OSError
|
dataset/ETHPy150Open coddingtonbear/taskwarrior-inthe.am/taskwarrior_inthe_am/commands.py/setup
|
7,718
|
def get_for_model(self, model):
"""
Returns the ContentType object for a given model, creating the
ContentType if necessary. Lookups are cached so that subsequent lookups
for the same model don't hit the database.
"""
opts = model._meta
key = (opts.app_label, opts.object_name.lower())
try:
ct = self.__class__._cache[key]
except __HOLE__:
# Load or create the ContentType entry. The smart_unicode() is
# needed around opts.verbose_name_raw because name_raw might be a
# django.utils.functional.__proxy__ object.
ct, created = self.get_or_create(
app_label = opts.app_label,
model = opts.object_name.lower(),
defaults = {'name': smart_unicode(opts.verbose_name_raw)},
)
self._add_to_cache(ct)
return ct
|
KeyError
|
dataset/ETHPy150Open dcramer/django-compositepks/django/contrib/contenttypes/models.py/ContentTypeManager.get_for_model
|
7,719
|
def get_for_id(self, id):
"""
Lookup a ContentType by ID. Uses the same shared cache as get_for_model
(though ContentTypes are obviously not created on-the-fly by get_by_id).
"""
try:
ct = self.__class__._cache[id]
except __HOLE__:
# This could raise a DoesNotExist; that's correct behavior and will
# make sure that only correct ctypes get stored in the cache dict.
ct = self.get(pk=id)
self._add_to_cache(ct)
return ct
|
KeyError
|
dataset/ETHPy150Open dcramer/django-compositepks/django/contrib/contenttypes/models.py/ContentTypeManager.get_for_id
|
7,720
|
def __init__(self, **options):
"""
See the class docstring for explanation of options.
"""
if not pil_available:
raise PilNotAvailable(
'Python Imaging Library is required for this formatter')
Formatter.__init__(self, **options)
# Read the style
self.styles = dict(self.style)
if self.style.background_color is None:
self.background_color = '#fff'
else:
self.background_color = self.style.background_color
# Image options
self.image_format = get_choice_opt(
options, 'image_format', ['png', 'jpeg', 'gif', 'bmp'],
self.default_image_format, normcase=True)
self.image_pad = get_int_opt(options, 'image_pad', 10)
self.line_pad = get_int_opt(options, 'line_pad', 2)
# The fonts
fontsize = get_int_opt(options, 'font_size', 14)
self.fonts = FontManager(options.get('font_name', ''), fontsize)
self.fontw, self.fonth = self.fonts.get_char_size()
# Line number options
self.line_number_fg = options.get('line_number_fg', '#886')
self.line_number_bg = options.get('line_number_bg', '#eed')
self.line_number_chars = get_int_opt(options,
'line_number_chars', 2)
self.line_number_bold = get_bool_opt(options,
'line_number_bold', False)
self.line_number_italic = get_bool_opt(options,
'line_number_italic', False)
self.line_number_pad = get_int_opt(options, 'line_number_pad', 6)
self.line_numbers = get_bool_opt(options, 'line_numbers', True)
self.line_number_separator = get_bool_opt(options,
'line_number_separator', True)
self.line_number_step = get_int_opt(options, 'line_number_step', 1)
self.line_number_start = get_int_opt(options, 'line_number_start', 1)
if self.line_numbers:
self.line_number_width = (self.fontw * self.line_number_chars +
self.line_number_pad * 2)
else:
self.line_number_width = 0
self.hl_lines = []
hl_lines_str = get_list_opt(options, 'hl_lines', [])
for line in hl_lines_str:
try:
self.hl_lines.append(int(line))
except __HOLE__:
pass
self.hl_color = options.get('hl_color',
self.style.highlight_color) or '#f90'
self.drawables = []
|
ValueError
|
dataset/ETHPy150Open adieu/allbuttonspressed/pygments/formatters/img.py/ImageFormatter.__init__
|
7,721
|
def get_shell_dialog():
global _shell_dialog
if _shell_dialog is not None:
return _shell_dialog
try:
deps = {'pip': 'ipython>=1.0',
'linux-ubuntu': 'ipython-qtconsole',
'linux-debian': 'ipython-qtconsole'}
IPython = py_import('IPython.qt.console.rich_ipython_widget', deps,
True)
RichIPythonWidget = \
IPython.qt.console.rich_ipython_widget.RichIPythonWidget
py_import('IPython.qt.inprocess', deps, True)
QtInProcessKernelManager = \
IPython.qt.inprocess.QtInProcessKernelManager
except __HOLE__:
return None
km = QtInProcessKernelManager()
km.start_kernel()
kernel = km.kernel
kernel.gui = 'qt4'
kernel_client = km.client()
kernel_client.start_channels()
class IPythonDialog(RichIPythonWidget, QVistrailsPaletteInterface):
"""This class incorporates an IPython shell into a dockable widget for use in the
VisTrails environment"""
def __init__(self, parent=None):
RichIPythonWidget.__init__(self, parent)
self.old_streams = None
self.running_workflow = False
self.kernel_manager = km
self.kernel_client = kernel_client
self.exit_requested.connect(self.stop)
self.setWindowTitle("Console")
self.vistrails_interpreter = get_default_interpreter()
def visibility_changed(self, visible):
QVistrailsPaletteInterface.visibility_changed(self, visible)
if visible:
self.show()
else:
self.hide()
def stop(self):
kernel_client.stop_channels()
km.shutdown_kernel()
def hide(self):
"""suspend() -> None
Called when hiding the parent window in order to recover the previous
state.
"""
#recovering the state
if self.old_streams is not None:
sys.stdout, sys.stderr, sys.stdin = self.old_streams
self.old_streams = None
RichIPythonWidget.hide(self)
def show(self):
"""show() -> None
Store previous state and starts capturing all interactive input and
output.
"""
# capture all interactive input/output
if self.old_streams is None:
self.old_streams = sys.stdout, sys.stderr, sys.stdin
sys.stdout = self
sys.stderr = self
sys.stdin = self
RichIPythonWidget.show(self)
def showEvent(self, e):
"""showEvent(e) -> None
Event handler called when the dialog acquires focus
"""
self.show()
def flush(self):
"""flush() -> None.
Simulate stdin, stdout, and stderr.
"""
pass
def isatty(self):
"""isatty() -> int
Simulate stdin, stdout, and stderr.
"""
return 1
def readline(self):
"""readline() -> str
Simulate stdin, stdout, and stderr.
"""
return ""
def write(self, text):
"""write(text: str) -> None
Simulate stdin, stdout, and stderr.
"""
self.input_buffer = ''
if not self.running_workflow:
self.running_workflow = True
# make text blue
self._append_plain_text("\n\x1b[34m<STANDARD OUTPUT>\x1b[0m\n", True)
self._append_plain_text(text, True)
self._prompt_pos = self._get_end_cursor().position()
self._control.ensureCursorVisible()
self._control.moveCursor(QtGui.QTextCursor.End)
def eventFilter(self, obj, event):
""" Reimplemented to ensure a console-like behavior in the underlying
text widgets.
"""
etype = event.type()
if etype == QtCore.QEvent.KeyPress:
self.running_workflow = False
return RichIPythonWidget.eventFilter(self, obj, event)
_shell_dialog = IPythonDialog
return IPythonDialog
|
ImportError
|
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/gui/shell.py/get_shell_dialog
|
7,722
|
def assertDestinationAdded(self, programPath):
"""
Assert that when running under the given program a new destination is
added by L{redirectLogsForTrial}.
@param programPath: A path to a program.
@type programPath: L{str}
"""
destination = _RedirectLogsForTrial(FakeSys([programPath], b""),
LogPublisher())()
# If this was not added as destination, removing it will raise an
# exception:
try:
removeDestination(destination)
except __HOLE__:
self.fail("Destination was not added.")
|
ValueError
|
dataset/ETHPy150Open ClusterHQ/eliot/eliot/tests/test_twisted.py/RedirectLogsForTrialTests.assertDestinationAdded
|
7,723
|
@cherrypy.expose
def repost_fragment(self, **kwargs):
response = self.rp.parse_authentication_response(cherrypy.session,
kwargs["url_fragment"])
html_page = self._load_HTML_page_from_file("htdocs/success_page.html")
# Support for hybrid flow
authz_code = None
try:
authz_code = response["code"]
except __HOLE__:
pass
access_token = None
try:
access_token = response["access_token"]
userinfo = self.rp.make_userinfo_request(cherrypy.session,
access_token)
except KeyError:
pass
return html_page.format(authz_code, access_token,
response["id_token"], userinfo)
|
KeyError
|
dataset/ETHPy150Open rohe/pyoidc/oidc_example/simple_rp/src/rp.py/RPServer.repost_fragment
|
7,724
|
@port.setter
def port(self, port):
"""
Sets the port that WebDriver will be running on
"""
if not isinstance(port, int):
raise WebDriverException("Port needs to be an integer")
try:
port = int(port)
if port < 1 or port > 65535:
raise WebDriverException("Port number must be in the range 1..65535")
except (__HOLE__, TypeError) as e:
raise WebDriverException("Port needs to be an integer")
self._port = port
self.set_preference("webdriver_firefox_port", self._port)
|
ValueError
|
dataset/ETHPy150Open apiad/sublime-browser-integration/selenium/webdriver/firefox/firefox_profile.py/FirefoxProfile.port
|
7,725
|
def escape(m):
all, tail = m.group(0, 1)
assert all.startswith("\\")
esc = simple_escapes.get(tail)
if esc is not None:
return esc
if tail.startswith("x"):
hexes = tail[1:]
if len(hexes) < 2:
raise ValueError("invalid hex string escape ('\\%s')" % tail)
try:
i = int(hexes, 16)
except __HOLE__:
raise ValueError("invalid hex string escape ('\\%s')" % tail)
else:
try:
i = int(tail, 8)
except ValueError:
raise ValueError("invalid octal string escape ('\\%s')" % tail)
return chr(i)
|
ValueError
|
dataset/ETHPy150Open JT5D/Alfred-Popclip-Sublime/Sublime Text 2/Python PEP8 Autoformat/libs/lib2to3/pgen2/literals.py/escape
|
7,726
|
def quantity(): # <1>
try:
quantity.counter += 1 # <2>
except __HOLE__:
quantity.counter = 0 # <3>
storage_name = '_{}:{}'.format('quantity', quantity.counter) # <4>
def qty_getter(instance): # <5>
return getattr(instance, storage_name)
def qty_setter(instance, value):
if value > 0:
setattr(instance, storage_name, value)
else:
raise ValueError('value must be > 0')
return property(qty_getter, qty_setter)
# END LINEITEM_V4_PROP
|
AttributeError
|
dataset/ETHPy150Open fluentpython/example-code/20-descriptor/bulkfood/bulkfood_v4prop.py/quantity
|
7,727
|
def render(self, name, value, attrs=None):
if not isinstance(value, list):
value = self.decompress(value)
if self.is_localized:
for widget in self.widgets:
widget.is_localized = self.is_localized
output = []
final_attrs = self.build_attrs(attrs)
id_ = final_attrs.get('id')
for i, widget in enumerate(self.widgets):
try:
widget_value = value[i]
except __HOLE__:
widget_value = None
suffix = widget.suffix
if id_:
final_attrs = dict(final_attrs, id='%s_%s_%s' %
(id_, i, suffix))
output.append(widget.render('%s_%s_%s' % (name, i, suffix),
widget_value,
final_attrs))
return mark_safe(self.format_output(name, output))
|
IndexError
|
dataset/ETHPy150Open MongoEngine/django-mongoengine/django_mongoengine/forms/widgets.py/Dictionary.render
|
7,728
|
def __init__(self, sub_attrs, key_value=None, attrs=None, **kwargs):
widgets = [self.key_type()] if callable(self.key_type) else []
if self.value_type in [TextInput, HiddenInput]:
if sub_attrs:
try:
widgets = [self.key_type(attrs=sub_attrs['key']), self.value_type(attrs=sub_attrs['value'])]
except KeyError:
raise KeyError("improper synthax for sub_attrs parameter")
else:
widgets = [self.key_type(), self.value_type()]
elif self.value_type == Dictionary:
if sub_attrs:
try:
widgets = [self.key_type(attrs=sub_attrs['key']), self.value_type(attrs=sub_attrs['value'], **kwargs)]
except __HOLE__:
raise KeyError("improper synthax for sub_attrs parameter")
else:
widgets = [self.key_type(), self.value_type(**kwargs)]
self.sub_attrs = sub_attrs
#raise error here ?
self.key_value = key_value if key_value is not None else ''
super(Pair, self).__init__(widgets, attrs)
#this method should be overwritten by subclasses
|
KeyError
|
dataset/ETHPy150Open MongoEngine/django-mongoengine/django_mongoengine/forms/widgets.py/Pair.__init__
|
7,729
|
def render(self, name, value, attrs=None):
if self.is_localized:
for widget in self.widgets:
widget.is_localized = self.is_localized
if not isinstance(value, list):
value = self.decompress(value)
output = []
final_attrs = self.build_attrs(attrs)
id_ = final_attrs.get('id')
for i, widget in enumerate(self.widgets):
try:
widget_value = value[i]
except __HOLE__:
widget_value = None
if id_:
final_attrs = dict(final_attrs, id='%s_%s' % (id_, i))
output.append(widget.render(name + '_%s' % i, widget_value, final_attrs))
return mark_safe(self.format_output(output, name))
|
IndexError
|
dataset/ETHPy150Open MongoEngine/django-mongoengine/django_mongoengine/forms/widgets.py/Pair.render
|
7,730
|
def set_cdata_mode(self, tag):
try:
self.interesting = _html_parser.interesting_cdata
except __HOLE__:
self.interesting = re.compile(r'</\s*%s\s*>' % tag.lower(), re.I)
self.cdata_tag = tag.lower()
|
AttributeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/utils/html_parser.py/HTMLParser.set_cdata_mode
|
7,731
|
@content
def POST(self):
data = self.checked_data()
try:
if 'passwordCredentials' in data['auth']:
if not validate_password_credentials(
**data['auth']['passwordCredentials']):
raise self.http(401)
elif 'token' in data['auth']:
if not validate_token(data['auth']['token']['id']):
raise self.http(401)
else:
raise self.http(400)
except (KeyError, __HOLE__):
raise self.http(400)
token = generate_token()
return {
"access": {
"token": {
"issued_at": "2012-07-10T13:37:58.708765",
"expires": "2012-07-10T14:37:58Z",
"id": token,
"tenant": {
"description": None,
"enabled": True,
"id": "12345",
"name": "admin"
}
},
"serviceCatalog": [],
"user": {
"username": "admin",
"roles_links": [],
"id": "9876",
"roles": [{"name": "admin"}],
"name": "admin"
},
"metadata": {
"is_admin": 0,
"roles": ["4567"]
}
}
}
|
TypeError
|
dataset/ETHPy150Open openstack/fuel-web/nailgun/nailgun/fake_keystone/handlers.py/TokensHandler.POST
|
7,732
|
def __getattr__(self, name):
try:
return self[name]
except __HOLE__:
raise AttributeError(name)
|
KeyError
|
dataset/ETHPy150Open D-L/SimpleBookMarks/src/tornado/util.py/ObjectDict.__getattr__
|
7,733
|
def import_object(name):
"""Imports an object by name.
import_object('x') is equivalent to 'import x'.
import_object('x.y.z') is equivalent to 'from x.y import z'.
>>> import tornado.escape
>>> import_object('tornado.escape') is tornado.escape
True
>>> import_object('tornado.escape.utf8') is tornado.escape.utf8
True
>>> import_object('tornado') is tornado
True
>>> import_object('tornado.missing_module')
Traceback (most recent call last):
...
ImportError: No module named missing_module
"""
if name.count('.') == 0:
return __import__(name, None, None)
parts = name.split('.')
obj = __import__('.'.join(parts[:-1]), None, None, [parts[-1]], 0)
try:
return getattr(obj, parts[-1])
except __HOLE__:
raise ImportError("No module named %s" % parts[-1])
# Fake unicode literal support: Python 3.2 doesn't have the u'' marker for
# literal strings, and alternative solutions like "from __future__ import
# unicode_literals" have other problems (see PEP 414). u() can be applied
# to ascii strings that include \u escapes (but they must not contain
# literal non-ascii characters).
|
AttributeError
|
dataset/ETHPy150Open D-L/SimpleBookMarks/src/tornado/util.py/import_object
|
7,734
|
def __init__(self, func, name):
self.name = name
try:
self.arg_pos = inspect.getargspec(func).args.index(self.name)
except __HOLE__:
# Not a positional parameter
self.arg_pos = None
|
ValueError
|
dataset/ETHPy150Open D-L/SimpleBookMarks/src/tornado/util.py/ArgReplacer.__init__
|
7,735
|
def run_detail(request, run_id):
test_run = get_object_or_404(TestRun, id=run_id)
sort = request.GET.get('sort')
if sort == 'response_time':
order_by = '-timer_total'
elif sort == 'sql_queries':
order_by = '-sql_num_queries'
elif sort == 'sql_time':
order_by = '-sql_time'
else:
order_by = '-timestamp'
test_run.set_aggregates()
p = Paginator(test_run.records.order_by(order_by), RECORDS_PER_PAGE)
try:
page_num = int(request.GET.get('p', 1))
except __HOLE__:
page_num = 1
page = p.page(page_num)
return render_to_response("debug_logging/run_detail.html", {
'page': page,
'test_run': test_run,
'all_test_runs': _get_all_test_runs(),
}, context_instance=RequestContext(request))
|
ValueError
|
dataset/ETHPy150Open lincolnloop/django-debug-logging/debug_logging/views.py/run_detail
|
7,736
|
def _status_query(query, hostname, enumerate=None, service=None):
'''
Send query along to Nagios.
'''
config = _config()
data = None
params = {
'hostname': hostname,
'query': query,
}
ret = {
'result': False
}
if enumerate:
params['formatoptions'] = 'enumerate'
if service:
params['servicedescription'] = service
if config['username'] and config['password'] is not None:
auth = (config['username'], config['password'],)
else:
auth = None
try:
result = salt.utils.http.query(
config['url'],
method='GET',
params=params,
decode=True,
data=data,
text=True,
status=True,
header_dict={},
auth=auth,
backend='requests',
opts=__opts__,
)
except ValueError:
ret['error'] = 'Please ensure Nagios is running.'
ret['result'] = False
return ret
if result.get('status', None) == salt.ext.six.moves.http_client.OK:
try:
ret['json_data'] = result['dict']
ret['result'] = True
except __HOLE__:
ret['error'] = 'Please ensure Nagios is running.'
elif result.get('status', None) == salt.ext.six.moves.http_client.UNAUTHORIZED:
ret['error'] = 'Authentication failed. Please check the configuration.'
elif result.get('status', None) == salt.ext.six.moves.http_client.NOT_FOUND:
ret['error'] = 'URL {0} was not found.'.format(config['url'])
else:
ret['error'] = 'Results: {0}'.format(result.text)
return ret
|
ValueError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/nagios_rpc.py/_status_query
|
7,737
|
def get_queryset(self, extra_filters=None):
try:
return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
except (__HOLE__, KeyError):
kwargs = extra_filters if extra_filters else {}
return self.through.tags_for(self.model, self.instance, **kwargs)
|
AttributeError
|
dataset/ETHPy150Open alex/django-taggit/taggit/managers.py/_TaggableManager.get_queryset
|
7,738
|
def parse_yaml(yaml_file):
"""
Parses a yaml file, returning its contents as a dict.
"""
try:
import yaml
except ImportError:
sys.exit("Unable to import yaml module.")
try:
with io.open(yaml_file, encoding='utf-8') as fname:
return yaml.load(fname)
except __HOLE__:
sys.exit("Unable to open YAML file: {0}".format(yaml_file))
|
IOError
|
dataset/ETHPy150Open ktbyers/netmiko/tests/test_utils.py/parse_yaml
|
7,739
|
def mk_test_name(name, value, index=0):
"""
Generate a new name for a test case.
It will take the original test name and append an ordinal index and a
string representation of the value, and convert the result into a valid
python identifier by replacing extraneous characters with ``_``.
If hash randomization is enabled (a feature available since 2.7.3/3.2.3
and enabled by default since 3.3) and a "non-trivial" value is passed
this will omit the name argument by default. Set `PYTHONHASHSEED`
to a fixed value before running tests in these cases to get the
names back consistently or use the `__name__` attribute on data values.
A "trivial" value is a plain scalar, or a tuple or list consisting
only of trivial values.
"""
# We avoid doing str(value) if all of the following hold:
#
# * Python version is 2.7.3 or newer (for 2 series) or 3.2.3 or
# newer (for 3 series). Also sys.flags.hash_randomization didn't
# exist before these.
# * sys.flags.hash_randomization is set to True
# * PYTHONHASHSEED is **not** defined in the environment
# * Given `value` argument is not a trivial scalar (None, str,
# int, float).
#
# Trivial scalar values are passed as is in all cases.
trivial_types = (type(None), bool, str, int, float)
try:
trivial_types += (unicode,)
except __HOLE__:
pass
def is_trivial(value):
if isinstance(value, trivial_types):
return True
if isinstance(value, (list, tuple)):
return all(map(is_trivial, value))
return False
if is_hash_randomized() and not is_trivial(value):
return "{0}_{1}".format(name, index + 1)
try:
value = str(value)
except UnicodeEncodeError:
# fallback for python2
value = value.encode('ascii', 'backslashreplace')
test_name = "{0}_{1}_{2}".format(name, index + 1, value)
return re.sub('\W|^(?=\d)', '_', test_name)
|
NameError
|
dataset/ETHPy150Open numba/numba/numba/testing/ddt.py/mk_test_name
|
7,740
|
def feed_data(func, new_name, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
wrapper.__name__ = new_name
# Try to call format on the docstring
if func.__doc__:
try:
wrapper.__doc__ = func.__doc__.format(*args, **kwargs)
except (__HOLE__, KeyError):
# Maybe the user has added some of the formating strings
# unintentionally in the docstring. Do not raise an exception as it
# could be that he is not aware of the formating feature.
pass
return wrapper
|
IndexError
|
dataset/ETHPy150Open numba/numba/numba/testing/ddt.py/feed_data
|
7,741
|
def configure(config):
"""When using Lino's default method, the :setting:`LOGGING` setting in
your :xfile:`settings.py` must be a dictionary containing the
parameters you want to set. Available parameters are:
:param logfile: the full path of the lino `system.log` file.
If absent or `None`, there will be no `system.log` file.
:param level: the overall verbosity level for both console and logfile.
:param mode: the opening mode for the logfile
:param encoding: the encoding for the logfile
:param tty: whether to install a default logger to the terminal
:param logger_names: A list or tuple of names of loggers to configure.
If this is a string, Lino converts it to a list
(expecting it to be a space-separated list of names).
Default value is 'lino'.
If there is a logfile, then console messages will never be more
verbose than INFO because too many messages on the screen are
disturbing, and if the level is DEBUG you will better analyze them in
the logfile.
Automatically adds an AdminEmailHandler with level ERROR to all
specified loggers *and* to the 'django' logger (even if 'django' is
not specified in `loggers`). Because that's rather necessary on a
production server with :setting:`DEBUG` False.
"""
if getattr(logging, "set_up_done", False):
msg = "Not changing the existing logging configuration."
# raise Exception(msg)
logging.info(msg)
return
logging.set_up_done = True
# if len(logging.getLogger().handlers) != 0:
# msg = "Not changing the existing logging configuration."
# # raise Exception(msg)
# logging.info(msg)
# return
#~ logger_names = config.get('logger_names','djangosite lino')
logger_names = config.get('logger_names', None)
#~ print 20130826, logger_names
if not logger_names:
#~ print 20130418, __file__, 'no logger names'
return # Django 1.5 calls this function twice (#20229)
#~ raise Exception("Missing keyword argument `logger_names` in %s." % config)
djangoLogger = logging.getLogger('django')
linoLogger = logging.getLogger('lino')
from django.utils.log import AdminEmailHandler
# print 20150623, config
# encoding = config.get('encoding', 'UTF-8')
logfile = config.get('filename', None)
rotate = config.get('rotate', True)
tty = config.get('tty', True)
level = getattr(logging, config.get('level', 'notset').upper())
if isinstance(logger_names, basestring):
logger_names = logger_names.split()
#~ print "20130418 configure loggers", logger_names, config
loggers = [logging.getLogger(n) for n in logger_names]
# for l in loggers:
# if len(l.handlers) != 0:
# msg = "Not configuring logging because already configured."
# l.info(msg)
# return
for l in loggers:
l.setLevel(level)
aeh = AdminEmailHandler(include_html=True)
aeh.setLevel(logging.ERROR)
for l in loggers:
l.addHandler(aeh)
if 'django' not in logger_names:
djangoLogger.addHandler(aeh)
if tty:
try:
if sys.stdout.isatty():
h = logging.StreamHandler()
if logfile is not None:
h.setLevel(logging.INFO)
#~ print "20130826 tty", h, loggers
fmt = logging.Formatter(fmt='%(levelname)s %(message)s')
h.setFormatter(fmt)
for l in loggers:
l.addHandler(h)
except IOError:
# happens under mod_wsgi
linoLogger.info("mod_wsgi mode (no sys.stdout)")
if logfile is not None:
try:
kw = {}
for k in ('mode', 'encoding'):
if k in config:
kw[k] = config[k]
h = file_handler(logfile, rotate, **kw)
#~ h.setLevel(level)
for l in loggers:
l.addHandler(h)
except __HOLE__ as e:
raise Exception("Failed to create log file %s : %s" % (logfile, e))
# linoLogger.exception("Failed to create log file %s : %s", logfile, e)
# linoLogger.exception(e)
#~ linoLogger.info("20120408 linoLogger.handlers: %s", linoLogger.handlers)
|
IOError
|
dataset/ETHPy150Open lsaffre/lino/lino/utils/log.py/configure
|
7,742
|
def test_emit(self):
self.log.set_level(Level.DEBUG)
for l in Level.iterconstants():
_lname = l.name.lower()
try:
getattr(self.log, _lname)("Message", _satraceback=False)
except __HOLE__:
self.assertEqual(l, Level.FATAL)
self.assertEqual(self._lastlog['system'], "test" if l == Level.INFO
else "test#" + _lname)
self.assertEqual(self._lastlog['_salevel'], l)
|
SystemExit
|
dataset/ETHPy150Open smartanthill/smartanthill1_0/smartanthill/test/test_log.py/LogCase.test_emit
|
7,743
|
def _resolve_urlspec(self, site_name, url_spec):
if isinstance(url_spec, (list, tuple)):
url_spec = dict(zip(['pattern', 'handler', 'kwargs', 'name'], url_spec))
if not 'pattern' in url_spec or not 'handler' in url_spec:
msg = 'Missing attributes in handler configuration in site [{0}]: [{1}]'.format(site_name, url_spec)
hint = 'Both "pattern" and "handler" must be defined.'
raise InvalidHandlerError(msg, hint)
# string -> class specification
if isinstance(url_spec['handler'], basestring):
url_spec['handler'] = {'class':url_spec['handler']}
if 'provider' in url_spec['handler']:
handler = url_spec.pop('handler')
provider = handler['provider']
dependency = self.dependency_manager.add_dependency(self, handler)
url_spec.setdefault('kwargs', dict()).update(dependency=dependency)
if provider.startswith('pipeline.'):
url_spec['handler_class'] = handlers.PipelineRequestHandler
return web.url(**url_spec)
url_spec['handler_class'] = handlers.PipedRequestHandlerProxy(dependency)
return web.url(**url_spec)
if 'class' in url_spec['handler']:
handler = url_spec.pop('handler')
cls = reflect.namedAny(handler['class'])
if id(cls) not in self._configured_handler_factory_ids:
self._configured_handler_factory_ids.add(id(cls))
try:
cls.configure(self.runtime_environment)
except __HOLE__ as ae:
if "has no attribute 'configure'" not in ae.args[0]:
raise
url_spec['handler_class'] = cls
return web.url(**url_spec)
msg = 'Invalid handler configuration in site [{0}]: [{1}]'.format(site_name, url_spec)
raise InvalidHandlerError(msg)
|
AttributeError
|
dataset/ETHPy150Open foundit/Piped/contrib/cyclone/piped_cyclone/providers.py/CycloneProvider._resolve_urlspec
|
7,744
|
@query
def __contains__(self, member):
""":keyword:`in` operator. Tests whether the set contains
the given operand ``member``.
:param member: the value to test
:returns: ``True`` if the sorted set contains the given
operand ``member``
:rtype: :class:`bool`
.. note::
This method internally uses :redis:`ZSCORE` command.
"""
try:
element = self.value_type.encode(member)
except __HOLE__:
return False
return self.session.client.zscore(self.key, element) is not None
|
TypeError
|
dataset/ETHPy150Open dahlia/sider/sider/sortedset.py/SortedSet.__contains__
|
7,745
|
@query
def __eq__(self, operand):
if not isinstance(operand, collections.Sized):
return False
length = len(self)
if length != len(operand):
return False
zrange = self.session.client.zrange
operand_is_sortedset = isinstance(operand, SortedSet)
if operand_is_sortedset:
if length == 0:
return True
elif self.value_type != operand.value_type:
return False
pairs = zrange(self.key, 0, -1, withscores=True)
decode = self.value_type.decode
if operand_is_sortedset:
operand_pairs = zrange(operand.key, 0, -1, withscores=True)
return pairs == operand_pairs
elif isinstance(operand, collections.Mapping):
for element, score in pairs:
element = decode(element)
try:
s = operand[element]
except __HOLE__:
return False
else:
if s != score:
return False
return True
elif isinstance(operand, collections.Set):
for element, score in pairs:
if not (score == 1 and decode(element) in operand):
return False
return True
return False
|
KeyError
|
dataset/ETHPy150Open dahlia/sider/sider/sortedset.py/SortedSet.__eq__
|
7,746
|
def StartInstance(self, instance, block_devices, startup_paused):
"""Start an instance.
For the chroot manager, we try to mount the block device and
execute '/ganeti-chroot start'.
"""
root_dir = self._InstanceDir(instance.name)
if not os.path.exists(root_dir):
try:
os.mkdir(root_dir)
except __HOLE__, err:
raise HypervisorError("Failed to start instance %s: %s" %
(instance.name, err))
if not os.path.isdir(root_dir):
raise HypervisorError("Needed path %s is not a directory" % root_dir)
if not os.path.ismount(root_dir):
if not block_devices:
raise HypervisorError("The chroot manager needs at least one disk")
sda_dev_path = block_devices[0][1]
result = utils.RunCmd(["mount", sda_dev_path, root_dir])
if result.failed:
raise HypervisorError("Can't mount the chroot dir: %s" % result.output)
init_script = instance.hvparams[constants.HV_INIT_SCRIPT]
result = utils.RunCmd(["chroot", root_dir, init_script, "start"])
if result.failed:
raise HypervisorError("Can't run the chroot start script: %s" %
result.output)
|
IOError
|
dataset/ETHPy150Open ganeti/ganeti/lib/hypervisor/hv_chroot.py/ChrootManager.StartInstance
|
7,747
|
def facebook_callback(fun_or_app, route=None):
def handler(fun, *args, **kwargs):
error = None
access_token = None
if CSRF_TOKEN_REQUIRED:
if 'state' not in request.args:
error = FacebookError(message="`state` parameter is required. This request might have been initiated by an unauthorized third-party.", err="StateMissing")
elif session['facebook_state'] != request.args['state']:
error = FacebookError(message="`state` parameter does not match session value. This request might have been initiated by an unauthorized third-party.", err="StateMismatch")
elif 'error' in request.args:
message = request.args.get('error_description')
err = request.args.get('error')
reason = request.args.get('error_reason')
error = FacebookError(message=message, err=err, code=reason)
elif 'code' not in request.args:
error = FacebookError(message="`code` is a required parameter.", err="CodeMissing")
if error is None:
code = request.args.get('code')
facebook = Facebook()
response = facebook.oauth.access_token(code=code)
try:
access_token = response.access_token
except __HOLE__:
access_token = response['access_token']
from ecl_facebook.signals import post_facebook_auth
post_facebook_auth.send('ecl_facebook', token=access_token)
return fun(access_token, error)
if route is None:
fun = fun_or_app
inner = lambda *args, **kwargs: handler(fun, *args, **kwargs)
inner = wraps(fun)(inner)
return inner
else:
app = fun_or_app
def decorator(fun):
inner = lambda *args, **kwargs: handler(fun, *args, **kwargs)
inner = wraps(fun)(inner)
app.add_url_rule(route, None, inner)
return inner
return decorator
|
AttributeError
|
dataset/ETHPy150Open elmcitylabs/ECL-Facebook/ecl_facebook/flask_decorators.py/facebook_callback
|
7,748
|
@detail_route(methods=['put'])
def set_state(self, request, pk=None):
"""
Special view endpoint to set and reset a state of a motion.
Send PUT {'state': <state_id>} to set and just PUT {} to reset the
state. Only managers can use this view.
"""
# Retrieve motion and state.
motion = self.get_object()
state = request.data.get('state')
# Set or reset state.
if state is not None:
# Check data and set state.
try:
state_id = int(state)
except __HOLE__:
raise ValidationError({'detail': _('Invalid data. State must be an integer.')})
if state_id not in [item.id for item in motion.state.next_states.all()]:
raise ValidationError(
{'detail': _('You can not set the state to %(state_id)d.') % {'state_id': state_id}})
motion.set_state(state_id)
else:
# Reset state.
motion.reset_state()
# Save motion.
motion.save(update_fields=['state', 'identifier'])
message = _('The state of the motion was set to %s.') % motion.state.name
# Write the log message and initiate response.
motion.write_log(
message_list=[ugettext_noop('State set to'), ' ', motion.state.name],
person=request.user)
return Response({'detail': message})
|
ValueError
|
dataset/ETHPy150Open OpenSlides/OpenSlides/openslides/motions/views.py/MotionViewSet.set_state
|
7,749
|
def fetch(self):
counter = 0
while counter <= self.timeout and not self.done:
try:
self.dispatch(self.transport)
self.done = True
self.logger.info("Done processing.")
except urllib2.HTTPError, err:
#code = err.code
self.logger.error("File not found: %s" % err)
except __HOLE__, err:
# Connection error (socket)
self.logger.error("IOError: %s" % err)
except Exception, err:
self.logger.error("SynapseException: %s" % err)
self.done = True
finally:
if not self.done:
self.logger.info('Retrying in 2 seconds. '
'{0} seconds left'.format(self.timeout - counter))
time.sleep(2)
counter += 2
|
IOError
|
dataset/ETHPy150Open comodit/synapse-agent/synapse/resourcefile.py/ResourceFile.fetch
|
7,750
|
def _get_http(self):
self.logger.info('Trying to open url %s' % self.url)
webfile = urllib2.urlopen(self.url)
try:
tasks = json.loads(webfile.read())
self.logger.info('Found %d task(s), processing...' % len(tasks))
except __HOLE__, err:
raise Exception('Error while loading json: {0}'.format(err))
finally:
webfile.close()
return tasks
|
ValueError
|
dataset/ETHPy150Open comodit/synapse-agent/synapse/resourcefile.py/ResourceFile._get_http
|
7,751
|
def _get_fs(self):
self.logger.info('Trying to open file %s' % self.path)
with open(self.path, 'r') as fd:
try:
tasks = json.load(fd)
self.logger.info('Found %d task(s)' % len(tasks))
except __HOLE__, err:
raise Exception('Error while loading json: {0}'.format(err))
return tasks or []
|
ValueError
|
dataset/ETHPy150Open comodit/synapse-agent/synapse/resourcefile.py/ResourceFile._get_fs
|
7,752
|
def prepdir(coord):
'''
'''
path = 'tiles/%(zoom)d/%(column)d/%(row)d' % coord.__dict__
try:
makedirs(path)
except __HOLE__:
pass
return path
|
OSError
|
dataset/ETHPy150Open codeforamerica/US-Census-Area-API/data/index-tiles.py/prepdir
|
7,753
|
def next_node(self, condition=None,
include_self=0, descend=1, siblings=0, ascend=0):
"""
Return the first node in the iterable returned by traverse(),
or None if the iterable is empty.
Parameter list is the same as of traverse. Note that
include_self defaults to 0, though.
"""
iterable = self.traverse(condition=condition,
include_self=include_self, descend=descend,
siblings=siblings, ascend=ascend)
try:
return iterable[0]
except __HOLE__:
return None
|
IndexError
|
dataset/ETHPy150Open adieu/allbuttonspressed/docutils/nodes.py/Node.next_node
|
7,754
|
def replace_self(self, new):
"""
Replace `self` node with `new`, where `new` is a node or a
list of nodes.
"""
update = new
if not isinstance(new, Node):
# `new` is a list; update first child.
try:
update = new[0]
except __HOLE__:
update = None
if isinstance(update, Element):
update.update_basic_atts(self)
else:
# `update` is a Text node or `new` is an empty list.
# Assert that we aren't losing any attributes.
for att in ('ids', 'names', 'classes', 'dupnames'):
assert not self[att], \
'Losing "%s" attribute: %s' % (att, self[att])
self.parent.replace(self, new)
|
IndexError
|
dataset/ETHPy150Open adieu/allbuttonspressed/docutils/nodes.py/Element.replace_self
|
7,755
|
def parse_csv_date(date):
try:
return None if date == 'NULL' else datetime.strptime(date, '%m/%d/%y')
except __HOLE__:
print("Can't parse the following date: " + date)
|
ValueError
|
dataset/ETHPy150Open codeforamerica/comport/comport/utils.py/parse_csv_date
|
7,756
|
def __init__(self, *args, **kwargs):
super(ReturnsContract, self).__init__(*args, **kwargs)
assert len(self.args) in [1, 2, 3]
self.obj_name = self.args[0] or None
self.obj_type = self.objects[self.obj_name]
try:
self.min_bound = int(self.args[1])
except IndexError:
self.min_bound = 1
try:
self.max_bound = int(self.args[2])
except __HOLE__:
self.max_bound = float('inf')
|
IndexError
|
dataset/ETHPy150Open wcong/ants/ants/contracts/default.py/ReturnsContract.__init__
|
7,757
|
def __getitem__(self, key):
key = key.lower()
try:
return self.base_data_types_reverse[key]
except __HOLE__:
size = get_field_size(key)
if size is not None:
return ('CharField', {'max_length': size})
raise KeyError
|
KeyError
|
dataset/ETHPy150Open django/django/django/db/backends/sqlite3/introspection.py/FlexibleFieldLookupDict.__getitem__
|
7,758
|
def get_relations(self, cursor, table_name):
"""
Return a dictionary of {field_name: (field_name_other_table, other_table)}
representing all relationships to the given table.
"""
# Dictionary of relations to return
relations = {}
# Schema for this table
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s", [table_name, "table"])
try:
results = cursor.fetchone()[0].strip()
except __HOLE__:
# It might be a view, then no results will be returned
return relations
results = results[results.index('(') + 1:results.rindex(')')]
# Walk through and look for references to other tables. SQLite doesn't
# really have enforced references, but since it echoes out the SQL used
# to create the table we can look for REFERENCES statements used there.
for field_desc in results.split(','):
field_desc = field_desc.strip()
if field_desc.startswith("UNIQUE"):
continue
m = re.search('references (\S*) ?\(["|]?(.*)["|]?\)', field_desc, re.I)
if not m:
continue
table, column = [s.strip('"') for s in m.groups()]
if field_desc.startswith("FOREIGN KEY"):
# Find name of the target FK field
m = re.match('FOREIGN KEY\(([^\)]*)\).*', field_desc, re.I)
field_name = m.groups()[0].strip('"')
else:
field_name = field_desc.split()[0].strip('"')
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s", [table])
result = cursor.fetchall()[0]
other_table_results = result[0].strip()
li, ri = other_table_results.index('('), other_table_results.rindex(')')
other_table_results = other_table_results[li + 1:ri]
for other_desc in other_table_results.split(','):
other_desc = other_desc.strip()
if other_desc.startswith('UNIQUE'):
continue
other_name = other_desc.split(' ', 1)[0].strip('"')
if other_name == column:
relations[field_name] = (other_name, table)
break
return relations
|
TypeError
|
dataset/ETHPy150Open django/django/django/db/backends/sqlite3/introspection.py/DatabaseIntrospection.get_relations
|
7,759
|
def render(self, name, value, attrs=None):
if value is not None:
# Special handling for MarkupField value.
# This won't touch simple TextFields because they don't have
# 'raw' attribute.
try:
value = value.raw
except __HOLE__:
pass
return super(MarkupInput, self).render(name, value, attrs)
|
AttributeError
|
dataset/ETHPy150Open zsiciarz/django-markitup/markitup/widgets.py/MarkupInput.render
|
7,760
|
def close(self):
try:
return self.file.close()
except __HOLE__ as e:
if e.errno != 2:
# Means the file was moved or deleted before the tempfile
# could unlink it. Still sets self.file.close_called and
# calls self.file.file.close() before the exception
raise
|
OSError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/core/files/uploadedfile.py/TemporaryUploadedFile.close
|
7,761
|
def QuotedFileName(fname):
"""Given a filename, return a quoted version if necessary
"""
import regutil, string
try:
string.index(fname, " ") # Other chars forcing quote?
return '"%s"' % fname
except __HOLE__:
# No space in name.
return fname
|
ValueError
|
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/site-packages/win32/scripts/regsetup.py/QuotedFileName
|
7,762
|
def LocateFileName(fileNamesString, searchPaths):
"""Locate a file name, anywhere on the search path.
If the file can not be located, prompt the user to find it for us
(using a common OpenFile dialog)
Raises KeyboardInterrupt if the user cancels.
"""
import regutil, string, os
fileNames = string.split(fileNamesString,";")
for path in searchPaths:
for fileName in fileNames:
try:
retPath = os.path.join(path, fileName)
os.stat(retPath)
break
except os.error:
retPath = None
if retPath:
break
else:
fileName = fileNames[0]
try:
import win32ui, win32con
except __HOLE__:
raise error, "Need to locate the file %s, but the win32ui module is not available\nPlease run the program again, passing as a parameter the path to this file." % fileName
# Display a common dialog to locate the file.
flags=win32con.OFN_FILEMUSTEXIST
ext = os.path.splitext(fileName)[1]
filter = "Files of requested type (*%s)|*%s||" % (ext,ext)
dlg = win32ui.CreateFileDialog(1,None,fileName,flags,filter,None)
dlg.SetOFNTitle("Locate " + fileName)
if dlg.DoModal() <> win32con.IDOK:
raise KeyboardInterrupt, "User cancelled the process"
retPath = dlg.GetPathName()
return os.path.abspath(retPath)
|
ImportError
|
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/site-packages/win32/scripts/regsetup.py/LocateFileName
|
7,763
|
def LocateOptionalPath(fileName, searchPaths):
"""Like LocatePath, but returns None if the user cancels.
"""
try:
return LocatePath(fileName, searchPaths)
except __HOLE__:
return None
|
KeyboardInterrupt
|
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/site-packages/win32/scripts/regsetup.py/LocateOptionalPath
|
7,764
|
def LocateOptionalFileName(fileName, searchPaths = None):
"""Like LocateFileName, but returns None if the user cancels.
"""
try:
return LocateFileName(fileName, searchPaths)
except __HOLE__:
return None
|
KeyboardInterrupt
|
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/site-packages/win32/scripts/regsetup.py/LocateOptionalFileName
|
7,765
|
def send_click(self, click):
"""
Sends a click to the server if the previous click has been confirmed.
Args:
click (BaseClick): The click to send.
Returns:
the click's action ID if the click could be sent,
None if the previous click has not been received and confirmed yet.
"""
# only send if previous click got confirmed
if self.last_click:
return None
inv = self.inventory
packet = click.get_packet(inv)
try:
craft_result_slot = inv.window.craft_result_slot.slot_nr
if packet['slot'] == craft_result_slot:
# send wrong click to update inventory after crafting
packet['clicked_item'] = {'id': -1}
except __HOLE__:
pass # not crafting
self.action_id += 1
packet['window_id'] = inv.window.window_id
packet['action'] = self.action_id
self.last_click = click
self.net.push_packet('PLAY>Click Window', packet)
return self.action_id
|
AttributeError
|
dataset/ETHPy150Open SpockBotMC/SpockBot/spockbot/plugins/helpers/inventory.py/InventoryPlugin.send_click
|
7,766
|
def find_all_tests(root_suite):
"""Yields all the tests and their names from a given suite."""
suites = [root_suite]
while suites:
tsuite = suites.pop()
try:
# not that suite is iterable, thus every sub-suite from suite
# is appended to the suites list
suites.extend(tsuite)
except __HOLE__:
yield tsuite, '{}.{}.{}'.format(
tsuite.__class__.__module__,
tsuite.__class__.__name__,
tsuite._testMethodName ).lower()
|
TypeError
|
dataset/ETHPy150Open BasicWolf/kaylee/kaylee/testsuite/__init__.py/find_all_tests
|
7,767
|
def tests():
def assert_eq(x, y):
try:
expr = xpath(x)
assert expr == y, expr
except __HOLE__:
ast = Decompiler(x.gi_code).ast
print ast
print
raise
assert (div for div in DOM).gi_frame.f_locals['.0'] == DOM
assert_eq((div for div in DOM), '//div')
assert_eq((span for div in DOM for span in div), '//div//span')
assert_eq((span.cls for div in DOM for span in div), '//div//span/@class')
assert_eq((span.text for span in DOM), '//span/text()')
assert_eq((span for span in DOM if span.name == 'main'), "//span[@name='main']")
assert_eq((div for span in DOM if span.name == 'main' for div in span), "//span[@name='main']//div")
assert_eq((div for span in DOM for div in span if span.name == 'main'), "//span[@name='main']//div")
assert_eq((div for span in DOM if span.name == 'main' for div in span if div.cls == 'row'), "//span[@name='main']//div[@class='row']")
assert_eq((div for span in DOM for div in span if div.cls == 'row' and span.name == 'main'), "//span[@name='main']//div[@class='row']") # tricky case - need to dissect And
assert_eq((a for a in DOM if a.href == 'http://www.google.com' and a.name == 'goog'), "//a[@href='http://www.google.com' and @name='goog']")
assert_eq((a for a in DOM if '.com' in a.href), "//a[contains(@href, '.com')]")
assert_eq((a for a in DOM if '.com' not in a.href), "//a[not(contains(@href, '.com'))]")
assert_eq((a for a in DOM if not '.com' in a.href), "//a[not(contains(@href, '.com'))]")
assert_eq((div for div in DOM if div.id != 'main'), "//div[@id!='main']")
assert_eq((div for div in DOM if not div.id == 'main'), "//div[not(@id='main')]")
assert_eq((X for X in DOM if X.name == 'main'), "//*[@name='main']")
assert_eq((span for div in DOM for X in div.following_siblings for span in X.children), '//div/following-sibling::*/span')
assert_eq((a.href for a in DOM if any(p for p in a.following_siblings)), '//a[./following-sibling::p]/@href')
assert_eq((a.href for a in DOM if any(p for p in a.following_siblings if p.id)), '//a[./following-sibling::p[@id]]/@href')
assert_eq((X for X in DOM if any(p for p in DOM)), '//*[//p]')
assert_eq((span for div in DOM for span in div if div.id in ('main', 'other')), "//div[@id='main' or @id='other']//span")
assert_eq((X for X in DOM if X.name in ('a', 'b', 'c')), "//*[@name='a' or @name='b' or @name='c']")
allowed_values = 'a b c'.split()
assert_eq((X for X in DOM if X.name in allowed_values), "//*[@name='a' or @name='b' or @name='c']")
allowed_values = map(str, range(5))
assert_eq((X for X in DOM if X.value in allowed_values), "//*[@value='0' or @value='1' or @value='2' or @value='3' or @value='4']")
assert_eq((X for X in DOM if all(p for p in X if p.id == 'a')), "//*[not(.//p[not(@id='a')])]")
assert_eq((X for X in DOM if all(p for p in DOM if p.id == 'a')), "//*[not(//p[not(@id='a')])]")
assert_eq((X for X in DOM if any(p.id == 'a' for p in X)), "//*[.//p/@id='a']")
assert_eq((X for X in DOM if all(not p.id == 'a' for p in X)), "//*[not(.//p/@id!='a')]")
assert_eq((X for X in DOM if all(not p.id != 'a' for p in X)), "//*[not(.//p/@id='a')]")
assert_eq((X for X in DOM if len(td for td in X.following_siblings) == 0), "//*[count(./following-sibling::td)=0]")
assert_eq((td.text for td in DOM if td.cls == 'wideonly' and len(td for td in td.following_siblings) == 0), "//td[@class='wideonly' and count(./following-sibling::td)=0]/text()")
assert_eq((X for X in DOM if X.data-bind == 'a'), "//*[@data-bind='a']")
assert_eq((X.data-bind for X in DOM), "//*/@data-bind")
#assert_eq((form.action for form in DOM if all(input.name == 'a' for input in form.children)), "//form[not(./input/@name!='a')]/@action")
#assert_eq((X for X in DOM if all(p.id in ('a', 'b') for p in X)), "//*[not(.//p/@id='a' or .//p/@id='b')]")
#assert_eq((X for X in DOM if all('x' in p.id for p in X)), "//*[not(.//p[not(contains(@id, 'x'))])]") # Gives //*[not(.contains(@id, //p))]
#TODO: position (e.g. xpath(a for a in (a for a in DOM)[:20]) ???)
#TODO: position (e.g. xpath(a for X in DOM for a in X[20:]) ???)
tree = etree.fromstring('''
<html>
<div id='main' class='main'>
<a href='http://www.google.com'>Google</a>
<a href='http://www.chasestevens.com'>Not Google</a>
<p>Lorem ipsum</p>
<p id='123'>no numbers here</p>
<p id='numbers_only'>123</p>
</div>
<div id='123' class='secondary'>
<a href='http://www.google.org'>Google Charity</a>
<a href='http://www.chasestevens.org'>Broken link!</a>
</div>
</html>
''')
assert len(query(a for a in tree)) == 4
assert query(a for a in tree if 'Not Google' in a.text)[0].attrib.get('href') != 'http://www.google.com'
assert query(a for a in tree if 'Not Google' not in a.text)[0].attrib.get('href') == 'http://www.google.com'
import re
assert next(
node
for node in
query(
p
for p in
tree
if node.id
)
if re.match(r'\D+', node.attrib.get('id'))
).text == '123'
assert query( # switch between xpyth and regular comprehensions
a
for a in
next(
node
for node in
query(
div
for div in
tree
)
if re.match(r'\d+', node.attrib.get('id'))
)
if 'google' in a.href
)[0].text == 'Google Charity'
assert set(query(
a.href
for a in
tree
if any(
p
for p in
a.following_siblings
)
)) == {'http://www.google.com', 'http://www.chasestevens.com'}
assert set(query(
a.href
for a in
tree
if not any(
p
for p in
a.following_siblings
)
)) == {'http://www.google.org', 'http://www.chasestevens.org'}
assert set(query(
a.href
for a in
tree
if not any(
p
for p in
a.following_siblings
)
and any(
p
for p in
a.following_siblings
)
)) == set()
assert set(query(
a.href
for a in
tree
if any(
p
for p in
tree
)
)) == {'http://www.google.com', 'http://www.chasestevens.com', 'http://www.google.org', 'http://www.chasestevens.org'}
assert not query(
a.href
for a in
tree
if not any(
p
for p in
tree
)
)
|
AssertionError
|
dataset/ETHPy150Open hchasestevens/xpyth/tests.py/tests
|
7,768
|
def ingest ( self ):
"""Read the stack and ingest"""
with closing ( ocpcaproj.OCPCAProjectsDB() ) as projdb:
proj = projdb.loadProject ( self.token )
with closing ( ocpcadb.OCPCADB (proj) ) as db:
(startslice, endslice) = proj.datasetcfg.slicerange
(xcubedim, ycubedim, zcubedim) = cubedims = proj.datasetcfg.cubedim[self.resolution]
(ximagesz, yimagesz) = proj.datasetcfg.imagesz[self.resolution]
batchsz = zcubedim
# Ingest in database aligned slabs in the z dimension
for sl in range( startslice, endslice, batchsz ):
slab = np.zeros ( [zcubedim, yimagesz, ximagesz], dtype=np.uint8 )
# over each slice
for b in range( batchsz ):
#if we are at the end of the space, quit
if ( sl + b <= endslice ):
filename = '{}dyer15_3_maskimg_{:0>4}.tif'.format(self.path, sl+b)
#filename = '{}proj_2_{:0>5}.tif'.format(self.path, sl+b)
#filename = '{}{}_maskimg_{:0>4}.tif'.format(self.path, self.token, sl+b)
#filename = '{}xbrain_dyer15_slice{:0>4}.tif'.format(self.path, sl+b )
print filename
try:
img = Image.open(filename,'r')
slab [b,:,:] = np.asarray(img)
except __HOLE__, e:
print "Failed to open file %s" % (e)
img = np.zeros((yimagesz,ximagesz), dtype=np.uint8)
slab [b,:,:] = img
for y in range ( 0, yimagesz, ycubedim ):
for x in range ( 0, ximagesz, xcubedim ):
zidx = ocplib.XYZMorton ( [ x/xcubedim, y/ycubedim, (sl-startslice)/zcubedim] )
cubedata = np.zeros ( [zcubedim, ycubedim, xcubedim], dtype=np.uint8 )
xmin = x
ymin = y
xmax = ( min(ximagesz-1, x+xcubedim-1) ) + 1
ymax = ( min(yimagesz-1, y+ycubedim-1) ) + 1
zmin = 0
zmax = min(sl+zcubedim,endslice)
cubedata[0:zmax-zmin,0:ymax-ymin,0:xmax-xmin] = slab[zmin:zmax,ymin:ymax,xmin:xmax]
cube = imagecube.ImageCube16 ( cubedims )
cube.zeros()
cube.data = cubedata
if np.count_nonzero ( cube.data ) != 0:
print zidx, ocplib.MortonXYZ(zidx)
db.putCube ( zidx, self.resolution, cube )
print "Commiting at x=%s, y=%s, z=%s" % (x,y,sl)
db.conn.commit()
|
IOError
|
dataset/ETHPy150Open neurodata/ndstore/ingest/eva/eva15.py/CatmaidIngest.ingest
|
7,769
|
def POST(self):
i = web.input(email="", password="")
if not waltz.utils.valid_email(i.email):
return self.GET(msg="invalid email")
try:
u = User(i.email)
except __HOLE__:
return self.GET(msg="no such user")
if u.authenticate(i.password):
session().update({'logged': True,
'email': i.email})
raise web.seeother('/')
return self.GET(msg="invalid credentials")
|
AttributeError
|
dataset/ETHPy150Open mekarpeles/waltz/examples/simplelogin/routes/auth.py/Login.POST
|
7,770
|
def render_tag(self, context, runcaseversion, user, environment, varname):
"""Get/construct Result and place it in context under ``varname``"""
# check for any completed result states from other users for this
# same case/env combo.
include_kwargs = dict(
environment=environment,
runcaseversion=runcaseversion,
is_latest=True,
status__in=(model.Result.COMPLETED_STATES +
[model.Result.STATUS.skipped]),
)
exclude_kwargs = dict(
tester=user,
)
try:
result = model.Result.objects.only(
"id",
"status",
"tester",
"comment",
).filter(
**include_kwargs).exclude(**exclude_kwargs).order_by(
"-modified_on")[0]
except __HOLE__:
result = None
context[varname] = result
return u""
|
IndexError
|
dataset/ETHPy150Open mozilla/moztrap/moztrap/view/runtests/templatetags/execution.py/OtherResultFor.render_tag
|
7,771
|
def routerrule_list(request, **params):
if 'router_id' in params:
params['device_id'] = params['router_id']
if 'router' in request.META:
router = request.META['router']
else:
router = api.router_get(request, params['device_id'])
try:
rules = router.router_rules
except __HOLE__:
return (False, [])
return (True, rules)
|
AttributeError
|
dataset/ETHPy150Open CiscoSystems/avos/openstack_dashboard/dashboards/project/routers/extensions/routerrules/rulemanager.py/routerrule_list
|
7,772
|
def get_errno(e):
"""
Return the errno of an exception, or the first argument if errno is not available.
:param e: the exception object
"""
try:
return e.errno
except __HOLE__:
return e.args[0]
|
AttributeError
|
dataset/ETHPy150Open jasonrbriggs/stomp.py/stomp/backward.py/get_errno
|
7,773
|
def get_dihedrals_for_conformation(self, conformation):
coordsets = self.structure_ensemble.getCoordsets()
self.structure_ensemble.setCoords(coordsets[conformation])
dihedral_angles = []
for residue in self.structure_ensemble.iterResidues():
try:
dihedral_angles.append(calcPhi(residue, radian=False, dist=None))
except __HOLE__:
dihedral_angles.append(0)
try:
dihedral_angles.append(calcPsi(residue, radian=False, dist=None))
except ValueError:
dihedral_angles.append(0)
# 0 links with Nth residue and Nth with 0th. Those values are not needed anyway.
return dihedral_angles[1:-1]
|
ValueError
|
dataset/ETHPy150Open victor-gil-sepulveda/pyProCT/pyproct/data/handler/protein/proteinEnsembleData.py/ProteinEnsembleData.get_dihedrals_for_conformation
|
7,774
|
def _scan(self):
# We're only interested in file-based modules (not C extensions).
modules = [m.__file__ for m in sys.modules.values()
if m and getattr(m, '__file__', None)]
for filename in modules:
# We're only interested in the source .py files.
filename = _normalize_filename(filename)
# stat() the file. This might fail if the module is part of a
# bundle (.egg). We simply skip those modules because they're
# not really reloadable anyway.
try:
stat = os.stat(filename)
except __HOLE__:
continue
# Check the modification time. We need to adjust on Windows.
mtime = stat.st_mtime
if _win32:
mtime -= stat.st_ctime
# Check if we've seen this file before. We don't need to do
# anything for new files.
if filename in self.mtimes:
# If this file's mtime has changed, queue it for reload.
if mtime != self.mtimes[filename]:
self.queue.put(filename)
# Record this filename's current mtime.
self.mtimes[filename] = mtime
|
OSError
|
dataset/ETHPy150Open jparise/python-reloader/monitor.py/ModuleMonitor._scan
|
7,775
|
@expose(help="create project files from a template")
def create(self):
if not len(self.app.pargs.extra) >= 1:
raise boss_exc.BossArgumentError("Destination path required.")
if not self.app.pargs.template:
raise boss_exc.BossArgumentError("Template label required.")
sources = self.app.db.get('sources')
try:
tmpl_parts = self.app.pargs.template.split(':')
source = tmpl_parts[0]
template = tmpl_parts[1]
except __HOLE__ as e:
source = 'boss'
template = self.app.pargs.template
src = SourceManager(self.app)
src.create_from_template(source, template, self.app.pargs.extra[0])
|
IndexError
|
dataset/ETHPy150Open datafolklabs/boss/boss/cli/controllers/base.py/BossBaseController.create
|
7,776
|
def get_taskwarrior_config(path):
try:
return TaskRc(path)
except __HOLE__:
return {}
|
IOError
|
dataset/ETHPy150Open coddingtonbear/taskwarrior-inthe.am/taskwarrior_inthe_am/taskwarrior.py/get_taskwarrior_config
|
7,777
|
def continue_with(self, func):
try:
response = func()
except __HOLE__ as exception:
if self.parent:
self.parent.on_success(exception.args)
except BaseException as exception:
if isinstance(exception, TaskFailed):
exception.tasktrace.append(self)
if self.parent:
self.parent.on_error(exception)
else:
raise
else:
self.register(response)
|
StopIteration
|
dataset/ETHPy150Open SpockBotMC/SpockBot/spockbot/plugins/tools/task.py/Task.continue_with
|
7,778
|
def test_main():
for name in dir(_testcapi):
if name.startswith('test_'):
test = getattr(_testcapi, name)
if test_support.verbose:
print "internal", name
try:
test()
except _testcapi.error:
raise test_support.TestFailed, sys.exc_info()[1]
# some extra thread-state tests driven via _testcapi
def TestThreadState():
import thread
import time
if test_support.verbose:
print "auto-thread-state"
idents = []
def callback():
idents.append(thread.get_ident())
_testcapi._test_thread_state(callback)
a = b = callback
time.sleep(1)
# Check our main thread is in the list exactly 3 times.
if idents.count(thread.get_ident()) != 3:
raise test_support.TestFailed, \
"Couldn't find main thread correctly in the list"
try:
_testcapi._test_thread_state
have_thread_state = True
except __HOLE__:
have_thread_state = False
if have_thread_state:
TestThreadState()
import threading
t=threading.Thread(target=TestThreadState)
t.start()
t.join()
|
AttributeError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/test/test_capi.py/test_main
|
7,779
|
def run(port=5000):
host = '127.0.0.1'
try:
from werkzeug.serving import run_simple
except __HOLE__:
pass
else:
run_simple(host, port, app, use_reloader=True, use_debugger=True)
return
print('Could not import Werkzeug, running without the reloader '
'or debugger.')
from wsgiref.simple_server import make_server
server = make_server(host, port, app)
print('Listening on http://%s:%s/ ...' % (host, port))
server.serve_forever()
|
ImportError
|
dataset/ETHPy150Open Kozea/WeasyPrint/weasyprint/navigator.py/run
|
7,780
|
def _eval_derivative(self, s):
try:
res = 0
if self.args[0].has(s) or self.args[1].has(s):
for i, p in enumerate(self._diffargs):
m = self._diffargs[i].diff(s)
if m != 0:
res += self.fdiff((1, i))*m
return res + self.fdiff(3)*self.args[2].diff(s)
except (ArgumentIndexError, __HOLE__):
return Derivative(self, s)
|
NotImplementedError
|
dataset/ETHPy150Open sympy/sympy/sympy/functions/special/hyper.py/TupleParametersBase._eval_derivative
|
7,781
|
def _eval_evalf(self, prec):
# The default code is insufficient for polar arguments.
# mpmath provides an optional argument "r", which evaluates
# G(z**(1/r)). I am not sure what its intended use is, but we hijack it
# here in the following way: to evaluate at a number z of |argument|
# less than (say) n*pi, we put r=1/n, compute z' = root(z, n)
# (carefully so as not to loose the branch information), and evaluate
# G(z'**(1/r)) = G(z'**n) = G(z).
from sympy.functions import exp_polar, ceiling
from sympy import Expr
import mpmath
z = self.argument
znum = self.argument._eval_evalf(prec)
if znum.has(exp_polar):
znum, branch = znum.as_coeff_mul(exp_polar)
if len(branch) != 1:
return
branch = branch[0].args[0]/I
else:
branch = S(0)
n = ceiling(abs(branch/S.Pi)) + 1
znum = znum**(S(1)/n)*exp(I*branch / n)
# Convert all args to mpf or mpc
try:
[z, r, ap, bq] = [arg._to_mpmath(prec)
for arg in [znum, 1/n, self.args[0], self.args[1]]]
except __HOLE__:
return
with mpmath.workprec(prec):
v = mpmath.meijerg(ap, bq, z, r)
return Expr._from_mpmath(v, prec)
|
ValueError
|
dataset/ETHPy150Open sympy/sympy/sympy/functions/special/hyper.py/meijerg._eval_evalf
|
7,782
|
def success(self):
if self.status == 403:
raise InvalidCredsError('Invalid credentials', GoGridNodeDriver)
if self.status == 401:
raise InvalidCredsError('API Key has insufficient rights', GoGridNodeDriver)
if not self.body:
return None
try:
return json.loads(self.body)['status'] == 'success'
except __HOLE__:
raise MalformedResponseError('Malformed reply', body=self.body, driver=GoGridNodeDriver)
|
ValueError
|
dataset/ETHPy150Open cloudkick/libcloud/libcloud/compute/drivers/gogrid.py/GoGridResponse.success
|
7,783
|
def parse_error(self):
try:
return json.loads(self.body)["list"][0]['message']
except __HOLE__:
return None
|
ValueError
|
dataset/ETHPy150Open cloudkick/libcloud/libcloud/compute/drivers/gogrid.py/GoGridResponse.parse_error
|
7,784
|
def list_nodes(self):
passwords_map = {}
res = self._server_list()
try:
for password in self._password_list()['list']:
try:
passwords_map[password['server']['id']] = password['password']
except __HOLE__:
pass
except InvalidCredsError:
# some gogrid API keys don't have permission to access the password list.
pass
return [ self._to_node(el, passwords_map.get(el.get('id')))
for el
in res['list'] ]
|
KeyError
|
dataset/ETHPy150Open cloudkick/libcloud/libcloud/compute/drivers/gogrid.py/GoGridNodeDriver.list_nodes
|
7,785
|
def _get_first_ip(self, location=None):
ips = self.ex_list_ips(public=True, assigned=False, location=location)
try:
return ips[0].ip
except __HOLE__:
raise LibcloudError('No public unassigned IPs left',
GoGridNodeDriver)
|
IndexError
|
dataset/ETHPy150Open cloudkick/libcloud/libcloud/compute/drivers/gogrid.py/GoGridNodeDriver._get_first_ip
|
7,786
|
def ex_create_node_nowait(self, **kwargs):
"""Don't block until GoGrid allocates id for a node
but return right away with id == None.
The existance of this method is explained by the fact
that GoGrid assigns id to a node only few minutes after
creation."""
name = kwargs['name']
image = kwargs['image']
size = kwargs['size']
try:
ip = kwargs['ex_ip']
except __HOLE__:
ip = self._get_first_ip(kwargs.get('location'))
params = {'name': name,
'image': image.id,
'description': kwargs.get('ex_description', ''),
'isSandbox': str(kwargs.get('ex_issandbox', False)).lower(),
'server.ram': size.id,
'ip': ip}
object = self.connection.request('/api/grid/server/add',
params=params, method='POST').object
node = self._to_node(object['list'][0])
return node
|
KeyError
|
dataset/ETHPy150Open cloudkick/libcloud/libcloud/compute/drivers/gogrid.py/GoGridNodeDriver.ex_create_node_nowait
|
7,787
|
def has_key(self,key):
if self._is_hasheable(key):
return self.hasheable_instances.has_key(key)
else:
for i,j in self.not_hasheable_instances:
try:
if i == key:
return True
except __HOLE__:
return False
except:
return False
return False
|
TypeError
|
dataset/ETHPy150Open weblabdeusto/weblabdeusto/server/src/voodoo/mapper.py/_InstanceDictionary.has_key
|
7,788
|
def remove_unpickables(instance):
# This function tries to do the same as:
# load_from_dto(dto_generator(obj), skip_recoverables = True )
# in a far faster way
already_removed_hash = {} # For those that can be hashed
already_removed_nothash = {} # For those that can't be hashed:
# {
# type(instance) : {
# str(instance) : [instances]
# }
# }
def removing_unpickables(instance):
type_instance = type(instance)
if type_instance in _basic_normal_types:
return instance
if type_instance in _recoverable_exceptions:
return None
if issubclass(type_instance, _default_exceptions):
return None
if hasattr(instance, '__slots__') and not hasattr(instance, '__getstate__'):
return None
if type_instance in _convert_to_str:
return str(instance)
try:
if already_removed_hash.has_key(instance):
return instance
else:
was_hashable = True
already_removed_hash[instance] = None
except __HOLE__:
was_hashable = False # Not hashable
if not was_hashable:
type_dict = already_removed_nothash.get(type_instance)
str_instance = str(instance)
if type_dict is None:
already_removed_nothash[type_instance] = { str_instance : [instance] }
else:
inst_list = type_dict.get(str_instance)
if inst_list is None:
type_dict[str_instance] = [instance]
else:
for inst in inst_list:
try:
if instance == inst:
return instance
except:
pass
inst_list.append(instance)
if type_instance == tuple:
return tuple(( removing_unpickables(element) for element in instance ))
elif type_instance == list:
return [ removing_unpickables(element) for element in instance ]
elif type_instance == dict:
new_dict = {}
for key, value in instance.iteritems():
new_dict[removing_unpickables(key)] = removing_unpickables(value)
return new_dict
elif type_instance == _new.instance or hasattr(instance,'__reduce__') or hasattr(instance, '__reduce_ex__'):
attributes = ( attr for attr in dir(instance)
#if not attr.startswith('__') or not attr.endswith('__')
)
for attr in attributes:
if hasattr(instance.__class__, attr):
data_type = getattr(instance.__class__, attr)
if type(data_type) == property:
continue
continue
if isinstance(instance, Exception) and attr == 'message':
continue
attr_value = getattr(instance, attr)
setattr(instance, attr, removing_unpickables(attr_value))
return instance
else:
return None
result = removing_unpickables(instance)
return result
|
TypeError
|
dataset/ETHPy150Open weblabdeusto/weblabdeusto/server/src/voodoo/mapper.py/remove_unpickables
|
7,789
|
def load_from_dto(instance,exceptions = None,skip_recoverables=False):
if exceptions == None:
exceptions = _default_exceptions
else:
tmp_exceptions = ()
for i in exceptions:
tmp_exceptions = tmp_exceptions + (i,)
exceptions = tmp_exceptions
parsed_instances = _InstanceDictionary()
def fill_tree(current_node,name,current_instance):
new_node = current_node.append_child(name,current_instance)
if new_node.data_type == _Node.BASIC or new_node.data_type == _Node.IGNORABLE:
return # No information inside a basic data type
# Don't repeat twice the same instance
if parsed_instances.has_key(current_instance):
parsed_instances[current_instance].append(new_node)
for parsed_node in parsed_instances[current_instance]:
parsed_node.repeated = True
return
else:
parsed_instances[current_instance] = [new_node]
if new_node.data_type in (_Node.TUPLE, _Node.LIST):
# Tuples and lists have elements inside
# They name will be their number
for number,value in enumerate(current_instance):
fill_tree(new_node,number,value)
elif new_node.data_type == _Node.DICT:
# Their name will be their key
# Very important: in dictionaries, the values will be pairs of (key,value)
for i in current_instance:
fill_tree(new_node,i,(i,current_instance[i]))
elif new_node.data_type in (_Node.INSTANCE, _Node.OBJECT, _Node.EXCEPTION, _Node.BUILTIN_E):
# Elements are the elements which we will take
elements = [ i for i in dir(current_instance)
if not i.startswith('__') or not i.endswith('__')]
for i in elements:
if new_node.data_type == _Node.EXCEPTION and i == 'message':
continue # Deprecated
element = getattr(current_instance,i)
fill_tree(new_node,i,element)
else:
raise TypeError(
'Unrecognized type: %s. Configure it at voodoo.mapper.py'
% new_node.data_type
)
first_node = _Node(None,None,None,())
fill_tree(first_node,None,instance)
# At this point, instance_tree has been created
dto_parsed_instances = _InstanceDictionary()
# Let's create the dto structure
def load_dto_value(current_node):
if current_node.data_type == _Node.BASIC:
return current_node.element
# Check parsed_instances
if dto_parsed_instances.has_key(current_node.element):
return dto_parsed_instances[current_node.element]
if current_node.data_type == _Node.TUPLE:
new_tuple = ()
for i,element in current_node.children:
new_tuple = new_tuple + (load_dto_value(element),)
dto_parsed_instances[current_node.element] = new_tuple
return new_tuple
elif current_node.data_type == _Node.LIST:
new_list = []
dto_parsed_instances[current_node.element] = new_list
for i, element in current_node.children:
new_list.append(load_dto_value(element))
return new_list
elif current_node.data_type == _Node.DICT:
new_dict = {}
dto_parsed_instances[current_node.element] = new_dict
for i, element in current_node.children:
# A dictionary is a set of tuples, where
# the first element is the key and the
# second element is the value
this_tuple = load_dto_value(element)
key = this_tuple[0]
value = this_tuple[1]
new_dict[key] = value
return new_dict
elif current_node.data_type == _Node.BUILTIN_E:
dto_object = _DtoBuiltin()
for i, element in current_node.children:
setattr(dto_object,i,load_dto_value(element))
builtin_type = getattr(__builtin__, dto_object._old_name)
inst = builtin_type()
for i, element in current_node.children:
try:
setattr(inst,i,load_dto_value(element))
except __HOLE__:
pass
dto_parsed_instances[current_node.element] = inst
try:
del inst._old_module
del inst._old_name
except AttributeError:
pass
return inst
elif current_node.data_type in (_Node.INSTANCE, _Node.OBJECT, _Node.EXCEPTION):
if current_node.data_type == _Node.INSTANCE:
dto_object = _DtoClass()
elif current_node.data_type == _Node.OBJECT:
dto_object = _DtoObject()
else:
dto_object = _DtoError()
dto_parsed_instances[current_node.element] = dto_object
for i, element in current_node.children:
setattr(dto_object,i,load_dto_value(element))
old_module = dto_object._old_module
old_name = dto_object._old_name
__import__(old_module,globals(),locals(),[])
the_class = getattr(_sys.modules[old_module],old_name)
dto_object.__class__ = the_class
if hasattr(dto_object,'deserialize'):
dto_object.deserialize()
del dto_object._old_module
del dto_object._old_name
if isinstance(dto_object,_DtoMissing):
if skip_recoverables:
return None
# it is a DtoMissing
if dto_object.data_type == str(_thread.LockType):
l = _threading.Lock()
dto_parsed_instances[current_node.element] = l
return l
elif dto_object.data_type == str(_threading._Condition):
c = _threading.Condition()
dto_parsed_instances[current_node.element] = c
return c
else:
raise TypeError(
"""No handler for the missing value %s at %s.
Check that you don't have a value in _recoverable_exceptions,
and no handler in load_from_dto"""
% (i,dto_object)
)
return dto_object
else:
raise TypeError(
'Unrecognized type: %s. Configure it at voodoo.mapper.py, load_dto_value'
% current_node.data_type
)
return load_dto_value(first_node.children[0][1])
|
AttributeError
|
dataset/ETHPy150Open weblabdeusto/weblabdeusto/server/src/voodoo/mapper.py/load_from_dto
|
7,790
|
def jsonrpc_method(name,
authenticated=False,
authentication_arguments=['username', 'password'],
safe=False,
validate=False,
site=default_site):
"""
Wraps a function turns it into a json-rpc method. Adds several attributes
to the function specific to the JSON-RPC machinery and adds it to the default
jsonrpc_site if one isn't provided. You must import the module containing
these functions in your urls.py.
name
The name of your method. IE: `namespace.methodName` The method name
can include type information, like `ns.method(String, Array) -> Nil`.
authenticated=False
Adds `username` and `password` arguments to the beginning of your
method if the user hasn't already been authenticated. These will
be used to authenticate the user against `django.contrib.authenticate`
If you use HTTP auth or other authentication middleware, `username`
and `password` will not be added, and this method will only check
against `request.user.is_authenticated`.
You may pass a callable to replace `django.contrib.auth.authenticate`
as the authentication method. It must return either a User or `None`
and take the keyword arguments `username` and `password`.
safe=False
Designates whether or not your method may be accessed by HTTP GET.
By default this is turned off.
validate=False
Validates the arguments passed to your method based on type
information provided in the signature. Supply type information by
including types in your method declaration. Like so:
@jsonrpc_method('myapp.specialSauce(Array, String)', validate=True)
def special_sauce(self, ingredients, instructions):
return SpecialSauce(ingredients, instructions)
Calls to `myapp.specialSauce` will now check each arguments type
before calling `special_sauce`, throwing an `InvalidParamsError`
when it encounters a discrepancy. This can significantly reduce the
amount of code required to write JSON-RPC services.
site=default_site
Defines which site the jsonrpc method will be added to. Can be any
object that provides a `register(name, func)` method.
"""
def decorator(func):
arg_names = getargspec(func)[0][1:]
X = {'name': name, 'arg_names': arg_names}
if authenticated:
if authenticated is True or six.callable(authenticated):
# TODO: this is an assumption
X['arg_names'] = authentication_arguments + X['arg_names']
X['name'] = _inject_args(X['name'], ('String', 'String'))
from django.contrib.auth import authenticate as _authenticate
from django.contrib.auth.models import User
else:
authenticate = authenticated
@six.wraps(func)
def _func(request, *args, **kwargs):
user = getattr(request, 'user', None)
is_authenticated = getattr(user, 'is_authenticated',
lambda: False)
if ((user is not None and six.callable(is_authenticated) and
not is_authenticated()) or user is None):
user = None
try:
creds = args[:len(authentication_arguments)]
if len(creds) == 0:
raise IndexError
# Django's authenticate() method takes arguments as dict
user = _authenticate(username=creds[0],
password=creds[1], *creds[2:])
if user is not None:
args = args[len(authentication_arguments):]
except IndexError:
auth_kwargs = {}
try:
for auth_kwarg in authentication_arguments:
auth_kwargs[auth_kwarg] = kwargs[auth_kwarg]
except __HOLE__:
raise InvalidParamsError(
'Authenticated methods require at least '
'[%(arguments)s] or {%(arguments)s} arguments' %
{'arguments': ', '.join(authentication_arguments)})
user = _authenticate(**auth_kwargs)
if user is not None:
for auth_kwarg in authentication_arguments:
kwargs.pop(auth_kwarg)
if user is None:
raise InvalidCredentialsError
request.user = user
return func(request, *args, **kwargs)
else:
_func = func
@six.wraps(_func)
def exc_printer(*a, **kw):
try:
return _func(*a, **kw)
except Exception as e:
try:
print('JSONRPC SERVICE EXCEPTION')
import traceback
traceback.print_exc()
except:
pass
six.reraise(*sys.exc_info())
ret_func = exc_printer
method, arg_types, return_type = \
_parse_sig(X['name'], X['arg_names'], validate)
ret_func.json_args = X['arg_names']
ret_func.json_arg_types = arg_types
ret_func.json_return_type = return_type
ret_func.json_method = method
ret_func.json_safe = safe
ret_func.json_sig = X['name']
ret_func.json_validate = validate
site.register(method, ret_func)
return ret_func
return decorator
|
KeyError
|
dataset/ETHPy150Open samuraisam/django-json-rpc/jsonrpc/__init__.py/jsonrpc_method
|
7,791
|
def i2cReg(self,wr,addr=0x00,data=0x0000):
try :
if(wr == "w"):
tmp = (data&0x00FF)<<8 | (data&0xFF00)>>8
#print "W:0x%02X = 0x%04X" % (addr,data)
return bus.write_word_data(self.I2C_ADDR,addr,tmp)
elif(wr == "r"):
tmp = bus.read_word_data(self.I2C_ADDR,addr)
tmp = (tmp&0x00FF)<<8 | (tmp&0xFF00)>>8
#print "R:0x%02X = 0x%04X" % (addr,tmp)
return tmp
else :
return -1
except __HOLE__ as err:
print("No ACK!")
time.sleep(0.1)
self.i2cReg(wr,addr,data)
|
IOError
|
dataset/ETHPy150Open DexterInd/GrovePi/Software/Python/grove_i2c_temp_hum_hdc1000/grove_i2c_temp_hum_hdc1000.py/HDC1000.i2cReg
|
7,792
|
def Temperature(self):
try :
bus.write_byte(self.I2C_ADDR,0x00)
time.sleep(0.20)
d=[0]*2
# print self.i2c.read_block_data(I2C_ADDR,0x00)
d[0] = bus.read_byte(self.I2C_ADDR)
time.sleep(0.001)
d[1] = bus.read_byte(self.I2C_ADDR)
time.sleep(0.001)
#print "0x%02X :0x%02X" % (d[0],d[1])
raw = ( d[0]<<8 | d[1] )
#print (float(raw)/(2**16))*(165-40)
return float(raw)/65536.0*165.0-40.0
except __HOLE__ as err:
print("No ACK!")
time.sleep(0.1)
self.Temperature()
|
IOError
|
dataset/ETHPy150Open DexterInd/GrovePi/Software/Python/grove_i2c_temp_hum_hdc1000/grove_i2c_temp_hum_hdc1000.py/HDC1000.Temperature
|
7,793
|
def Humidity(self):
try :
bus.write_byte(self.I2C_ADDR,0x00)
time.sleep(0.10)
d=[0]*2
d[0] = bus.read_byte(self.I2C_ADDR)
time.sleep(0.001)
d[1] = bus.read_byte(self.I2C_ADDR)
time.sleep(0.001)
#print "0x%02X :0x%02X" % (d[0],d[1])
raw = ( d[0]<<8 | d[1] )
return float(raw)/65536.0*100.0
except __HOLE__ as err:
print("No ACK!")
time.sleep(0.1)
self.Humidity()
|
IOError
|
dataset/ETHPy150Open DexterInd/GrovePi/Software/Python/grove_i2c_temp_hum_hdc1000/grove_i2c_temp_hum_hdc1000.py/HDC1000.Humidity
|
7,794
|
def default_test_processes():
"""
Default number of test processes when using the --parallel option.
"""
# The current implementation of the parallel test runner requires
# multiprocessing to start subprocesses with fork().
# On Python 3.4+: if multiprocessing.get_start_method() != 'fork':
if not hasattr(os, 'fork'):
return 1
try:
return int(os.environ['DJANGO_TEST_PROCESSES'])
except __HOLE__:
return multiprocessing.cpu_count()
|
KeyError
|
dataset/ETHPy150Open django/django/django/test/runner.py/default_test_processes
|
7,795
|
def run(self, result):
"""
Distribute test cases across workers.
Return an identifier of each test case with its result in order to use
imap_unordered to show results as soon as they're available.
To minimize pickling errors when getting results from workers:
- pass back numeric indexes in self.subsuites instead of tests
- make tracebacks picklable with tblib, if available
Even with tblib, errors may still occur for dynamically created
exception classes such Model.DoesNotExist which cannot be unpickled.
"""
if tblib is not None:
tblib.pickling_support.install()
counter = multiprocessing.Value(ctypes.c_int, 0)
pool = multiprocessing.Pool(
processes=self.processes,
initializer=self.init_worker.__func__,
initargs=[counter])
args = [
(index, subsuite, self.failfast)
for index, subsuite in enumerate(self.subsuites)
]
test_results = pool.imap_unordered(self.run_subsuite.__func__, args)
while True:
if result.shouldStop:
pool.terminate()
break
try:
subsuite_index, events = test_results.next(timeout=0.1)
except multiprocessing.TimeoutError:
continue
except __HOLE__:
pool.close()
break
tests = list(self.subsuites[subsuite_index])
for event in events:
event_name = event[0]
handler = getattr(result, event_name, None)
if handler is None:
continue
test = tests[event[1]]
args = event[2:]
handler(test, *args)
pool.join()
return result
|
StopIteration
|
dataset/ETHPy150Open django/django/django/test/runner.py/ParallelTestSuite.run
|
7,796
|
def is_discoverable(label):
"""
Check if a test label points to a python package or file directory.
Relative labels like "." and ".." are seen as directories.
"""
try:
mod = import_module(label)
except (ImportError, __HOLE__):
pass
else:
return hasattr(mod, '__path__')
return os.path.isdir(os.path.abspath(label))
|
TypeError
|
dataset/ETHPy150Open django/django/django/test/runner.py/is_discoverable
|
7,797
|
def processmessage(self, tags, args):
if self.writer is not None and args:
lines = self.format_message(tags, args)
self.writer(''.join(lines))
try:
self._tag2proc[tags](tags, args)
except __HOLE__:
pass
|
KeyError
|
dataset/ETHPy150Open pytest-dev/pytest/_pytest/vendored_packages/pluggy.py/_TagTracer.processmessage
|
7,798
|
def _wrapped_call(wrap_controller, func):
""" Wrap calling to a function with a generator which needs to yield
exactly once. The yield point will trigger calling the wrapped function
and return its _CallOutcome to the yield point. The generator then needs
to finish (raise StopIteration) in order for the wrapped call to complete.
"""
try:
next(wrap_controller) # first yield
except StopIteration:
_raise_wrapfail(wrap_controller, "did not yield")
call_outcome = _CallOutcome(func)
try:
wrap_controller.send(call_outcome)
_raise_wrapfail(wrap_controller, "has second yield")
except __HOLE__:
pass
return call_outcome.get_result()
|
StopIteration
|
dataset/ETHPy150Open pytest-dev/pytest/_pytest/vendored_packages/pluggy.py/_wrapped_call
|
7,799
|
def varnames(func, startindex=None):
""" return argument name tuple for a function, method, class or callable.
In case of a class, its "__init__" method is considered.
For methods the "self" parameter is not included unless you are passing
an unbound method with Python3 (which has no supports for unbound methods)
"""
cache = getattr(func, "__dict__", {})
try:
return cache["_varnames"]
except KeyError:
pass
if inspect.isclass(func):
try:
func = func.__init__
except AttributeError:
return ()
startindex = 1
else:
if not inspect.isfunction(func) and not inspect.ismethod(func):
func = getattr(func, '__call__', func)
if startindex is None:
startindex = int(inspect.ismethod(func))
try:
rawcode = func.__code__
except AttributeError:
return ()
try:
x = rawcode.co_varnames[startindex:rawcode.co_argcount]
except AttributeError:
x = ()
else:
defaults = func.__defaults__
if defaults:
x = x[:-len(defaults)]
try:
cache["_varnames"] = x
except __HOLE__:
pass
return x
|
TypeError
|
dataset/ETHPy150Open pytest-dev/pytest/_pytest/vendored_packages/pluggy.py/varnames
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.