_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3 values | text stringlengths 75 19.8k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q36100 | ID3Tags._copy | train | def _copy(self):
"""Creates a shallow copy of all tags"""
items = self.items()
subs = {}
for f in (self.getall("CHAP") + self.getall("CTOC")):
subs[f.HashKey] = f.sub_frames._copy()
return (items, subs) | python | {
"resource": ""
} |
q36101 | SignalHandler.block | train | def block(self):
"""While this context manager is active any signals for aborting
the process will be queued and exit the program once the context
is left.
"""
self._nosig = True
yield
self._nosig = False
if self._interrupted:
raise SystemExit("Aborted...") | python | {
"resource": ""
} |
q36102 | is_valid_key | train | def is_valid_key(key):
"""Return true if a string is a valid Vorbis comment key.
Valid Vorbis comment keys are printable ASCII between 0x20 (space)
and 0x7D ('}'), excluding '='.
Takes str/unicode in Python 2, unicode in Python 3
"""
if PY3 and isinstance(key, bytes):
raise TypeError("needs to be str not bytes")
for c in key:
if c < " " or c > "}" or c == "=":
return False
else:
return bool(key) | python | {
"resource": ""
} |
q36103 | VComment.load | train | def load(self, fileobj, errors='replace', framing=True):
"""Parse a Vorbis comment from a file-like object.
Arguments:
errors (str): 'strict', 'replace', or 'ignore'.
This affects Unicode decoding and how other malformed content
is interpreted.
framing (bool): if true, fail if a framing bit is not present
Framing bits are required by the Vorbis comment specification,
but are not used in FLAC Vorbis comment blocks.
"""
try:
vendor_length = cdata.uint_le(fileobj.read(4))
self.vendor = fileobj.read(vendor_length).decode('utf-8', errors)
count = cdata.uint_le(fileobj.read(4))
for i in xrange(count):
length = cdata.uint_le(fileobj.read(4))
try:
string = fileobj.read(length).decode('utf-8', errors)
except (OverflowError, MemoryError):
raise error("cannot read %d bytes, too large" % length)
try:
tag, value = string.split('=', 1)
except ValueError as err:
if errors == "ignore":
continue
elif errors == "replace":
tag, value = u"unknown%d" % i, string
else:
reraise(VorbisEncodingError, err, sys.exc_info()[2])
try:
tag = tag.encode('ascii', errors)
except UnicodeEncodeError:
raise VorbisEncodingError("invalid tag name %r" % tag)
else:
# string keys in py3k
if PY3:
tag = tag.decode("ascii")
if is_valid_key(tag):
self.append((tag, value))
if framing and not bytearray(fileobj.read(1))[0] & 0x01:
raise VorbisUnsetFrameError("framing bit was unset")
except (cdata.error, TypeError):
raise error("file is not a valid Vorbis comment") | python | {
"resource": ""
} |
q36104 | VComment.validate | train | def validate(self):
"""Validate keys and values.
Check to make sure every key used is a valid Vorbis key, and
that every value used is a valid Unicode or UTF-8 string. If
any invalid keys or values are found, a ValueError is raised.
In Python 3 all keys and values have to be a string.
"""
if not isinstance(self.vendor, text_type):
if PY3:
raise ValueError("vendor needs to be str")
try:
self.vendor.decode('utf-8')
except UnicodeDecodeError:
raise ValueError
for key, value in self:
try:
if not is_valid_key(key):
raise ValueError("%r is not a valid key" % key)
except TypeError:
raise ValueError("%r is not a valid key" % key)
if not isinstance(value, text_type):
if PY3:
err = "%r needs to be str for key %r" % (value, key)
raise ValueError(err)
try:
value.decode("utf-8")
except Exception:
err = "%r is not a valid value for key %r" % (value, key)
raise ValueError(err)
return True | python | {
"resource": ""
} |
q36105 | ParseID3v1 | train | def ParseID3v1(data, v2_version=4, known_frames=None):
"""Parse an ID3v1 tag, returning a list of ID3v2 frames
Returns a {frame_name: frame} dict or None.
v2_version: Decides whether ID3v2.3 or ID3v2.4 tags
should be returned. Must be 3 or 4.
known_frames (Dict[`mutagen.text`, `Frame`]): dict mapping frame
IDs to Frame objects
"""
if v2_version not in (3, 4):
raise ValueError("Only 3 and 4 possible for v2_version")
try:
data = data[data.index(b"TAG"):]
except ValueError:
return None
if 128 < len(data) or len(data) < 124:
return None
# Issue #69 - Previous versions of Mutagen, when encountering
# out-of-spec TDRC and TYER frames of less than four characters,
# wrote only the characters available - e.g. "1" or "" - into the
# year field. To parse those, reduce the size of the year field.
# Amazingly, "0s" works as a struct format string.
unpack_fmt = "3s30s30s30s%ds29sBB" % (len(data) - 124)
try:
tag, title, artist, album, year, comment, track, genre = unpack(
unpack_fmt, data)
except StructError:
return None
if tag != b"TAG":
return None
def fix(data):
return data.split(b"\x00")[0].strip().decode('latin1')
title, artist, album, year, comment = map(
fix, [title, artist, album, year, comment])
frame_class = {
"TIT2": TIT2,
"TPE1": TPE1,
"TALB": TALB,
"TYER": TYER,
"TDRC": TDRC,
"COMM": COMM,
"TRCK": TRCK,
"TCON": TCON,
}
for key in frame_class:
if known_frames is not None:
if key in known_frames:
frame_class[key] = known_frames[key]
else:
frame_class[key] = None
frames = {}
if title and frame_class["TIT2"]:
frames["TIT2"] = frame_class["TIT2"](encoding=0, text=title)
if artist and frame_class["TPE1"]:
frames["TPE1"] = frame_class["TPE1"](encoding=0, text=[artist])
if album and frame_class["TALB"]:
frames["TALB"] = frame_class["TALB"](encoding=0, text=album)
if year:
if v2_version == 3 and frame_class["TYER"]:
frames["TYER"] = frame_class["TYER"](encoding=0, text=year)
elif frame_class["TDRC"]:
frames["TDRC"] = frame_class["TDRC"](encoding=0, text=year)
if comment and frame_class["COMM"]:
frames["COMM"] = frame_class["COMM"](
encoding=0, lang="eng", desc="ID3v1 Comment", text=comment)
# Don't read a track number if it looks like the comment was
# padded with spaces instead of nulls (thanks, WinAmp).
if (track and frame_class["TRCK"] and
((track != 32) or (data[-3] == b'\x00'[0]))):
frames["TRCK"] = TRCK(encoding=0, text=str(track))
if genre != 255 and frame_class["TCON"]:
frames["TCON"] = TCON(encoding=0, text=str(genre))
return frames | python | {
"resource": ""
} |
q36106 | MakeID3v1 | train | def MakeID3v1(id3):
"""Return an ID3v1.1 tag string from a dict of ID3v2.4 frames."""
v1 = {}
for v2id, name in {"TIT2": "title", "TPE1": "artist",
"TALB": "album"}.items():
if v2id in id3:
text = id3[v2id].text[0].encode('latin1', 'replace')[:30]
else:
text = b""
v1[name] = text + (b"\x00" * (30 - len(text)))
if "COMM" in id3:
cmnt = id3["COMM"].text[0].encode('latin1', 'replace')[:28]
else:
cmnt = b""
v1["comment"] = cmnt + (b"\x00" * (29 - len(cmnt)))
if "TRCK" in id3:
try:
v1["track"] = chr_(+id3["TRCK"])
except ValueError:
v1["track"] = b"\x00"
else:
v1["track"] = b"\x00"
if "TCON" in id3:
try:
genre = id3["TCON"].genres[0]
except IndexError:
pass
else:
if genre in TCON.GENRES:
v1["genre"] = chr_(TCON.GENRES.index(genre))
if "genre" not in v1:
v1["genre"] = b"\xff"
if "TDRC" in id3:
year = text_type(id3["TDRC"]).encode('ascii')
elif "TYER" in id3:
year = text_type(id3["TYER"]).encode('ascii')
else:
year = b""
v1["year"] = (year + b"\x00\x00\x00\x00")[:4]
return (
b"TAG" +
v1["title"] +
v1["artist"] +
v1["album"] +
v1["year"] +
v1["comment"] +
v1["track"] +
v1["genre"]
) | python | {
"resource": ""
} |
q36107 | OggVorbisInfo._post_tags | train | def _post_tags(self, fileobj):
"""Raises ogg.error"""
page = OggPage.find_last(fileobj, self.serial, finishing=True)
if page is None:
raise OggVorbisHeaderError
self.length = page.position / float(self.sample_rate) | python | {
"resource": ""
} |
q36108 | IFFChunk.write | train | def write(self, data):
"""Write the chunk data"""
if len(data) > self.data_size:
raise ValueError
self.__fileobj.seek(self.data_offset)
self.__fileobj.write(data) | python | {
"resource": ""
} |
q36109 | IFFChunk.resize | train | def resize(self, new_data_size):
"""Resize the file and update the chunk sizes"""
resize_bytes(
self.__fileobj, self.data_size, new_data_size, self.data_offset)
self._update_size(new_data_size) | python | {
"resource": ""
} |
q36110 | IFFFile.insert_chunk | train | def insert_chunk(self, id_):
"""Insert a new chunk at the end of the IFF file"""
assert_valid_chunk_id(id_)
self.__fileobj.seek(self.__next_offset)
self.__fileobj.write(pack('>4si', id_.ljust(4).encode('ascii'), 0))
self.__fileobj.seek(self.__next_offset)
chunk = IFFChunk(self.__fileobj, self[u'FORM'])
self[u'FORM']._update_size(self[u'FORM'].data_size + chunk.size)
self.__chunks[id_] = chunk
self.__next_offset = chunk.offset + chunk.size | python | {
"resource": ""
} |
q36111 | _IFFID3.save | train | def save(self, filething=None, v2_version=4, v23_sep='/', padding=None):
"""Save ID3v2 data to the AIFF file"""
fileobj = filething.fileobj
iff_file = IFFFile(fileobj)
if u'ID3' not in iff_file:
iff_file.insert_chunk(u'ID3')
chunk = iff_file[u'ID3']
try:
data = self._prepare_data(
fileobj, chunk.data_offset, chunk.data_size, v2_version,
v23_sep, padding)
except ID3Error as e:
reraise(error, e, sys.exc_info()[2])
new_size = len(data)
new_size += new_size % 2 # pad byte
assert new_size % 2 == 0
chunk.resize(new_size)
data += (new_size - len(data)) * b'\x00'
assert new_size == len(data)
chunk.write(data) | python | {
"resource": ""
} |
q36112 | AIFF.load | train | def load(self, filething, **kwargs):
"""Load stream and tag information from a file."""
fileobj = filething.fileobj
try:
self.tags = _IFFID3(fileobj, **kwargs)
except ID3NoHeaderError:
self.tags = None
except ID3Error as e:
raise error(e)
else:
self.tags.filename = self.filename
fileobj.seek(0, 0)
self.info = AIFFInfo(fileobj) | python | {
"resource": ""
} |
q36113 | Engine.is_valid_image | train | def is_valid_image(self, raw_data):
'''
Wand library makes sure when opening any image that is fine, when
the image is corrupted raises an exception.
'''
try:
Image(blob=raw_data)
return True
except (exceptions.CorruptImageError, exceptions.MissingDelegateError):
return False | python | {
"resource": ""
} |
q36114 | parse_cropbox | train | def parse_cropbox(cropbox):
"""
Returns x, y, x2, y2 tuple for cropping.
"""
if isinstance(cropbox, six.text_type):
return tuple([int(x.strip()) for x in cropbox.split(',')])
else:
return tuple(cropbox) | python | {
"resource": ""
} |
q36115 | Engine.get_image | train | def get_image(self, source):
"""
Returns the backend image objects from a ImageFile instance
"""
with NamedTemporaryFile(mode='wb', delete=False) as fp:
fp.write(source.read())
return {'source': fp.name, 'options': OrderedDict(), 'size': None} | python | {
"resource": ""
} |
q36116 | Engine.is_valid_image | train | def is_valid_image(self, raw_data):
"""
This is not very good for imagemagick because it will say anything is
valid that it can use as input.
"""
with NamedTemporaryFile(mode='wb') as fp:
fp.write(raw_data)
fp.flush()
args = settings.THUMBNAIL_IDENTIFY.split(' ')
args.append(fp.name + '[0]')
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
retcode = p.wait()
return retcode == 0 | python | {
"resource": ""
} |
q36117 | Engine._crop | train | def _crop(self, image, width, height, x_offset, y_offset):
"""
Crops the image
"""
image['options']['crop'] = '%sx%s+%s+%s' % (width, height, x_offset, y_offset)
image['size'] = (width, height) # update image size
return image | python | {
"resource": ""
} |
q36118 | Engine._scale | train | def _scale(self, image, width, height):
"""
Does the resizing of the image
"""
image['options']['scale'] = '%sx%s!' % (width, height)
image['size'] = (width, height) # update image size
return image | python | {
"resource": ""
} |
q36119 | Engine._padding | train | def _padding(self, image, geometry, options):
"""
Pads the image
"""
# The order is important. The gravity option should come before extent.
image['options']['background'] = options.get('padding_color')
image['options']['gravity'] = 'center'
image['options']['extent'] = '%sx%s' % (geometry[0], geometry[1])
return image | python | {
"resource": ""
} |
q36120 | tokey | train | def tokey(*args):
"""
Computes a unique key from arguments given.
"""
salt = '||'.join([force_text(arg) for arg in args])
hash_ = hashlib.md5(encode(salt))
return hash_.hexdigest() | python | {
"resource": ""
} |
q36121 | get_module_class | train | def get_module_class(class_path):
"""
imports and returns module class from ``path.to.module.Class``
argument
"""
mod_name, cls_name = class_path.rsplit('.', 1)
try:
mod = import_module(mod_name)
except ImportError as e:
raise ImproperlyConfigured(('Error importing module %s: "%s"' % (mod_name, e)))
return getattr(mod, cls_name) | python | {
"resource": ""
} |
q36122 | get_thumbnail | train | def get_thumbnail(file_, geometry_string, **options):
"""
A shortcut for the Backend ``get_thumbnail`` method
"""
return default.backend.get_thumbnail(file_, geometry_string, **options) | python | {
"resource": ""
} |
q36123 | safe_filter | train | def safe_filter(error_output=''):
"""
A safe filter decorator only raising errors when ``THUMBNAIL_DEBUG`` is
``True`` otherwise returning ``error_output``.
"""
def inner(f):
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as err:
if sorl_settings.THUMBNAIL_DEBUG:
raise
logger.error('Thumbnail filter failed: %s' % str(err),
exc_info=sys.exc_info())
return error_output
return wrapper
return inner | python | {
"resource": ""
} |
q36124 | resolution | train | def resolution(file_, resolution_string):
"""
A filter to return the URL for the provided resolution of the thumbnail.
"""
if sorl_settings.THUMBNAIL_DUMMY:
dummy_source = sorl_settings.THUMBNAIL_DUMMY_SOURCE
source = dummy_source.replace('%(width)s', '(?P<width>[0-9]+)')
source = source.replace('%(height)s', '(?P<height>[0-9]+)')
source = re.compile(source)
try:
resolution = decimal.Decimal(resolution_string.strip('x'))
info = source.match(file_).groupdict()
info = {dimension: int(int(size) * resolution) for (dimension, size) in info.items()}
return dummy_source % info
except (AttributeError, TypeError, KeyError):
# If we can't manipulate the dummy we shouldn't change it at all
return file_
filename, extension = os.path.splitext(file_)
return '%s@%s%s' % (filename, resolution_string, extension) | python | {
"resource": ""
} |
q36125 | is_portrait | train | def is_portrait(file_):
"""
A very handy filter to determine if an image is portrait or landscape.
"""
if sorl_settings.THUMBNAIL_DUMMY:
return sorl_settings.THUMBNAIL_DUMMY_RATIO < 1
if not file_:
return False
image_file = default.kvstore.get_or_set(ImageFile(file_))
return image_file.is_portrait() | python | {
"resource": ""
} |
q36126 | margin | train | def margin(file_, geometry_string):
"""
Returns the calculated margin for an image and geometry
"""
if not file_ or (sorl_settings.THUMBNAIL_DUMMY or isinstance(file_, DummyImageFile)):
return 'auto'
margin = [0, 0, 0, 0]
image_file = default.kvstore.get_or_set(ImageFile(file_))
x, y = parse_geometry(geometry_string, image_file.ratio)
ex = x - image_file.x
margin[3] = ex / 2
margin[1] = ex / 2
if ex % 2:
margin[1] += 1
ey = y - image_file.y
margin[0] = ey / 2
margin[2] = ey / 2
if ey % 2:
margin[2] += 1
return ' '.join(['%dpx' % n for n in margin]) | python | {
"resource": ""
} |
q36127 | text_filter | train | def text_filter(regex_base, value):
"""
Helper method to regex replace images with captions in different markups
"""
regex = regex_base % {
're_cap': r'[a-zA-Z0-9\.\,:;/_ \(\)\-\!\?"]+',
're_img': r'[a-zA-Z0-9\.:/_\-\% ]+'
}
images = re.findall(regex, value)
for i in images:
image = i[1]
if image.startswith(settings.MEDIA_URL):
image = image[len(settings.MEDIA_URL):]
im = get_thumbnail(image, str(sorl_settings.THUMBNAIL_FILTER_WIDTH))
value = value.replace(i[1], im.url)
return value | python | {
"resource": ""
} |
q36128 | ImageField.delete_file | train | def delete_file(self, instance, sender, **kwargs):
"""
Adds deletion of thumbnails and key value store references to the
parent class implementation. Only called in Django < 1.2.5
"""
file_ = getattr(instance, self.attname)
# If no other object of this type references the file, and it's not the
# default value for future objects, delete it from the backend.
query = Q(**{self.name: file_.name}) & ~Q(pk=instance.pk)
qs = sender._default_manager.filter(query)
if (file_ and file_.name != self.default and not qs):
default.backend.delete(file_)
elif file_:
# Otherwise, just close the file, so it doesn't tie up resources.
file_.close() | python | {
"resource": ""
} |
q36129 | Engine.get_image_size | train | def get_image_size(self, image):
"""
Returns the image width and height as a tuple
"""
if image['size'] is None:
args = settings.THUMBNAIL_VIPSHEADER.split(' ')
args.append(image['source'])
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
m = size_re.match(str(p.stdout.read()))
image['size'] = int(m.group('x')), int(m.group('y'))
return image['size'] | python | {
"resource": ""
} |
q36130 | ThumbnailBackend.get_thumbnail | train | def get_thumbnail(self, file_, geometry_string, **options):
"""
Returns thumbnail as an ImageFile instance for file with geometry and
options given. First it will try to get it from the key value store,
secondly it will create it.
"""
logger.debug('Getting thumbnail for file [%s] at [%s]', file_, geometry_string)
if file_:
source = ImageFile(file_)
else:
raise ValueError('falsey file_ argument in get_thumbnail()')
# preserve image filetype
if settings.THUMBNAIL_PRESERVE_FORMAT:
options.setdefault('format', self._get_format(source))
for key, value in self.default_options.items():
options.setdefault(key, value)
# For the future I think it is better to add options only if they
# differ from the default settings as below. This will ensure the same
# filenames being generated for new options at default.
for key, attr in self.extra_options:
value = getattr(settings, attr)
if value != getattr(default_settings, attr):
options.setdefault(key, value)
name = self._get_thumbnail_filename(source, geometry_string, options)
thumbnail = ImageFile(name, default.storage)
cached = default.kvstore.get(thumbnail)
if cached:
return cached
# We have to check exists() because the Storage backend does not
# overwrite in some implementations.
if settings.THUMBNAIL_FORCE_OVERWRITE or not thumbnail.exists():
try:
source_image = default.engine.get_image(source)
except IOError as e:
logger.exception(e)
if settings.THUMBNAIL_DUMMY:
return DummyImageFile(geometry_string)
else:
# if S3Storage says file doesn't exist remotely, don't try to
# create it and exit early.
# Will return working empty image type; 404'd image
logger.warning(
'Remote file [%s] at [%s] does not exist',
file_, geometry_string,
)
return thumbnail
# We might as well set the size since we have the image in memory
image_info = default.engine.get_image_info(source_image)
options['image_info'] = image_info
size = default.engine.get_image_size(source_image)
source.set_size(size)
try:
self._create_thumbnail(source_image, geometry_string, options,
thumbnail)
self._create_alternative_resolutions(source_image, geometry_string,
options, thumbnail.name)
finally:
default.engine.cleanup(source_image)
# If the thumbnail exists we don't create it, the other option is
# to delete and write but this could lead to race conditions so I
# will just leave that out for now.
default.kvstore.get_or_set(source)
default.kvstore.set(thumbnail, source)
return thumbnail | python | {
"resource": ""
} |
q36131 | ThumbnailBackend.delete | train | def delete(self, file_, delete_file=True):
"""
Deletes file_ references in Key Value store and optionally the file_
it self.
"""
image_file = ImageFile(file_)
if delete_file:
image_file.delete()
default.kvstore.delete(image_file) | python | {
"resource": ""
} |
q36132 | ThumbnailBackend._create_thumbnail | train | def _create_thumbnail(self, source_image, geometry_string, options,
thumbnail):
"""
Creates the thumbnail by using default.engine
"""
logger.debug('Creating thumbnail file [%s] at [%s] with [%s]',
thumbnail.name, geometry_string, options)
ratio = default.engine.get_image_ratio(source_image, options)
geometry = parse_geometry(geometry_string, ratio)
image = default.engine.create(source_image, geometry, options)
default.engine.write(image, options, thumbnail)
# It's much cheaper to set the size here
size = default.engine.get_image_size(image)
thumbnail.set_size(size) | python | {
"resource": ""
} |
q36133 | ThumbnailBackend._create_alternative_resolutions | train | def _create_alternative_resolutions(self, source_image, geometry_string,
options, name):
"""
Creates the thumbnail by using default.engine with multiple output
sizes. Appends @<ratio>x to the file name.
"""
ratio = default.engine.get_image_ratio(source_image, options)
geometry = parse_geometry(geometry_string, ratio)
file_name, dot_file_ext = os.path.splitext(name)
for resolution in settings.THUMBNAIL_ALTERNATIVE_RESOLUTIONS:
resolution_geometry = (int(geometry[0] * resolution), int(geometry[1] * resolution))
resolution_options = options.copy()
if 'crop' in options and isinstance(options['crop'], string_types):
crop = options['crop'].split(" ")
for i in range(len(crop)):
s = re.match(r"(\d+)px", crop[i])
if s:
crop[i] = "%spx" % int(int(s.group(1)) * resolution)
resolution_options['crop'] = " ".join(crop)
image = default.engine.create(source_image, resolution_geometry, options)
thumbnail_name = '%(file_name)s%(suffix)s%(file_ext)s' % {
'file_name': file_name,
'suffix': '@%sx' % resolution,
'file_ext': dot_file_ext
}
thumbnail = ImageFile(thumbnail_name, default.storage)
default.engine.write(image, resolution_options, thumbnail)
size = default.engine.get_image_size(image)
thumbnail.set_size(size) | python | {
"resource": ""
} |
q36134 | ThumbnailBackend._get_thumbnail_filename | train | def _get_thumbnail_filename(self, source, geometry_string, options):
"""
Computes the destination filename.
"""
key = tokey(source.key, geometry_string, serialize(options))
# make some subdirs
path = '%s/%s/%s' % (key[:2], key[2:4], key)
return '%s%s.%s' % (settings.THUMBNAIL_PREFIX, path, EXTENSIONS[options['format']]) | python | {
"resource": ""
} |
q36135 | round_corner | train | def round_corner(radius, fill):
"""Draw a round corner"""
corner = Image.new('L', (radius, radius), 0) # (0, 0, 0, 0))
draw = ImageDraw.Draw(corner)
draw.pieslice((0, 0, radius * 2, radius * 2), 180, 270, fill=fill)
return corner | python | {
"resource": ""
} |
q36136 | round_rectangle | train | def round_rectangle(size, radius, fill):
"""Draw a rounded rectangle"""
width, height = size
rectangle = Image.new('L', size, 255) # fill
corner = round_corner(radius, 255) # fill
rectangle.paste(corner, (0, 0))
rectangle.paste(corner.rotate(90),
(0, height - radius)) # Rotate the corner and paste it
rectangle.paste(corner.rotate(180), (width - radius, height - radius))
rectangle.paste(corner.rotate(270), (width - radius, 0))
return rectangle | python | {
"resource": ""
} |
q36137 | Engine._get_image_entropy | train | def _get_image_entropy(self, image):
"""calculate the entropy of an image"""
hist = image.histogram()
hist_size = sum(hist)
hist = [float(h) / hist_size for h in hist]
return -sum([p * math.log(p, 2) for p in hist if p != 0]) | python | {
"resource": ""
} |
q36138 | EngineBase.create | train | def create(self, image, geometry, options):
"""
Processing conductor, returns the thumbnail as an image engine instance
"""
image = self.cropbox(image, geometry, options)
image = self.orientation(image, geometry, options)
image = self.colorspace(image, geometry, options)
image = self.remove_border(image, options)
image = self.scale(image, geometry, options)
image = self.crop(image, geometry, options)
image = self.rounded(image, geometry, options)
image = self.blur(image, geometry, options)
image = self.padding(image, geometry, options)
return image | python | {
"resource": ""
} |
q36139 | EngineBase.cropbox | train | def cropbox(self, image, geometry, options):
"""
Wrapper for ``_cropbox``
"""
cropbox = options['cropbox']
if not cropbox:
return image
x, y, x2, y2 = parse_cropbox(cropbox)
return self._cropbox(image, x, y, x2, y2) | python | {
"resource": ""
} |
q36140 | EngineBase.orientation | train | def orientation(self, image, geometry, options):
"""
Wrapper for ``_orientation``
"""
if options.get('orientation', settings.THUMBNAIL_ORIENTATION):
return self._orientation(image)
self.reoriented = True
return image | python | {
"resource": ""
} |
q36141 | EngineBase.colorspace | train | def colorspace(self, image, geometry, options):
"""
Wrapper for ``_colorspace``
"""
colorspace = options['colorspace']
return self._colorspace(image, colorspace) | python | {
"resource": ""
} |
q36142 | EngineBase.scale | train | def scale(self, image, geometry, options):
"""
Wrapper for ``_scale``
"""
upscale = options['upscale']
x_image, y_image = map(float, self.get_image_size(image))
factor = self._calculate_scaling_factor(x_image, y_image, geometry, options)
if factor < 1 or upscale:
width = toint(x_image * factor)
height = toint(y_image * factor)
image = self._scale(image, width, height)
return image | python | {
"resource": ""
} |
q36143 | EngineBase.crop | train | def crop(self, image, geometry, options):
"""
Wrapper for ``_crop``
"""
crop = options['crop']
x_image, y_image = self.get_image_size(image)
if not crop or crop == 'noop':
return image
elif crop == 'smart':
# Smart cropping is suitably different from regular cropping
# to warrent it's own function
return self._entropy_crop(image, geometry[0], geometry[1], x_image, y_image)
# Handle any other crop option with the backend crop function.
geometry = (min(x_image, geometry[0]), min(y_image, geometry[1]))
x_offset, y_offset = parse_crop(crop, (x_image, y_image), geometry)
return self._crop(image, geometry[0], geometry[1], x_offset, y_offset) | python | {
"resource": ""
} |
q36144 | EngineBase.rounded | train | def rounded(self, image, geometry, options):
"""
Wrapper for ``_rounded``
"""
r = options['rounded']
if not r:
return image
return self._rounded(image, int(r)) | python | {
"resource": ""
} |
q36145 | EngineBase.blur | train | def blur(self, image, geometry, options):
"""
Wrapper for ``_blur``
"""
if options.get('blur'):
return self._blur(image, int(options.get('blur')))
return image | python | {
"resource": ""
} |
q36146 | EngineBase.padding | train | def padding(self, image, geometry, options):
"""
Wrapper for ``_padding``
"""
if options.get('padding') and self.get_image_size(image) != geometry:
return self._padding(image, geometry, options)
return image | python | {
"resource": ""
} |
q36147 | EngineBase.get_image_ratio | train | def get_image_ratio(self, image, options):
"""
Calculates the image ratio. If cropbox option is used, the ratio
may have changed.
"""
cropbox = options['cropbox']
if cropbox:
x, y, x2, y2 = parse_cropbox(cropbox)
x = x2 - x
y = y2 - y
else:
x, y = self.get_image_size(image)
return float(x) / y | python | {
"resource": ""
} |
q36148 | KVStoreBase.set | train | def set(self, image_file, source=None):
"""
Updates store for the `image_file`. Makes sure the `image_file` has a
size set.
"""
image_file.set_size() # make sure its got a size
self._set(image_file.key, image_file)
if source is not None:
if not self.get(source):
# make sure the source is in kvstore
raise ThumbnailError('Cannot add thumbnails for source: `%s` '
'that is not in kvstore.' % source.name)
# Update the list of thumbnails for source.
thumbnails = self._get(source.key, identity='thumbnails') or []
thumbnails = set(thumbnails)
thumbnails.add(image_file.key)
self._set(source.key, list(thumbnails), identity='thumbnails') | python | {
"resource": ""
} |
q36149 | KVStoreBase.delete | train | def delete(self, image_file, delete_thumbnails=True):
"""
Deletes the reference to the ``image_file`` and deletes the references
to thumbnails as well as thumbnail files if ``delete_thumbnails`` is
`True``. Does not delete the ``image_file`` is self.
"""
if delete_thumbnails:
self.delete_thumbnails(image_file)
self._delete(image_file.key) | python | {
"resource": ""
} |
q36150 | KVStoreBase.delete_thumbnails | train | def delete_thumbnails(self, image_file):
"""
Deletes references to thumbnails as well as thumbnail ``image_files``.
"""
thumbnail_keys = self._get(image_file.key, identity='thumbnails')
if thumbnail_keys:
# Delete all thumbnail keys from store and delete the
# thumbnail ImageFiles.
for key in thumbnail_keys:
thumbnail = self._get(key)
if thumbnail:
self.delete(thumbnail, False)
thumbnail.delete() # delete the actual file
# Delete the thumbnails key from store
self._delete(image_file.key, identity='thumbnails') | python | {
"resource": ""
} |
q36151 | KVStoreBase.clear | train | def clear(self):
"""
Brutely clears the key value store for keys with THUMBNAIL_KEY_PREFIX
prefix. Use this in emergency situations. Normally you would probably
want to use the ``cleanup`` method instead.
"""
all_keys = self._find_keys_raw(settings.THUMBNAIL_KEY_PREFIX)
if all_keys:
self._delete_raw(*all_keys) | python | {
"resource": ""
} |
q36152 | KVStoreBase._get | train | def _get(self, key, identity='image'):
"""
Deserializing, prefix wrapper for _get_raw
"""
value = self._get_raw(add_prefix(key, identity))
if not value:
return None
if identity == 'image':
return deserialize_image_file(value)
return deserialize(value) | python | {
"resource": ""
} |
q36153 | KVStoreBase._set | train | def _set(self, key, value, identity='image'):
"""
Serializing, prefix wrapper for _set_raw
"""
if identity == 'image':
s = serialize_image_file(value)
else:
s = serialize(value)
self._set_raw(add_prefix(key, identity), s) | python | {
"resource": ""
} |
q36154 | KVStoreBase._find_keys | train | def _find_keys(self, identity='image'):
"""
Finds and returns all keys for identity,
"""
prefix = add_prefix('', identity)
raw_keys = self._find_keys_raw(prefix) or []
for raw_key in raw_keys:
yield del_prefix(raw_key) | python | {
"resource": ""
} |
q36155 | dtw | train | def dtw(x, y, dist=None):
''' return the distance between 2 time series without approximation
Parameters
----------
x : array_like
input array 1
y : array_like
input array 2
dist : function or int
The method for calculating the distance between x[i] and y[j]. If
dist is an int of value p > 0, then the p-norm will be used. If
dist is a function then dist(x[i], y[j]) will be used. If dist is
None then abs(x[i] - y[j]) will be used.
Returns
-------
distance : float
the approximate distance between the 2 time series
path : list
list of indexes for the inputs x and y
Examples
--------
>>> import numpy as np
>>> import fastdtw
>>> x = np.array([1, 2, 3, 4, 5], dtype='float')
>>> y = np.array([2, 3, 4], dtype='float')
>>> fastdtw.dtw(x, y)
(2.0, [(0, 0), (1, 0), (2, 1), (3, 2), (4, 2)])
'''
x, y, dist = __prep_inputs(x, y, dist)
return __dtw(x, y, None, dist) | python | {
"resource": ""
} |
q36156 | get_next_url | train | def get_next_url(request, redirect_field_name):
"""Retrieves next url from request
Note: This verifies that the url is safe before returning it. If the url
is not safe, this returns None.
:arg HttpRequest request: the http request
:arg str redirect_field_name: the name of the field holding the next url
:returns: safe url or None
"""
next_url = request.GET.get(redirect_field_name)
if next_url:
kwargs = {
'url': next_url,
'require_https': import_from_settings(
'OIDC_REDIRECT_REQUIRE_HTTPS', request.is_secure())
}
hosts = list(import_from_settings('OIDC_REDIRECT_ALLOWED_HOSTS', []))
hosts.append(request.get_host())
kwargs['allowed_hosts'] = hosts
is_safe = is_safe_url(**kwargs)
if is_safe:
return next_url
return None | python | {
"resource": ""
} |
q36157 | OIDCAuthenticationCallbackView.get | train | def get(self, request):
"""Callback handler for OIDC authorization code flow"""
nonce = request.session.get('oidc_nonce')
if nonce:
# Make sure that nonce is not used twice
del request.session['oidc_nonce']
if request.GET.get('error'):
# Ouch! Something important failed.
# Make sure the user doesn't get to continue to be logged in
# otherwise the refresh middleware will force the user to
# redirect to authorize again if the session refresh has
# expired.
if is_authenticated(request.user):
auth.logout(request)
assert not is_authenticated(request.user)
elif 'code' in request.GET and 'state' in request.GET:
kwargs = {
'request': request,
'nonce': nonce,
}
if 'oidc_state' not in request.session:
return self.login_failure()
if request.GET['state'] != request.session['oidc_state']:
msg = 'Session `oidc_state` does not match the OIDC callback state'
raise SuspiciousOperation(msg)
self.user = auth.authenticate(**kwargs)
if self.user and self.user.is_active:
return self.login_success()
return self.login_failure() | python | {
"resource": ""
} |
q36158 | OIDCAuthenticationRequestView.get | train | def get(self, request):
"""OIDC client authentication initialization HTTP endpoint"""
state = get_random_string(self.get_settings('OIDC_STATE_SIZE', 32))
redirect_field_name = self.get_settings('OIDC_REDIRECT_FIELD_NAME', 'next')
reverse_url = self.get_settings('OIDC_AUTHENTICATION_CALLBACK_URL',
'oidc_authentication_callback')
params = {
'response_type': 'code',
'scope': self.get_settings('OIDC_RP_SCOPES', 'openid email'),
'client_id': self.OIDC_RP_CLIENT_ID,
'redirect_uri': absolutify(
request,
reverse(reverse_url)
),
'state': state,
}
params.update(self.get_extra_params(request))
if self.get_settings('OIDC_USE_NONCE', True):
nonce = get_random_string(self.get_settings('OIDC_NONCE_SIZE', 32))
params.update({
'nonce': nonce
})
request.session['oidc_nonce'] = nonce
request.session['oidc_state'] = state
request.session['oidc_login_next'] = get_next_url(request, redirect_field_name)
query = urlencode(params)
redirect_url = '{url}?{query}'.format(url=self.OIDC_OP_AUTH_ENDPOINT, query=query)
return HttpResponseRedirect(redirect_url) | python | {
"resource": ""
} |
q36159 | get_oidc_backend | train | def get_oidc_backend():
"""
Get the Django auth backend that uses OIDC.
"""
# allow the user to force which back backend to use. this is mostly
# convenient if you want to use OIDC with DRF but don't want to configure
# OIDC for the "normal" Django auth.
backend_setting = import_from_settings('OIDC_DRF_AUTH_BACKEND', None)
if backend_setting:
backend = import_string(backend_setting)()
if not isinstance(backend, OIDCAuthenticationBackend):
msg = 'Class configured in OIDC_DRF_AUTH_BACKEND ' \
'does not extend OIDCAuthenticationBackend!'
raise ImproperlyConfigured(msg)
return backend
# if the backend setting is not set, look through the list of configured
# backends for one that is an OIDCAuthenticationBackend.
backends = [b for b in get_backends() if isinstance(b, OIDCAuthenticationBackend)]
if not backends:
msg = 'No backends extending OIDCAuthenticationBackend found - ' \
'add one to AUTHENTICATION_BACKENDS or set OIDC_DRF_AUTH_BACKEND!'
raise ImproperlyConfigured(msg)
if len(backends) > 1:
raise ImproperlyConfigured('More than one OIDCAuthenticationBackend found!')
return backends[0] | python | {
"resource": ""
} |
q36160 | OIDCAuthentication.get_access_token | train | def get_access_token(self, request):
"""
Get the access token based on a request.
Returns None if no authentication details were provided. Raises
AuthenticationFailed if the token is incorrect.
"""
header = authentication.get_authorization_header(request)
if not header:
return None
header = header.decode(authentication.HTTP_HEADER_ENCODING)
auth = header.split()
if auth[0].lower() != 'bearer':
return None
if len(auth) == 1:
msg = 'Invalid "bearer" header: No credentials provided.'
raise exceptions.AuthenticationFailed(msg)
elif len(auth) > 2:
msg = 'Invalid "bearer" header: Credentials string should not contain spaces.'
raise exceptions.AuthenticationFailed(msg)
return auth[1] | python | {
"resource": ""
} |
q36161 | import_from_settings | train | def import_from_settings(attr, *args):
"""
Load an attribute from the django settings.
:raises:
ImproperlyConfigured
"""
try:
if args:
return getattr(settings, attr, args[0])
return getattr(settings, attr)
except AttributeError:
raise ImproperlyConfigured('Setting {0} not found'.format(attr)) | python | {
"resource": ""
} |
q36162 | default_username_algo | train | def default_username_algo(email):
"""Generate username for the Django user.
:arg str/unicode email: the email address to use to generate a username
:returns: str/unicode
"""
# bluntly stolen from django-browserid
# store the username as a base64 encoded sha224 of the email address
# this protects against data leakage because usernames are often
# treated as public identifiers (so we can't use the email address).
username = base64.urlsafe_b64encode(
hashlib.sha1(force_bytes(email)).digest()
).rstrip(b'=')
return smart_text(username) | python | {
"resource": ""
} |
q36163 | OIDCAuthenticationBackend.filter_users_by_claims | train | def filter_users_by_claims(self, claims):
"""Return all users matching the specified email."""
email = claims.get('email')
if not email:
return self.UserModel.objects.none()
return self.UserModel.objects.filter(email__iexact=email) | python | {
"resource": ""
} |
q36164 | OIDCAuthenticationBackend.verify_claims | train | def verify_claims(self, claims):
"""Verify the provided claims to decide if authentication should be allowed."""
# Verify claims required by default configuration
scopes = self.get_settings('OIDC_RP_SCOPES', 'openid email')
if 'email' in scopes.split():
return 'email' in claims
LOGGER.warning('Custom OIDC_RP_SCOPES defined. '
'You need to override `verify_claims` for custom claims verification.')
return True | python | {
"resource": ""
} |
q36165 | OIDCAuthenticationBackend.create_user | train | def create_user(self, claims):
"""Return object for a newly created user account."""
email = claims.get('email')
username = self.get_username(claims)
return self.UserModel.objects.create_user(username, email) | python | {
"resource": ""
} |
q36166 | OIDCAuthenticationBackend.get_username | train | def get_username(self, claims):
"""Generate username based on claims."""
# bluntly stolen from django-browserid
# https://github.com/mozilla/django-browserid/blob/master/django_browserid/auth.py
username_algo = self.get_settings('OIDC_USERNAME_ALGO', None)
if username_algo:
if isinstance(username_algo, six.string_types):
username_algo = import_string(username_algo)
return username_algo(claims.get('email'))
return default_username_algo(claims.get('email')) | python | {
"resource": ""
} |
q36167 | OIDCAuthenticationBackend._verify_jws | train | def _verify_jws(self, payload, key):
"""Verify the given JWS payload with the given key and return the payload"""
jws = JWS.from_compact(payload)
try:
alg = jws.signature.combined.alg.name
except KeyError:
msg = 'No alg value found in header'
raise SuspiciousOperation(msg)
if alg != self.OIDC_RP_SIGN_ALGO:
msg = "The provider algorithm {!r} does not match the client's " \
"OIDC_RP_SIGN_ALGO.".format(alg)
raise SuspiciousOperation(msg)
if isinstance(key, six.string_types):
# Use smart_bytes here since the key string comes from settings.
jwk = JWK.load(smart_bytes(key))
else:
# The key is a json returned from the IDP JWKS endpoint.
jwk = JWK.from_json(key)
if not jws.verify(jwk):
msg = 'JWS token verification failed.'
raise SuspiciousOperation(msg)
return jws.payload | python | {
"resource": ""
} |
q36168 | OIDCAuthenticationBackend.retrieve_matching_jwk | train | def retrieve_matching_jwk(self, token):
"""Get the signing key by exploring the JWKS endpoint of the OP."""
response_jwks = requests.get(
self.OIDC_OP_JWKS_ENDPOINT,
verify=self.get_settings('OIDC_VERIFY_SSL', True)
)
response_jwks.raise_for_status()
jwks = response_jwks.json()
# Compute the current header from the given token to find a match
jws = JWS.from_compact(token)
json_header = jws.signature.protected
header = Header.json_loads(json_header)
key = None
for jwk in jwks['keys']:
if jwk['kid'] != smart_text(header.kid):
continue
if 'alg' in jwk and jwk['alg'] != smart_text(header.alg):
raise SuspiciousOperation('alg values do not match.')
key = jwk
if key is None:
raise SuspiciousOperation('Could not find a valid JWKS.')
return key | python | {
"resource": ""
} |
q36169 | OIDCAuthenticationBackend.get_payload_data | train | def get_payload_data(self, token, key):
"""Helper method to get the payload of the JWT token."""
if self.get_settings('OIDC_ALLOW_UNSECURED_JWT', False):
header, payload_data, signature = token.split(b'.')
header = json.loads(smart_text(b64decode(header)))
# If config allows unsecured JWTs check the header and return the decoded payload
if 'alg' in header and header['alg'] == 'none':
return b64decode(payload_data)
# By default fallback to verify JWT signatures
return self._verify_jws(token, key) | python | {
"resource": ""
} |
q36170 | OIDCAuthenticationBackend.verify_token | train | def verify_token(self, token, **kwargs):
"""Validate the token signature."""
nonce = kwargs.get('nonce')
token = force_bytes(token)
if self.OIDC_RP_SIGN_ALGO.startswith('RS'):
if self.OIDC_RP_IDP_SIGN_KEY is not None:
key = self.OIDC_RP_IDP_SIGN_KEY
else:
key = self.retrieve_matching_jwk(token)
else:
key = self.OIDC_RP_CLIENT_SECRET
payload_data = self.get_payload_data(token, key)
# The 'token' will always be a byte string since it's
# the result of base64.urlsafe_b64decode().
# The payload is always the result of base64.urlsafe_b64decode().
# In Python 3 and 2, that's always a byte string.
# In Python3.6, the json.loads() function can accept a byte string
# as it will automagically decode it to a unicode string before
# deserializing https://bugs.python.org/issue17909
payload = json.loads(payload_data.decode('utf-8'))
token_nonce = payload.get('nonce')
if self.get_settings('OIDC_USE_NONCE', True) and nonce != token_nonce:
msg = 'JWT Nonce verification failed.'
raise SuspiciousOperation(msg)
return payload | python | {
"resource": ""
} |
q36171 | OIDCAuthenticationBackend.get_token | train | def get_token(self, payload):
"""Return token object as a dictionary."""
auth = None
if self.get_settings('OIDC_TOKEN_USE_BASIC_AUTH', False):
# When Basic auth is defined, create the Auth Header and remove secret from payload.
user = payload.get('client_id')
pw = payload.get('client_secret')
auth = HTTPBasicAuth(user, pw)
del payload['client_secret']
response = requests.post(
self.OIDC_OP_TOKEN_ENDPOINT,
data=payload,
auth=auth,
verify=self.get_settings('OIDC_VERIFY_SSL', True))
response.raise_for_status()
return response.json() | python | {
"resource": ""
} |
q36172 | OIDCAuthenticationBackend.get_userinfo | train | def get_userinfo(self, access_token, id_token, payload):
"""Return user details dictionary. The id_token and payload are not used in
the default implementation, but may be used when overriding this method"""
user_response = requests.get(
self.OIDC_OP_USER_ENDPOINT,
headers={
'Authorization': 'Bearer {0}'.format(access_token)
},
verify=self.get_settings('OIDC_VERIFY_SSL', True))
user_response.raise_for_status()
return user_response.json() | python | {
"resource": ""
} |
q36173 | OIDCAuthenticationBackend.authenticate | train | def authenticate(self, request, **kwargs):
"""Authenticates a user based on the OIDC code flow."""
self.request = request
if not self.request:
return None
state = self.request.GET.get('state')
code = self.request.GET.get('code')
nonce = kwargs.pop('nonce', None)
if not code or not state:
return None
reverse_url = self.get_settings('OIDC_AUTHENTICATION_CALLBACK_URL',
'oidc_authentication_callback')
token_payload = {
'client_id': self.OIDC_RP_CLIENT_ID,
'client_secret': self.OIDC_RP_CLIENT_SECRET,
'grant_type': 'authorization_code',
'code': code,
'redirect_uri': absolutify(
self.request,
reverse(reverse_url)
),
}
# Get the token
token_info = self.get_token(token_payload)
id_token = token_info.get('id_token')
access_token = token_info.get('access_token')
# Validate the token
payload = self.verify_token(id_token, nonce=nonce)
if payload:
self.store_tokens(access_token, id_token)
try:
return self.get_or_create_user(access_token, id_token, payload)
except SuspiciousOperation as exc:
LOGGER.warning('failed to get or create user: %s', exc)
return None
return None | python | {
"resource": ""
} |
q36174 | OIDCAuthenticationBackend.store_tokens | train | def store_tokens(self, access_token, id_token):
"""Store OIDC tokens."""
session = self.request.session
if self.get_settings('OIDC_STORE_ACCESS_TOKEN', False):
session['oidc_access_token'] = access_token
if self.get_settings('OIDC_STORE_ID_TOKEN', False):
session['oidc_id_token'] = id_token | python | {
"resource": ""
} |
q36175 | OIDCAuthenticationBackend.get_or_create_user | train | def get_or_create_user(self, access_token, id_token, payload):
"""Returns a User instance if 1 user is found. Creates a user if not found
and configured to do so. Returns nothing if multiple users are matched."""
user_info = self.get_userinfo(access_token, id_token, payload)
email = user_info.get('email')
claims_verified = self.verify_claims(user_info)
if not claims_verified:
msg = 'Claims verification failed'
raise SuspiciousOperation(msg)
# email based filtering
users = self.filter_users_by_claims(user_info)
if len(users) == 1:
return self.update_user(users[0], user_info)
elif len(users) > 1:
# In the rare case that two user accounts have the same email address,
# bail. Randomly selecting one seems really wrong.
msg = 'Multiple users returned'
raise SuspiciousOperation(msg)
elif self.get_settings('OIDC_CREATE_USER', True):
user = self.create_user(user_info)
return user
else:
LOGGER.debug('Login failed: No user with email %s found, and '
'OIDC_CREATE_USER is False', email)
return None | python | {
"resource": ""
} |
q36176 | SessionRefresh.exempt_urls | train | def exempt_urls(self):
"""Generate and return a set of url paths to exempt from SessionRefresh
This takes the value of ``settings.OIDC_EXEMPT_URLS`` and appends three
urls that mozilla-django-oidc uses. These values can be view names or
absolute url paths.
:returns: list of url paths (for example "/oidc/callback/")
"""
exempt_urls = list(self.get_settings('OIDC_EXEMPT_URLS', []))
exempt_urls.extend([
'oidc_authentication_init',
'oidc_authentication_callback',
'oidc_logout',
])
return set([
url if url.startswith('/') else reverse(url)
for url in exempt_urls
]) | python | {
"resource": ""
} |
q36177 | SessionRefresh.is_refreshable_url | train | def is_refreshable_url(self, request):
"""Takes a request and returns whether it triggers a refresh examination
:arg HttpRequest request:
:returns: boolean
"""
# Do not attempt to refresh the session if the OIDC backend is not used
backend_session = request.session.get(BACKEND_SESSION_KEY)
is_oidc_enabled = True
if backend_session:
auth_backend = import_string(backend_session)
is_oidc_enabled = issubclass(auth_backend, OIDCAuthenticationBackend)
return (
request.method == 'GET' and
is_authenticated(request.user) and
is_oidc_enabled and
request.path not in self.exempt_urls
) | python | {
"resource": ""
} |
q36178 | ReverseFileSearcher._read | train | def _read(self):
"""
Reads and returns a buffer reversely from current file-pointer position.
:rtype : str
"""
filepos = self._fp.tell()
if filepos < 1:
return ""
destpos = max(filepos - self._chunk_size, 0)
self._fp.seek(destpos)
buf = self._fp.read(filepos - destpos)
self._fp.seek(destpos)
return buf | python | {
"resource": ""
} |
q36179 | ReverseFileSearcher.find | train | def find(self):
"""
Returns the position of the first occurence of needle.
If the needle was not found, -1 is returned.
:rtype : int
"""
lastbuf = ""
while 0 < self._fp.tell():
buf = self._read()
bufpos = (buf + lastbuf).rfind(self._needle)
if bufpos > -1:
filepos = self._fp.tell() + bufpos
self._fp.seek(filepos)
return filepos
# for it to work when the needle is split between chunks.
lastbuf = buf[:len(self._needle)]
return -1 | python | {
"resource": ""
} |
q36180 | LogReader.search | train | def search(self, text):
"""
Find text in log file from current position
returns a tuple containing:
absolute position,
position in result buffer,
result buffer (the actual file contents)
"""
key = hash(text)
searcher = self._searchers.get(key)
if not searcher:
searcher = ReverseFileSearcher(self.filename, text)
self._searchers[key] = searcher
position = searcher.find()
if position < 0:
# reset the searcher to start from the tail again.
searcher.reset()
return -1, -1, ''
# try to get some content from before and after the result's position
read_before = self.buffer_size / 2
offset = max(position - read_before, 0)
bufferpos = position if offset == 0 else read_before
self.fp.seek(offset)
return position, bufferpos, self.read() | python | {
"resource": ""
} |
q36181 | get_interface_addresses | train | def get_interface_addresses():
"""
Get addresses of available network interfaces.
See netifaces on pypi for details.
Returns a list of dicts
"""
addresses = []
ifaces = netifaces.interfaces()
for iface in ifaces:
addrs = netifaces.ifaddresses(iface)
families = addrs.keys()
# put IPv4 to the end so it lists as the main iface address
if netifaces.AF_INET in families:
families.remove(netifaces.AF_INET)
families.append(netifaces.AF_INET)
for family in families:
for addr in addrs[family]:
address = {
'name': iface,
'family': family,
'ip': addr['addr'],
}
addresses.append(address)
return addresses | python | {
"resource": ""
} |
q36182 | NetIOCounters._get_net_io_counters | train | def _get_net_io_counters(self):
"""
Fetch io counters from psutil and transform it to
dicts with the additional attributes defaulted
"""
counters = psutil.net_io_counters(pernic=self.pernic)
res = {}
for name, io in counters.iteritems():
res[name] = io._asdict()
res[name].update({'tx_per_sec': 0, 'rx_per_sec': 0})
return res | python | {
"resource": ""
} |
q36183 | URL.build | train | def build(
cls,
*,
scheme="",
user="",
password=None,
host="",
port=None,
path="",
query=None,
query_string="",
fragment="",
encoded=False
):
"""Creates and returns a new URL"""
if not host and scheme:
raise ValueError('Can\'t build URL with "scheme" but without "host".')
if port and not host:
raise ValueError('Can\'t build URL with "port" but without "host".')
if query and query_string:
raise ValueError('Only one of "query" or "query_string" should be passed')
if path is None or query_string is None or fragment is None:
raise TypeError('NoneType is illegal for "path", "query_string" and '
'"fragment" args, use string values instead.')
if not user and not password and not host and not port:
netloc = ""
else:
netloc = cls._make_netloc(user, password, host, port, encode=not encoded)
if not encoded:
path = cls._PATH_QUOTER(path)
if netloc:
path = cls._normalize_path(path)
cls._validate_authority_uri_abs_path(host=host, path=path)
query_string = cls._QUERY_QUOTER(query_string)
fragment = cls._FRAGMENT_QUOTER(fragment)
url = cls(
SplitResult(scheme, netloc, path, query_string, fragment), encoded=True
)
if query:
return url.with_query(query)
else:
return url | python | {
"resource": ""
} |
q36184 | URL.is_default_port | train | def is_default_port(self):
"""A check for default port.
Return True if port is default for specified scheme,
e.g. 'http://python.org' or 'http://python.org:80', False
otherwise.
"""
if self.port is None:
return False
default = DEFAULT_PORTS.get(self.scheme)
if default is None:
return False
return self.port == default | python | {
"resource": ""
} |
q36185 | URL.origin | train | def origin(self):
"""Return an URL with scheme, host and port parts only.
user, password, path, query and fragment are removed.
"""
# TODO: add a keyword-only option for keeping user/pass maybe?
if not self.is_absolute():
raise ValueError("URL should be absolute")
if not self._val.scheme:
raise ValueError("URL should have scheme")
v = self._val
netloc = self._make_netloc(None, None, v.hostname, v.port, encode=False)
val = v._replace(netloc=netloc, path="", query="", fragment="")
return URL(val, encoded=True) | python | {
"resource": ""
} |
q36186 | URL.relative | train | def relative(self):
"""Return a relative part of the URL.
scheme, user, password, host and port are removed.
"""
if not self.is_absolute():
raise ValueError("URL should be absolute")
val = self._val._replace(scheme="", netloc="")
return URL(val, encoded=True) | python | {
"resource": ""
} |
q36187 | URL.host | train | def host(self):
"""Decoded host part of URL.
None for relative URLs.
"""
raw = self.raw_host
if raw is None:
return None
if "%" in raw:
# Hack for scoped IPv6 addresses like
# fe80::2%Проверка
# presence of '%' sign means only IPv6 address, so idna is useless.
return raw
try:
return idna.decode(raw.encode("ascii"))
except UnicodeError: # e.g. '::1'
return raw.encode("ascii").decode("idna") | python | {
"resource": ""
} |
q36188 | URL.port | train | def port(self):
"""Port part of URL, with scheme-based fallback.
None for relative URLs or URLs without explicit port and
scheme without default port substitution.
"""
return self._val.port or DEFAULT_PORTS.get(self._val.scheme) | python | {
"resource": ""
} |
q36189 | URL.raw_path | train | def raw_path(self):
"""Encoded path of URL.
/ for absolute URLs without path part.
"""
ret = self._val.path
if not ret and self.is_absolute():
ret = "/"
return ret | python | {
"resource": ""
} |
q36190 | URL.query | train | def query(self):
"""A MultiDictProxy representing parsed query parameters in decoded
representation.
Empty value if URL has no query part.
"""
ret = MultiDict(parse_qsl(self.raw_query_string, keep_blank_values=True))
return MultiDictProxy(ret) | python | {
"resource": ""
} |
q36191 | URL.path_qs | train | def path_qs(self):
"""Decoded path of URL with query."""
if not self.query_string:
return self.path
return "{}?{}".format(self.path, self.query_string) | python | {
"resource": ""
} |
q36192 | URL.raw_path_qs | train | def raw_path_qs(self):
"""Encoded path of URL with query."""
if not self.raw_query_string:
return self.raw_path
return "{}?{}".format(self.raw_path, self.raw_query_string) | python | {
"resource": ""
} |
q36193 | URL.parent | train | def parent(self):
"""A new URL with last part of path removed and cleaned up query and
fragment.
"""
path = self.raw_path
if not path or path == "/":
if self.raw_fragment or self.raw_query_string:
return URL(self._val._replace(query="", fragment=""), encoded=True)
return self
parts = path.split("/")
val = self._val._replace(path="/".join(parts[:-1]), query="", fragment="")
return URL(val, encoded=True) | python | {
"resource": ""
} |
q36194 | URL.raw_name | train | def raw_name(self):
"""The last part of raw_parts."""
parts = self.raw_parts
if self.is_absolute():
parts = parts[1:]
if not parts:
return ""
else:
return parts[-1]
else:
return parts[-1] | python | {
"resource": ""
} |
q36195 | URL._validate_authority_uri_abs_path | train | def _validate_authority_uri_abs_path(host, path):
"""Ensure that path in URL with authority starts with a leading slash.
Raise ValueError if not.
"""
if len(host) > 0 and len(path) > 0 and not path.startswith("/"):
raise ValueError(
"Path in a URL with authority " "should start with a slash ('/') if set"
) | python | {
"resource": ""
} |
q36196 | URL.with_scheme | train | def with_scheme(self, scheme):
"""Return a new URL with scheme replaced."""
# N.B. doesn't cleanup query/fragment
if not isinstance(scheme, str):
raise TypeError("Invalid scheme type")
if not self.is_absolute():
raise ValueError("scheme replacement is not allowed " "for relative URLs")
return URL(self._val._replace(scheme=scheme.lower()), encoded=True) | python | {
"resource": ""
} |
q36197 | URL.with_user | train | def with_user(self, user):
"""Return a new URL with user replaced.
Autoencode user if needed.
Clear user/password if user is None.
"""
# N.B. doesn't cleanup query/fragment
val = self._val
if user is None:
password = None
elif isinstance(user, str):
user = self._QUOTER(user)
password = val.password
else:
raise TypeError("Invalid user type")
if not self.is_absolute():
raise ValueError("user replacement is not allowed " "for relative URLs")
return URL(
self._val._replace(
netloc=self._make_netloc(
user, password, val.hostname, val.port, encode=False
)
),
encoded=True,
) | python | {
"resource": ""
} |
q36198 | URL.with_host | train | def with_host(self, host):
"""Return a new URL with host replaced.
Autoencode host if needed.
Changing host for relative URLs is not allowed, use .join()
instead.
"""
# N.B. doesn't cleanup query/fragment
if not isinstance(host, str):
raise TypeError("Invalid host type")
if not self.is_absolute():
raise ValueError("host replacement is not allowed " "for relative URLs")
if not host:
raise ValueError("host removing is not allowed")
host = self._encode_host(host)
val = self._val
return URL(
self._val._replace(
netloc=self._make_netloc(
val.username, val.password, host, val.port, encode=False
)
),
encoded=True,
) | python | {
"resource": ""
} |
q36199 | URL.with_port | train | def with_port(self, port):
"""Return a new URL with port replaced.
Clear port to default if None is passed.
"""
# N.B. doesn't cleanup query/fragment
if port is not None and not isinstance(port, int):
raise TypeError("port should be int or None, got {}".format(type(port)))
if not self.is_absolute():
raise ValueError("port replacement is not allowed " "for relative URLs")
val = self._val
return URL(
self._val._replace(
netloc=self._make_netloc(
val.username, val.password, val.hostname, port, encode=False
)
),
encoded=True,
) | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.