_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q15900
observer
train
def observer(names_or_instance, names=None, func=None, change_only=False): """Specify a callback function that will fire on Property value change Observer functions on a HasProperties class fire after the observed Property or Properties have been changed (unlike validator functions that fire on set before the value is changed). You can use this method as a decorator inside a HasProperties class .. code:: @properties.observer('variable_name') def callback_function(self, change): print(change) or you can use it to register a function to a single HasProperties instance .. code:: properties.observer(my_has_props, 'variable_name', callback_function) The variable name must refer to a Property name on the HasProperties class. A list of Property names may also be used; the same callback function will fire when any of these Properties change. Also, :class:`properties.everything <properties.utils.Sentinel>` may be specified instead of the variable name. In that case, the callback function will fire when any Property changes. The callback function must take two arguments. The first is the HasProperties instance; the second is the change notification dictionary. This dictionary contains: * 'name' - the name of the changed Property * 'previous' - the value of the Property prior to change (this will be :code:`properties.undefined` if the value was not previously set) * 'value' - the new value of the Property (this will be :code:`properties.undefined` if the value is deleted) * 'mode' - the mode of the change; for observers, this is either 'observe_set' or 'observe_change' Finally, the keyword argument **change_only** may be specified as a boolean. If False (the default), the callback function will fire any time the Property is set. If True, the callback function will only fire if the new value is different than the previous value, determined by the :code:`Property.equal` method. """ mode = 'observe_change' if change_only else 'observe_set' if names is None and func is None: return Observer(names_or_instance, mode) obs = Observer(names, mode)(func) _set_listener(names_or_instance, obs) return obs
python
{ "resource": "" }
q15901
validator
train
def validator(names_or_instance, names=None, func=None): """Specify a callback function to fire on class validation OR property set This function has two modes of operation: 1. Registering callback functions that validate Property values when they are set, before the change is saved to the HasProperties instance. This mode is very similar to the :code:`observer` function. 2. Registering callback functions that fire only when the HasProperties :code:`validate` method is called. This allows for cross-validation of Properties that should only fire when all required Properties are set. **Mode 1:** Validator functions on a HasProperties class fire on set but before the observed Property or Properties have been changed (unlike observer functions that fire after the value has been changed). You can use this method as a decorator inside a HasProperties class .. code:: @properties.validator('variable_name') def callback_function(self, change): print(change) or you can use it to register a function to a single HasProperties instance .. code:: properties.validator(my_has_props, 'variable_name', callback_function) The variable name must refer to a Property name on the HasProperties class. A list of Property names may also be used; the same callback function will fire when any of these Properties change. Also, :class:`properties.everything <properties.utils.Sentinel>` may be specified instead of the variable name. In that case, the callback function will fire when any Property changes. The callback function must take two arguments. The first is the HasProperties instance; the second is the change notification dictionary. This dictionary contains: * 'name' - the name of the changed Property * 'previous' - the value of the Property prior to change (this will be :code:`properties.undefined` if the value was not previously set) * 'value' - the new value of the Property (this will be :code:`properties.undefined` if the value is deleted) * 'mode' - the mode of the change; for validators, this is 'validate' **Mode 2:** When used as a decorator without arguments (i.e. called directly on a HasProperties method), the decorated method is registered as a class validator. These methods execute only when :code:`validate()` is called on the HasProperties instance. .. code:: @properties.validator def validation_method(self): print('validating instance of {}'.format(self.__class__)) The decorated function must only take one argument, the HasProperties instance. """ if names is None and func is None: if callable(names_or_instance): return ClassValidator(names_or_instance) return Observer(names_or_instance, 'validate') val = Observer(names, 'validate')(func) _set_listener(names_or_instance, val) return val
python
{ "resource": "" }
q15902
build_from_bases
train
def build_from_bases(bases, classdict, attr, attr_dict): """Helper function to build private HasProperties attributes""" output = OrderedDict() output_keys = set() all_bases = [] # Go through the bases from furthest to nearest ancestor for base in reversed(bases): # Only keep the items that are still defined on the bases if base is not object and isinstance(base, PropertyMetaclass): output_keys = output_keys.union(getattr(base, attr)) # Collect all bases so we ensure overridden items are assigned # in the correct order for item in reversed(base.__mro__): if item is object or not isinstance(item, PropertyMetaclass): continue if item not in all_bases: all_bases.append(item) # Update the items in reverse MRO order; only keep those that are # defined on the bases for base in all_bases: for key, val in iteritems(getattr(base, attr)): if key in base.__dict__ and key in output_keys: output.update({key: val}) # Remove all items that were overridden by this class; this is # potentially a superset of the items added back in the next step. for key in classdict: if key in output: output.pop(key) # Update the items with those defined on this class output.update(attr_dict) return output
python
{ "resource": "" }
q15903
HasProperties._reset
train
def _reset(self, name=None): """Revert specified property to default value If no property is specified, all properties are returned to default. """ if name is None: for key in self._props: if isinstance(self._props[key], basic.Property): self._reset(key) return if name not in self._props: raise AttributeError("Input name '{}' is not a known " "property or attribute".format(name)) if not isinstance(self._props[name], basic.Property): raise AttributeError("Cannot reset GettableProperty " "'{}'".format(name)) if name in self._defaults: val = self._defaults[name] else: val = self._props[name].default if callable(val): val = val() setattr(self, name, val)
python
{ "resource": "" }
q15904
HasProperties.validate
train
def validate(self): """Call all registered class validator methods These are all methods decorated with :code:`@properties.validator`. Validator methods are expected to raise a ValidationError if they fail. """ if getattr(self, '_getting_validated', False): return True self._getting_validated = True self._validation_error_tuples = [] self._non_validation_error = None try: for val in itervalues(self._class_validators): try: if isinstance(val.func, string_types): valid = getattr(self, val.func)() else: valid = val.func(self) if valid is False: raise utils.ValidationError( 'Validation failed', None, None, self ) except utils.ValidationError as val_err: self._validation_error_tuples += val_err.error_tuples except GENERIC_ERRORS as err: if not self._non_validation_error: self._non_validation_error = err if self._validation_error_tuples: self._error_hook(self._validation_error_tuples) msgs = ['Validation failed:'] msgs += [val.message for val in self._validation_error_tuples] raise utils.ValidationError( message='\n- '.join(msgs), _error_tuples=self._validation_error_tuples, ) if self._non_validation_error: raise self._non_validation_error #pylint: disable=raising-bad-type return True finally: self._getting_validated = False self._validation_error_tuples = None self._non_validation_error = None
python
{ "resource": "" }
q15905
HasProperties._deserialize_class
train
def _deserialize_class(cls, input_cls_name, trusted, strict): """Returns the HasProperties class to use for deserialization""" if not input_cls_name or input_cls_name == cls.__name__: return cls if trusted and input_cls_name in cls._REGISTRY: return cls._REGISTRY[input_cls_name] if strict: raise ValueError( 'Class name {} from deserialization input dictionary does ' 'not match input class {}'.format(input_cls_name, cls.__name__) ) return cls
python
{ "resource": "" }
q15906
BaseTask.report_status
train
def report_status(self, status): """Hook for reporting the task status towards completion""" status = Instance('', TaskStatus).validate(None, status) print(r'{taskname} | {percent:>3}% | {message}'.format( taskname=self.__class__.__name__, percent=int(round(100*status.progress)), message=status.message if status.message else '', ))
python
{ "resource": "" }
q15907
HasUID.serialize
train
def serialize(self, include_class=True, save_dynamic=False, **kwargs): """Serialize nested HasUID instances to a flat dictionary **Parameters**: * **include_class** - If True (the default), the name of the class will also be saved to the serialized dictionary under key :code:`'__class__'` * **save_dynamic** - If True, dynamic properties are written to the serialized dict (default: False). * You may also specify a **registry** - This is the flat dictionary where UID/HasUID pairs are stored. By default, no registry need be provided; a new dictionary will be created. * Any other keyword arguments will be passed through to the Property serializers. """ registry = kwargs.pop('registry', None) if registry is None: registry = dict() if not registry: root = True registry.update({'__root__': self.uid}) else: root = False key = self.uid if key not in registry: registry.update({key: None}) registry.update({key: super(HasUID, self).serialize( registry=registry, include_class=include_class, save_dynamic=save_dynamic, **kwargs )}) if root: return registry return key
python
{ "resource": "" }
q15908
HasUID.deserialize
train
def deserialize(cls, value, trusted=False, strict=False, assert_valid=False, **kwargs): """Deserialize nested HasUID instance from flat pointer dictionary **Parameters** * **value** - Flat pointer dictionary produced by :code:`serialize` with UID/HasUID key/value pairs. It also includes a :code:`__root__` key to specify the root HasUID instance. * **trusted** - If True (and if the input dictionaries have :code:`'__class__'` keyword and this class is in the registry), the new **HasProperties** class will come from the dictionary. If False (the default), only the **HasProperties** class this method is called on will be constructed. * **strict** - Requires :code:`'__class__'`, if present on the input dictionary, to match the deserialized instance's class. Also disallows unused properties in the input dictionary. Default is False. * **assert_valid** - Require deserialized instance to be valid. Default is False. * You may also specify an alternative **root** - This allows a different HasUID root instance to be specified. It overrides :code:`__root__` in the input dictionary. * Any other keyword arguments will be passed through to the Property deserializers. .. note:: HasUID instances are constructed with no input arguments (ie :code:`cls()` is called). This means deserialization will fail if the init method has been overridden to require input parameters. """ registry = kwargs.pop('registry', None) if registry is None: if not isinstance(value, dict): raise ValueError('HasUID must deserialize from dictionary') registry = value.copy() uid = kwargs.get('root', registry.get('__root__')) else: uid = value if uid in cls._INSTANCES and uid not in registry: return cls._INSTANCES[uid] if uid in cls._INSTANCES: raise ValueError('UID already used: {}'.format(uid)) if uid not in registry: raise ValueError('Invalid UID: {}'.format(uid)) value = registry[uid] if not isinstance(value, HasUID): try: input_class = value.get('__class__') except AttributeError: input_class = None new_cls = cls._deserialize_class(input_class, trusted, strict) new_inst = new_cls() registry.update({uid: new_inst}) super(HasUID, cls).deserialize( value=value, trusted=trusted, strict=strict, registry=registry, _instance=new_inst, **kwargs ) cls._INSTANCES[uid] = registry[uid] return registry[uid]
python
{ "resource": "" }
q15909
Pointer.deserialize
train
def deserialize(self, value, **kwargs): """Deserialize instance from JSON value If a deserializer is registered, that is used. Otherwise, if the instance_class is a HasProperties subclass, an instance can be deserialized from a dictionary. """ kwargs.update({'trusted': kwargs.get('trusted', False)}) if self.deserializer is not None: return self.deserializer(value, **kwargs) if value is None: return None if isinstance(value, string_types): return value if issubclass(self.instance_class, base.HasProperties): return self.instance_class.deserialize(value, **kwargs) return self.from_json(value, **kwargs)
python
{ "resource": "" }
q15910
ImagePNG.validate
train
def validate(self, instance, value): """Checks if value is an open PNG file, valid filename, or png.Image Returns an open bytestream of the image """ # Pass if already validated if getattr(value, '__valid__', False): return value # Validate that value is PNG if isinstance(value, png.Image): pass else: value = super(ImagePNG, self).validate(instance, value) try: png.Reader(value).validate_signature() except png.FormatError: self.error(instance, value, extra='Open file is not PNG.') value.seek(0) # Write input to new bytestream output = BytesIO() output.name = self.filename output.__valid__ = True if isinstance(value, png.Image): value.save(output) else: fid = value fid.seek(0) output.write(fid.read()) fid.close() output.seek(0) return output
python
{ "resource": "" }
q15911
ImagePNG.to_json
train
def to_json(value, **kwargs): """Convert a PNG Image to base64-encoded JSON to_json assumes that value has passed validation. """ b64rep = base64.b64encode(value.read()) value.seek(0) jsonrep = '{preamble}{b64}'.format( preamble=PNG_PREAMBLE, b64=b64rep.decode(), ) return jsonrep
python
{ "resource": "" }
q15912
ImagePNG.from_json
train
def from_json(value, **kwargs): """Convert a PNG Image from base64-encoded JSON""" if not value.startswith(PNG_PREAMBLE): raise ValueError('Not a valid base64-encoded PNG image') infile = BytesIO() rep = base64.b64decode(value[len(PNG_PREAMBLE):].encode('utf-8')) infile.write(rep) infile.seek(0) return infile
python
{ "resource": "" }
q15913
validate
train
def validate(schema, value, noun='value'): """ Checks the value against the schema, and raises ValidationError if validation fails. """ errors = schema.errors(value) if errors: error_details = '' for error in errors: if error.pointer: error_details += ' - %s: %s\n' % (error.pointer, error.message) else: error_details += ' - %s\n' % error.message raise ValidationError('Invalid %s:\n%s' % (noun, error_details))
python
{ "resource": "" }
q15914
validate_call
train
def validate_call(kwargs, returns, is_method=False): """ Decorator which runs validation on a callable's arguments and its return value. Pass a schema for the kwargs and for the return value. Positional arguments are not supported. """ def decorator(func): @wraps(func) def inner(*passed_args, **passed_kwargs): # Enforce no positional args # first argument of instance method and class method is always positonal so we need # to make expception for them. Static methods are still validated according to standard rules # this check happens before methods are bound, so instance method is still a regular function max_allowed_passed_args_len = 0 if is_method and type(func) in (types.FunctionType, classmethod): max_allowed_passed_args_len = 1 if len(passed_args) > max_allowed_passed_args_len: raise PositionalError('You cannot call this with positional arguments.') # Validate keyword arguments validate(kwargs, passed_kwargs, 'keyword arguments') # Call callable return_value = func(*passed_args, **passed_kwargs) # Validate return value validate(returns, return_value, 'return value') return return_value inner.__wrapped__ = func # caveat: checking for f.__validated__ will only work if @validate_call # is not masked by other decorators except for @classmethod or @staticmethod inner.__validated__ = True return inner return decorator
python
{ "resource": "" }
q15915
PattonResults.dump
train
def dump(self): """Dump to file""" # NO Dump file selected -> DO NOTHING if self.running_config.output_file: # Determinate file format _, extension = op.splitext(self.running_config.output_file) extension = extension.replace(".", "") if extension not in self.ALLOWED_DUMP_FORMATS: raise PCException( f"Extension of dump file is not available. " f"Allowed extensions are: " f"{', '.join(self.ALLOWED_DUMP_FORMATS)}") with open(self.running_config.output_file, "w") as f: if extension == "csv": csv_writer = csv.writer(f) csv_writer.writerow(("# Name", "CPE", "CVE", "Score", "Summary")) csv_writer.writerows(self._to_csv()) elif extension == "json": json.dump(self.results, f, indent=4, sort_keys=True) elif extension == "raw": f.write(self._to_table())
python
{ "resource": "" }
q15916
on_init
train
def on_init(app): # pylint: disable=unused-argument """ Run sphinx-apidoc and swg2rst after Sphinx initialization. Read the Docs won't run tox or custom shell commands, so we need this to avoid checking in the generated reStructuredText files. """ docs_path = os.path.abspath(os.path.dirname(__file__)) root_path = os.path.abspath(os.path.join(docs_path, '..')) apidoc_path = 'sphinx-apidoc' swg2rst_path = 'swg2rst' if hasattr(sys, 'real_prefix'): # Check to see if we are in a virtualenv # If we are, assemble the path manually bin_path = os.path.abspath(os.path.join(sys.prefix, 'bin')) apidoc_path = os.path.join(bin_path, apidoc_path) swg2rst_path = os.path.join(bin_path, swg2rst_path) check_call([apidoc_path, '-o', docs_path, os.path.join(root_path, 'user_tasks'), os.path.join(root_path, 'user_tasks/migrations')]) json_path = os.path.join(docs_path, 'swagger.json') rst_path = os.path.join(docs_path, 'rest_api.rst') check_call([swg2rst_path, json_path, '-f', 'rst', '-o', rst_path])
python
{ "resource": "" }
q15917
_get_or_create_group_parent
train
def _get_or_create_group_parent(message_body, user_id): """ Determine if the given task belongs to a group or not, and if so, get or create a status record for the group. Arguments: message_body (dict): The body of the before_task_publish signal for the task in question user_id (int): The primary key of the user model record for the user who triggered the task. (If using a custom user model, this may not be an integer.) Returns ------- UserTaskStatus: The status record for the containing group, or `None` if there isn't one """ parent_id = message_body.get('taskset', None) if not parent_id: # Not part of a group return None parent_class = 'celery.group' parent_name = message_body['kwargs'].get('user_task_name', '') parent, _ = UserTaskStatus.objects.get_or_create( task_id=parent_id, defaults={'is_container': True, 'name': parent_name, 'task_class': parent_class, 'total_steps': 0, 'user_id': user_id}) if parent_name and not parent.name: parent.name = parent_name parent.save(update_fields={'name', 'modified'}) return parent
python
{ "resource": "" }
q15918
_get_user_id
train
def _get_user_id(arguments_dict): """ Get and validate the `user_id` argument to a task derived from `UserTaskMixin`. Arguments: arguments_dict (dict): The parsed positional and keyword arguments to the task Returns ------- int: The primary key of a user record (may not be an int if using a custom user model) """ if 'user_id' not in arguments_dict: raise TypeError('Each invocation of a UserTaskMixin subclass must include the user_id') user_id = arguments_dict['user_id'] try: get_user_model().objects.get(pk=user_id) except (ValueError, get_user_model().DoesNotExist): raise TypeError('Invalid user_id: {}'.format(user_id)) return user_id
python
{ "resource": "" }
q15919
colorbar
train
def colorbar(height, length, colormap): """Return the channels of a colorbar. """ cbar = np.tile(np.arange(length) * 1.0 / (length - 1), (height, 1)) cbar = (cbar * (colormap.values.max() - colormap.values.min()) + colormap.values.min()) return colormap.colorize(cbar)
python
{ "resource": "" }
q15920
palettebar
train
def palettebar(height, length, colormap): """Return the channels of a palettebar. """ cbar = np.tile(np.arange(length) * 1.0 / (length - 1), (height, 1)) cbar = (cbar * (colormap.values.max() + 1 - colormap.values.min()) + colormap.values.min()) return colormap.palettize(cbar)
python
{ "resource": "" }
q15921
Colormap.to_rio
train
def to_rio(self): """Converts the colormap to a rasterio colormap. """ self.colors = (((self.colors * 1.0 - self.colors.min()) / (self.colors.max() - self.colors.min())) * 255) return dict(zip(self.values, tuple(map(tuple, self.colors))))
python
{ "resource": "" }
q15922
StatusViewSet.cancel
train
def cancel(self, request, *args, **kwargs): # pylint: disable=unused-argument """ Cancel the task associated with the specified status record. Arguments: request (Request): A POST including a task status record ID Returns ------- Response: A JSON response indicating whether the cancellation succeeded or not """ status = self.get_object() status.cancel() serializer = StatusSerializer(status, context={'request': request}) return Response(serializer.data)
python
{ "resource": "" }
q15923
swagger
train
def swagger(request): # pylint: disable=unused-argument """ Render Swagger UI and the underlying Open API schema JSON file. """ generator = schemas.SchemaGenerator(title='django-user-tasks REST API') return response.Response(generator.get_schema())
python
{ "resource": "" }
q15924
ConditionalOpenAPIRenderer.render
train
def render(self, data, accepted_media_type=None, renderer_context=None): """ Render the appropriate Open API JSON file. """ if 'SWAGGER_JSON_PATH' in os.environ: with io.open(os.environ['SWAGGER_JSON_PATH'], 'rb') as f: return f.read() else: return super(ConditionalOpenAPIRenderer, self).render(data, accepted_media_type, renderer_context)
python
{ "resource": "" }
q15925
add_rules
train
def add_rules(): """ Use the rules provided in this module to implement authorization checks for the ``django-user-tasks`` models. These rules allow only superusers and the user who triggered a task to view its status or artifacts, cancel the task, or delete the status information and all its related artifacts. Only superusers are allowed to directly modify or delete an artifact (or to modify a task status record). """ rules.add_perm('user_tasks.view_usertaskstatus', STATUS_PERMISSION) rules.add_perm('user_tasks.cancel_usertaskstatus', STATUS_PERMISSION) rules.add_perm('user_tasks.change_usertaskstatus', rules.predicates.is_superuser) rules.add_perm('user_tasks.delete_usertaskstatus', STATUS_PERMISSION) rules.add_perm('user_tasks.view_usertaskartifact', ARTIFACT_PERMISSION) rules.add_perm('user_tasks.change_usertaskartifact', rules.predicates.is_superuser) rules.add_perm('user_tasks.delete_usertaskartifact', rules.predicates.is_superuser)
python
{ "resource": "" }
q15926
ArtifactFilterBackend.filter_queryset
train
def filter_queryset(self, request, queryset, view): """ Filter out any artifacts which the requesting user does not have permission to view. """ if request.user.is_superuser: return queryset return queryset.filter(status__user=request.user)
python
{ "resource": "" }
q15927
_image2array
train
def _image2array(filepath): ''' Utility function that converts an image file in 3 np arrays that can be fed into geo_image.GeoImage in order to generate a PyTROLL GeoImage object. ''' im = Pimage.open(filepath).convert('RGB') (width, height) = im.size _r = np.array(list(im.getdata(0)))/255.0 _g = np.array(list(im.getdata(1)))/255.0 _b = np.array(list(im.getdata(2)))/255.0 _r = _r.reshape((height, width)) _g = _g.reshape((height, width)) _b = _b.reshape((height, width)) return _r, _g, _b
python
{ "resource": "" }
q15928
ycbcr2rgb
train
def ycbcr2rgb(y__, cb_, cr_): """Convert the three YCbCr channels to RGB channels. """ kb_ = 0.114 kr_ = 0.299 r__ = 2 * cr_ / (1 - kr_) + y__ b__ = 2 * cb_ / (1 - kb_) + y__ g__ = (y__ - kr_ * r__ - kb_ * b__) / (1 - kr_ - kb_) return r__, g__, b__
python
{ "resource": "" }
q15929
Image._add_channel
train
def _add_channel(self, chn, color_min, color_max): """Adds a channel to the image object """ if isinstance(chn, np.ma.core.MaskedArray): chn_data = chn.data chn_mask = chn.mask else: chn_data = np.array(chn) chn_mask = False scaled = ((chn_data - color_min) * 1.0 / (color_max - color_min)) self.channels.append(np.ma.array(scaled, mask=chn_mask))
python
{ "resource": "" }
q15930
Image.is_empty
train
def is_empty(self): """Checks for an empty image. """ if(((self.channels == []) and (not self.shape == (0, 0))) or ((not self.channels == []) and (self.shape == (0, 0)))): raise RuntimeError("Channels-shape mismatch.") return self.channels == [] and self.shape == (0, 0)
python
{ "resource": "" }
q15931
Image._pngmeta
train
def _pngmeta(self): """It will return GeoImage.tags as a PNG metadata object. Inspired by: public domain, Nick Galbreath http://blog.modp.com/2007/08/python-pil-and-png-metadata-take-2.html """ reserved = ('interlace', 'gamma', 'dpi', 'transparency', 'aspect') try: tags = self.tags except AttributeError: tags = {} # Undocumented class from PIL import PngImagePlugin meta = PngImagePlugin.PngInfo() # Copy from tags to new dict for k__, v__ in tags.items(): if k__ not in reserved: meta.add_text(k__, v__, 0) return meta
python
{ "resource": "" }
q15932
Image._rgb2ycbcr
train
def _rgb2ycbcr(self, mode): """Convert the image from RGB mode to YCbCr.""" self._check_modes(("RGB", "RGBA")) (self.channels[0], self.channels[1], self.channels[2]) = \ rgb2ycbcr(self.channels[0], self.channels[1], self.channels[2]) if self.fill_value is not None: self.fill_value[0:3] = rgb2ycbcr(self.fill_value[0], self.fill_value[1], self.fill_value[2]) self.mode = mode
python
{ "resource": "" }
q15933
Image._ycbcr2rgb
train
def _ycbcr2rgb(self, mode): """Convert the image from YCbCr mode to RGB. """ self._check_modes(("YCbCr", "YCbCrA")) (self.channels[0], self.channels[1], self.channels[2]) = \ ycbcr2rgb(self.channels[0], self.channels[1], self.channels[2]) if self.fill_value is not None: self.fill_value[0:3] = ycbcr2rgb(self.fill_value[0], self.fill_value[1], self.fill_value[2]) self.mode = mode
python
{ "resource": "" }
q15934
Image._to_p
train
def _to_p(self, mode): """Convert the image to P or PA mode. """ if self.mode.endswith("A"): chans = self.channels[:-1] alpha = self.channels[-1] self._secondary_mode = self.mode[:-1] else: chans = self.channels alpha = None self._secondary_mode = self.mode palette = [] selfmask = chans[0].mask for chn in chans[1:]: selfmask = np.ma.mask_or(selfmask, chn.mask) new_chn = np.ma.zeros(self.shape, dtype=int) color_nb = 0 for i in range(self.height): for j in range(self.width): current_col = tuple([chn[i, j] for chn in chans]) try: next(idx for idx in range(len(palette)) if palette[idx] == current_col) except StopIteration: idx = color_nb palette.append(current_col) color_nb = color_nb + 1 new_chn[i, j] = idx if self.fill_value is not None: if self.mode.endswith("A"): current_col = tuple(self.fill_value[:-1]) fill_alpha = [self.fill_value[-1]] else: current_col = tuple(self.fill_value) fill_alpha = [] try: next(idx for idx in range(len(palette)) if palette[idx] == current_col) except StopIteration: idx = color_nb palette.append(current_col) color_nb = color_nb + 1 self.fill_value = [idx] + fill_alpha new_chn.mask = selfmask self.palette = palette if alpha is None: self.channels = [new_chn] else: self.channels = [new_chn, alpha] self.mode = mode
python
{ "resource": "" }
q15935
Image._from_p
train
def _from_p(self, mode): """Convert the image from P or PA mode. """ self._check_modes(("P", "PA")) if self.mode.endswith("A"): alpha = self.channels[-1] else: alpha = None chans = [] cdfs = [] color_chan = self.channels[0] for i in range(len(self.palette[0])): cdfs.append(np.zeros(len(self.palette))) for j in range(len(self.palette)): cdfs[i][j] = self.palette[j][i] new_chn = np.ma.array(np.interp(color_chan, np.arange(len(self.palette)), cdfs[i]), mask=color_chan.mask) chans.append(new_chn) if self.fill_value is not None: if alpha is not None: fill_alpha = self.fill_value[-1] self.fill_value = list(self.palette[int(self.fill_value[0])]) self.fill_value += [fill_alpha] else: self.fill_value = list(self.palette[int(self.fill_value[0])]) self.mode = self._secondary_mode self.channels = chans if alpha is not None: self.channels.append(alpha) self.mode = self.mode + "A" self.convert(mode)
python
{ "resource": "" }
q15936
Image._rgb2l
train
def _rgb2l(self, mode): """Convert from RGB to monochrome L. """ self._check_modes(("RGB", "RGBA")) kb_ = 0.114 kr_ = 0.299 r__ = self.channels[0] g__ = self.channels[1] b__ = self.channels[2] y__ = kr_ * r__ + (1 - kr_ - kb_) * g__ + kb_ * b__ if self.fill_value is not None: self.fill_value = ([rgb2ycbcr(self.fill_value[0], self.fill_value[1], self.fill_value[2])[0]] + self.fill_value[3:]) self.channels = [y__] + self.channels[3:] self.mode = mode
python
{ "resource": "" }
q15937
Image._ycbcr2l
train
def _ycbcr2l(self, mode): """Convert from YCbCr to L. """ self._check_modes(("YCbCr", "YCbCrA")) self.channels = [self.channels[0]] + self.channels[3:] if self.fill_value is not None: self.fill_value = [self.fill_value[0]] + self.fill_value[3:] self.mode = mode
python
{ "resource": "" }
q15938
Image._l2ycbcr
train
def _l2ycbcr(self, mode): """Convert from L to YCbCr. """ self._check_modes(("L", "LA")) luma = self.channels[0] zeros = np.ma.zeros(luma.shape) zeros.mask = luma.mask self.channels = [luma, zeros, zeros] + self.channels[1:] if self.fill_value is not None: self.fill_value = [self.fill_value[0], 0, 0] + self.fill_value[1:] self.mode = mode
python
{ "resource": "" }
q15939
rgb2xyz
train
def rgb2xyz(r__, g__, b__): """RGB to XYZ """ r2_ = r__ / 255.0 g2_ = g__ / 255.0 b2_ = b__ / 255.0 def f__(arr): """Forward """ return np.where(arr > 0.04045, ((arr + 0.055) / 1.055) ** 2.4, arr / 12.92) r2_ = f__(r2_) * 100 g2_ = f__(g2_) * 100 b2_ = f__(b2_) * 100 x__ = r2_ * 0.4124 + g2_ * 0.3576 + b2_ * 0.1805 y__ = r2_ * 0.2126 + g2_ * 0.7152 + b2_ * 0.0722 z__ = r2_ * 0.0193 + g2_ * 0.1192 + b2_ * 0.9505 return x__, y__, z__
python
{ "resource": "" }
q15940
xyz2rgb
train
def xyz2rgb(x__, y__, z__): """XYZ colorspace to RGB """ x2_ = x__ / 100.0 y2_ = y__ / 100.0 z2_ = z__ / 100.0 r__ = x2_ * 3.2406 + y2_ * -1.5372 + z2_ * -0.4986 g__ = x2_ * -0.9689 + y2_ * 1.8758 + z2_ * 0.0415 b__ = x2_ * 0.0557 + y2_ * -0.2040 + z2_ * 1.0570 def finv(arr): """Inverse """ return np.where(arr > 0.0031308, 1.055 * (arr ** (1.0 / 2.4)) - 0.055, 12.92 * arr) return finv(r__) * 255, finv(g__) * 255, finv(b__) * 255
python
{ "resource": "" }
q15941
purge_old_user_tasks
train
def purge_old_user_tasks(): """ Delete any UserTaskStatus and UserTaskArtifact records older than ``settings.USER_TASKS_MAX_AGE``. Intended to be run as a scheduled task. """ limit = now() - settings.USER_TASKS_MAX_AGE # UserTaskArtifacts will also be removed via deletion cascading UserTaskStatus.objects.filter(created__lt=limit).delete()
python
{ "resource": "" }
q15942
color_interp
train
def color_interp(data): """Get the color interpretation for this image.""" from rasterio.enums import ColorInterp as ci modes = {'L': [ci.gray], 'LA': [ci.gray, ci.alpha], 'YCbCr': [ci.Y, ci.Cb, ci.Cr], 'YCbCrA': [ci.Y, ci.Cb, ci.Cr, ci.alpha]} try: mode = ''.join(data['bands'].values) return modes[mode] except KeyError: colors = {'R': ci.red, 'G': ci.green, 'B': ci.blue, 'A': ci.alpha, 'C': ci.cyan, 'M': ci.magenta, 'Y': ci.yellow, 'H': ci.hue, 'S': ci.saturation, 'L': ci.lightness, 'K': ci.black, } return [colors[band] for band in data['bands'].values]
python
{ "resource": "" }
q15943
XRImage._correct_dims
train
def _correct_dims(data): """Standardize dimensions to bands, y, and x.""" if not hasattr(data, 'dims'): raise TypeError("Data must have a 'dims' attribute.") # doesn't actually copy the data underneath # we don't want our operations to change the user's data data = data.copy() if 'y' not in data.dims or 'x' not in data.dims: if data.ndim != 2: raise ValueError("Data must have a 'y' and 'x' dimension") # rename dimensions so we can use them # don't rename 'x' or 'y' if they already exist if 'y' not in data.dims: # find a dimension that isn't 'x' old_dim = [d for d in data.dims if d != 'x'][0] data = data.rename({old_dim: 'y'}) if 'x' not in data.dims: # find a dimension that isn't 'y' old_dim = [d for d in data.dims if d != 'y'][0] data = data.rename({old_dim: 'x'}) if "bands" not in data.dims: if data.ndim <= 2: data = data.expand_dims('bands') data['bands'] = ['L'] else: raise ValueError("No 'bands' dimension provided.") return data
python
{ "resource": "" }
q15944
XRImage._create_alpha
train
def _create_alpha(self, data, fill_value=None): """Create an alpha band DataArray object. If `fill_value` is provided and input data is an integer type then it is used to determine invalid "null" pixels instead of xarray's `isnull` and `notnull` methods. The returned array is 1 where data is valid, 0 where invalid. """ not_alpha = [b for b in data.coords['bands'].values if b != 'A'] null_mask = data.sel(bands=not_alpha) if np.issubdtype(data.dtype, np.integer) and fill_value is not None: null_mask = null_mask != fill_value else: null_mask = null_mask.notnull() # if any of the bands are valid, we don't want transparency null_mask = null_mask.any(dim='bands') null_mask = null_mask.expand_dims('bands') null_mask['bands'] = ['A'] # match data dtype return null_mask
python
{ "resource": "" }
q15945
XRImage._add_alpha
train
def _add_alpha(self, data, alpha=None): """Create an alpha channel and concatenate it to the provided data. If ``data`` is an integer type then the alpha band will be scaled to use the smallest (min) value as fully transparent and the largest (max) value as fully opaque. For float types the alpha band spans 0 to 1. """ null_mask = alpha if alpha is not None else self._create_alpha(data) # if we are using integer data, then alpha needs to be min-int to max-int # otherwise for floats we want 0 to 1 if np.issubdtype(data.dtype, np.integer): # xarray sometimes upcasts this calculation, so cast again null_mask = self._scale_to_dtype(null_mask, data.dtype).astype(data.dtype) data = xr.concat([data, null_mask], dim="bands") return data
python
{ "resource": "" }
q15946
XRImage._scale_to_dtype
train
def _scale_to_dtype(self, data, dtype): """Scale provided data to dtype range assuming a 0-1 range. Float input data is assumed to be normalized to a 0 to 1 range. Integer input data is not scaled, only clipped. A float output type is not scaled since both outputs and inputs are assumed to be in the 0-1 range already. """ if np.issubdtype(dtype, np.integer): if np.issubdtype(data, np.integer): # preserve integer data type data = data.clip(np.iinfo(dtype).min, np.iinfo(dtype).max) else: # scale float data (assumed to be 0 to 1) to full integer space dinfo = np.iinfo(dtype) data = data.clip(0, 1) * (dinfo.max - dinfo.min) + dinfo.min data = data.round() return data
python
{ "resource": "" }
q15947
XRImage._from_p
train
def _from_p(self, mode): """Convert the image from P or PA to RGB or RGBA.""" self._check_modes(("P", "PA")) if not self.palette: raise RuntimeError("Can't convert palettized image, missing palette.") pal = np.array(self.palette) pal = da.from_array(pal, chunks=pal.shape) if pal.shape[1] == 4: # colormap's alpha overrides data alpha mode = "RGBA" alpha = None elif self.mode.endswith("A"): # add a new/fake 'bands' dimension to the end alpha = self.data.sel(bands="A").data[..., None] mode = mode + "A" if not mode.endswith("A") else mode else: alpha = None flat_indexes = self.data.sel(bands='P').data.ravel().astype('int64') dim_sizes = ((key, val) for key, val in self.data.sizes.items() if key != 'bands') dims, new_shape = zip(*dim_sizes) dims = dims + ('bands',) new_shape = new_shape + (pal.shape[1],) new_data = pal[flat_indexes].reshape(new_shape) coords = dict(self.data.coords) coords["bands"] = list(mode) if alpha is not None: new_arr = da.concatenate((new_data, alpha), axis=-1) data = xr.DataArray(new_arr, coords=coords, attrs=self.data.attrs, dims=dims) else: data = xr.DataArray(new_data, coords=coords, attrs=self.data.attrs, dims=dims) return data
python
{ "resource": "" }
q15948
XRImage._finalize
train
def _finalize(self, fill_value=None, dtype=np.uint8, keep_palette=False, cmap=None): """Wrapper around 'finalize' method for backwards compatibility.""" import warnings warnings.warn("'_finalize' is deprecated, use 'finalize' instead.", DeprecationWarning) return self.finalize(fill_value, dtype, keep_palette, cmap)
python
{ "resource": "" }
q15949
XRImage.finalize
train
def finalize(self, fill_value=None, dtype=np.uint8, keep_palette=False, cmap=None): """Finalize the image to be written to an output file. This adds an alpha band or fills data with a fill_value (if specified). It also scales float data to the output range of the data type (0-255 for uint8, default). For integer input data this method assumes the data is already scaled to the proper desired range. It will still fill in invalid values and add an alpha band if needed. Integer input data's fill value is determined by a special ``_FillValue`` attribute in the ``DataArray`` ``.attrs`` dictionary. """ if keep_palette and not self.mode.startswith('P'): keep_palette = False if not keep_palette: if self.mode == "P": return self.convert("RGB").finalize(fill_value=fill_value, dtype=dtype, keep_palette=keep_palette, cmap=cmap) if self.mode == "PA": return self.convert("RGBA").finalize(fill_value=fill_value, dtype=dtype, keep_palette=keep_palette, cmap=cmap) if np.issubdtype(dtype, np.floating) and fill_value is None: logger.warning("Image with floats cannot be transparent, so " "setting fill_value to 0") fill_value = 0 final_data = self.data # if the data are integers then this fill value will be used to check for invalid values ifill = final_data.attrs.get('_FillValue') if np.issubdtype(final_data, np.integer) else None if not keep_palette: if fill_value is None and not self.mode.endswith('A'): # We don't have a fill value or an alpha, let's add an alpha alpha = self._create_alpha(final_data, fill_value=ifill) final_data = self._scale_to_dtype(final_data, dtype).astype(dtype) final_data = self._add_alpha(final_data, alpha=alpha) else: # scale float data to the proper dtype # this method doesn't cast yet so that we can keep track of NULL values final_data = self._scale_to_dtype(final_data, dtype) # Add fill_value after all other calculations have been done to # make sure it is not scaled for the data type if ifill is not None and fill_value is not None: # cast fill value to output type so we don't change data type fill_value = dtype(fill_value) # integer fields have special fill values final_data = final_data.where(final_data != ifill, dtype(fill_value)) elif fill_value is not None: final_data = final_data.fillna(dtype(fill_value)) final_data = final_data.astype(dtype) final_data.attrs = self.data.attrs return final_data, ''.join(final_data['bands'].values)
python
{ "resource": "" }
q15950
XRImage.xrify_tuples
train
def xrify_tuples(self, tup): """Make xarray.DataArray from tuple.""" return xr.DataArray(tup, dims=['bands'], coords={'bands': self.data['bands']})
python
{ "resource": "" }
q15951
XRImage.gamma
train
def gamma(self, gamma=1.0): """Apply gamma correction to the channels of the image. If *gamma* is a tuple, then it should have as many elements as the channels of the image, and the gamma correction is applied elementwise. If *gamma* is a number, the same gamma correction is applied on every channel, if there are several channels in the image. The behaviour of :func:`gamma` is undefined outside the normal [0,1] range of the channels. """ if isinstance(gamma, (list, tuple)): gamma = self.xrify_tuples(gamma) elif gamma == 1.0: return logger.debug("Applying gamma %s", str(gamma)) attrs = self.data.attrs self.data = self.data.clip(min=0) self.data **= 1.0 / gamma self.data.attrs = attrs
python
{ "resource": "" }
q15952
XRImage.stretch
train
def stretch(self, stretch="crude", **kwargs): """Apply stretching to the current image. The value of *stretch* sets the type of stretching applied. The values "histogram", "linear", "crude" (or "crude-stretch") perform respectively histogram equalization, contrast stretching (with 5% cutoff on both sides), and contrast stretching without cutoff. The value "logarithmic" or "log" will do a logarithmic enhancement towards white. If a tuple or a list of two values is given as input, then a contrast stretching is performed with the values as cutoff. These values should be normalized in the range [0.0,1.0]. """ logger.debug("Applying stretch %s with parameters %s", stretch, str(kwargs)) # FIXME: do not apply stretch to alpha channel if isinstance(stretch, (tuple, list)): if len(stretch) == 2: self.stretch_linear(cutoffs=stretch) else: raise ValueError( "Stretch tuple must have exactly two elements") elif stretch == "linear": self.stretch_linear(**kwargs) elif stretch == "histogram": self.stretch_hist_equalize(**kwargs) elif stretch in ["crude", "crude-stretch"]: self.crude_stretch(**kwargs) elif stretch in ["log", "logarithmic"]: self.stretch_logarithmic(**kwargs) elif stretch == "no": return elif isinstance(stretch, str): raise ValueError("Stretching method %s not recognized." % stretch) else: raise TypeError("Stretch parameter must be a string or a tuple.")
python
{ "resource": "" }
q15953
XRImage._compute_quantile
train
def _compute_quantile(data, dims, cutoffs): """Helper method for stretch_linear. Dask delayed functions need to be non-internal functions (created inside a function) to be serializable on a multi-process scheduler. Quantile requires the data to be loaded since it not supported on dask arrays yet. """ # numpy doesn't get a 'quantile' function until 1.15 # for better backwards compatibility we use xarray's version data_arr = xr.DataArray(data, dims=dims) # delayed will provide us the fully computed xarray with ndarray left, right = data_arr.quantile([cutoffs[0], 1. - cutoffs[1]], dim=['x', 'y']) logger.debug("Interval: left=%s, right=%s", str(left), str(right)) return left.data, right.data
python
{ "resource": "" }
q15954
XRImage.stretch_linear
train
def stretch_linear(self, cutoffs=(0.005, 0.005)): """Stretch linearly the contrast of the current image. Use *cutoffs* for left and right trimming. """ logger.debug("Perform a linear contrast stretch.") logger.debug("Calculate the histogram quantiles: ") logger.debug("Left and right quantiles: " + str(cutoffs[0]) + " " + str(cutoffs[1])) cutoff_type = np.float64 # numpy percentile (which quantile calls) returns 64-bit floats # unless the value is a higher order float if np.issubdtype(self.data.dtype, np.floating) and \ np.dtype(self.data.dtype).itemsize > 8: cutoff_type = self.data.dtype left, right = dask.delayed(self._compute_quantile, nout=2)(self.data.data, self.data.dims, cutoffs) left_data = da.from_delayed(left, shape=(self.data.sizes['bands'],), dtype=cutoff_type) left = xr.DataArray(left_data, dims=('bands',), coords={'bands': self.data['bands']}) right_data = da.from_delayed(right, shape=(self.data.sizes['bands'],), dtype=cutoff_type) right = xr.DataArray(right_data, dims=('bands',), coords={'bands': self.data['bands']}) self.crude_stretch(left, right)
python
{ "resource": "" }
q15955
XRImage.crude_stretch
train
def crude_stretch(self, min_stretch=None, max_stretch=None): """Perform simple linear stretching. This is done without any cutoff on the current image and normalizes to the [0,1] range. """ if min_stretch is None: non_band_dims = tuple(x for x in self.data.dims if x != 'bands') min_stretch = self.data.min(dim=non_band_dims) if max_stretch is None: non_band_dims = tuple(x for x in self.data.dims if x != 'bands') max_stretch = self.data.max(dim=non_band_dims) if isinstance(min_stretch, (list, tuple)): min_stretch = self.xrify_tuples(min_stretch) if isinstance(max_stretch, (list, tuple)): max_stretch = self.xrify_tuples(max_stretch) delta = (max_stretch - min_stretch) if isinstance(delta, xr.DataArray): # fillna if delta is NaN scale_factor = (1.0 / delta).fillna(0) else: scale_factor = 1.0 / delta attrs = self.data.attrs self.data -= min_stretch self.data *= scale_factor self.data.attrs = attrs
python
{ "resource": "" }
q15956
XRImage.stretch_hist_equalize
train
def stretch_hist_equalize(self, approximate=False): """Stretch the current image's colors through histogram equalization. Args: approximate (bool): Use a faster less-accurate percentile calculation. At the time of writing the dask version of `percentile` is not as accurate as the numpy version. This will likely change in the future. Current dask version 0.17. """ logger.info("Perform a histogram equalized contrast stretch.") nwidth = 2048. logger.debug("Make histogram bins having equal amount of data, " + "using numpy percentile function:") def _band_hist(band_data): cdf = da.arange(0., 1., 1. / nwidth, chunks=nwidth) if approximate: # need a 1D array flat_data = band_data.ravel() # replace with nanpercentile in the future, if available # dask < 0.17 returns all NaNs for this bins = da.percentile(flat_data[da.notnull(flat_data)], cdf * 100.) else: bins = dask.delayed(np.nanpercentile)(band_data, cdf * 100.) bins = da.from_delayed(bins, shape=(nwidth,), dtype=cdf.dtype) res = dask.delayed(np.interp)(band_data, bins, cdf) res = da.from_delayed(res, shape=band_data.shape, dtype=band_data.dtype) return res band_results = [] for band in self.data['bands'].values: if band == 'A': continue band_data = self.data.sel(bands=band) res = _band_hist(band_data.data) band_results.append(res) if 'A' in self.data.coords['bands'].values: band_results.append(self.data.sel(bands='A')) self.data.data = da.stack(band_results, axis=self.data.dims.index('bands'))
python
{ "resource": "" }
q15957
XRImage.stretch_weber_fechner
train
def stretch_weber_fechner(self, k, s0): """Stretch according to the Weber-Fechner law. p = k.ln(S/S0) p is perception, S is the stimulus, S0 is the stimulus threshold (the highest unpercieved stimulus), and k is the factor. """ attrs = self.data.attrs self.data = k * xu.log(self.data / s0) self.data.attrs = attrs
python
{ "resource": "" }
q15958
XRImage.colorize
train
def colorize(self, colormap): """Colorize the current image using `colormap`. .. note:: Works only on "L" or "LA" images. """ if self.mode not in ("L", "LA"): raise ValueError("Image should be grayscale to colorize") if self.mode == "LA": alpha = self.data.sel(bands=['A']) else: alpha = None l_data = self.data.sel(bands=['L']) def _colorize(l_data, colormap): # 'l_data' is (1, rows, cols) # 'channels' will be a list of 3 (RGB) or 4 (RGBA) arrays channels = colormap.colorize(l_data) return np.concatenate(channels, axis=0) new_data = l_data.data.map_blocks(_colorize, colormap, chunks=(colormap.colors.shape[1],) + l_data.data.chunks[1:], dtype=np.float64) if colormap.colors.shape[1] == 4: mode = "RGBA" elif alpha is not None: new_data = da.concatenate([new_data, alpha.data], axis=0) mode = "RGBA" else: mode = "RGB" # copy the coordinates so we don't affect the original coords = dict(self.data.coords) coords['bands'] = list(mode) attrs = self.data.attrs dims = self.data.dims self.data = xr.DataArray(new_data, coords=coords, attrs=attrs, dims=dims)
python
{ "resource": "" }
q15959
XRImage.palettize
train
def palettize(self, colormap): """Palettize the current image using `colormap`. .. note:: Works only on "L" or "LA" images. """ if self.mode not in ("L", "LA"): raise ValueError("Image should be grayscale to colorize") l_data = self.data.sel(bands=['L']) def _palettize(data): # returns data and palette, only need data return colormap.palettize(data)[0] new_data = l_data.data.map_blocks(_palettize, dtype=l_data.dtype) self.palette = tuple(colormap.colors) if self.mode == "L": mode = "P" else: mode = "PA" new_data = da.concatenate([new_data, self.data.sel(bands=['A'])], axis=0) self.data.data = new_data self.data.coords['bands'] = list(mode)
python
{ "resource": "" }
q15960
_path_from_env
train
def _path_from_env(variable: str, default: Path) -> Path: """Read an environment variable as a path. The environment variable with the specified name is read, and its value returned as a path. If the environment variable is not set, or set to the empty string, the default value is returned. Parameters ---------- variable : str Name of the environment variable. default : Path Default value. Returns ------- Path Value from environment or default. """ # TODO(srstevenson): Use assignment expression in Python 3.8. value = os.environ.get(variable) if value: return Path(value) return default
python
{ "resource": "" }
q15961
_paths_from_env
train
def _paths_from_env(variable: str, default: List[Path]) -> List[Path]: """Read an environment variable as a list of paths. The environment variable with the specified name is read, and its value split on colons and returned as a list of paths. If the environment variable is not set, or set to the empty string, the default value is returned. Parameters ---------- variable : str Name of the environment variable. default : List[Path] Default value. Returns ------- List[Path] Value from environment or default. """ # TODO(srstevenson): Use assignment expression in Python 3.8. value = os.environ.get(variable) if value: return [Path(path) for path in value.split(":")] return default
python
{ "resource": "" }
q15962
Basic.add
train
def add(self, user, password): """ Adds a user with password """ if self.__contains__(user): raise UserExists self.new_users[user] = self._encrypt_password(password) + "\n"
python
{ "resource": "" }
q15963
Basic.pop
train
def pop(self, user): """ Deletes a user """ if not self.__contains__(user): raise UserNotExists self.new_users.pop(user)
python
{ "resource": "" }
q15964
Basic.change_password
train
def change_password(self, user, password): """ Changes user password """ if not self.__contains__(user): raise UserNotExists self.new_users[user] = self._encrypt_password(password) + "\n"
python
{ "resource": "" }
q15965
Basic._encrypt_password
train
def _encrypt_password(self, password): """encrypt the password for given mode """ if self.encryption_mode.lower() == 'crypt': return self._crypt_password(password) elif self.encryption_mode.lower() == 'md5': return self._md5_password(password) elif self.encryption_mode.lower() == 'md5-base': return self._md5_base_password(password) else: raise UnknownEncryptionMode(self.encryption_mode)
python
{ "resource": "" }
q15966
Group.add_user
train
def add_user(self, user, group): """ Adds user to a group """ if self.is_user_in(user, group): raise UserAlreadyInAGroup self.new_groups.add(group, user)
python
{ "resource": "" }
q15967
Group.delete_user
train
def delete_user(self, user, group): """ Deletes user from group """ if not self.__contains__(group): raise GroupNotExists if not self.is_user_in(user, group): raise UserNotInAGroup self.new_groups.popvalue(group, user)
python
{ "resource": "" }
q15968
Swauth._get_concealed_token
train
def _get_concealed_token(self, token): """Returns hashed token to be used as object name in Swift. Tokens are stored in auth account but object names are visible in Swift logs. Object names are hashed from token. """ enc_key = "%s:%s:%s" % (HASH_PATH_PREFIX, token, HASH_PATH_SUFFIX) return sha512(enc_key).hexdigest()
python
{ "resource": "" }
q15969
Swauth.authorize
train
def authorize(self, req): """Returns None if the request is authorized to continue or a standard WSGI response callable if not. """ try: version, account, container, obj = split_path(req.path, 1, 4, True) except ValueError: return HTTPNotFound(request=req) if not account or not account.startswith(self.reseller_prefix): return self.denied_response(req) user_groups = (req.remote_user or '').split(',') if '.reseller_admin' in user_groups and \ account != self.reseller_prefix and \ account[len(self.reseller_prefix)] != '.': req.environ['swift_owner'] = True return None if account in user_groups and \ (req.method not in ('DELETE', 'PUT') or container): # If the user is admin for the account and is not trying to do an # account DELETE or PUT... req.environ['swift_owner'] = True return None if (req.environ.get('swift_sync_key') and req.environ['swift_sync_key'] == req.headers.get('x-container-sync-key', None) and 'x-timestamp' in req.headers and (req.remote_addr in self.allowed_sync_hosts or get_remote_client(req) in self.allowed_sync_hosts)): return None referrers, groups = parse_acl(getattr(req, 'acl', None)) if referrer_allowed(req.referer, referrers): if obj or '.rlistings' in groups: return None return self.denied_response(req) if not req.remote_user: return self.denied_response(req) for user_group in user_groups: if user_group in groups: return None return self.denied_response(req)
python
{ "resource": "" }
q15970
Swauth.denied_response
train
def denied_response(self, req): """Returns a standard WSGI response callable with the status of 403 or 401 depending on whether the REMOTE_USER is set or not. """ if not hasattr(req, 'credentials_valid'): req.credentials_valid = None if req.remote_user or req.credentials_valid: return HTTPForbidden(request=req) else: return HTTPUnauthorized(request=req)
python
{ "resource": "" }
q15971
Swauth.is_user_reseller_admin
train
def is_user_reseller_admin(self, req, account, user): """Returns True if the user is a .reseller_admin. :param account: account user is part of :param user: the user :returns: True if user .reseller_admin, False if user is not a reseller_admin and None if the user doesn't exist. """ req.credentials_valid = True user_json = self.get_user_detail(req, account, user) if user_json is None: req.credentials_valid = False return False user_detail = json.loads(user_json) return '.reseller_admin' in (g['name'] for g in user_detail['groups'])
python
{ "resource": "" }
q15972
Swauth.get_itoken
train
def get_itoken(self, env): """Returns the current internal token to use for the auth system's own actions with other services. Each process will create its own itoken and the token will be deleted and recreated based on the token_life configuration value. The itoken information is stored in memcache because the auth process that is asked by Swift to validate the token may not be the same as the auth process that created the token. """ if not self.itoken or self.itoken_expires < time() or \ env.get('HTTP_X_AUTH_NEW_TOKEN', 'false').lower() in \ TRUE_VALUES: self.itoken = '%sitk%s' % (self.reseller_prefix, uuid4().hex) memcache_key = '%s/auth/%s' % (self.reseller_prefix, self.itoken) self.itoken_expires = time() + self.token_life memcache_client = cache_from_env(env) if not memcache_client: raise Exception( 'No memcache set up; required for Swauth middleware') memcache_client.set( memcache_key, (self.itoken_expires, '.auth,.reseller_admin,%s.auth' % self.reseller_prefix), time=self.token_life) return self.itoken
python
{ "resource": "" }
q15973
Swauth.get_admin_detail
train
def get_admin_detail(self, req): """Returns the dict for the user specified as the admin in the request with the addition of an `account` key set to the admin user's account. :param req: The swob request to retrieve X-Auth-Admin-User and X-Auth-Admin-Key from. :returns: The dict for the admin user with the addition of the `account` key. """ if ':' not in req.headers.get('x-auth-admin-user', ''): return None admin_account, admin_user = \ req.headers.get('x-auth-admin-user').split(':', 1) user_json = self.get_user_detail(req, admin_account, admin_user) if user_json is None: return None admin_detail = json.loads(user_json) admin_detail['account'] = admin_account return admin_detail
python
{ "resource": "" }
q15974
Swauth.get_user_detail
train
def get_user_detail(self, req, account, user): """Returns the response body of a GET request for the specified user The body is in JSON format and contains all user information. :param req: The swob request :param account: the account the user is a member of :param user: the user :returns: A JSON response with the user detail information, None if the user doesn't exist """ path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return None if resp.status_int // 100 != 2: raise Exception('Could not get user object: %s %s' % (path, resp.status)) return resp.body
python
{ "resource": "" }
q15975
Swauth.credentials_match
train
def credentials_match(self, user_detail, key): """Returns True if the key is valid for the user_detail. It will use auth_encoder type the password was encoded with, to check for a key match. :param user_detail: The dict for the user. :param key: The key to validate for the user. :returns: True if the key is valid for the user, False if not. """ if user_detail: creds = user_detail.get('auth') try: auth_encoder, creds_dict = \ swauth.authtypes.validate_creds(creds) except ValueError as e: self.logger.error('%s' % e.args[0]) return False return user_detail and auth_encoder.match(key, creds, **creds_dict)
python
{ "resource": "" }
q15976
Swauth.is_user_changing_own_key
train
def is_user_changing_own_key(self, req, user): """Check if the user is changing his own key. :param req: The swob.Request to check. This contains x-auth-admin-user and x-auth-admin-key headers which are credentials of the user sending the request. :param user: User whose password is to be changed. :returns: True if user is changing his own key, False if not. """ admin_detail = self.get_admin_detail(req) if not admin_detail: # The user does not exist return False # If user is not admin/reseller_admin and x-auth-user-admin or # x-auth-user-reseller-admin headers are present in request, he may be # attempting to escalate himself as admin/reseller_admin! if '.admin' not in (g['name'] for g in admin_detail['groups']): if req.headers.get('x-auth-user-admin') == 'true' or \ req.headers.get('x-auth-user-reseller-admin') == 'true': return False if '.reseller_admin' not in \ (g['name'] for g in admin_detail['groups']) and \ req.headers.get('x-auth-user-reseller-admin') == 'true': return False return req.headers.get('x-auth-admin-user') == user and \ self.credentials_match(admin_detail, req.headers.get('x-auth-admin-key'))
python
{ "resource": "" }
q15977
Swauth.is_super_admin
train
def is_super_admin(self, req): """Returns True if the admin specified in the request represents the .super_admin. :param req: The swob.Request to check. :param returns: True if .super_admin. """ return req.headers.get('x-auth-admin-user') == '.super_admin' and \ self.super_admin_key and \ req.headers.get('x-auth-admin-key') == self.super_admin_key
python
{ "resource": "" }
q15978
Swauth.is_reseller_admin
train
def is_reseller_admin(self, req, admin_detail=None): """Returns True if the admin specified in the request represents a .reseller_admin. :param req: The swob.Request to check. :param admin_detail: The previously retrieved dict from :func:`get_admin_detail` or None for this function to retrieve the admin_detail itself. :param returns: True if .reseller_admin. """ req.credentials_valid = False if self.is_super_admin(req): return True if not admin_detail: admin_detail = self.get_admin_detail(req) if not self.credentials_match(admin_detail, req.headers.get('x-auth-admin-key')): return False req.credentials_valid = True return '.reseller_admin' in (g['name'] for g in admin_detail['groups'])
python
{ "resource": "" }
q15979
validate_creds
train
def validate_creds(creds): """Parse and validate user credentials whether format is right :param creds: User credentials :returns: Auth_type class instance and parsed user credentials in dict :raises ValueError: If credential format is wrong (eg: bad auth_type) """ try: auth_type, auth_rest = creds.split(':', 1) except ValueError: raise ValueError("Missing ':' in %s" % creds) authtypes = sys.modules[__name__] auth_encoder = getattr(authtypes, auth_type.title(), None) if auth_encoder is None: raise ValueError('Invalid auth_type: %s' % auth_type) auth_encoder = auth_encoder() parsed_creds = dict(type=auth_type, salt=None, hash=None) parsed_creds.update(auth_encoder.validate(auth_rest)) return auth_encoder, parsed_creds
python
{ "resource": "" }
q15980
Sha1.encode_w_salt
train
def encode_w_salt(self, salt, key): """Encodes a user key with salt into a particular format. The result of this method will be used internally. :param salt: Salt for hashing :param key: User's secret key :returns: A string representing user credentials """ enc_key = '%s%s' % (salt, key) enc_val = hashlib.sha1(enc_key).hexdigest() return "sha1:%s$%s" % (salt, enc_val)
python
{ "resource": "" }
q15981
Sha1.encode
train
def encode(self, key): """Encodes a user key into a particular format. The result of this method will be used by swauth for storing user credentials. If salt is not manually set in conf file, a random salt will be generated and used. :param key: User's secret key :returns: A string representing user credentials """ salt = self.salt or os.urandom(32).encode('base64').rstrip() return self.encode_w_salt(salt, key)
python
{ "resource": "" }
q15982
Sha1.match
train
def match(self, key, creds, salt, **kwargs): """Checks whether the user-provided key matches the user's credentials :param key: User-supplied key :param creds: User's stored credentials :param salt: Salt for hashing :param kwargs: Extra keyword args for compatibility reason with other auth_type classes :returns: True if the supplied key is valid, False otherwise """ return self.encode_w_salt(salt, key) == creds
python
{ "resource": "" }
q15983
Sha1.validate
train
def validate(self, auth_rest): """Validate user credentials whether format is right for Sha1 :param auth_rest: User credentials' part without auth_type :return: Dict with a hash and a salt part of user credentials :raises ValueError: If credentials' part doesn't contain delimiter between a salt and a hash. """ try: auth_salt, auth_hash = auth_rest.split('$') except ValueError: raise ValueError("Missing '$' in %s" % auth_rest) if len(auth_salt) == 0: raise ValueError("Salt must have non-zero length!") if len(auth_hash) != 40: raise ValueError("Hash must have 40 chars!") if not all(c in string.hexdigits for c in auth_hash): raise ValueError("Hash must be hexadecimal!") return dict(salt=auth_salt, hash=auth_hash)
python
{ "resource": "" }
q15984
ElecSlp.get_profile
train
def get_profile(self, ann_el_demand_per_sector): """ Get the profiles for the given annual demand Parameters ---------- ann_el_demand_per_sector : dictionary Key: sector, value: annual value Returns ------- pandas.DataFrame : Table with all profiles """ return self.slp_frame.multiply(pd.Series( ann_el_demand_per_sector), axis=1).dropna(how='all', axis=1) * 4
python
{ "resource": "" }
q15985
HeatBuilding.weighted_temperature
train
def weighted_temperature(self, how='geometric_series'): r""" A new temperature vector is generated containing a multi-day average temperature as needed in the load profile function. Parameters ---------- how : string string which type to return ("geometric_series" or "mean") Notes ----- Equation for the mathematical series of the average tempaerature [1]_: .. math:: T=\frac{T_{D}+0.5\cdot T_{D-1}+0.25\cdot T_{D-2}+ 0.125\cdot T_{D-3}}{1+0.5+0.25+0.125} with :math:`T_D` = Average temperature on the present day :math:`T_{D-i}` = Average temperature on the day - i References ---------- .. [1] `BDEW <https://www.avacon.de/cps/rde/xbcr/avacon/15-06-30_Leitfaden_Abwicklung_SLP_Gas.pdf>`_, BDEW Documentation for heat profiles. """ # calculate daily mean temperature temperature = self.df['temperature'].resample('D').mean().reindex( self.df.index).fillna(method='ffill').fillna(method='bfill') if how == 'geometric_series': temperature_mean = (temperature + 0.5 * np.roll(temperature, 24) + 0.25 * np.roll(temperature, 48) + 0.125 * np.roll(temperature, 72)) / 1.875 elif how == 'mean': temperature_mean = temperature else: temperature_mean = None return temperature_mean
python
{ "resource": "" }
q15986
HeatBuilding.get_temperature_interval
train
def get_temperature_interval(self): """Appoints the corresponding temperature interval to each temperature in the temperature vector. """ intervals = ({ -20: 1, -19: 1, -18: 1, -17: 1, -16: 1, -15: 1, -14: 2, -13: 2, -12: 2, -11: 2, -10: 2, -9: 3, -8: 3, -7: 3, -6: 3, -5: 3, -4: 4, -3: 4, -2: 4, -1: 4, 0: 4, 1: 5, 2: 5, 3: 5, 4: 5, 5: 5, 6: 6, 7: 6, 8: 6, 9: 6, 10: 6, 11: 7, 12: 7, 13: 7, 14: 7, 15: 7, 16: 8, 17: 8, 18: 8, 19: 8, 20: 8, 21: 9, 22: 9, 23: 9, 24: 9, 25: 9, 26: 10, 27: 10, 28: 10, 29: 10, 30: 10, 31: 10, 32: 10, 33: 10, 34: 10, 35: 10, 36: 10, 37: 10, 38: 10, 39: 10, 40: 10}) temperature_rounded = [ceil(i) for i in self.df['temperature_geo']] temperature_interval = [intervals[i] for i in temperature_rounded] return np.transpose(np.array(temperature_interval))
python
{ "resource": "" }
q15987
HeatBuilding.get_sf_values
train
def get_sf_values(self, filename='shlp_hour_factors.csv'): """ Determine the h-values Parameters ---------- filename : string name of file where sigmoid factors are stored """ file = os.path.join(self.datapath, filename) hour_factors = pd.read_csv(file, index_col=0) hour_factors = hour_factors.query( 'building_class=={0} and shlp_type=="{1}"'.format( self.building_class, self.shlp_type)) # Join the two DataFrames on the columns 'hour' and 'hour_of_the_day' # or ['hour' 'weekday'] and ['hour_of_the_day', 'weekday'] if it is # not a residential slp. residential = self.building_class > 0 left_cols = ['hour_of_day'] + (['weekday'] if not residential else []) right_cols = ['hour'] + (['weekday'] if not residential else []) sf_mat = pd.DataFrame.merge( hour_factors, self.df, left_on=left_cols, right_on=right_cols, how='outer', left_index=True).sort_index() # drop unnecessary columns drop_cols = ( ['hour_of_day', 'hour', 'building_class', 'shlp_type', 'date', 'temperature'] + (['weekday_x'] if residential else []) + (['weekday_y'] if residential else []) + (['weekday'] if not residential else [])) sf_mat = sf_mat.drop(drop_cols, 1) # Determine the h values length = len(self.temperature) sf = (np.array(sf_mat)[np.array(list(range(0, length)))[:], (self.get_temperature_interval() - 1)[:]]) return np.array(list(map(float, sf[:])))
python
{ "resource": "" }
q15988
HeatBuilding.get_sigmoid_parameters
train
def get_sigmoid_parameters(self, filename='shlp_sigmoid_factors.csv'): """ Retrieve the sigmoid parameters from csv-files Parameters ---------- filename : string name of file where sigmoid factors are stored """ file = os.path.join(self.datapath, filename) sigmoid = pd.read_csv(file, index_col=0) sigmoid = sigmoid.query( 'building_class=={0} and '.format(self.building_class) + 'shlp_type=="{0}" and '.format(self.shlp_type) + 'wind_impact=={0}'.format(self.wind_class)) a = float(sigmoid['parameter_a']) b = float(sigmoid['parameter_b']) c = float(sigmoid['parameter_c']) if self.ww_incl: d = float(sigmoid['parameter_d']) else: d = 0 return a, b, c, d
python
{ "resource": "" }
q15989
HeatBuilding.get_weekday_parameters
train
def get_weekday_parameters(self, filename='shlp_weekday_factors.csv'): """ Retrieve the weekday parameter from csv-file Parameters ---------- filename : string name of file where sigmoid factors are stored """ file = os.path.join(self.datapath, filename) f_df = pd.read_csv(file, index_col=0) tmp_df = f_df.query('shlp_type=="{0}"'.format(self.shlp_type)).drop( 'shlp_type', axis=1) tmp_df['weekdays'] = np.array(list(range(7))) + 1 return np.array(list(map(float, pd.DataFrame.merge( tmp_df, self.df, left_on='weekdays', right_on='weekday', how='inner', left_index=True).sort_index()['wochentagsfaktor'])))
python
{ "resource": "" }
q15990
HeatBuilding.get_normalized_bdew_profile
train
def get_normalized_bdew_profile(self): """ Calculation of the normalized hourly heat demand """ self.df['temperature'] = self.temperature.values self.df['temperature_geo'] = self.weighted_temperature( how='geometric_series') sf = self.get_sf_values() [a, b, c, d] = self.get_sigmoid_parameters() f = self.get_weekday_parameters() h = (a / (1 + (b / (self.df['temperature_geo'] - 40)) ** c) + d) kw = 1.0 / (sum(h * f) / 24) heat_profile_normalized = (kw * h * f * sf) return heat_profile_normalized
python
{ "resource": "" }
q15991
rename
train
def rename(oldPath, newPath, **kwargs): """rename the file oldPath to newPath""" import os return os.rename(oldPath, newPath, **kwargs)
python
{ "resource": "" }
q15992
chown
train
def chown(path, user=None, group=None): """change ownership of path""" import os import pwd import grp uid = pwd.getpwnam(user).pw_uid if user else -1 gid = grp.getgrnam(group).gr_gid if group else -1 return os.chown(path, uid, gid)
python
{ "resource": "" }
q15993
chmod
train
def chmod(path, mode): """change pernmissions of path""" import os, stat st = os.stat(path) return os.chmod(path, mode)
python
{ "resource": "" }
q15994
exists
train
def exists(path, **kwargs): """Check if file or directory exists""" import os.path return os.path.exists(path, **kwargs)
python
{ "resource": "" }
q15995
get
train
def get(path): """Read an object from file""" try: import cPickle as pickle except: import pickle with open(path, 'rb') as file: return pickle.load(file)
python
{ "resource": "" }
q15996
put
train
def put(path, obj): """Write an object to file""" try: import cPickle as pickle except: import pickle with open(path, 'wb') as file: return pickle.dump(obj, file)
python
{ "resource": "" }
q15997
join
train
def join(*args, **kwargs): """Join parts of a path together""" import os.path if _is_list(args[0]): return os.path.join(*args[0]) return os.path.join(*args, **kwargs)
python
{ "resource": "" }
q15998
add_weekdays2df
train
def add_weekdays2df(time_df, holidays=None, holiday_is_sunday=False): r"""Giving back a DataFrame containing weekdays and optionally holidays for the given year. Parameters ---------- time_df : pandas DataFrame DataFrame to which the weekdays should be added Optional Parameters ------------------- holidays : array with information for every hour of the year, if holiday or not (0: holiday, 1: no holiday) holiday_is_sunday : boolean If set to True, all holidays (0) will be set to sundays (7). Returns ------- pandas.DataFrame : DataFrame with weekdays Notes ----- Using Pandas > 0.16 """ time_df['weekday'] = time_df.index.weekday + 1 time_df['date'] = time_df.index.date # Set weekday to Holiday (0) for all holidays if holidays is not None: if isinstance(holidays, dict): holidays = list(holidays.keys()) time_df['weekday'].mask(pd.to_datetime(time_df['date']).isin( pd.to_datetime(holidays)), 0, True) if holiday_is_sunday: time_df.weekday.mask(time_df.weekday == 0, 7, True) return time_df
python
{ "resource": "" }
q15999
IndustrialLoadProfile.simple_profile
train
def simple_profile(self, annual_demand, **kwargs): """ Create industrial load profile Parameters ---------- annual_demand : float Total demand. Other Parameters ---------------- am : datetime.time beginning of workday pm : datetime.time end of workday week : list list of weekdays weekend : list list of weekend days profile_factors : dictionary dictionary with scaling factors for night and day of weekdays and weekend days """ # Day(am to pm), night (pm to am), week day (week), # weekend day (weekend) am = kwargs.get('am', settime(7, 00, 0)) pm = kwargs.get('pm', settime(23, 30, 0)) week = kwargs.get('week', [1, 2, 3, 4, 5]) weekend = kwargs.get('weekend', [0, 6, 7]) default_factors = {'week': {'day': 0.8, 'night': 0.6}, 'weekend': {'day': 0.9, 'night': 0.7}} profile_factors = kwargs.get('profile_factors', default_factors) self.dataframe['ind'] = 0 self.dataframe['ind'].mask( self.dataframe['weekday'].between_time(am, pm).isin(week), profile_factors['week']['day'], True) self.dataframe['ind'].mask( self.dataframe['weekday'].between_time(pm, am).isin(week), profile_factors['week']['night'], True) self.dataframe['ind'].mask( self.dataframe['weekday'].between_time(am, pm).isin(weekend), profile_factors['weekend']['day'], True) self.dataframe['ind'].mask( self.dataframe['weekday'].between_time(pm, am).isin(weekend), profile_factors['weekend']['night'], True) if self.dataframe['ind'].isnull().any(axis=0): logging.error('NAN value found in industrial load profile') time_interval = self.dataframe.index.freq.nanos / 3.6e12 return (self.dataframe['ind'] / self.dataframe['ind'].sum() * annual_demand / time_interval)
python
{ "resource": "" }