code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
all_args = (None, ) + args return inspect.getcallargs(cls.run, *all_args, **kwargs)
def arguments_as_dict(cls, *args, **kwargs)
Generate the arguments dictionary provided to :py:meth:`generate_name` and :py:meth:`calculate_total_steps`. This makes it possible to fetch arguments by name regardless of whether they were passed as positional or keyword arguments. Unnamed positional arguments are provided as a tuple under the key ``pos``.
5.679048
6.679581
0.85021
task_id = self.request.id try: # Most calls are for existing objects, don't waste time # preparing creation arguments unless necessary return UserTaskStatus.objects.get(task_id=task_id) except UserTaskStatus.DoesNotExist: # Probably an eager task that skipped the before_task_publish # signal (or an atomic view where the new record hasn't been # committed yet). Create a record for it. arguments_dict = self.arguments_as_dict(*self.request.args, **self.request.kwargs) name = self.generate_name(arguments_dict) task_class = '.'.join([self.__class__.__module__, self.__class__.__name__]) total_steps = self.calculate_total_steps(arguments_dict) user_id = arguments_dict['user_id'] # Use get_or_create() again just in case another process created it in the meantime return UserTaskStatus.objects.get_or_create( task_id=task_id, defaults={'user_id': user_id, 'name': name, 'task_class': task_class, 'total_steps': total_steps})[0]
def status(self)
Get the :py:class:`~user_tasks.models.UserTaskStatus` model instance for this UserTaskMixin.
5.082603
4.699517
1.081516
from rasterio.enums import ColorInterp as ci modes = {'L': [ci.gray], 'LA': [ci.gray, ci.alpha], 'YCbCr': [ci.Y, ci.Cb, ci.Cr], 'YCbCrA': [ci.Y, ci.Cb, ci.Cr, ci.alpha]} try: mode = ''.join(data['bands'].values) return modes[mode] except KeyError: colors = {'R': ci.red, 'G': ci.green, 'B': ci.blue, 'A': ci.alpha, 'C': ci.cyan, 'M': ci.magenta, 'Y': ci.yellow, 'H': ci.hue, 'S': ci.saturation, 'L': ci.lightness, 'K': ci.black, } return [colors[band] for band in data['bands'].values]
def color_interp(data)
Get the color interpretation for this image.
2.303327
2.279352
1.010518
if not hasattr(data, 'dims'): raise TypeError("Data must have a 'dims' attribute.") # doesn't actually copy the data underneath # we don't want our operations to change the user's data data = data.copy() if 'y' not in data.dims or 'x' not in data.dims: if data.ndim != 2: raise ValueError("Data must have a 'y' and 'x' dimension") # rename dimensions so we can use them # don't rename 'x' or 'y' if they already exist if 'y' not in data.dims: # find a dimension that isn't 'x' old_dim = [d for d in data.dims if d != 'x'][0] data = data.rename({old_dim: 'y'}) if 'x' not in data.dims: # find a dimension that isn't 'y' old_dim = [d for d in data.dims if d != 'y'][0] data = data.rename({old_dim: 'x'}) if "bands" not in data.dims: if data.ndim <= 2: data = data.expand_dims('bands') data['bands'] = ['L'] else: raise ValueError("No 'bands' dimension provided.") return data
def _correct_dims(data)
Standardize dimensions to bands, y, and x.
2.799757
2.66558
1.050337
fformat = fformat or os.path.splitext(filename)[1][1:4] if fformat in ('tif', 'jp2') and rasterio: return self.rio_save(filename, fformat=fformat, fill_value=fill_value, compute=compute, keep_palette=keep_palette, cmap=cmap, **format_kwargs) else: return self.pil_save(filename, fformat, fill_value, compute=compute, **format_kwargs)
def save(self, filename, fformat=None, fill_value=None, compute=True, keep_palette=False, cmap=None, **format_kwargs)
Save the image to the given *filename*. Args: filename (str): Output filename fformat (str): File format of output file (optional). Can be one of many image formats supported by the `rasterio` or `PIL` libraries ('jpg', 'png', 'tif'). By default this is determined by the extension of the provided filename. If the format allows, geographical information will be saved to the ouput file, in the form of grid mapping or ground control points. fill_value (float): Replace invalid data values with this value and do not produce an Alpha band. Default behavior is to create an alpha band. compute (bool): If True (default) write the data to the file immediately. If False the return value is either a `dask.Delayed` object or a tuple of ``(source, target)`` to be passed to `dask.array.store`. keep_palette (bool): Saves the palettized version of the image if set to True. False by default. cmap (Colormap or dict): Colormap to be applied to the image when saving with rasterio, used with keep_palette=True. Should be uint8. format_kwargs: Additional format options to pass to `rasterio` or `PIL` saving methods. Returns: Either `None` if `compute` is True or a `dask.Delayed` object or ``(source, target)`` pair to be passed to `dask.array.store`. If compute is False the return value depends on format and how the image backend is used. If ``(source, target)`` is provided then target is an open file-like object that must be closed by the caller.
2.263149
2.44853
0.924289
fformat = fformat or os.path.splitext(filename)[1][1:4] drivers = {'jpg': 'JPEG', 'png': 'PNG', 'tif': 'GTiff', 'jp2': 'JP2OpenJPEG'} driver = drivers.get(fformat, fformat) if tags is None: tags = {} data, mode = self.finalize(fill_value, dtype=dtype, keep_palette=keep_palette, cmap=cmap) data = data.transpose('bands', 'y', 'x') data.attrs = self.data.attrs crs = None gcps = None transform = None if driver in ['GTiff', 'JP2OpenJPEG']: if not np.issubdtype(data.dtype, np.floating): format_kwargs.setdefault('compress', 'DEFLATE') photometric_map = { 'RGB': 'RGB', 'RGBA': 'RGB', 'CMYK': 'CMYK', 'CMYKA': 'CMYK', 'YCBCR': 'YCBCR', 'YCBCRA': 'YCBCR', } if mode.upper() in photometric_map: format_kwargs.setdefault('photometric', photometric_map[mode.upper()]) try: crs = rasterio.crs.CRS(data.attrs['area'].proj_dict) west, south, east, north = data.attrs['area'].area_extent height, width = data.sizes['y'], data.sizes['x'] transform = rasterio.transform.from_bounds(west, south, east, north, width, height) except KeyError: # No area logger.info("Couldn't create geotransform") except AttributeError: try: gcps = data.attrs['area'].lons.attrs['gcps'] crs = data.attrs['area'].lons.attrs['crs'] except KeyError: logger.info("Couldn't create geotransform") if "start_time" in data.attrs: stime = data.attrs['start_time'] stime_str = stime.strftime("%Y:%m:%d %H:%M:%S") tags.setdefault('TIFFTAG_DATETIME', stime_str) elif driver == 'JPEG' and 'A' in mode: raise ValueError('JPEG does not support alpha') # FIXME add metadata r_file = RIOFile(filename, 'w', driver=driver, width=data.sizes['x'], height=data.sizes['y'], count=data.sizes['bands'], dtype=dtype, nodata=fill_value, crs=crs, transform=transform, gcps=gcps, **format_kwargs) r_file.open() if not keep_palette: r_file.colorinterp = color_interp(data) r_file.rfile.update_tags(**tags) if keep_palette and cmap is not None: if data.dtype != 'uint8': raise ValueError('Rasterio only supports 8-bit colormaps') try: from trollimage.colormap import Colormap cmap = cmap.to_rio() if isinstance(cmap, Colormap) else cmap r_file.rfile.write_colormap(1, cmap) except AttributeError: raise ValueError("Colormap is not formatted correctly") if compute: # write data to the file now res = da.store(data.data, r_file) r_file.close() return res # provide the data object and the opened file so the caller can # store them when they would like. Caller is responsible for # closing the file return data.data, r_file
def rio_save(self, filename, fformat=None, fill_value=None, dtype=np.uint8, compute=True, tags=None, keep_palette=False, cmap=None, **format_kwargs)
Save the image using rasterio. Overviews can be added to the file using the `overviews` kwarg, eg:: img.rio_save('myfile.tif', overviews=[2, 4, 8, 16])
3.267459
3.318998
0.984472
fformat = fformat or os.path.splitext(filename)[1][1:4] fformat = check_image_format(fformat) if fformat == 'png': # Take care of GeoImage.tags (if any). format_kwargs['pnginfo'] = self._pngmeta() img = self.pil_image(fill_value, compute=False) delay = img.save(filename, fformat, **format_kwargs) if compute: return delay.compute() return delay
def pil_save(self, filename, fformat=None, fill_value=None, compute=True, **format_kwargs)
Save the image to the given *filename* using PIL. For now, the compression level [0-9] is ignored, due to PIL's lack of support. See also :meth:`save`.
5.254472
5.405388
0.97208
not_alpha = [b for b in data.coords['bands'].values if b != 'A'] null_mask = data.sel(bands=not_alpha) if np.issubdtype(data.dtype, np.integer) and fill_value is not None: null_mask = null_mask != fill_value else: null_mask = null_mask.notnull() # if any of the bands are valid, we don't want transparency null_mask = null_mask.any(dim='bands') null_mask = null_mask.expand_dims('bands') null_mask['bands'] = ['A'] # match data dtype return null_mask
def _create_alpha(self, data, fill_value=None)
Create an alpha band DataArray object. If `fill_value` is provided and input data is an integer type then it is used to determine invalid "null" pixels instead of xarray's `isnull` and `notnull` methods. The returned array is 1 where data is valid, 0 where invalid.
3.71937
3.684454
1.009476
null_mask = alpha if alpha is not None else self._create_alpha(data) # if we are using integer data, then alpha needs to be min-int to max-int # otherwise for floats we want 0 to 1 if np.issubdtype(data.dtype, np.integer): # xarray sometimes upcasts this calculation, so cast again null_mask = self._scale_to_dtype(null_mask, data.dtype).astype(data.dtype) data = xr.concat([data, null_mask], dim="bands") return data
def _add_alpha(self, data, alpha=None)
Create an alpha channel and concatenate it to the provided data. If ``data`` is an integer type then the alpha band will be scaled to use the smallest (min) value as fully transparent and the largest (max) value as fully opaque. For float types the alpha band spans 0 to 1.
6.633455
5.695132
1.164759
if np.issubdtype(dtype, np.integer): if np.issubdtype(data, np.integer): # preserve integer data type data = data.clip(np.iinfo(dtype).min, np.iinfo(dtype).max) else: # scale float data (assumed to be 0 to 1) to full integer space dinfo = np.iinfo(dtype) data = data.clip(0, 1) * (dinfo.max - dinfo.min) + dinfo.min data = data.round() return data
def _scale_to_dtype(self, data, dtype)
Scale provided data to dtype range assuming a 0-1 range. Float input data is assumed to be normalized to a 0 to 1 range. Integer input data is not scaled, only clipped. A float output type is not scaled since both outputs and inputs are assumed to be in the 0-1 range already.
2.918255
2.898127
1.006945
if not isinstance(modes, (tuple, list, set)): modes = [modes] if self.mode not in modes: raise ValueError("Image not in suitable mode, expected: %s, got: %s" % (modes, self.mode))
def _check_modes(self, modes)
Check that the image is in one of the given *modes*, raise an exception otherwise.
3.34712
2.621023
1.277028
self._check_modes(("P", "PA")) if not self.palette: raise RuntimeError("Can't convert palettized image, missing palette.") pal = np.array(self.palette) pal = da.from_array(pal, chunks=pal.shape) if pal.shape[1] == 4: # colormap's alpha overrides data alpha mode = "RGBA" alpha = None elif self.mode.endswith("A"): # add a new/fake 'bands' dimension to the end alpha = self.data.sel(bands="A").data[..., None] mode = mode + "A" if not mode.endswith("A") else mode else: alpha = None flat_indexes = self.data.sel(bands='P').data.ravel().astype('int64') dim_sizes = ((key, val) for key, val in self.data.sizes.items() if key != 'bands') dims, new_shape = zip(*dim_sizes) dims = dims + ('bands',) new_shape = new_shape + (pal.shape[1],) new_data = pal[flat_indexes].reshape(new_shape) coords = dict(self.data.coords) coords["bands"] = list(mode) if alpha is not None: new_arr = da.concatenate((new_data, alpha), axis=-1) data = xr.DataArray(new_arr, coords=coords, attrs=self.data.attrs, dims=dims) else: data = xr.DataArray(new_data, coords=coords, attrs=self.data.attrs, dims=dims) return data
def _from_p(self, mode)
Convert the image from P or PA to RGB or RGBA.
4.041479
3.755372
1.076186
self._check_modes(("L", "LA")) bands = ["L"] * 3 if mode[-1] == "A": bands.append("A") data = self.data.sel(bands=bands) data["bands"] = list(mode) return data
def _l2rgb(self, mode)
Convert from L (black and white) to RGB.
5.731101
5.855709
0.97872
import warnings warnings.warn("'_finalize' is deprecated, use 'finalize' instead.", DeprecationWarning) return self.finalize(fill_value, dtype, keep_palette, cmap)
def _finalize(self, fill_value=None, dtype=np.uint8, keep_palette=False, cmap=None)
Wrapper around 'finalize' method for backwards compatibility.
2.786998
2.390736
1.165749
if keep_palette and not self.mode.startswith('P'): keep_palette = False if not keep_palette: if self.mode == "P": return self.convert("RGB").finalize(fill_value=fill_value, dtype=dtype, keep_palette=keep_palette, cmap=cmap) if self.mode == "PA": return self.convert("RGBA").finalize(fill_value=fill_value, dtype=dtype, keep_palette=keep_palette, cmap=cmap) if np.issubdtype(dtype, np.floating) and fill_value is None: logger.warning("Image with floats cannot be transparent, so " "setting fill_value to 0") fill_value = 0 final_data = self.data # if the data are integers then this fill value will be used to check for invalid values ifill = final_data.attrs.get('_FillValue') if np.issubdtype(final_data, np.integer) else None if not keep_palette: if fill_value is None and not self.mode.endswith('A'): # We don't have a fill value or an alpha, let's add an alpha alpha = self._create_alpha(final_data, fill_value=ifill) final_data = self._scale_to_dtype(final_data, dtype).astype(dtype) final_data = self._add_alpha(final_data, alpha=alpha) else: # scale float data to the proper dtype # this method doesn't cast yet so that we can keep track of NULL values final_data = self._scale_to_dtype(final_data, dtype) # Add fill_value after all other calculations have been done to # make sure it is not scaled for the data type if ifill is not None and fill_value is not None: # cast fill value to output type so we don't change data type fill_value = dtype(fill_value) # integer fields have special fill values final_data = final_data.where(final_data != ifill, dtype(fill_value)) elif fill_value is not None: final_data = final_data.fillna(dtype(fill_value)) final_data = final_data.astype(dtype) final_data.attrs = self.data.attrs return final_data, ''.join(final_data['bands'].values)
def finalize(self, fill_value=None, dtype=np.uint8, keep_palette=False, cmap=None)
Finalize the image to be written to an output file. This adds an alpha band or fills data with a fill_value (if specified). It also scales float data to the output range of the data type (0-255 for uint8, default). For integer input data this method assumes the data is already scaled to the proper desired range. It will still fill in invalid values and add an alpha band if needed. Integer input data's fill value is determined by a special ``_FillValue`` attribute in the ``DataArray`` ``.attrs`` dictionary.
3.685409
3.477558
1.059769
channels, mode = self.finalize(fill_value) res = channels.transpose('y', 'x', 'bands') img = dask.delayed(PILImage.fromarray)(np.squeeze(res.data), mode) if compute: img = img.compute() return img
def pil_image(self, fill_value=None, compute=True)
Return a PIL image from the current image. Args: fill_value (int or float): Value to use for NaN null values. See :meth:`~trollimage.xrimage.XRImage.finalize` for more info. compute (bool): Whether to return a fully computed PIL.Image object (True) or return a dask Delayed object representing the Image (False). This is True by default.
5.74965
5.530301
1.039663
return xr.DataArray(tup, dims=['bands'], coords={'bands': self.data['bands']})
def xrify_tuples(self, tup)
Make xarray.DataArray from tuple.
6.833077
4.744897
1.44009
if isinstance(gamma, (list, tuple)): gamma = self.xrify_tuples(gamma) elif gamma == 1.0: return logger.debug("Applying gamma %s", str(gamma)) attrs = self.data.attrs self.data = self.data.clip(min=0) self.data **= 1.0 / gamma self.data.attrs = attrs
def gamma(self, gamma=1.0)
Apply gamma correction to the channels of the image. If *gamma* is a tuple, then it should have as many elements as the channels of the image, and the gamma correction is applied elementwise. If *gamma* is a number, the same gamma correction is applied on every channel, if there are several channels in the image. The behaviour of :func:`gamma` is undefined outside the normal [0,1] range of the channels.
4.751024
4.859772
0.977623
logger.debug("Applying stretch %s with parameters %s", stretch, str(kwargs)) # FIXME: do not apply stretch to alpha channel if isinstance(stretch, (tuple, list)): if len(stretch) == 2: self.stretch_linear(cutoffs=stretch) else: raise ValueError( "Stretch tuple must have exactly two elements") elif stretch == "linear": self.stretch_linear(**kwargs) elif stretch == "histogram": self.stretch_hist_equalize(**kwargs) elif stretch in ["crude", "crude-stretch"]: self.crude_stretch(**kwargs) elif stretch in ["log", "logarithmic"]: self.stretch_logarithmic(**kwargs) elif stretch == "no": return elif isinstance(stretch, str): raise ValueError("Stretching method %s not recognized." % stretch) else: raise TypeError("Stretch parameter must be a string or a tuple.")
def stretch(self, stretch="crude", **kwargs)
Apply stretching to the current image. The value of *stretch* sets the type of stretching applied. The values "histogram", "linear", "crude" (or "crude-stretch") perform respectively histogram equalization, contrast stretching (with 5% cutoff on both sides), and contrast stretching without cutoff. The value "logarithmic" or "log" will do a logarithmic enhancement towards white. If a tuple or a list of two values is given as input, then a contrast stretching is performed with the values as cutoff. These values should be normalized in the range [0.0,1.0].
3.145398
2.719589
1.156571
# numpy doesn't get a 'quantile' function until 1.15 # for better backwards compatibility we use xarray's version data_arr = xr.DataArray(data, dims=dims) # delayed will provide us the fully computed xarray with ndarray left, right = data_arr.quantile([cutoffs[0], 1. - cutoffs[1]], dim=['x', 'y']) logger.debug("Interval: left=%s, right=%s", str(left), str(right)) return left.data, right.data
def _compute_quantile(data, dims, cutoffs)
Helper method for stretch_linear. Dask delayed functions need to be non-internal functions (created inside a function) to be serializable on a multi-process scheduler. Quantile requires the data to be loaded since it not supported on dask arrays yet.
7.461492
7.776195
0.95953
logger.debug("Perform a linear contrast stretch.") logger.debug("Calculate the histogram quantiles: ") logger.debug("Left and right quantiles: " + str(cutoffs[0]) + " " + str(cutoffs[1])) cutoff_type = np.float64 # numpy percentile (which quantile calls) returns 64-bit floats # unless the value is a higher order float if np.issubdtype(self.data.dtype, np.floating) and \ np.dtype(self.data.dtype).itemsize > 8: cutoff_type = self.data.dtype left, right = dask.delayed(self._compute_quantile, nout=2)(self.data.data, self.data.dims, cutoffs) left_data = da.from_delayed(left, shape=(self.data.sizes['bands'],), dtype=cutoff_type) left = xr.DataArray(left_data, dims=('bands',), coords={'bands': self.data['bands']}) right_data = da.from_delayed(right, shape=(self.data.sizes['bands'],), dtype=cutoff_type) right = xr.DataArray(right_data, dims=('bands',), coords={'bands': self.data['bands']}) self.crude_stretch(left, right)
def stretch_linear(self, cutoffs=(0.005, 0.005))
Stretch linearly the contrast of the current image. Use *cutoffs* for left and right trimming.
3.797474
3.758801
1.010289
if min_stretch is None: non_band_dims = tuple(x for x in self.data.dims if x != 'bands') min_stretch = self.data.min(dim=non_band_dims) if max_stretch is None: non_band_dims = tuple(x for x in self.data.dims if x != 'bands') max_stretch = self.data.max(dim=non_band_dims) if isinstance(min_stretch, (list, tuple)): min_stretch = self.xrify_tuples(min_stretch) if isinstance(max_stretch, (list, tuple)): max_stretch = self.xrify_tuples(max_stretch) delta = (max_stretch - min_stretch) if isinstance(delta, xr.DataArray): # fillna if delta is NaN scale_factor = (1.0 / delta).fillna(0) else: scale_factor = 1.0 / delta attrs = self.data.attrs self.data -= min_stretch self.data *= scale_factor self.data.attrs = attrs
def crude_stretch(self, min_stretch=None, max_stretch=None)
Perform simple linear stretching. This is done without any cutoff on the current image and normalizes to the [0,1] range.
2.374639
2.364736
1.004188
logger.info("Perform a histogram equalized contrast stretch.") nwidth = 2048. logger.debug("Make histogram bins having equal amount of data, " + "using numpy percentile function:") def _band_hist(band_data): cdf = da.arange(0., 1., 1. / nwidth, chunks=nwidth) if approximate: # need a 1D array flat_data = band_data.ravel() # replace with nanpercentile in the future, if available # dask < 0.17 returns all NaNs for this bins = da.percentile(flat_data[da.notnull(flat_data)], cdf * 100.) else: bins = dask.delayed(np.nanpercentile)(band_data, cdf * 100.) bins = da.from_delayed(bins, shape=(nwidth,), dtype=cdf.dtype) res = dask.delayed(np.interp)(band_data, bins, cdf) res = da.from_delayed(res, shape=band_data.shape, dtype=band_data.dtype) return res band_results = [] for band in self.data['bands'].values: if band == 'A': continue band_data = self.data.sel(bands=band) res = _band_hist(band_data.data) band_results.append(res) if 'A' in self.data.coords['bands'].values: band_results.append(self.data.sel(bands='A')) self.data.data = da.stack(band_results, axis=self.data.dims.index('bands'))
def stretch_hist_equalize(self, approximate=False)
Stretch the current image's colors through histogram equalization. Args: approximate (bool): Use a faster less-accurate percentile calculation. At the time of writing the dask version of `percentile` is not as accurate as the numpy version. This will likely change in the future. Current dask version 0.17.
4.068623
3.982879
1.021528
logger.debug("Perform a logarithmic contrast stretch.") crange = (0., 1.0) b__ = float(crange[1] - crange[0]) / np.log(factor) c__ = float(crange[0]) def _band_log(arr): slope = (factor - 1.) / float(arr.max() - arr.min()) arr = 1. + (arr - arr.min()) * slope arr = c__ + b__ * da.log(arr) return arr band_results = [] for band in self.data['bands'].values: if band == 'A': continue band_data = self.data.sel(bands=band) res = _band_log(band_data.data) band_results.append(res) if 'A' in self.data.coords['bands'].values: band_results.append(self.data.sel(bands='A')) self.data.data = da.stack(band_results, axis=self.data.dims.index('bands'))
def stretch_logarithmic(self, factor=100.)
Move data into range [1:factor] through normalized logarithm.
3.559741
3.563653
0.998902
attrs = self.data.attrs self.data = k * xu.log(self.data / s0) self.data.attrs = attrs
def stretch_weber_fechner(self, k, s0)
Stretch according to the Weber-Fechner law. p = k.ln(S/S0) p is perception, S is the stimulus, S0 is the stimulus threshold (the highest unpercieved stimulus), and k is the factor.
7.368564
8.793497
0.837956
logger.debug("Applying invert with parameters %s", str(invert)) if isinstance(invert, (tuple, list)): invert = self.xrify_tuples(invert) offset = invert.astype(int) scale = (-1) ** offset elif invert: offset = 1 scale = -1 attrs = self.data.attrs self.data = self.data * scale + offset self.data.attrs = attrs
def invert(self, invert=True)
Inverts all the channels of a image according to *invert*. If invert is a tuple or a list, elementwise invertion is performed, otherwise all channels are inverted if *invert* is true (default). Note: 'Inverting' means that black becomes white, and vice-versa, not that the values are negated !
5.358664
5.136325
1.043288
raise NotImplementedError("This method has not be implemented for " "xarray support.") if self.is_empty(): raise ValueError("Cannot merge an empty image.") if self.mode != img.mode: raise ValueError("Cannot merge image of different modes.") selfmask = self.channels[0].mask for chn in self.channels[1:]: selfmask = np.ma.mask_or(selfmask, chn.mask) for i in range(len(self.channels)): self.channels[i] = np.ma.where(selfmask, img.channels[i], self.channels[i]) self.channels[i].mask = np.logical_and(selfmask, img.channels[i].mask)
def merge(self, img)
Use the provided image as background for the current *img* image, that is if the current image has missing data.
3.372835
3.222186
1.046754
if self.mode not in ("L", "LA"): raise ValueError("Image should be grayscale to colorize") if self.mode == "LA": alpha = self.data.sel(bands=['A']) else: alpha = None l_data = self.data.sel(bands=['L']) def _colorize(l_data, colormap): # 'l_data' is (1, rows, cols) # 'channels' will be a list of 3 (RGB) or 4 (RGBA) arrays channels = colormap.colorize(l_data) return np.concatenate(channels, axis=0) new_data = l_data.data.map_blocks(_colorize, colormap, chunks=(colormap.colors.shape[1],) + l_data.data.chunks[1:], dtype=np.float64) if colormap.colors.shape[1] == 4: mode = "RGBA" elif alpha is not None: new_data = da.concatenate([new_data, alpha.data], axis=0) mode = "RGBA" else: mode = "RGB" # copy the coordinates so we don't affect the original coords = dict(self.data.coords) coords['bands'] = list(mode) attrs = self.data.attrs dims = self.data.dims self.data = xr.DataArray(new_data, coords=coords, attrs=attrs, dims=dims)
def colorize(self, colormap)
Colorize the current image using `colormap`. .. note:: Works only on "L" or "LA" images.
3.250314
3.099694
1.048592
if self.mode not in ("L", "LA"): raise ValueError("Image should be grayscale to colorize") l_data = self.data.sel(bands=['L']) def _palettize(data): # returns data and palette, only need data return colormap.palettize(data)[0] new_data = l_data.data.map_blocks(_palettize, dtype=l_data.dtype) self.palette = tuple(colormap.colors) if self.mode == "L": mode = "P" else: mode = "PA" new_data = da.concatenate([new_data, self.data.sel(bands=['A'])], axis=0) self.data.data = new_data self.data.coords['bands'] = list(mode)
def palettize(self, colormap)
Palettize the current image using `colormap`. .. note:: Works only on "L" or "LA" images.
4.256742
3.999699
1.064266
raise NotImplementedError("This method has not be implemented for " "xarray support.") if self.mode != "RGBA" or other.mode != "RGBA": raise ValueError("Images must be in RGBA") src = other dst = self outa = src.channels[3] + dst.channels[3] * (1 - src.channels[3]) for i in range(3): dst.channels[i] = (src.channels[i] * src.channels[3] + dst.channels[i] * dst.channels[3] * (1 - src.channels[3])) / outa dst.channels[i][outa == 0] = 0 dst.channels[3] = outa
def blend(self, other)
Alpha blend *other* on top of the current image.
3.350914
3.100146
1.080889
# TODO(srstevenson): Use assignment expression in Python 3.8. value = os.environ.get(variable) if value: return Path(value) return default
def _path_from_env(variable: str, default: Path) -> Path
Read an environment variable as a path. The environment variable with the specified name is read, and its value returned as a path. If the environment variable is not set, or set to the empty string, the default value is returned. Parameters ---------- variable : str Name of the environment variable. default : Path Default value. Returns ------- Path Value from environment or default.
8.548379
9.432499
0.906269
# TODO(srstevenson): Use assignment expression in Python 3.8. value = os.environ.get(variable) if value: return [Path(path) for path in value.split(":")] return default
def _paths_from_env(variable: str, default: List[Path]) -> List[Path]
Read an environment variable as a list of paths. The environment variable with the specified name is read, and its value split on colons and returned as a list of paths. If the environment variable is not set, or set to the empty string, the default value is returned. Parameters ---------- variable : str Name of the environment variable. default : List[Path] Default value. Returns ------- List[Path] Value from environment or default.
6.083297
6.880448
0.884143
if self.__contains__(user): raise UserExists self.new_users[user] = self._encrypt_password(password) + "\n"
def add(self, user, password)
Adds a user with password
7.331522
7.096185
1.033164
if not self.__contains__(user): raise UserNotExists self.new_users.pop(user)
def pop(self, user)
Deletes a user
7.595429
8.76944
0.866125
if not self.__contains__(user): raise UserNotExists self.new_users[user] = self._encrypt_password(password) + "\n"
def change_password(self, user, password)
Changes user password
8.458797
8.059211
1.049581
if self.encryption_mode.lower() == 'crypt': return self._crypt_password(password) elif self.encryption_mode.lower() == 'md5': return self._md5_password(password) elif self.encryption_mode.lower() == 'md5-base': return self._md5_base_password(password) else: raise UnknownEncryptionMode(self.encryption_mode)
def _encrypt_password(self, password)
encrypt the password for given mode
2.210447
2.072674
1.066472
def salt(): symbols = ascii_letters + digits return choice(symbols) + choice(symbols) return crypt(password, salt())
def _crypt_password(self, password)
Crypts password
9.60976
10.392938
0.924643
if self.is_user_in(user, group): raise UserAlreadyInAGroup self.new_groups.add(group, user)
def add_user(self, user, group)
Adds user to a group
6.80317
6.223195
1.093196
if not self.__contains__(group): raise GroupNotExists if not self.is_user_in(user, group): raise UserNotInAGroup self.new_groups.popvalue(group, user)
def delete_user(self, user, group)
Deletes user from group
7.158089
6.870847
1.041806
conf = global_conf.copy() conf.update(local_conf) def auth_filter(app): return Swauth(app, conf) return auth_filter
def filter_factory(global_conf, **local_conf)
Returns a WSGI filter app for use with paste.deploy.
2.937856
3.001676
0.978739
if self.default_storage_policy: sp = self.default_storage_policy if headers: headers.update({'X-Storage-Policy': sp}) else: headers = {'X-Storage-Policy': sp} subreq = swift.common.wsgi.make_pre_authed_request( env, method=method, path=path, body=body, headers=headers, agent=self.agent) subreq.environ['swift.source'] = self.swift_source return subreq
def make_pre_authed_request(self, env, method=None, path=None, body=None, headers=None)
Nearly the same as swift.common.wsgi.make_pre_authed_request except that this also always sets the 'swift.source' and user agent. Newer Swift code will support swift_source as a kwarg, but we do it this way so we don't have to have a newer Swift. Since we're doing this anyway, we may as well set the user agent too since we always do that.
2.759477
2.370088
1.164293
enc_key = "%s:%s:%s" % (HASH_PATH_PREFIX, token, HASH_PATH_SUFFIX) return sha512(enc_key).hexdigest()
def _get_concealed_token(self, token)
Returns hashed token to be used as object name in Swift. Tokens are stored in auth account but object names are visible in Swift logs. Object names are hashed from token.
6.629
5.74979
1.152912
try: version, account, container, obj = split_path(req.path, 1, 4, True) except ValueError: return HTTPNotFound(request=req) if not account or not account.startswith(self.reseller_prefix): return self.denied_response(req) user_groups = (req.remote_user or '').split(',') if '.reseller_admin' in user_groups and \ account != self.reseller_prefix and \ account[len(self.reseller_prefix)] != '.': req.environ['swift_owner'] = True return None if account in user_groups and \ (req.method not in ('DELETE', 'PUT') or container): # If the user is admin for the account and is not trying to do an # account DELETE or PUT... req.environ['swift_owner'] = True return None if (req.environ.get('swift_sync_key') and req.environ['swift_sync_key'] == req.headers.get('x-container-sync-key', None) and 'x-timestamp' in req.headers and (req.remote_addr in self.allowed_sync_hosts or get_remote_client(req) in self.allowed_sync_hosts)): return None referrers, groups = parse_acl(getattr(req, 'acl', None)) if referrer_allowed(req.referer, referrers): if obj or '.rlistings' in groups: return None return self.denied_response(req) if not req.remote_user: return self.denied_response(req) for user_group in user_groups: if user_group in groups: return None return self.denied_response(req)
def authorize(self, req)
Returns None if the request is authorized to continue or a standard WSGI response callable if not.
3.086317
3.059043
1.008916
if not hasattr(req, 'credentials_valid'): req.credentials_valid = None if req.remote_user or req.credentials_valid: return HTTPForbidden(request=req) else: return HTTPUnauthorized(request=req)
def denied_response(self, req)
Returns a standard WSGI response callable with the status of 403 or 401 depending on whether the REMOTE_USER is set or not.
3.644254
3.021339
1.206172
try: req = Request(env) if self.auth_prefix: req.path_info_pop() req.bytes_transferred = '-' req.client_disconnect = False if 'x-storage-token' in req.headers and \ 'x-auth-token' not in req.headers: req.headers['x-auth-token'] = req.headers['x-storage-token'] if 'eventlet.posthooks' in env: env['eventlet.posthooks'].append( (self.posthooklogger, (req,), {})) return self.handle_request(req)(env, start_response) else: # Lack of posthook support means that we have to log on the # start of the response, rather than after all the data has # been sent. This prevents logging client disconnects # differently than full transmissions. response = self.handle_request(req)(env, start_response) self.posthooklogger(env, req) return response except (Exception, TimeoutError): print("EXCEPTION IN handle: %s: %s" % (format_exc(), env)) start_response('500 Server Error', [('Content-Type', 'text/plain')]) return ['Internal server error.\n']
def handle(self, env, start_response)
WSGI entry point for auth requests (ones that match the self.auth_prefix). Wraps env in swob.Request object and passes it down. :param env: WSGI environment dictionary :param start_response: WSGI callable
4.010405
3.87452
1.035072
req.start_time = time() handler = None try: version, account, user, _junk = split_path(req.path_info, minsegs=0, maxsegs=4, rest_with_last=True) except ValueError: return HTTPNotFound(request=req) if version in ('v1', 'v1.0', 'auth'): if req.method == 'GET': handler = self.handle_get_token elif version == 'v2': if not self.super_admin_key: return HTTPNotFound(request=req) req.path_info_pop() if req.method == 'GET': if not account and not user: handler = self.handle_get_reseller elif account: if not user: handler = self.handle_get_account elif account == '.token': req.path_info_pop() handler = self.handle_validate_token else: handler = self.handle_get_user elif req.method == 'PUT': if not user: handler = self.handle_put_account else: handler = self.handle_put_user elif req.method == 'DELETE': if not user: handler = self.handle_delete_account else: handler = self.handle_delete_user elif req.method == 'POST': if account == '.prep': handler = self.handle_prep elif user == '.services': handler = self.handle_set_services else: handler = self.handle_webadmin if not handler: req.response = HTTPBadRequest(request=req) else: req.response = handler(req) return req.response
def handle_request(self, req)
Entry point for auth requests (ones that match the self.auth_prefix). Should return a WSGI-style callable (such as swob.Response). :param req: swob.Request object
2.589865
2.56743
1.008738
if not self.is_super_admin(req): return self.denied_response(req) path = quote('/v1/%s' % self.auth_account) resp = self.make_pre_authed_request( req.environ, 'PUT', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create the main auth account: %s %s' % (path, resp.status)) path = quote('/v1/%s/.account_id' % self.auth_account) resp = self.make_pre_authed_request( req.environ, 'PUT', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create container: %s %s' % (path, resp.status)) for container in xrange(16): path = quote('/v1/%s/.token_%x' % (self.auth_account, container)) resp = self.make_pre_authed_request( req.environ, 'PUT', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create container: %s %s' % (path, resp.status)) return HTTPNoContent(request=req)
def handle_prep(self, req)
Handles the POST v2/.prep call for preparing the backing store Swift cluster for use with the auth subsystem. Can only be called by .super_admin. :param req: The swob.Request to process. :returns: swob.Response, 204 on success
2.261216
2.061023
1.097133
if not self.is_reseller_admin(req): return self.denied_response(req) listing = [] marker = '' while True: path = '/v1/%s?format=json&marker=%s' % (quote(self.auth_account), quote(marker)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not list main auth account: %s %s' % (path, resp.status)) sublisting = json.loads(resp.body) if not sublisting: break for container in sublisting: if container['name'][0] != '.': listing.append({'name': container['name']}) marker = sublisting[-1]['name'].encode('utf-8') return Response(body=json.dumps({'accounts': listing}), content_type=CONTENT_TYPE_JSON)
def handle_get_reseller(self, req)
Handles the GET v2 call for getting general reseller information (currently just a list of accounts). Can only be called by a .reseller_admin. On success, a JSON dictionary will be returned with a single `accounts` key whose value is list of dicts. Each dict represents an account and currently only contains the single key `name`. For example:: {"accounts": [{"name": "reseller"}, {"name": "test"}, {"name": "test2"}]} :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with a JSON dictionary as explained above.
2.994099
2.728014
1.097538
account = req.path_info_pop() if req.path_info or not account or account[0] == '.': return HTTPBadRequest(request=req) if not self.is_account_admin(req, account): return self.denied_response(req) path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not obtain the .services object: %s %s' % (path, resp.status)) services = json.loads(resp.body) listing = [] marker = '' while True: path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' % (self.auth_account, account)), quote(marker)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not list in main auth account: %s %s' % (path, resp.status)) account_id = resp.headers['X-Container-Meta-Account-Id'] sublisting = json.loads(resp.body) if not sublisting: break for obj in sublisting: if obj['name'][0] != '.': listing.append({'name': obj['name']}) marker = sublisting[-1]['name'].encode('utf-8') return Response(content_type=CONTENT_TYPE_JSON, body=json.dumps({'account_id': account_id, 'services': services, 'users': listing}))
def handle_get_account(self, req)
Handles the GET v2/<account> call for getting account information. Can only be called by an account .admin. On success, a JSON dictionary will be returned containing the keys `account_id`, `services`, and `users`. The `account_id` is the value used when creating service accounts. The `services` value is a dict as described in the :func:`handle_get_token` call. The `users` value is a list of dicts, each dict representing a user and currently only containing the single key `name`. For example:: {"account_id": "AUTH_018c3946-23f8-4efb-a8fb-b67aae8e4162", "services": {"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_018c3946"}}, "users": [{"name": "tester"}, {"name": "tester3"}]} :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with a JSON dictionary as explained above.
2.483912
2.289511
1.08491
if not self.is_reseller_admin(req): return self.denied_response(req) account = req.path_info_pop() if req.path_info != '/.services' or not account or account[0] == '.': return HTTPBadRequest(request=req) try: new_services = json.loads(req.body) except ValueError as err: return HTTPBadRequest(body=str(err)) # Get the current services information path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not obtain services info: %s %s' % (path, resp.status)) services = json.loads(resp.body) for new_service, value in new_services.iteritems(): if new_service in services: services[new_service].update(value) else: services[new_service] = value # Save the new services information services = json.dumps(services) resp = self.make_pre_authed_request( req.environ, 'PUT', path, services).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not save .services object: %s %s' % (path, resp.status)) return Response(request=req, body=services, content_type=CONTENT_TYPE_JSON)
def handle_set_services(self, req)
Handles the POST v2/<account>/.services call for setting services information. Can only be called by a reseller .admin. In the :func:`handle_get_account` (GET v2/<account>) call, a section of the returned JSON dict is `services`. This section looks something like this:: "services": {"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_018c3946"}} Making use of this section is described in :func:`handle_get_token`. This function allows setting values within this section for the <account>, allowing the addition of new service end points or updating existing ones. The body of the POST request should contain a JSON dict with the following format:: {"service_name": {"end_point_name": "end_point_value"}} There can be multiple services and multiple end points in the same call. Any new services or end points will be added to the existing set of services and end points. Any existing services with the same service name will be merged with the new end points. Any existing end points with the same end point name will have their values updated. The updated services dictionary will be returned on success. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with the udpated services JSON dict as described above
2.448413
2.218865
1.103453
account = req.path_info_pop() user = req.path_info_pop() if req.path_info or not account or account[0] == '.' or not user or \ (user[0] == '.' and user != '.groups'): return HTTPBadRequest(request=req) if not self.is_account_admin(req, account): return self.denied_response(req) # get information for each user for the specified # account and create a list of all groups that the users # are part of if user == '.groups': # TODO(gholt): This could be very slow for accounts with a really # large number of users. Speed could be improved by concurrently # requesting user group information. Then again, I don't *know* # it's slow for `normal` use cases, so testing should be done. groups = set() marker = '' while True: path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' % (self.auth_account, account)), quote(marker)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not list in main auth account: ' '%s %s' % (path, resp.status)) sublisting = json.loads(resp.body) if not sublisting: break for obj in sublisting: if obj['name'][0] != '.': # get list of groups for each user user_json = self.get_user_detail(req, account, obj['name']) if user_json is None: raise Exception('Could not retrieve user object: ' '%s:%s %s' % (account, user, 404)) groups.update( g['name'] for g in json.loads(user_json)['groups']) marker = sublisting[-1]['name'].encode('utf-8') body = json.dumps( {'groups': [{'name': g} for g in sorted(groups)]}) else: # get information for specific user, # if user doesn't exist, return HTTPNotFound body = self.get_user_detail(req, account, user) if body is None: return HTTPNotFound(request=req) display_groups = [g['name'] for g in json.loads(body)['groups']] if ('.admin' in display_groups and not self.is_reseller_admin(req)) or \ ('.reseller_admin' in display_groups and not self.is_super_admin(req)): return self.denied_response(req) return Response(body=body, content_type=CONTENT_TYPE_JSON)
def handle_get_user(self, req)
Handles the GET v2/<account>/<user> call for getting user information. Can only be called by an account .admin. On success, a JSON dict will be returned as described:: {"groups": [ # List of groups the user is a member of {"name": "<act>:<usr>"}, # The first group is a unique user identifier {"name": "<account>"}, # The second group is the auth account name {"name": "<additional-group>"} # There may be additional groups, .admin being a special # group indicating an account admin and .reseller_admin # indicating a reseller admin. ], "auth": "plaintext:<key>" # The auth-type and key for the user; currently only plaintext is # implemented. } For example:: {"groups": [{"name": "test:tester"}, {"name": "test"}, {"name": ".admin"}], "auth": "plaintext:testing"} If the <user> in the request is the special user `.groups`, the JSON dict will contain a single key of `groups` whose value is a list of dicts representing the active groups within the account. Each dict currently has the single key `name`. For example:: {"groups": [{"name": ".admin"}, {"name": "test"}, {"name": "test:tester"}, {"name": "test:tester3"}]} :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with a JSON dictionary as explained above.
3.564886
3.248157
1.09751
# Validate path info account = req.path_info_pop() user = req.path_info_pop() key = unquote(req.headers.get('x-auth-user-key', '')) key_hash = unquote(req.headers.get('x-auth-user-key-hash', '')) admin = req.headers.get('x-auth-user-admin') == 'true' reseller_admin = \ req.headers.get('x-auth-user-reseller-admin') == 'true' if reseller_admin: admin = True if req.path_info or not account or account[0] == '.' or not user or \ user[0] == '.' or (not key and not key_hash): return HTTPBadRequest(request=req) if key_hash: try: swauth.authtypes.validate_creds(key_hash) except ValueError: return HTTPBadRequest(request=req) user_arg = account + ':' + user if reseller_admin: if not self.is_super_admin(req) and\ not self.is_user_changing_own_key(req, user_arg): return self.denied_response(req) elif not self.is_account_admin(req, account) and\ not self.is_user_changing_own_key(req, user_arg): return self.denied_response(req) path = quote('/v1/%s/%s' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'HEAD', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not retrieve account id value: %s %s' % (path, resp.status)) headers = {'X-Object-Meta-Account-Id': resp.headers['x-container-meta-account-id']} # Create the object in the main auth account (this object represents # the user) path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) groups = ['%s:%s' % (account, user), account] if admin: groups.append('.admin') if reseller_admin: groups.append('.reseller_admin') auth_value = key_hash or self.auth_encoder().encode(key) resp = self.make_pre_authed_request( req.environ, 'PUT', path, json.dumps({'auth': auth_value, 'groups': [{'name': g} for g in groups]}), headers=headers).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not create user object: %s %s' % (path, resp.status)) return HTTPCreated(request=req)
def handle_put_user(self, req)
Handles the PUT v2/<account>/<user> call for adding a user to an account. X-Auth-User-Key represents the user's key (url encoded), - OR - X-Auth-User-Key-Hash represents the user's hashed key (url encoded), X-Auth-User-Admin may be set to `true` to create an account .admin, and X-Auth-User-Reseller-Admin may be set to `true` to create a .reseller_admin. Creating users ************** Can only be called by an account .admin unless the user is to be a .reseller_admin, in which case the request must be by .super_admin. Changing password/key ********************* 1) reseller_admin key can be changed by super_admin and by himself. 2) admin key can be changed by any admin in same account, reseller_admin, super_admin and himself. 3) Regular user key can be changed by any admin in his account, reseller_admin, super_admin and himself. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success.
2.754792
2.508427
1.098215
# Validate path info account = req.path_info_pop() user = req.path_info_pop() if req.path_info or not account or account[0] == '.' or not user or \ user[0] == '.': return HTTPBadRequest(request=req) # if user to be deleted is reseller_admin, then requesting # user must be the super_admin is_reseller_admin = self.is_user_reseller_admin(req, account, user) if not is_reseller_admin and not req.credentials_valid: # if user to be deleted can't be found, return 404 return HTTPNotFound(request=req) elif is_reseller_admin and not self.is_super_admin(req): return HTTPForbidden(request=req) if not self.is_account_admin(req, account): return self.denied_response(req) # Delete the user's existing token, if any. path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'HEAD', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) elif resp.status_int // 100 != 2: raise Exception('Could not obtain user details: %s %s' % (path, resp.status)) candidate_token = resp.headers.get('x-object-meta-auth-token') if candidate_token: object_name = self._get_concealed_token(candidate_token) path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, object_name[-1], object_name)) resp = self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) if resp.status_int // 100 != 2 and resp.status_int != 404: raise Exception('Could not delete possibly existing token: ' '%s %s' % (path, resp.status)) # Delete the user entry itself. path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) if resp.status_int // 100 != 2 and resp.status_int != 404: raise Exception('Could not delete the user object: %s %s' % (path, resp.status)) return HTTPNoContent(request=req)
def handle_delete_user(self, req)
Handles the DELETE v2/<account>/<user> call for deleting a user from an account. Can only be called by an account .admin. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success.
2.466791
2.407722
1.024533
req.credentials_valid = True user_json = self.get_user_detail(req, account, user) if user_json is None: req.credentials_valid = False return False user_detail = json.loads(user_json) return '.reseller_admin' in (g['name'] for g in user_detail['groups'])
def is_user_reseller_admin(self, req, account, user)
Returns True if the user is a .reseller_admin. :param account: account user is part of :param user: the user :returns: True if user .reseller_admin, False if user is not a reseller_admin and None if the user doesn't exist.
4.429577
4.393416
1.008231
token = req.path_info_pop() if req.path_info or not token.startswith(self.reseller_prefix): return HTTPBadRequest(request=req) expires = groups = None memcache_client = cache_from_env(req.environ) if memcache_client: memcache_key = '%s/auth/%s' % (self.reseller_prefix, token) cached_auth_data = memcache_client.get(memcache_key) if cached_auth_data: expires, groups = cached_auth_data if expires < time(): groups = None if not groups: object_name = self._get_concealed_token(token) path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, object_name[-1], object_name)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: return HTTPNotFound(request=req) detail = json.loads(resp.body) expires = detail['expires'] if expires < time(): self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) return HTTPNotFound(request=req) groups = [g['name'] for g in detail['groups']] if '.admin' in groups: groups.remove('.admin') groups.append(detail['account_id']) groups = ','.join(groups) return HTTPNoContent(headers={'X-Auth-TTL': expires - time(), 'X-Auth-Groups': groups})
def handle_validate_token(self, req)
Handles the GET v2/.token/<token> call for validating a token, usually called by a service like Swift. On a successful validation, X-Auth-TTL will be set for how much longer this token is valid and X-Auth-Groups will contain a comma separated list of groups the user belongs to. The first group listed will be a unique identifier for the user the token represents. .reseller_admin is a special group that indicates the user should be allowed to do anything on any account. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with data set as explained above.
2.969987
2.648001
1.121596
if not urlparsed: urlparsed = self.dsc_parsed2 if urlparsed.scheme == 'http': return HTTPConnection(urlparsed.netloc) else: return HTTPSConnection(urlparsed.netloc)
def get_conn(self, urlparsed=None)
Returns an HTTPConnection based on the urlparse result given or the default Swift cluster (internal url) urlparse result. :param urlparsed: The result from urlparse.urlparse or None to use the default Swift cluster's value
3.63574
4.402852
0.825769
if not self.itoken or self.itoken_expires < time() or \ env.get('HTTP_X_AUTH_NEW_TOKEN', 'false').lower() in \ TRUE_VALUES: self.itoken = '%sitk%s' % (self.reseller_prefix, uuid4().hex) memcache_key = '%s/auth/%s' % (self.reseller_prefix, self.itoken) self.itoken_expires = time() + self.token_life memcache_client = cache_from_env(env) if not memcache_client: raise Exception( 'No memcache set up; required for Swauth middleware') memcache_client.set( memcache_key, (self.itoken_expires, '.auth,.reseller_admin,%s.auth' % self.reseller_prefix), time=self.token_life) return self.itoken
def get_itoken(self, env)
Returns the current internal token to use for the auth system's own actions with other services. Each process will create its own itoken and the token will be deleted and recreated based on the token_life configuration value. The itoken information is stored in memcache because the auth process that is asked by Swift to validate the token may not be the same as the auth process that created the token.
4.114079
3.614169
1.138319
if ':' not in req.headers.get('x-auth-admin-user', ''): return None admin_account, admin_user = \ req.headers.get('x-auth-admin-user').split(':', 1) user_json = self.get_user_detail(req, admin_account, admin_user) if user_json is None: return None admin_detail = json.loads(user_json) admin_detail['account'] = admin_account return admin_detail
def get_admin_detail(self, req)
Returns the dict for the user specified as the admin in the request with the addition of an `account` key set to the admin user's account. :param req: The swob request to retrieve X-Auth-Admin-User and X-Auth-Admin-Key from. :returns: The dict for the admin user with the addition of the `account` key.
2.544201
2.200703
1.156085
path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return None if resp.status_int // 100 != 2: raise Exception('Could not get user object: %s %s' % (path, resp.status)) return resp.body
def get_user_detail(self, req, account, user)
Returns the response body of a GET request for the specified user The body is in JSON format and contains all user information. :param req: The swob request :param account: the account the user is a member of :param user: the user :returns: A JSON response with the user detail information, None if the user doesn't exist
3.200548
3.124944
1.024194
if user_detail: creds = user_detail.get('auth') try: auth_encoder, creds_dict = \ swauth.authtypes.validate_creds(creds) except ValueError as e: self.logger.error('%s' % e.args[0]) return False return user_detail and auth_encoder.match(key, creds, **creds_dict)
def credentials_match(self, user_detail, key)
Returns True if the key is valid for the user_detail. It will use auth_encoder type the password was encoded with, to check for a key match. :param user_detail: The dict for the user. :param key: The key to validate for the user. :returns: True if the key is valid for the user, False if not.
5.878875
5.282228
1.112954
admin_detail = self.get_admin_detail(req) if not admin_detail: # The user does not exist return False # If user is not admin/reseller_admin and x-auth-user-admin or # x-auth-user-reseller-admin headers are present in request, he may be # attempting to escalate himself as admin/reseller_admin! if '.admin' not in (g['name'] for g in admin_detail['groups']): if req.headers.get('x-auth-user-admin') == 'true' or \ req.headers.get('x-auth-user-reseller-admin') == 'true': return False if '.reseller_admin' not in \ (g['name'] for g in admin_detail['groups']) and \ req.headers.get('x-auth-user-reseller-admin') == 'true': return False return req.headers.get('x-auth-admin-user') == user and \ self.credentials_match(admin_detail, req.headers.get('x-auth-admin-key'))
def is_user_changing_own_key(self, req, user)
Check if the user is changing his own key. :param req: The swob.Request to check. This contains x-auth-admin-user and x-auth-admin-key headers which are credentials of the user sending the request. :param user: User whose password is to be changed. :returns: True if user is changing his own key, False if not.
3.501866
3.203114
1.093269
return req.headers.get('x-auth-admin-user') == '.super_admin' and \ self.super_admin_key and \ req.headers.get('x-auth-admin-key') == self.super_admin_key
def is_super_admin(self, req)
Returns True if the admin specified in the request represents the .super_admin. :param req: The swob.Request to check. :param returns: True if .super_admin.
3.840534
3.479643
1.103715
req.credentials_valid = False if self.is_super_admin(req): return True if not admin_detail: admin_detail = self.get_admin_detail(req) if not self.credentials_match(admin_detail, req.headers.get('x-auth-admin-key')): return False req.credentials_valid = True return '.reseller_admin' in (g['name'] for g in admin_detail['groups'])
def is_reseller_admin(self, req, admin_detail=None)
Returns True if the admin specified in the request represents a .reseller_admin. :param req: The swob.Request to check. :param admin_detail: The previously retrieved dict from :func:`get_admin_detail` or None for this function to retrieve the admin_detail itself. :param returns: True if .reseller_admin.
3.896848
3.970022
0.981568
try: auth_type, auth_rest = creds.split(':', 1) except ValueError: raise ValueError("Missing ':' in %s" % creds) authtypes = sys.modules[__name__] auth_encoder = getattr(authtypes, auth_type.title(), None) if auth_encoder is None: raise ValueError('Invalid auth_type: %s' % auth_type) auth_encoder = auth_encoder() parsed_creds = dict(type=auth_type, salt=None, hash=None) parsed_creds.update(auth_encoder.validate(auth_rest)) return auth_encoder, parsed_creds
def validate_creds(creds)
Parse and validate user credentials whether format is right :param creds: User credentials :returns: Auth_type class instance and parsed user credentials in dict :raises ValueError: If credential format is wrong (eg: bad auth_type)
3.344953
3.019434
1.107808
enc_key = '%s%s' % (salt, key) enc_val = hashlib.sha1(enc_key).hexdigest() return "sha1:%s$%s" % (salt, enc_val)
def encode_w_salt(self, salt, key)
Encodes a user key with salt into a particular format. The result of this method will be used internally. :param salt: Salt for hashing :param key: User's secret key :returns: A string representing user credentials
3.482173
4.764759
0.730818
salt = self.salt or os.urandom(32).encode('base64').rstrip() return self.encode_w_salt(salt, key)
def encode(self, key)
Encodes a user key into a particular format. The result of this method will be used by swauth for storing user credentials. If salt is not manually set in conf file, a random salt will be generated and used. :param key: User's secret key :returns: A string representing user credentials
5.723874
6.764596
0.846152
return self.encode_w_salt(salt, key) == creds
def match(self, key, creds, salt, **kwargs)
Checks whether the user-provided key matches the user's credentials :param key: User-supplied key :param creds: User's stored credentials :param salt: Salt for hashing :param kwargs: Extra keyword args for compatibility reason with other auth_type classes :returns: True if the supplied key is valid, False otherwise
17.36862
28.450893
0.610477
try: auth_salt, auth_hash = auth_rest.split('$') except ValueError: raise ValueError("Missing '$' in %s" % auth_rest) if len(auth_salt) == 0: raise ValueError("Salt must have non-zero length!") if len(auth_hash) != 40: raise ValueError("Hash must have 40 chars!") if not all(c in string.hexdigits for c in auth_hash): raise ValueError("Hash must be hexadecimal!") return dict(salt=auth_salt, hash=auth_hash)
def validate(self, auth_rest)
Validate user credentials whether format is right for Sha1 :param auth_rest: User credentials' part without auth_type :return: Dict with a hash and a salt part of user credentials :raises ValueError: If credentials' part doesn't contain delimiter between a salt and a hash.
2.635314
2.300801
1.14539
# define file path of slp csv data file_path = os.path.join(self.datapath, 'selp_series.csv') # Read standard load profile series from csv file selp_series = pd.read_csv(file_path) tmp_df = selp_series # Create an index to merge. The year and month will be ignored only the # time index is necessary. index = pd.date_range( pd.datetime(2007, 1, 1, 0), periods=2016, freq='15Min') tmp_df.set_index(index, inplace=True) # Create empty DataFrame to take the results. new_df = pd.DataFrame(index=dt_index, columns=slp_types).fillna(0) new_df = add_weekdays2df(new_df, holidays=holidays, holiday_is_sunday=True) new_df['hour'] = dt_index.hour + 1 new_df['minute'] = dt_index.minute time_df = new_df[['date', 'hour', 'minute', 'weekday']].copy() tmp_df[slp_types] = tmp_df[slp_types].astype(float) # Inner join the slps on the time_df to the slp's for a whole year tmp_df['hour_of_day'] = tmp_df.index.hour + 1 tmp_df['minute_of_hour'] = tmp_df.index.minute left_cols = ['hour_of_day', 'minute_of_hour', 'weekday'] right_cols = ['hour', 'minute', 'weekday'] tmp_df = tmp_df.reset_index() tmp_df.pop('index') for p in self.seasons.keys(): a = pd.datetime(self.year, self.seasons[p][0], self.seasons[p][1], 0, 0) b = pd.datetime(self.year, self.seasons[p][2], self.seasons[p][3], 23, 59) new_df.update(pd.DataFrame.merge( tmp_df[tmp_df['period'] == p[:-1]], time_df[a:b], left_on=left_cols, right_on=right_cols, how='inner', left_index=True).sort_index().drop( ['hour_of_day'], 1)) new_df.drop('date', axis=1, inplace=True) return new_df.div(new_df.sum(axis=0), axis=1)
def create_bdew_load_profiles(self, dt_index, slp_types, holidays=None)
Calculates the hourly electricity load profile in MWh/h of a region.
3.375271
3.388937
0.995967
return self.slp_frame.multiply(pd.Series( ann_el_demand_per_sector), axis=1).dropna(how='all', axis=1) * 4
def get_profile(self, ann_el_demand_per_sector)
Get the profiles for the given annual demand Parameters ---------- ann_el_demand_per_sector : dictionary Key: sector, value: annual value Returns ------- pandas.DataFrame : Table with all profiles
8.065794
9.258354
0.871191
r # calculate daily mean temperature temperature = self.df['temperature'].resample('D').mean().reindex( self.df.index).fillna(method='ffill').fillna(method='bfill') if how == 'geometric_series': temperature_mean = (temperature + 0.5 * np.roll(temperature, 24) + 0.25 * np.roll(temperature, 48) + 0.125 * np.roll(temperature, 72)) / 1.875 elif how == 'mean': temperature_mean = temperature else: temperature_mean = None return temperature_mean
def weighted_temperature(self, how='geometric_series')
r""" A new temperature vector is generated containing a multi-day average temperature as needed in the load profile function. Parameters ---------- how : string string which type to return ("geometric_series" or "mean") Notes ----- Equation for the mathematical series of the average tempaerature [1]_: .. math:: T=\frac{T_{D}+0.5\cdot T_{D-1}+0.25\cdot T_{D-2}+ 0.125\cdot T_{D-3}}{1+0.5+0.25+0.125} with :math:`T_D` = Average temperature on the present day :math:`T_{D-i}` = Average temperature on the day - i References ---------- .. [1] `BDEW <https://www.avacon.de/cps/rde/xbcr/avacon/15-06-30_Leitfaden_Abwicklung_SLP_Gas.pdf>`_, BDEW Documentation for heat profiles.
2.962457
2.758574
1.073909
intervals = ({ -20: 1, -19: 1, -18: 1, -17: 1, -16: 1, -15: 1, -14: 2, -13: 2, -12: 2, -11: 2, -10: 2, -9: 3, -8: 3, -7: 3, -6: 3, -5: 3, -4: 4, -3: 4, -2: 4, -1: 4, 0: 4, 1: 5, 2: 5, 3: 5, 4: 5, 5: 5, 6: 6, 7: 6, 8: 6, 9: 6, 10: 6, 11: 7, 12: 7, 13: 7, 14: 7, 15: 7, 16: 8, 17: 8, 18: 8, 19: 8, 20: 8, 21: 9, 22: 9, 23: 9, 24: 9, 25: 9, 26: 10, 27: 10, 28: 10, 29: 10, 30: 10, 31: 10, 32: 10, 33: 10, 34: 10, 35: 10, 36: 10, 37: 10, 38: 10, 39: 10, 40: 10}) temperature_rounded = [ceil(i) for i in self.df['temperature_geo']] temperature_interval = [intervals[i] for i in temperature_rounded] return np.transpose(np.array(temperature_interval))
def get_temperature_interval(self)
Appoints the corresponding temperature interval to each temperature in the temperature vector.
1.426558
1.380488
1.033373
file = os.path.join(self.datapath, filename) hour_factors = pd.read_csv(file, index_col=0) hour_factors = hour_factors.query( 'building_class=={0} and shlp_type=="{1}"'.format( self.building_class, self.shlp_type)) # Join the two DataFrames on the columns 'hour' and 'hour_of_the_day' # or ['hour' 'weekday'] and ['hour_of_the_day', 'weekday'] if it is # not a residential slp. residential = self.building_class > 0 left_cols = ['hour_of_day'] + (['weekday'] if not residential else []) right_cols = ['hour'] + (['weekday'] if not residential else []) sf_mat = pd.DataFrame.merge( hour_factors, self.df, left_on=left_cols, right_on=right_cols, how='outer', left_index=True).sort_index() # drop unnecessary columns drop_cols = ( ['hour_of_day', 'hour', 'building_class', 'shlp_type', 'date', 'temperature'] + (['weekday_x'] if residential else []) + (['weekday_y'] if residential else []) + (['weekday'] if not residential else [])) sf_mat = sf_mat.drop(drop_cols, 1) # Determine the h values length = len(self.temperature) sf = (np.array(sf_mat)[np.array(list(range(0, length)))[:], (self.get_temperature_interval() - 1)[:]]) return np.array(list(map(float, sf[:])))
def get_sf_values(self, filename='shlp_hour_factors.csv')
Determine the h-values Parameters ---------- filename : string name of file where sigmoid factors are stored
3.682416
3.637347
1.012391
file = os.path.join(self.datapath, filename) sigmoid = pd.read_csv(file, index_col=0) sigmoid = sigmoid.query( 'building_class=={0} and '.format(self.building_class) + 'shlp_type=="{0}" and '.format(self.shlp_type) + 'wind_impact=={0}'.format(self.wind_class)) a = float(sigmoid['parameter_a']) b = float(sigmoid['parameter_b']) c = float(sigmoid['parameter_c']) if self.ww_incl: d = float(sigmoid['parameter_d']) else: d = 0 return a, b, c, d
def get_sigmoid_parameters(self, filename='shlp_sigmoid_factors.csv')
Retrieve the sigmoid parameters from csv-files Parameters ---------- filename : string name of file where sigmoid factors are stored
3.034334
3.14439
0.964999
file = os.path.join(self.datapath, filename) f_df = pd.read_csv(file, index_col=0) tmp_df = f_df.query('shlp_type=="{0}"'.format(self.shlp_type)).drop( 'shlp_type', axis=1) tmp_df['weekdays'] = np.array(list(range(7))) + 1 return np.array(list(map(float, pd.DataFrame.merge( tmp_df, self.df, left_on='weekdays', right_on='weekday', how='inner', left_index=True).sort_index()['wochentagsfaktor'])))
def get_weekday_parameters(self, filename='shlp_weekday_factors.csv')
Retrieve the weekday parameter from csv-file Parameters ---------- filename : string name of file where sigmoid factors are stored
4.003665
4.05528
0.987272
self.df['temperature'] = self.temperature.values self.df['temperature_geo'] = self.weighted_temperature( how='geometric_series') sf = self.get_sf_values() [a, b, c, d] = self.get_sigmoid_parameters() f = self.get_weekday_parameters() h = (a / (1 + (b / (self.df['temperature_geo'] - 40)) ** c) + d) kw = 1.0 / (sum(h * f) / 24) heat_profile_normalized = (kw * h * f * sf) return heat_profile_normalized
def get_normalized_bdew_profile(self)
Calculation of the normalized hourly heat demand
7.505176
6.874385
1.09176
import os.path return os.path.isfile(path, **kwargs)
def isfile(path, **kwargs)
Check if *path* is a file
3.977523
3.627219
1.096577
import os.path return os.path.isdir(path, **kwargs)
def isdir(path, **kwargs)
Check if *path* is a directory
3.93406
3.768369
1.043969
import os return os.rename(oldPath, newPath, **kwargs)
def rename(oldPath, newPath, **kwargs)
rename the file oldPath to newPath
3.889026
3.320909
1.171073
import os import pwd import grp uid = pwd.getpwnam(user).pw_uid if user else -1 gid = grp.getgrnam(group).gr_gid if group else -1 return os.chown(path, uid, gid)
def chown(path, user=None, group=None)
change ownership of path
1.877162
1.885856
0.99539
import os, stat st = os.stat(path) return os.chmod(path, mode)
def chmod(path, mode)
change pernmissions of path
3.478689
3.57984
0.971744
import os.path return os.path.abspath(path, **kwargs)
def abspath(path, **kwargs)
Return the absolute path of *path*
3.98851
3.236591
1.232318
import os.path return os.path.normpath(path, **kwargs)
def normalize(path, **kwargs)
Return the normalized path of *path*
5.569032
4.719737
1.179945
if recursive: import shutil return shutil.rmtree(path, **kwargs) else: import os return os.remdir(path, **kwargs)
def rmdir(path, recursive=True, **kwargs)
Remove the directory *path*
3.558256
3.541146
1.004832
import os if recursive: os.makedirs(path, **kwargs) else: os.mkdir(path, **kwargs)
def mkdir(path, recursive=True, **kwargs)
Unix equivalent *mkdir*
2.494758
2.29037
1.089238
import os try: OPEN_FUNC(path, 'a+').close() except IOError: os.utime(path, None)
def touch(path)
Unix equivalent *touch* @src: http://stackoverflow.com/a/1158096
6.004103
6.873361
0.873532
import os.path return os.path.exists(path, **kwargs)
def exists(path, **kwargs)
Check if file or directory exists
4.716627
4.094746
1.151873
import os for f in os.listdir(path): if isfile(join(path, f)): yield join(path, f) if path != '.' else f
def list(path='.')
generator that returns all files of *path*
3.680135
2.916484
1.261839
import os for f in os.listdir(path): if isdir(join(path, f)): yield join(path, f) if path != '.' else f
def listdirs(path='.')
generator that returns all directories of *path*
3.641056
3.053483
1.192427
import fnmatch import os if recursive: for root, dirnames, filenames in os.walk(path): for pat in _to_list(pattern): for filename in fnmatch.filter(filenames, pat): filepath = join(abspath(root), filename) for excl in _to_list(exclude): if excl and fnmatch.fnmatch(filepath, excl): break else: yield filepath else: for pat in _to_list(pattern): for filename in fnmatch.filter(list(path), pat): filepath = join(abspath(path), filename) for excl in _to_list(exclude): if excl and fnmatch.fnmatch(filepath, excl): break else: yield filepath
def find(pattern, path='.', exclude=None, recursive=True)
Find files that match *pattern* in *path*
1.94494
1.969308
0.987627
import fnmatch import os if recursive: for root, dirnames, filenames in os.walk(path): for pat in _to_list(pattern): for dirname in fnmatch.filter(dirnames, pat): dirpath = join(abspath(root), dirname) for excl in _to_list(exclude): if excl and fnmatch.fnmatch(dirpath, excl): break else: yield dirpath else: for pat in _to_list(pattern): for dirname in fnmatch.filter(listdirs(path), pat): dirpath = join(abspath(path), dirname) for excl in _to_list(exclude): if excl and fnmatch.fnmatch(dirpath, excl): break else: yield dirpath
def finddirs(pattern, path='.', exclude=None, recursive=True)
Find directories that match *pattern* in *path*
1.847183
1.8647
0.990606
mode = 'wb' if not append else 'ab' with OPEN_FUNC(path, mode) as _file: if raw: import shutil shutil.copyfileobj(content, _file) else: _file.write(content.encode(encoding))
def write(path, content, encoding="UTF-8", append=False, raw=False)
Write *content* to file *path*
3.198038
3.586751
0.891625
with OPEN_FUNC(path, 'rb') as _file: cont = _file.read() return cont.decode(encoding)
def read(path, encoding="UTF-8")
Read and return content from file *path*
6.460264
7.029009
0.919086
try: import cPickle as pickle except: import pickle with open(path, 'rb') as file: return pickle.load(file)
def get(path)
Read an object from file
2.568588
2.616261
0.981778
try: import cPickle as pickle except: import pickle with open(path, 'wb') as file: return pickle.dump(obj, file)
def put(path, obj)
Write an object to file
2.657472
2.894285
0.918179
import os.path if _is_list(args[0]): return os.path.join(*args[0]) return os.path.join(*args, **kwargs)
def join(*args, **kwargs)
Join parts of a path together
3.54365
3.127172
1.13318
import os.path name, ext = os.path.splitext(path, **kwargs) return ext
def extname(path, **kwargs)
Return the extension from *path*
4.886436
3.808707
1.282964
import os.path if ext is False: return os.path.basename(path).replace(extname(path), "") else: return os.path.basename(path).replace(ext, "")
def basename(path, ext="")
Return the file base name from *path*
3.379994
3.152314
1.072226
return join(dirname(path), basename(path, ext=False) + suffix + extname(path))
def add_suffix(path, suffix="")
Adds a suffix to a filename *path*
7.172579
7.617202
0.941629
import sys if not exists(path): raise ValueError('Path %s does not exist' % path) sys.path.insert(1, path)
def addpath(path)
Add *path* to system path
3.810773
3.754188
1.015072