sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def with_fields(self, *fields): """Returns list of characters with information for certain fields. Parameters ---------- *fields : list of str fields for which information should be available Returns ------- :class:`sqlalchemy.orm.query.Query` : list of matches """ Unihan = self.sql.base.classes.Unihan query = self.sql.session.query(Unihan) for field in fields: query = query.filter(Column(field).isnot(None)) return query
Returns list of characters with information for certain fields. Parameters ---------- *fields : list of str fields for which information should be available Returns ------- :class:`sqlalchemy.orm.query.Query` : list of matches
entailment
def entropy(data=None, prob=None, method='nearest-neighbors', bins=None, errorVal=1e-5, units='bits'): ''' given a probability distribution (prob) or an interable of symbols (data) compute and return its continuous entropy. inputs: ------ data: samples by dimensions ndarray prob: iterable with probabilities method: 'nearest-neighbors', 'gaussian', or 'bin' bins: either a list of num_bins, or a list of lists containing the bin edges errorVal: if prob is given, 'entropy' checks that the sum is about 1. It raises an error if abs(sum(prob)-1) >= errorVal units: either 'bits' or 'nats' Different Methods: 'nearest-neighbors' computes the binless entropy (bits) of a random vector using average nearest neighbors distance (Kozachenko and Leonenko, 1987). For a review see Beirlant et al., 2001 or Chandler & Field, 2007. 'gaussian' computes the binless entropy based on estimating the covariance matrix and assuming the data is normally distributed. 'bin' discretizes the data and computes the discrete entropy. ''' if prob is None and data is None: raise ValueError("%s.entropy requires either 'prob' or 'data' to be defined" % __name__) if prob is not None and data is not None: raise ValueError("%s.entropy requires only 'prob' or 'data to be given but not both" % __name__) if prob is not None and not isinstance(prob, np.ndarray): raise TypeError("'entropy' in '%s' needs 'prob' to be an ndarray" % __name__) if prob is not None and abs(prob.sum()-1) > errorVal: raise ValueError("parameter 'prob' in '%s.entropy' should sum to 1" % __name__) if data is not None: num_samples = data.shape[0] if len(data.shape) == 1: num_dimensions = 1 else: num_dimensions = data.shape[1] if method == 'nearest-neighbors': from sklearn.neighbors import NearestNeighbors from scipy.special import gamma if data is None: raise ValueError('Nearest neighbors entropy requires original data') if len(data.shape) > 1: k = num_dimensions else: k = 1 nbrs = NearestNeighbors(n_neighbors=2, algorithm='auto').fit(data) distances, indices = nbrs.kneighbors(data) rho = distances[:,1] # take nearest-neighbor distance (first column is always zero) Ak = (k*np.pi**(float(k)/float(2)))/gamma(float(k)/float(2)+1) if units is 'bits': # 0.577215... is the Euler-Mascheroni constant (np.euler_gamma) return k*np.mean(np.log2(rho)) + np.log2(num_samples*Ak/k) + np.log2(np.exp(1))*np.euler_gamma elif units is 'nats': # 0.577215... is the Euler-Mascheroni constant (np.euler_gamma) return k*np.mean(np.log(rho)) + np.log(num_samples*Ak/k) + np.log(np.exp(1))*np.euler_gamma else: print('Units not recognized: {}'.format(units)) elif method == 'gaussian': from numpy.linalg import det if data is None: raise ValueError('Nearest neighbors entropy requires original data') detCov = det(np.dot(data.transpose(), data)/num_samples) normalization = (2*np.pi*np.exp(1))**num_dimensions if detCov == 0: return -np.inf else: if units is 'bits': return 0.5*np.log2(normalization*detCov) elif units is 'nats': return 0.5*np.log(normalization*detCov) else: print('Units not recognized: {}'.format(units)) elif method == 'bin': if prob is None and bins is None: raise ValueError('Either prob or bins must be specified.') if data is not None: prob = symbols_to_prob(data, bins=bins) if units is 'bits': # compute the log2 of the probability and change any -inf by 0s logProb = np.log2(prob) logProb[logProb == -np.inf] = 0 elif units is 'nats': # compute the log2 of the probability and change any -inf by 0s logProb = np.log(prob) logProb[logProb == -np.inf] = 0 else: print('Units not recognized: {}'.format(units)) # return sum of product of logProb and prob # (not using np.dot here because prob, logprob are nd arrays) return -float(np.sum(prob * logProb))
given a probability distribution (prob) or an interable of symbols (data) compute and return its continuous entropy. inputs: ------ data: samples by dimensions ndarray prob: iterable with probabilities method: 'nearest-neighbors', 'gaussian', or 'bin' bins: either a list of num_bins, or a list of lists containing the bin edges errorVal: if prob is given, 'entropy' checks that the sum is about 1. It raises an error if abs(sum(prob)-1) >= errorVal units: either 'bits' or 'nats' Different Methods: 'nearest-neighbors' computes the binless entropy (bits) of a random vector using average nearest neighbors distance (Kozachenko and Leonenko, 1987). For a review see Beirlant et al., 2001 or Chandler & Field, 2007. 'gaussian' computes the binless entropy based on estimating the covariance matrix and assuming the data is normally distributed. 'bin' discretizes the data and computes the discrete entropy.
entailment
def mi(x, y, bins_x=None, bins_y=None, bins_xy=None, method='nearest-neighbors', units='bits'): ''' compute and return the mutual information between x and y inputs: ------- x, y: numpy arrays of shape samples x dimension method: 'nearest-neighbors', 'gaussian', or 'bin' units: 'bits' or 'nats' output: ------- mi: float Notes: ------ if you are trying to mix several symbols together as in mi(x, (y0,y1,...)), try info[p] = _info.mi(x, info.combine_symbols(y0, y1, ...) ) ''' # dict.values() returns a view object that has to be converted to a list before being # converted to an array # the following lines will execute properly in python3, but not python2 because there # is no zip object try: if isinstance(x, zip): x = list(x) if isinstance(y, zip): y = list(y) except: pass # wrapped in try bracket because x, y might have no .shape attribute try: # handling for 1d np arrays if len(x.shape) == 1: x = np.expand_dims(x, 1) if len(y.shape) == 1: y = np.expand_dims(y, 1) except: pass HX = entropy(data=x, bins=bins_x, method=method, units=units) HY = entropy(data=y, bins=bins_y, method=method, units=units) HXY = entropy(data=np.concatenate([x, y], axis=1), bins=bins_xy, method=method, units=units) return HX + HY - HXY
compute and return the mutual information between x and y inputs: ------- x, y: numpy arrays of shape samples x dimension method: 'nearest-neighbors', 'gaussian', or 'bin' units: 'bits' or 'nats' output: ------- mi: float Notes: ------ if you are trying to mix several symbols together as in mi(x, (y0,y1,...)), try info[p] = _info.mi(x, info.combine_symbols(y0, y1, ...) )
entailment
def cond_entropy(x, y, bins_y=None, bins_xy=None, method='nearest-neighbors', units='bits'): ''' compute the conditional entropy H(X|Y). method: 'nearest-neighbors', 'gaussian', or 'bin' if 'bin' need to provide bins_y, and bins_xy units: 'bits' or 'nats' ''' HXY = entropy(data=np.concatenate([x, y], axis=1), bins=bins_xy, method=method, units=units) HY = entropy(data=y, bins=bins_y, method=method, units=units) return HXY - HY
compute the conditional entropy H(X|Y). method: 'nearest-neighbors', 'gaussian', or 'bin' if 'bin' need to provide bins_y, and bins_xy units: 'bits' or 'nats'
entailment
def entropy(data=None, prob=None, tol=1e-5): ''' given a probability distribution (prob) or an interable of symbols (data) compute and return its entropy inputs: ------ data: iterable of symbols prob: iterable with probabilities tol: if prob is given, 'entropy' checks that the sum is about 1. It raises an error if abs(sum(prob)-1) >= tol ''' if prob is None and data is None: raise ValueError("%s.entropy requires either 'prob' or 'data' to be defined" % __name__) if prob is not None and data is not None: raise ValueError("%s.entropy requires only 'prob' or 'data to be given but not both" % __name__) if prob is not None and not isinstance(prob, np.ndarray): raise TypeError("'entropy' in '%s' needs 'prob' to be an ndarray" % __name__) if prob is not None and abs(prob.sum()-1) > tol: raise ValueError("parameter 'prob' in '%s.entropy' should sum to 1" % __name__) if data is not None: prob = symbols_to_prob(data).prob() # compute the log2 of the probability and change any -inf by 0s logProb = np.log2(prob) logProb[logProb == -np.inf] = 0 # return dot product of logProb and prob return -float(np.dot(prob, logProb))
given a probability distribution (prob) or an interable of symbols (data) compute and return its entropy inputs: ------ data: iterable of symbols prob: iterable with probabilities tol: if prob is given, 'entropy' checks that the sum is about 1. It raises an error if abs(sum(prob)-1) >= tol
entailment
def symbols_to_prob(symbols): ''' Return a dict mapping symbols to probability. input: ----- symbols: iterable of hashable items works well if symbols is a zip of iterables ''' myCounter = Counter(symbols) N = float(len(list(symbols))) # symbols might be a zip object in python 3 for k in myCounter: myCounter[k] /= N return myCounter
Return a dict mapping symbols to probability. input: ----- symbols: iterable of hashable items works well if symbols is a zip of iterables
entailment
def combine_symbols(*args): ''' Combine different symbols into a 'super'-symbol args can be an iterable of iterables that support hashing see example for 2D ndarray input usage: 1) combine two symbols, each a number into just one symbol x = numpy.random.randint(0,4,1000) y = numpy.random.randint(0,2,1000) z = combine_symbols(x,y) 2) combine a letter and a number s = 'abcd' x = numpy.random.randint(0,4,1000) y = [s[randint(4)] for i in range(1000)] z = combine_symbols(x,y) 3) suppose you are running an experiment and for each sample, you measure 3 different properties and you put the data into a 2d ndarray such that: samples_N, properties_N = data.shape and you want to combine all 3 different properties into just 1 symbol In this case you have to find a way to impute each property as an independent array combined_symbol = combine_symbols(*data.T) 4) if data from 3) is such that: properties_N, samples_N = data.shape then run: combined_symbol = combine_symbols(*data) ''' for arg in args: if len(arg)!=len(args[0]): raise ValueError("combine_symbols got inputs with different sizes") return tuple(zip(*args))
Combine different symbols into a 'super'-symbol args can be an iterable of iterables that support hashing see example for 2D ndarray input usage: 1) combine two symbols, each a number into just one symbol x = numpy.random.randint(0,4,1000) y = numpy.random.randint(0,2,1000) z = combine_symbols(x,y) 2) combine a letter and a number s = 'abcd' x = numpy.random.randint(0,4,1000) y = [s[randint(4)] for i in range(1000)] z = combine_symbols(x,y) 3) suppose you are running an experiment and for each sample, you measure 3 different properties and you put the data into a 2d ndarray such that: samples_N, properties_N = data.shape and you want to combine all 3 different properties into just 1 symbol In this case you have to find a way to impute each property as an independent array combined_symbol = combine_symbols(*data.T) 4) if data from 3) is such that: properties_N, samples_N = data.shape then run: combined_symbol = combine_symbols(*data)
entailment
def mi(x, y): ''' compute and return the mutual information between x and y inputs: ------- x, y: iterables of hashable items output: ------- mi: float Notes: ------ if you are trying to mix several symbols together as in mi(x, (y0,y1,...)), try info[p] = _info.mi(x, info.combine_symbols(y0, y1, ...) ) ''' # dict.values() returns a view object that has to be converted to a list before being # converted to an array # the following lines will execute properly in python3, but not python2 because there # is no zip object try: if isinstance(x, zip): x = list(x) if isinstance(y, zip): y = list(y) except: pass probX = symbols_to_prob(x).prob() probY = symbols_to_prob(y).prob() probXY = symbols_to_prob(combine_symbols(x, y)).prob() return entropy(prob=probX) + entropy(prob=probY) - entropy(prob=probXY)
compute and return the mutual information between x and y inputs: ------- x, y: iterables of hashable items output: ------- mi: float Notes: ------ if you are trying to mix several symbols together as in mi(x, (y0,y1,...)), try info[p] = _info.mi(x, info.combine_symbols(y0, y1, ...) )
entailment
def cond_mi(x, y, z): ''' compute and return the mutual information between x and y given z, I(x, y | z) inputs: ------- x, y, z: iterables with discrete symbols output: ------- mi: float implementation notes: --------------------- I(x, y | z) = H(x | z) - H(x | y, z) = H(x, z) - H(z) - ( H(x, y, z) - H(y,z) ) = H(x, z) + H(y, z) - H(z) - H(x, y, z) ''' # dict.values() returns a view object that has to be converted to a list before being converted to an array probXZ = symbols_to_prob(combine_symbols(x, z)).prob() probYZ = symbols_to_prob(combine_symbols(y, z)).prob() probXYZ =symbols_to_prob(combine_symbols(x, y, z)).prob() probZ = symbols_to_prob(z).prob() return entropy(prob=probXZ) + entropy(prob=probYZ) - entropy(prob=probXYZ) - entropy(prob=probZ)
compute and return the mutual information between x and y given z, I(x, y | z) inputs: ------- x, y, z: iterables with discrete symbols output: ------- mi: float implementation notes: --------------------- I(x, y | z) = H(x | z) - H(x | y, z) = H(x, z) - H(z) - ( H(x, y, z) - H(y,z) ) = H(x, z) + H(y, z) - H(z) - H(x, y, z)
entailment
def mi_chain_rule(X, y): ''' Decompose the information between all X and y according to the chain rule and return all the terms in the chain rule. Inputs: ------- X: iterable of iterables. You should be able to compute [mi(x, y) for x in X] y: iterable of symbols output: ------- ndarray: terms of chaing rule Implemenation notes: I(X; y) = I(x0, x1, ..., xn; y) = I(x0; y) + I(x1;y | x0) + I(x2; y | x0, x1) + ... + I(xn; y | x0, x1, ..., xn-1) ''' # allocate ndarray output chain = np.zeros(len(X)) # first term in the expansion is not a conditional information, but the information between the first x and y chain[0] = mi(X[0], y) for i in range(1, len(X)): chain[i] = cond_mi(X[i], y, X[:i]) return chain
Decompose the information between all X and y according to the chain rule and return all the terms in the chain rule. Inputs: ------- X: iterable of iterables. You should be able to compute [mi(x, y) for x in X] y: iterable of symbols output: ------- ndarray: terms of chaing rule Implemenation notes: I(X; y) = I(x0, x1, ..., xn; y) = I(x0; y) + I(x1;y | x0) + I(x2; y | x0, x1) + ... + I(xn; y | x0, x1, ..., xn-1)
entailment
def KL_divergence(P,Q): ''' Compute the KL divergence between distributions P and Q P and Q should be dictionaries linking symbols to probabilities. the keys to P and Q should be the same. ''' assert(P.keys()==Q.keys()) distance = 0 for k in P.keys(): distance += P[k] * log(P[k]/Q[k]) return distance
Compute the KL divergence between distributions P and Q P and Q should be dictionaries linking symbols to probabilities. the keys to P and Q should be the same.
entailment
def bin(x, bins, maxX=None, minX=None): ''' bin signal x using 'binsN' bin. If minX, maxX are None, they default to the full range of the signal. If they are not None, everything above maxX gets assigned to binsN-1 and everything below minX gets assigned to 0, this is effectively the same as clipping x before passing it to 'bin' input: ----- x: signal to be binned, some sort of iterable bins: int, number of bins iterable, bin edges maxX: clips data above maxX minX: clips data below maxX output: ------ binnedX: x after being binned bins: bins used for binning. if input 'bins' is already an iterable it just returns the same iterable example: # make 10 bins of equal length spanning from x.min() to x.max() bin(x, 10) # use predefined bins such that each bin has the same number of points (maximize entropy) binsN = 10 percentiles = list(np.arange(0, 100.1, 100/binsN)) bins = np.percentile(x, percentiles) bin(x, bins) ''' if maxX is None: maxX = x.max() if minX is None: minX = x.min() if not np.iterable(bins): bins = np.linspace(minX, maxX+1e-5, bins+1) # digitize works on 1d array but not nd arrays. # So I pass the flattened version of x and then reshape back into x's original shape return np.digitize(x.ravel(), bins).reshape(x.shape), bins
bin signal x using 'binsN' bin. If minX, maxX are None, they default to the full range of the signal. If they are not None, everything above maxX gets assigned to binsN-1 and everything below minX gets assigned to 0, this is effectively the same as clipping x before passing it to 'bin' input: ----- x: signal to be binned, some sort of iterable bins: int, number of bins iterable, bin edges maxX: clips data above maxX minX: clips data below maxX output: ------ binnedX: x after being binned bins: bins used for binning. if input 'bins' is already an iterable it just returns the same iterable example: # make 10 bins of equal length spanning from x.min() to x.max() bin(x, 10) # use predefined bins such that each bin has the same number of points (maximize entropy) binsN = 10 percentiles = list(np.arange(0, 100.1, 100/binsN)) bins = np.percentile(x, percentiles) bin(x, bins)
entailment
def linkcode_resolve(domain, info): # NOQA: C901 """ Determine the URL corresponding to Python object Notes ----- From https://github.com/numpy/numpy/blob/v1.15.1/doc/source/conf.py, 7c49cfa on Jul 31. License BSD-3. https://github.com/numpy/numpy/blob/v1.15.1/LICENSE.txt """ if domain != 'py': return None modname = info['module'] fullname = info['fullname'] submod = sys.modules.get(modname) if submod is None: return None obj = submod for part in fullname.split('.'): try: obj = getattr(obj, part) except Exception: return None # strip decorators, which would resolve to the source of the decorator # possibly an upstream bug in getsourcefile, bpo-1764286 try: unwrap = inspect.unwrap except AttributeError: pass else: obj = unwrap(obj) try: fn = inspect.getsourcefile(obj) except Exception: fn = None if not fn: return None try: source, lineno = inspect.getsourcelines(obj) except Exception: lineno = None if lineno: linespec = "#L%d-L%d" % (lineno, lineno + len(source) - 1) else: linespec = "" fn = relpath(fn, start=dirname(cihai.__file__)) if 'dev' in about['__version__']: return "%s/blob/master/%s/%s%s" % ( about['__github__'], about['__package_name__'], fn, linespec, ) else: return "%s/blob/v%s/%s/%s%s" % ( about['__github__'], about['__version__'], about['__package_name__'], fn, linespec, )
Determine the URL corresponding to Python object Notes ----- From https://github.com/numpy/numpy/blob/v1.15.1/doc/source/conf.py, 7c49cfa on Jul 31. License BSD-3. https://github.com/numpy/numpy/blob/v1.15.1/LICENSE.txt
entailment
def inv(z): """$= z^{-1} \mod q$, for z != 0""" # Adapted from curve25519_athlon.c in djb's Curve25519. z = qdiv(z) z2 = z * z % PRIME # 2 z9 = pow2(z2, 2) * z % PRIME # 9 z11 = z9 * z2 % PRIME # 11 z2_5_0 = (z11*z11) % PRIME * z9 % PRIME # 31 == 2^5 - 2^0 z2_10_0 = pow2(z2_5_0, 5) * z2_5_0 % PRIME # 2^10 - 2^0 z2_20_0 = pow2(z2_10_0, 10) * z2_10_0 % PRIME # ... z2_40_0 = pow2(z2_20_0, 20) * z2_20_0 % PRIME z2_50_0 = pow2(z2_40_0, 10) * z2_10_0 % PRIME z2_100_0 = pow2(z2_50_0, 50) * z2_50_0 % PRIME z2_200_0 = pow2(z2_100_0, 100) * z2_100_0 % PRIME z2_250_0 = pow2(z2_200_0, 50) * z2_50_0 % PRIME # 2^250 - 2^0 return pow2(z2_250_0, 5) * z11 % PRIME
$= z^{-1} \mod q$, for z != 0
entailment
def scalarmult_B(e): """ Implements scalarmult(B, e) more efficiently. """ # scalarmult(B, l) is the identity e %= L P = IDENT for i in range(253): if e & 1: P = edwards_add(P=P, Q=Bpow[i]) e //= 2 assert e == 0, e return P
Implements scalarmult(B, e) more efficiently.
entailment
def recover(y): """ given a value y, recover the preimage x """ p = (y*y - 1) * inverse(D*y*y + 1) x = powmod(p, (PRIME+3) // 8, PRIME) if (x*x - p) % PRIME != 0: i = powmod(2, (PRIME-1) // 4, PRIME) x = (x*i) % PRIME if x % 2 != 0: x = PRIME - x return x
given a value y, recover the preimage x
entailment
def __rubberband(y, sr, **kwargs): '''Execute rubberband Parameters ---------- y : np.ndarray [shape=(n,) or (n, c)] Audio time series, either single or multichannel sr : int > 0 sampling rate of y **kwargs keyword arguments to rubberband Returns ------- y_mod : np.ndarray [shape=(n,) or (n, c)] `y` after rubberband transformation ''' assert sr > 0 # Get the input and output tempfile fd, infile = tempfile.mkstemp(suffix='.wav') os.close(fd) fd, outfile = tempfile.mkstemp(suffix='.wav') os.close(fd) # dump the audio sf.write(infile, y, sr) try: # Execute rubberband arguments = [__RUBBERBAND_UTIL, '-q'] for key, value in six.iteritems(kwargs): arguments.append(str(key)) arguments.append(str(value)) arguments.extend([infile, outfile]) subprocess.check_call(arguments, stdout=DEVNULL, stderr=DEVNULL) # Load the processed audio. y_out, _ = sf.read(outfile, always_2d=True) # make sure that output dimensions matches input if y.ndim == 1: y_out = np.squeeze(y_out) except OSError as exc: six.raise_from(RuntimeError('Failed to execute rubberband. ' 'Please verify that rubberband-cli ' 'is installed.'), exc) finally: # Remove temp files os.unlink(infile) os.unlink(outfile) return y_out
Execute rubberband Parameters ---------- y : np.ndarray [shape=(n,) or (n, c)] Audio time series, either single or multichannel sr : int > 0 sampling rate of y **kwargs keyword arguments to rubberband Returns ------- y_mod : np.ndarray [shape=(n,) or (n, c)] `y` after rubberband transformation
entailment
def time_stretch(y, sr, rate, rbargs=None): '''Apply a time stretch of `rate` to an audio time series. This uses the `tempo` form for rubberband, so the higher the rate, the faster the playback. Parameters ---------- y : np.ndarray [shape=(n,) or (n, c)] Audio time series, either single or multichannel sr : int > 0 Sampling rate of `y` rate : float > 0 Desired playback rate. rbargs Additional keyword parameters for rubberband See `rubberband -h` for details. Returns ------- y_stretch : np.ndarray Time-stretched audio Raises ------ ValueError if `rate <= 0` ''' if rate <= 0: raise ValueError('rate must be strictly positive') if rate == 1.0: return y if rbargs is None: rbargs = dict() rbargs.setdefault('--tempo', rate) return __rubberband(y, sr, **rbargs)
Apply a time stretch of `rate` to an audio time series. This uses the `tempo` form for rubberband, so the higher the rate, the faster the playback. Parameters ---------- y : np.ndarray [shape=(n,) or (n, c)] Audio time series, either single or multichannel sr : int > 0 Sampling rate of `y` rate : float > 0 Desired playback rate. rbargs Additional keyword parameters for rubberband See `rubberband -h` for details. Returns ------- y_stretch : np.ndarray Time-stretched audio Raises ------ ValueError if `rate <= 0`
entailment
def timemap_stretch(y, sr, time_map, rbargs=None): '''Apply a timemap stretch to an audio time series. A timemap stretch allows non-linear time-stretching by mapping source to target sample frame numbers for fixed time points within the audio data. This uses the `time` and `timemap` form for rubberband. Parameters ---------- y : np.ndarray [shape=(n,) or (n, c)] Audio time series, either single or multichannel sr : int > 0 Sampling rate of `y` time_map : list Each element is a tuple `t` of length 2 which corresponds to the source sample position and target sample position. If `t[1] < t[0]` the track will be sped up in this area. `time_map[-1]` must correspond to the lengths of the source audio and target audio. rbargs Additional keyword parameters for rubberband See `rubberband -h` for details. Returns ------- y_stretch : np.ndarray Time-stretched audio Raises ------ ValueError if `time_map` is not monotonic if `time_map` is not non-negative if `time_map[-1][0]` is not the input audio length ''' if rbargs is None: rbargs = dict() is_positive = all(time_map[i][0] >= 0 and time_map[i][1] >= 0 for i in range(len(time_map))) is_monotonic = all(time_map[i][0] <= time_map[i+1][0] and time_map[i][1] <= time_map[i+1][1] for i in range(len(time_map)-1)) if not is_positive: raise ValueError('time_map should be non-negative') if not is_monotonic: raise ValueError('time_map is not monotonic') if time_map[-1][0] != len(y): raise ValueError('time_map[-1] should correspond to the last sample') time_stretch = time_map[-1][1] * 1.0 / time_map[-1][0] rbargs.setdefault('--time', time_stretch) stretch_file = tempfile.NamedTemporaryFile(mode='w', suffix='.txt', delete=False) try: for t in time_map: stretch_file.write('{:0} {:1}\n'.format(t[0], t[1])) stretch_file.close() rbargs.setdefault('--timemap', stretch_file.name) y_stretch = __rubberband(y, sr, **rbargs) finally: # Remove temp file os.unlink(stretch_file.name) return y_stretch
Apply a timemap stretch to an audio time series. A timemap stretch allows non-linear time-stretching by mapping source to target sample frame numbers for fixed time points within the audio data. This uses the `time` and `timemap` form for rubberband. Parameters ---------- y : np.ndarray [shape=(n,) or (n, c)] Audio time series, either single or multichannel sr : int > 0 Sampling rate of `y` time_map : list Each element is a tuple `t` of length 2 which corresponds to the source sample position and target sample position. If `t[1] < t[0]` the track will be sped up in this area. `time_map[-1]` must correspond to the lengths of the source audio and target audio. rbargs Additional keyword parameters for rubberband See `rubberband -h` for details. Returns ------- y_stretch : np.ndarray Time-stretched audio Raises ------ ValueError if `time_map` is not monotonic if `time_map` is not non-negative if `time_map[-1][0]` is not the input audio length
entailment
def pitch_shift(y, sr, n_steps, rbargs=None): '''Apply a pitch shift to an audio time series. Parameters ---------- y : np.ndarray [shape=(n,) or (n, c)] Audio time series, either single or multichannel sr : int > 0 Sampling rate of `y` n_steps : float Shift by `n_steps` semitones. rbargs Additional keyword parameters for rubberband See `rubberband -h` for details. Returns ------- y_shift : np.ndarray Pitch-shifted audio ''' if n_steps == 0: return y if rbargs is None: rbargs = dict() rbargs.setdefault('--pitch', n_steps) return __rubberband(y, sr, **rbargs)
Apply a pitch shift to an audio time series. Parameters ---------- y : np.ndarray [shape=(n,) or (n, c)] Audio time series, either single or multichannel sr : int > 0 Sampling rate of `y` n_steps : float Shift by `n_steps` semitones. rbargs Additional keyword parameters for rubberband See `rubberband -h` for details. Returns ------- y_shift : np.ndarray Pitch-shifted audio
entailment
def kuten_to_gb2312(kuten): """ Convert GB kuten / quwei form (94 zones * 94 points) to GB2312-1980 / ISO-2022-CN hex (internal representation) """ zone, point = int(kuten[:2]), int(kuten[2:]) hi, lo = hexd(zone + 0x20), hexd(point + 0x20) gb2312 = "%s%s" % (hi, lo) assert isinstance(gb2312, bytes) return gb2312
Convert GB kuten / quwei form (94 zones * 94 points) to GB2312-1980 / ISO-2022-CN hex (internal representation)
entailment
def gb2312_to_euc(gb2312hex): """ Convert GB2312-1980 hex (internal representation) to EUC-CN hex (the "external encoding") """ hi, lo = int(gb2312hex[:2], 16), int(gb2312hex[2:], 16) hi, lo = hexd(hi + 0x80), hexd(lo + 0x80) euc = "%s%s" % (hi, lo) assert isinstance(euc, bytes) return euc
Convert GB2312-1980 hex (internal representation) to EUC-CN hex (the "external encoding")
entailment
def euc_to_python(hexstr): """ Convert a EUC-CN (GB2312) hex to a Python unicode string. """ hi = hexstr[0:2] lo = hexstr[2:4] gb_enc = b'\\x' + hi + b'\\x' + lo return gb_enc.decode("gb2312")
Convert a EUC-CN (GB2312) hex to a Python unicode string.
entailment
def euc_to_utf8(euchex): """ Convert EUC hex (e.g. "d2bb") to UTF8 hex (e.g. "e4 b8 80"). """ utf8 = euc_to_python(euchex).encode("utf-8") uf8 = utf8.decode('unicode_escape') uf8 = uf8.encode('latin1') uf8 = uf8.decode('euc-jp') return uf8
Convert EUC hex (e.g. "d2bb") to UTF8 hex (e.g. "e4 b8 80").
entailment
def ucn_to_unicode(ucn): """ Convert a Unicode Universal Character Number (e.g. "U+4E00" or "4E00") to Python unicode (u'\\u4e00') """ if isinstance(ucn, string_types): ucn = ucn.strip("U+") if len(ucn) > int(4): char = b'\U' + format(int(ucn, 16), '08x').encode('latin1') char = char.decode('unicode_escape') else: char = unichr(int(ucn, 16)) else: char = unichr(ucn) assert isinstance(char, text_type) return char
Convert a Unicode Universal Character Number (e.g. "U+4E00" or "4E00") to Python unicode (u'\\u4e00')
entailment
def euc_to_unicode(hexstr): """ Return EUC-CN (GB2312) hex to a Python unicode. Parameters ---------- hexstr : bytes Returns ------- unicode : Python unicode e.g. ``u'\\u4e00'`` / '一'. Examples -------- >>> u'\u4e00'.encode('gb2312').decode('utf-8') u'\u04bb' >>> (b'\\x' + b'd2' + b'\\x' + b'bb').replace('\\x', '') \\ ... .decode('hex').decode('utf-8') u'\u04bb' Note: bytes don't have a ``.replace``: >>> gb_enc = gb_enc.replace('\\x', '').decode('hex') >>> gb_enc.decode('string_escape') # Won't work with Python 3.x. """ hi = hexstr[0:2] lo = hexstr[2:4] # hi and lo are only 2 characters long, no risk with eval-ing them gb_enc = b'\\x' + hi + b'\\x' + lo assert isinstance(gb_enc, bytes) # Requires coercing back to text_type in 2.7 gb_enc = gb_enc.decode('unicode_escape') gb_enc = gb_enc.encode('latin1') gb_enc = gb_enc.decode('gb2312') assert isinstance(gb_enc, text_type) return gb_enc
Return EUC-CN (GB2312) hex to a Python unicode. Parameters ---------- hexstr : bytes Returns ------- unicode : Python unicode e.g. ``u'\\u4e00'`` / '一'. Examples -------- >>> u'\u4e00'.encode('gb2312').decode('utf-8') u'\u04bb' >>> (b'\\x' + b'd2' + b'\\x' + b'bb').replace('\\x', '') \\ ... .decode('hex').decode('utf-8') u'\u04bb' Note: bytes don't have a ``.replace``: >>> gb_enc = gb_enc.replace('\\x', '').decode('hex') >>> gb_enc.decode('string_escape') # Won't work with Python 3.x.
entailment
def python_to_ucn(uni_char, as_bytes=False): """ Return UCN character from Python Unicode character. Converts a one character Python unicode string (e.g. u'\\u4e00') to the corresponding Unicode UCN ('U+4E00'). """ ucn = uni_char.encode('unicode_escape').decode('latin1') ucn = text_type(ucn).replace('\\', '').upper().lstrip('U') if len(ucn) > int(4): # get rid of the zeroes that Python uses to pad 32 byte UCNs ucn = ucn.lstrip("0") ucn = "U+" + ucn.upper() if as_bytes: ucn = ucn.encode('latin1') return ucn
Return UCN character from Python Unicode character. Converts a one character Python unicode string (e.g. u'\\u4e00') to the corresponding Unicode UCN ('U+4E00').
entailment
def python_to_euc(uni_char, as_bytes=False): """ Return EUC character from a Python Unicode character. Converts a one character Python unicode string (e.g. u'\\u4e00') to the corresponding EUC hex ('d2bb'). """ euc = repr(uni_char.encode("gb2312"))[1:-1].replace("\\x", "").strip("'") if as_bytes: euc = euc.encode('utf-8') assert isinstance(euc, bytes) return euc
Return EUC character from a Python Unicode character. Converts a one character Python unicode string (e.g. u'\\u4e00') to the corresponding EUC hex ('d2bb').
entailment
def ucnstring_to_unicode(ucn_string): """Return ucnstring as Unicode.""" ucn_string = ucnstring_to_python(ucn_string).decode('utf-8') assert isinstance(ucn_string, text_type) return ucn_string
Return ucnstring as Unicode.
entailment
def ucnstring_to_python(ucn_string): """ Return string with Unicode UCN (e.g. "U+4E00") to native Python Unicode (u'\\u4e00'). """ res = re.findall("U\+[0-9a-fA-F]*", ucn_string) for r in res: ucn_string = ucn_string.replace(text_type(r), text_type(ucn_to_unicode(r))) ucn_string = ucn_string.encode('utf-8') assert isinstance(ucn_string, bytes) return ucn_string
Return string with Unicode UCN (e.g. "U+4E00") to native Python Unicode (u'\\u4e00').
entailment
def parse_var(var): """ Returns a tuple consisting of a string and a tag, or None, if none is specified. """ bits = var.split("<", 1) if len(bits) < 2: tag = None else: tag = bits[1] return ucn_to_unicode(bits[0]), tag
Returns a tuple consisting of a string and a tag, or None, if none is specified.
entailment
def from_file(cls, config_path=None, *args, **kwargs): """ Create a Cihai instance from a JSON or YAML config. Parameters ---------- config_path : str, optional path to custom config file Returns ------- :class:`Cihai` : application object """ config_reader = kaptan.Kaptan() config = {} if config_path: if not os.path.exists(config_path): raise exc.CihaiException( '{0} does not exist.'.format(os.path.abspath(config_path)) ) if not any( config_path.endswith(ext) for ext in ('json', 'yml', 'yaml', 'ini') ): raise exc.CihaiException( '{0} does not have a yaml,yml,json,ini extend.'.format( os.path.abspath(config_path) ) ) else: custom_config = config_reader.import_config(config_path).get() config = merge_dict(config, custom_config) return cls(config)
Create a Cihai instance from a JSON or YAML config. Parameters ---------- config_path : str, optional path to custom config file Returns ------- :class:`Cihai` : application object
entailment
def _process_locale(self, locale): """Return True if this locale should be processed.""" if locale.lower().startswith('en'): return False return (locale in self.enabled_locales or self.reverse_locale_map.get(locale.lower(), None) in self.enabled_locales or locale in self.lower_locales or self.reverse_locale_map.get(locale.lower(), None) in self.lower_locales )
Return True if this locale should be processed.
entailment
def desk_locale(self, locale): """Return the Desk-style locale for locale.""" locale = locale.lower().replace('-', '_') return self.vendor_locale_map.get(locale, locale)
Return the Desk-style locale for locale.
entailment
def push(self): """Push topics to Transifex.""" tx = Tx(self.tx_project_slug) # asssemble the template catalog template = babel.messages.catalog.Catalog() for topic in self.desk.topics(): if topic.show_in_portal: template.add(topic.name) # serialize the catalog as a PO file template_po = StringIO() babel.messages.pofile.write_po(template_po, template) # upload/update the catalog resource tx.create_or_update_resource( self.TOPIC_STRINGS_SLUG, DEFAULT_SOURCE_LANGUAGE, "Help Center Topics", template_po.getvalue(), i18n_type='PO', project_slug=self.tx_project_slug, )
Push topics to Transifex.
entailment
def pull(self): """Pull topics from Transifex.""" topic_stats = txlib.api.statistics.Statistics.get( project_slug=self.tx_project_slug, resource_slug=self.TOPIC_STRINGS_SLUG, ) translated = {} # for each language for locale in self.enabled_locales: if not self._process_locale(locale): continue locale_stats = getattr(topic_stats, locale, None) if locale_stats is None: self.log.debug('Locale %s not present when pulling topics.' % (locale,)) continue if locale_stats['completed'] == '100%': # get the resource from Tx translation = txlib.api.translations.Translation.get( project_slug=self.tx_project_slug, slug=self.TOPIC_STRINGS_SLUG, lang=locale, ) translated[locale] = babel.messages.pofile.read_po( StringIO(translation.content.encode('utf-8')) ) # now that we've pulled everything from Tx, upload to Desk for topic in self.desk.topics(): for locale in translated: if topic.name in translated[locale]: self.log.debug( 'Updating topic (%s) for locale (%s)' % (topic.name, locale), ) if locale in topic.translations: topic.translations[locale].update( name=translated[locale][topic.name].string, ) else: topic.translations.create( locale=locale, name=translated[locale][topic.name].string, ) else: self.log.error( 'Topic name (%s) does not exist in locale (%s)' % (topic['name'], locale), )
Pull topics from Transifex.
entailment
def make_resource_document(self, title, content, tags=[],): """Return a single HTML document containing the title and content.""" assert "<html>" not in content assert "<body>" not in content return """ <html> <head><title>%(title)s</title></head> <body> %(content)s </body> """ % dict( title=title, content=content, )
Return a single HTML document containing the title and content.
entailment
def parse_resource_document(self, content): """Return a dict with the keys title, content, tags for content.""" content = content.strip() if not content.startswith('<html>'): # this is not a full HTML doc, probably content w/o title, tags, etc return dict(body=content) result = {} if '<title>' in content and '</title>' in content: result['subject'] = content[content.find('<title>') + 7:content.find('</title>')].strip() result['body'] = content[content.find('<body>') + 6:content.find('</body>')].strip() return result
Return a dict with the keys title, content, tags for content.
entailment
def push(self): """Push tutorials to Transifex.""" tx = Tx(self.tx_project_slug) if self.options.resources: articles = [ self.desk.articles().by_id(r.strip()) for r in self.options.resources.split(',') ] else: articles = self.desk.articles() for a in articles: self.log.debug( 'Inspecting Desk resource %s', a.api_href ) for translation in a.translations.items().values(): our_locale = self.desk_to_our_locale(translation.locale) self.log.debug('Checking locale %s', translation.locale) if not self._process_locale(translation.locale): self.log.debug('Skipping locale.') continue # make sure the project exists in Tx tx.get_project(our_locale) a_id = a.api_href.rsplit('/', 1)[1] if (self.options.force or not tx.resource_exists(a_id, our_locale) or translation.outdated ): self.log.info('Resource %(id)s out of date in %(locale)s; updating.' % {'id': a_id, 'locale': our_locale, }, ) tx.create_or_update_resource( a_id, our_locale, self.make_resource_title(a), self.make_resource_document(a.subject, a.body), )
Push tutorials to Transifex.
entailment
def get_project(self, locale, source_language_code=DEFAULT_SOURCE_LANGUAGE, **kwargs): """ Gets or creates the Transifex project for the current project prefix and locale :param locale: A locale to which content is to be translated :type locale: string :param source_language_code: The language of the original untranslated content (i.e. Spanish), defaults to DEFAULT_SOURCE_LANGUAGE, which is English :type source_language_code: string, optional :return: The Transifex project to which resources can be pushed or pulled :rtype: project.Project """ try: locale_project = project.Project.get(slug=self.get_project_slug(locale)) except NotFoundError: locale_project = project.Project( slug=self.get_project_slug(locale), ) defaults = { 'name': 'Help Center (%s)' % (locale, ), 'description': 'Help Center pages to translate to %s' % ( locale, ), 'source_language_code': source_language_code, 'private': True, } valid_keys = ('name','description') defaults.update( dict((k,v) for k,v in kwargs.iteritems() if k in valid_keys) ) for k,v in defaults.iteritems(): setattr(locale_project, k, v) locale_project.save() return locale_project
Gets or creates the Transifex project for the current project prefix and locale :param locale: A locale to which content is to be translated :type locale: string :param source_language_code: The language of the original untranslated content (i.e. Spanish), defaults to DEFAULT_SOURCE_LANGUAGE, which is English :type source_language_code: string, optional :return: The Transifex project to which resources can be pushed or pulled :rtype: project.Project
entailment
def translation_exists(self, slug, lang): """Return True if the translation exists for this slug.""" try: return translations.Translation.get( project_slug=self.get_project_slug(lang), slug=slug, lang=lang, ) except (NotFoundError, RemoteServerError): pass return False
Return True if the translation exists for this slug.
entailment
def list_resources(self, lang): """Return a sequence of resources for a given lang. Each Resource is a dict containing the slug, name, i18n_type, source_language_code and the category. """ return registry.registry.http_handler.get( '/api/2/project/%s/resources/' % ( self.get_project_slug(lang),) )
Return a sequence of resources for a given lang. Each Resource is a dict containing the slug, name, i18n_type, source_language_code and the category.
entailment
def resources(self, lang, slug): """Generate a list of Resources in the Project. Yields dicts from the Tx API, with keys including the slug, name, i18n_type, source_language_code, and category. """ resource = resources.Resource.get( project_slug=self.get_project_slug(lang), slug=slug, ) return resource
Generate a list of Resources in the Project. Yields dicts from the Tx API, with keys including the slug, name, i18n_type, source_language_code, and category.
entailment
def resource_exists(self, slug, locale, project_slug=None): """Return True if a Resource with the given slug exists in locale.""" try: resource = resources.Resource.get( project_slug=project_slug or self.get_project_slug(locale), slug=slug, ) return resource except NotFoundError: pass return None
Return True if a Resource with the given slug exists in locale.
entailment
def session_request(session, url, **kwargs): """Do HTTP/S request and return response as a string.""" try: response = session(url, **kwargs) response.raise_for_status() return response.text except requests.exceptions.HTTPError as errh: _LOGGER.debug("%s, %s", response, errh) raise_error(response.status_code) except requests.exceptions.ConnectionError as errc: _LOGGER.debug("%s", errc) raise RequestError("Connection error: {}".format(errc)) except requests.exceptions.Timeout as errt: _LOGGER.debug("%s", errt) raise RequestError("Timeout: {}".format(errt)) except requests.exceptions.RequestException as err: _LOGGER.debug("%s", err) raise RequestError("Unknown error: {}".format(err))
Do HTTP/S request and return response as a string.
entailment
def get_event_list(config): """Get a dict of supported events from device.""" eventinstances = session_request( config.session.post, device_event_url.format( proto=config.web_proto, host=config.host, port=config.port), auth=config.session.auth, headers=headers, data=request_xml) raw_event_list = _prepare_event(eventinstances) event_list = {} for entry in MAP + METAMAP: instance = raw_event_list try: for item in sum(entry[MAP_BASE].values(), []): instance = instance[item] except KeyError: continue event_list[entry[MAP_TYPE]] = instance return event_list
Get a dict of supported events from device.
entailment
def _prepare_event(eventinstances): """Converts event instances to a relevant dictionary.""" import xml.etree.ElementTree as ET def parse_event(events): """Find all events inside of an topicset list. MessageInstance signals that subsequent children will contain source and data descriptions. """ def clean_attrib(attrib={}): """Clean up child attributes by removing XML namespace.""" attributes = {} for key, value in attrib.items(): attributes[key.split('}')[-1]] = value return attributes description = {} for child in events: child_tag = child.tag.split('}')[-1] child_attrib = clean_attrib(child.attrib) if child_tag != 'MessageInstance': description[child_tag] = { **child_attrib, **parse_event(child)} elif child_tag == 'MessageInstance': description = {} for item in child: tag = item.tag.split('}')[-1] description[tag] = clean_attrib(item[0].attrib) return description root = ET.fromstring(eventinstances) return parse_event(root[0][0][0])
Converts event instances to a relevant dictionary.
entailment
def url(self): """Represent device base url.""" return URL.format(http=self.web_proto, host=self.host, port=self.port)
Represent device base url.
entailment
def process_raw(self, raw: dict) -> None: """Pre-process raw dict. Prepare parameters to work with APIItems. """ raw_ports = {} for param in raw: port_index = REGEX_PORT_INDEX.search(param).group(0) if port_index not in raw_ports: raw_ports[port_index] = {} name = param.replace(IOPORT + '.I' + port_index + '.', '') raw_ports[port_index][name] = raw[param] super().process_raw(raw_ports)
Pre-process raw dict. Prepare parameters to work with APIItems.
entailment
def name(self) -> str: """Return name relevant to direction.""" if self.direction == DIRECTION_IN: return self.raw.get('Input.Name', '') return self.raw.get('Output.Name', '')
Return name relevant to direction.
entailment
def action(self, action): r"""Activate or deactivate an output. Use the <wait> option to activate/deactivate the port for a limited period of time. <Port ID> = Port name. Default: Name from Output.Name <a> = Action character. /=active, \=inactive <wait> = Delay before the next action. Unit: milliseconds Note: The :, / and \ characters must be percent-encoded in the URI. See Percent encoding. Example: To set output 1 to active, use 1:/. In the URI, the action argument becomes action=1%3A%2F """ if not self.direction == DIRECTION_OUT: return port_action = quote( '{port}:{action}'.format(port=int(self.id)+1, action=action), safe='' ) url = URL + ACTION.format(action=port_action) self._request('get', url)
r"""Activate or deactivate an output. Use the <wait> option to activate/deactivate the port for a limited period of time. <Port ID> = Port name. Default: Name from Output.Name <a> = Action character. /=active, \=inactive <wait> = Delay before the next action. Unit: milliseconds Note: The :, / and \ characters must be percent-encoded in the URI. See Percent encoding. Example: To set output 1 to active, use 1:/. In the URI, the action argument becomes action=1%3A%2F
entailment
def initialize_params(self, preload_data=True) -> None: """Load device parameters and initialize parameter management. Preload data can be disabled to selectively load params afterwards. """ params = '' if preload_data: params = self.request('get', param_url) self.params = Params(params, self.request)
Load device parameters and initialize parameter management. Preload data can be disabled to selectively load params afterwards.
entailment
def initialize_ports(self) -> None: """Load IO port parameters for device.""" if not self.params: self.initialize_params(preload_data=False) self.params.update_ports() self.ports = Ports(self.params, self.request)
Load IO port parameters for device.
entailment
def initialize_users(self) -> None: """Load device user data and initialize user management.""" users = self.request('get', pwdgrp_url) self.users = Users(users, self.request)
Load device user data and initialize user management.
entailment
def request(self, method, path, **kwargs): """Prepare HTTP request.""" if method == 'get': session_method = self.config.session.get elif method == 'post': session_method = self.config.session.post else: raise AxisException url = self.config.url + path result = session_request(session_method, url, **kwargs) _LOGGER.debug("Response: %s from %s", result, self.config.host) return result
Prepare HTTP request.
entailment
def new_event(self, event_data: str) -> None: """New event to process.""" event = self.parse_event_xml(event_data) if EVENT_OPERATION in event: self.manage_event(event)
New event to process.
entailment
def parse_event_xml(self, event_data) -> dict: """Parse metadata xml.""" event = {} event_xml = event_data.decode() message = MESSAGE.search(event_xml) if not message: return {} event[EVENT_OPERATION] = message.group(EVENT_OPERATION) topic = TOPIC.search(event_xml) if topic: event[EVENT_TOPIC] = topic.group(EVENT_TOPIC) source = SOURCE.search(event_xml) if source: event[EVENT_SOURCE] = source.group(EVENT_SOURCE) event[EVENT_SOURCE_IDX] = source.group(EVENT_SOURCE_IDX) data = DATA.search(event_xml) if data: event[EVENT_TYPE] = data.group(EVENT_TYPE) event[EVENT_VALUE] = data.group(EVENT_VALUE) _LOGGER.debug(event) return event
Parse metadata xml.
entailment
def manage_event(self, event) -> None: """Received new metadata. Operation initialized means new event, also happens if reconnecting. Operation changed updates existing events state. """ name = EVENT_NAME.format( topic=event[EVENT_TOPIC], source=event.get(EVENT_SOURCE_IDX)) if event[EVENT_OPERATION] == 'Initialized' and name not in self.events: for event_class in EVENT_CLASSES: if event_class.TOPIC in event[EVENT_TOPIC]: self.events[name] = event_class(event) self.signal('add', name) return _LOGGER.debug('Unsupported event %s', event[EVENT_TOPIC]) elif event[EVENT_OPERATION] == 'Changed' and name in self.events: self.events[name].state = event[EVENT_VALUE]
Received new metadata. Operation initialized means new event, also happens if reconnecting. Operation changed updates existing events state.
entailment
def state(self, state: str) -> None: """Update state of event.""" self._state = state for callback in self._callbacks: callback()
Update state of event.
entailment
def remove_callback(self, callback) -> None: """Remove callback.""" if callback in self._callbacks: self._callbacks.remove(callback)
Remove callback.
entailment
def enable_events(self, event_callback=None) -> None: """Enable events for stream.""" self.event = EventManager(event_callback) self.stream.event = self.event
Enable events for stream.
entailment
def update_brand(self) -> None: """Update brand group of parameters.""" self.update(path=URL_GET + GROUP.format(group=BRAND))
Update brand group of parameters.
entailment
def update_ports(self) -> None: """Update port groups of parameters.""" self.update(path=URL_GET + GROUP.format(group=INPUT)) self.update(path=URL_GET + GROUP.format(group=IOPORT)) self.update(path=URL_GET + GROUP.format(group=OUTPUT))
Update port groups of parameters.
entailment
def ports(self) -> dict: """Create a smaller dictionary containing all ports.""" return { param: self[param].raw for param in self if param.startswith(IOPORT) }
Create a smaller dictionary containing all ports.
entailment
def update_properties(self) -> None: """Update properties group of parameters.""" self.update(path=URL_GET + GROUP.format(group=PROPERTIES))
Update properties group of parameters.
entailment
def process_raw(self, raw: str) -> None: """Pre-process raw string. Prepare parameters to work with APIItems. """ raw_params = dict(group.split('=', 1) for group in raw.splitlines()) super().process_raw(raw_params)
Pre-process raw string. Prepare parameters to work with APIItems.
entailment
def create(self, user: str, *, pwd: str, sgrp: str, comment: str=None) -> None: """Create new user.""" data = { 'action': 'add', 'user': user, 'pwd': pwd, 'grp': 'users', 'sgrp': sgrp } if comment: data['comment'] = comment self._request('post', URL, data=data)
Create new user.
entailment
def delete(self, user: str) -> None: """Remove user.""" data = { 'action': 'remove', 'user': user } self._request('post', URL, data=data)
Remove user.
entailment
def process_raw(self, raw: str) -> None: """Pre-process raw string. Prepare users to work with APIItems. Create booleans with user levels. """ raw_dict = dict(group.split('=') for group in raw.splitlines()) raw_users = { user: { group: user in REGEX_STRING.findall(raw_dict[group]) for group in [ADMIN, OPERATOR, VIEWER, PTZ] } for user in REGEX_STRING.findall(raw_dict['users']) } super().process_raw(raw_users)
Pre-process raw string. Prepare users to work with APIItems. Create booleans with user levels.
entailment
def start(self): """Start session.""" conn = self.loop.create_connection( lambda: self, self.session.host, self.session.port) task = self.loop.create_task(conn) task.add_done_callback(self.init_done)
Start session.
entailment
def init_done(self, fut): """Server ready. If we get OSError during init the device is not available. """ try: if fut.exception(): fut.result() except OSError as err: _LOGGER.debug('RTSP got exception %s', err) self.stop() self.callback(SIGNAL_FAILED)
Server ready. If we get OSError during init the device is not available.
entailment
def stop(self): """Stop session.""" if self.transport: self.transport.write(self.method.TEARDOWN().encode()) self.transport.close() self.rtp.stop()
Stop session.
entailment
def connection_made(self, transport): """Connect to device is successful. Start configuring RTSP session. Schedule time out handle in case device doesn't respond. """ self.transport = transport self.transport.write(self.method.message.encode()) self.time_out_handle = self.loop.call_later( TIME_OUT_LIMIT, self.time_out)
Connect to device is successful. Start configuring RTSP session. Schedule time out handle in case device doesn't respond.
entailment
def data_received(self, data): """Got response on RTSP session. Manage time out handle since response came in a reasonable time. Update session parameters with latest response. If state is playing schedule keep-alive. """ self.time_out_handle.cancel() self.session.update(data.decode()) if self.session.state == STATE_STARTING: self.transport.write(self.method.message.encode()) self.time_out_handle = self.loop.call_later( TIME_OUT_LIMIT, self.time_out) elif self.session.state == STATE_PLAYING: self.callback(SIGNAL_PLAYING) if self.session.session_timeout != 0: interval = self.session.session_timeout - 5 self.loop.call_later(interval, self.keep_alive) else: self.stop()
Got response on RTSP session. Manage time out handle since response came in a reasonable time. Update session parameters with latest response. If state is playing schedule keep-alive.
entailment
def time_out(self): """If we don't get a response within time the RTSP request time out. This usually happens if device isn't available on specified IP. """ _LOGGER.warning('Response timed out %s', self.session.host) self.stop() self.callback(SIGNAL_FAILED)
If we don't get a response within time the RTSP request time out. This usually happens if device isn't available on specified IP.
entailment
def message(self): """Return RTSP method based on sequence number from session.""" message = self.message_methods[self.session.method]() _LOGGER.debug(message) return message
Return RTSP method based on sequence number from session.
entailment
def OPTIONS(self, authenticate=True): """Request options device supports.""" message = "OPTIONS " + self.session.url + " RTSP/1.0\r\n" message += self.sequence message += self.authentication if authenticate else '' message += self.user_agent message += self.session_id message += '\r\n' return message
Request options device supports.
entailment
def DESCRIBE(self): """Request description of what services RTSP server make available.""" message = "DESCRIBE " + self.session.url + " RTSP/1.0\r\n" message += self.sequence message += self.authentication message += self.user_agent message += "Accept: application/sdp\r\n" message += '\r\n' return message
Request description of what services RTSP server make available.
entailment
def SETUP(self): """Set up stream transport.""" message = "SETUP " + self.session.control_url + " RTSP/1.0\r\n" message += self.sequence message += self.authentication message += self.user_agent message += self.transport message += '\r\n' return message
Set up stream transport.
entailment
def PLAY(self): """RTSP session is ready to send data.""" message = "PLAY " + self.session.url + " RTSP/1.0\r\n" message += self.sequence message += self.authentication message += self.user_agent message += self.session_id message += '\r\n' return message
RTSP session is ready to send data.
entailment
def authentication(self): """Generate authentication string.""" if self.session.digest: authentication = self.session.generate_digest() elif self.session.basic: authentication = self.session.generate_basic() else: return '' return "Authorization: " + authentication + '\r\n'
Generate authentication string.
entailment
def transport(self): """Generate transport string.""" transport = "Transport: RTP/AVP;unicast;client_port={}-{}\r\n" return transport.format( str(self.session.rtp_port), str(self.session.rtcp_port))
Generate transport string.
entailment
def state(self): """Which state the session is in. Starting - all messages needed to get stream started. Playing - keep-alive messages every self.session_timeout. """ if self.method in ['OPTIONS', 'DESCRIBE', 'SETUP', 'PLAY']: state = STATE_STARTING elif self.method in ['KEEP-ALIVE']: state = STATE_PLAYING else: state = STATE_STOPPED _LOGGER.debug('RTSP session (%s) state %s', self.host, state) return state
Which state the session is in. Starting - all messages needed to get stream started. Playing - keep-alive messages every self.session_timeout.
entailment
def update(self, response): """Update session information from device response. Increment sequence number when starting stream, not when playing. If device requires authentication resend previous message with auth. """ data = response.splitlines() _LOGGER.debug('Received data %s from %s', data, self.host) while data: line = data.pop(0) if 'RTSP/1.0' in line: self.rtsp_version = int(line.split(' ')[0][5]) self.status_code = int(line.split(' ')[1]) self.status_text = line.split(' ')[2] elif 'CSeq' in line: self.sequence_ack = int(line.split(': ')[1]) elif 'Date' in line: self.date = line.split(': ')[1] elif 'Public' in line: self.methods_ack = line.split(': ')[1].split(', ') elif "WWW-Authenticate: Basic" in line: self.basic = True self.realm = line.split('"')[1] elif "WWW-Authenticate: Digest" in line: self.digest = True self.realm = line.split('"')[1] self.nonce = line.split('"')[3] self.stale = (line.split('stale=')[1] == 'TRUE') elif 'Content-Type' in line: self.content_type = line.split(': ')[1] elif 'Content-Base' in line: self.content_base = line.split(': ')[1] elif 'Content-Length' in line: self.content_length = int(line.split(': ')[1]) elif 'Session' in line: self.session_id = line.split(': ')[1].split(";")[0] if '=' in line: self.session_timeout = int(line.split(': ')[1].split('=')[1]) elif 'Transport' in line: self.transport_ack = line.split(': ')[1] elif 'Range' in line: self.range = line.split(': ')[1] elif 'RTP-Info' in line: self.rtp_info = line.split(': ')[1] elif not line: if data: self.sdp = data break if self.sdp: stream_found = False for param in self.sdp: if not stream_found and 'm=application' in param: stream_found = True elif stream_found and 'a=control:rtsp' in param: self.control_url = param.split(':', 1)[1] break if self.status_code == 200: if self.state == STATE_STARTING: self.sequence += 1 elif self.status_code == 401: # Device requires authorization, do not increment to next method pass else: # If device configuration is correct we should never get here _LOGGER.debug( "%s RTSP %s %s", self.host, self.status_code, self.status_text)
Update session information from device response. Increment sequence number when starting stream, not when playing. If device requires authentication resend previous message with auth.
entailment
def generate_digest(self): """RFC 2617.""" from hashlib import md5 ha1 = self.username + ':' + self.realm + ':' + self.password HA1 = md5(ha1.encode('UTF-8')).hexdigest() ha2 = self.method + ':' + self.url HA2 = md5(ha2.encode('UTF-8')).hexdigest() encrypt_response = HA1 + ':' + self.nonce + ':' + HA2 response = md5(encrypt_response.encode('UTF-8')).hexdigest() digest_auth = 'Digest ' digest_auth += 'username=\"' + self.username + "\", " digest_auth += 'realm=\"' + self.realm + "\", " digest_auth += "algorithm=\"MD5\", " digest_auth += 'nonce=\"' + self.nonce + "\", " digest_auth += 'uri=\"' + self.url + "\", " digest_auth += 'response=\"' + response + '\"' return digest_auth
RFC 2617.
entailment
def generate_basic(self): """RFC 2617.""" from base64 import b64encode if not self.basic_auth: creds = self.username + ':' + self.password self.basic_auth = 'Basic ' self.basic_auth += b64encode(creds.encode('UTF-8')).decode('UTF-8') return self.basic_auth
RFC 2617.
entailment
def retry(ExceptionToCheck, tries=10, timeout_secs=1.0, logger=None, callback_by_exception=None): """ Retry calling the decorated function using an exponential backoff. :param callback_by_exception: callback/method invocation on certain exceptions :type callback_by_exception: None or dict """ def deco_retry(f): def f_retry(*args, **kwargs): mtries, mdelay = tries, timeout_secs run_one_last_time = True while mtries > 1: try: return f(*args, **kwargs) except ExceptionToCheck as e: # check if this exception is something the caller wants special handling for callback_errors = callback_by_exception or {} for error_type in callback_errors: if isinstance(e, error_type): callback_logic = callback_by_exception[error_type] should_break_out = run_one_last_time = False if isinstance(callback_logic, (list, tuple)): callback_logic, should_break_out = callback_logic if isinstance(should_break_out, (list, tuple)): should_break_out, run_one_last_time = should_break_out callback_logic() if should_break_out: # caller requests we stop handling this exception break # traceback.print_exc() half_interval = mdelay * 0.10 # interval size actual_delay = random.uniform(mdelay - half_interval, mdelay + half_interval) msg = "Retrying in %.2f seconds ..." % actual_delay if logger is None: logging.exception(msg) else: logger.exception(msg) time.sleep(actual_delay) mtries -= 1 mdelay *= 2 if run_one_last_time: # one exception may be all the caller wanted in certain cases return f(*args, **kwargs) return f_retry # true decorator return deco_retry
Retry calling the decorated function using an exponential backoff. :param callback_by_exception: callback/method invocation on certain exceptions :type callback_by_exception: None or dict
entailment
def stream_url(self): """Build url for stream.""" rtsp_url = RTSP_URL.format( host=self.config.host, video=self.video_query, audio=self.audio_query, event=self.event_query) _LOGGER.debug(rtsp_url) return rtsp_url
Build url for stream.
entailment
def session_callback(self, signal): """Signalling from stream session. Data - new data available for processing. Playing - Connection is healthy. Retry - if there is no connection to device. """ if signal == SIGNAL_DATA: self.event.new_event(self.data) elif signal == SIGNAL_FAILED: self.retry() if signal in [SIGNAL_PLAYING, SIGNAL_FAILED] and \ self.connection_status_callback: self.connection_status_callback(signal)
Signalling from stream session. Data - new data available for processing. Playing - Connection is healthy. Retry - if there is no connection to device.
entailment
def start(self): """Start stream.""" if not self.stream or self.stream.session.state == STATE_STOPPED: self.stream = RTSPClient( self.config.loop, self.stream_url, self.config.host, self.config.username, self.config.password, self.session_callback) self.stream.start()
Start stream.
entailment
def stop(self): """Stop stream.""" if self.stream and self.stream.session.state != STATE_STOPPED: self.stream.stop()
Stop stream.
entailment
def retry(self): """No connection to device, retry connection after 15 seconds.""" self.stream = None self.config.loop.call_later(RETRY_TIMER, self.start) _LOGGER.debug('Reconnecting to %s', self.config.host)
No connection to device, retry connection after 15 seconds.
entailment
def connect_mysql(): """ return an inspector object """ MySQLConnection.get_characterset_info = MySQLConnection.get_charset db = create_engine(engine_name) db.echo = True db.connect() return db
return an inspector object
entailment
def check_perm(user_id, permission_code): """ Checks whether a user has permission to perform an action. The permission_code parameter should be a permission contained in tPerm. If the user does not have permission to perfom an action, a permission error is thrown. """ try: perm = db.DBSession.query(Perm).filter(Perm.code==permission_code).one() except NoResultFound: raise PermissionError("Nonexistent permission type: %s"%(permission_code)) try: res = db.DBSession.query(User).join(RoleUser, RoleUser.user_id==User.id).\ join(Perm, Perm.id==perm.id).\ join(RolePerm, RolePerm.perm_id==Perm.id).filter(User.id==user_id).one() except NoResultFound: raise PermissionError("Permission denied. User %s does not have permission %s"% (user_id, permission_code))
Checks whether a user has permission to perform an action. The permission_code parameter should be a permission contained in tPerm. If the user does not have permission to perfom an action, a permission error is thrown.
entailment
def required_perms(*req_perms): """ Decorator applied to functions requiring caller to possess permission Takes args tuple of required perms and raises PermissionsError via check_perm if these are not a subset of user perms """ def dec_wrapper(wfunc): @wraps(wfunc) def wrapped(*args, **kwargs): user_id = kwargs.get("user_id") for perm in req_perms: check_perm(user_id, perm) return wfunc(*args, **kwargs) return wrapped return dec_wrapper
Decorator applied to functions requiring caller to possess permission Takes args tuple of required perms and raises PermissionsError via check_perm if these are not a subset of user perms
entailment
def required_role(req_role): """ Decorator applied to functions requiring caller to possess the specified role """ def dec_wrapper(wfunc): @wraps(wfunc) def wrapped(*args, **kwargs): user_id = kwargs.get("user_id") try: res = db.DBSession.query(RoleUser).filter(RoleUser.user_id==user_id).join(Role, Role.code==req_role).one() except NoResultFound: raise PermissionError("Permission denied. User %s does not have role %s"% (user_id, req_role)) return wfunc(*args, **kwargs) return wrapped return dec_wrapper
Decorator applied to functions requiring caller to possess the specified role
entailment
def get_time_period(period_name): """ Given a time period name, fetch the hydra-compatible time abbreviation. """ time_abbreviation = time_map.get(period_name.lower()) if time_abbreviation is None: raise Exception("Symbol %s not recognised as a time period"%period_name) return time_abbreviation
Given a time period name, fetch the hydra-compatible time abbreviation.
entailment
def get_datetime(timestamp): """ Turn a string timestamp into a date time. First tries to use dateutil. Failing that it tries to guess the time format and converts it manually using stfptime. @returns: A timezone unaware timestamp. """ timestamp_is_float = False try: float(timestamp) timestamp_is_float = True except (ValueError, TypeError): pass if timestamp_is_float == True: raise ValueError("Timestamp %s is a float"%(timestamp,)) #First try to use date util. Failing that, continue try: parsed_dt = parse(timestamp, dayfirst=False) if parsed_dt.tzinfo is None: return parsed_dt else: return parsed_dt.replace(tzinfo=None) except: pass if isinstance(timestamp, datetime): return timestamp fmt = guess_timefmt(timestamp) if fmt is None: fmt = FORMAT # and proceed as usual try: ts_time = datetime.strptime(timestamp, fmt) except ValueError as e: if e.message.split(' ', 1)[0].strip() == 'unconverted': utcoffset = e.message.split()[3].strip() timestamp = timestamp.replace(utcoffset, '') ts_time = datetime.strptime(timestamp, fmt) # Apply offset tzoffset = timedelta(hours=int(utcoffset[0:3]), minutes=int(utcoffset[3:5])) ts_time -= tzoffset else: raise e return ts_time
Turn a string timestamp into a date time. First tries to use dateutil. Failing that it tries to guess the time format and converts it manually using stfptime. @returns: A timezone unaware timestamp.
entailment
def timestamp_to_ordinal(timestamp): """Convert a timestamp as defined in the soap interface to the time format stored in the database. """ if timestamp is None: return None ts_time = get_datetime(timestamp) # Convert time to Gregorian ordinal (1 = January 1st, year 1) ordinal_ts_time = Decimal(ts_time.toordinal()) total_seconds = (ts_time - datetime(ts_time.year, ts_time.month, ts_time.day, 0, 0, 0)).total_seconds() fraction = (Decimal(repr(total_seconds)) / Decimal(86400)).quantize(Decimal('.00000000000000000001'),rounding=ROUND_HALF_UP) ordinal_ts_time += fraction log.debug("%s converted to %s", timestamp, ordinal_ts_time) return ordinal_ts_time
Convert a timestamp as defined in the soap interface to the time format stored in the database.
entailment
def date_to_string(date, seasonal=False): """Convert a date to a standard string used by Hydra. The resulting string looks like this:: '2013-10-03 00:49:17.568-0400' Hydra also accepts seasonal time series (yearly recurring). If the flag ``seasonal`` is set to ``True``, this function will generate a string recognised by Hydra as seasonal time stamp. """ seasonal_key = config.get('DEFAULT', 'seasonal_key', '9999') if seasonal: FORMAT = seasonal_key+'-%m-%dT%H:%M:%S.%f' else: FORMAT = '%Y-%m-%dT%H:%M:%S.%f' return date.strftime(FORMAT)
Convert a date to a standard string used by Hydra. The resulting string looks like this:: '2013-10-03 00:49:17.568-0400' Hydra also accepts seasonal time series (yearly recurring). If the flag ``seasonal`` is set to ``True``, this function will generate a string recognised by Hydra as seasonal time stamp.
entailment