sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def choices(tree): """ Get the 'address' of each leaf node in terms of internal node choices """ n = len(leaves(tree)) addr = np.nan * np.ones((n, n-1)) def _addresses(node, index, choices): # index is the index of the current internal node # choices is a list of (indice, 0/1) choices made if np.isscalar(node): for i, choice in choices: addr[node, i] = choice return index elif isinstance(node, tuple) and len(node) == 2: newindex = _addresses(node[0], index+1, choices + [(index, 0)]) newindex = _addresses(node[1], newindex, choices + [(index, 1)]) return newindex else: raise Exception("Not a tree!") _addresses(tree, 0, []) return addr
Get the 'address' of each leaf node in terms of internal node choices
entailment
def adjacency(tree): """ Construct the adjacency matrix of the tree :param tree: :return: """ dd = ids(tree) N = len(dd) A = np.zeros((N, N)) def _adj(node): if np.isscalar(node): return elif isinstance(node, tuple) and len(node) == 2: A[dd[node], dd[node[0]]] = 1 A[dd[node[0]], dd[node]] = 1 _adj(node[0]) A[dd[node], dd[node[1]]] = 1 A[dd[node[1]], dd[node]] = 1 _adj(node[1]) _adj(tree) return A
Construct the adjacency matrix of the tree :param tree: :return:
entailment
def max_likelihood(self, data, weights=None, stats=None, lmbda=0.1): """ As an alternative to MCMC with Polya-gamma augmentation, we also implement maximum likelihood learning via gradient descent with autograd. This follows the pybasicbayes convention. :param data: list of tuples, (x,y), for each dataset. :param weights: Not used in this implementation. :param stats: Not used in this implementation. """ import autograd.numpy as anp from autograd import value_and_grad, hessian_vector_product from scipy.optimize import minimize assert weights is None assert stats is None if not isinstance(data, list): assert isinstance(data, tuple) and len(data) == 2 data = [data] # Define a helper function for the log of the logistic fn def loglogistic(psi): return psi - anp.log(1+anp.exp(psi)) # optimize each row of A and b for n in range(self.D_out): # Define an objective function for the n-th row of hstack((A, b)) # This is the negative log likelihood of the n-th column of data. def nll(abn): an, bn = abn[:-1], abn[-1] T = 0 ll = 0 for (x, y) in data: T += x.shape[0] yn = y[:, n] psi = anp.dot(x, an) + bn ll += anp.sum(yn * loglogistic(psi)) ll += anp.sum((1 - yn) * loglogistic(-1. * psi)) # Include a penalty on the weights ll -= lmbda * T * anp.sum(an**2) ll -= lmbda * T * bn**2 return -1 * ll / T abn0 = np.concatenate((self.A[n], self.b[n])) res = minimize(value_and_grad(nll), abn0, tol=1e-3, method="Newton-CG", jac=True, hessp=hessian_vector_product(nll)) assert res.success self.A[n] = res.x[:-1] self.b[n] = res.x[-1]
As an alternative to MCMC with Polya-gamma augmentation, we also implement maximum likelihood learning via gradient descent with autograd. This follows the pybasicbayes convention. :param data: list of tuples, (x,y), for each dataset. :param weights: Not used in this implementation. :param stats: Not used in this implementation.
entailment
def resample(self, data, mask=None, omega=None): """ Multinomial regression is somewhat special. We have to compute the kappa functions for the entire dataset, not just for one column of the data at a time. """ if not isinstance(data, list): assert isinstance(data, tuple) and len(data) == 2, \ "datas must be an (x,y) tuple or a list of such tuples" data = [data] if mask is None: mask = [np.ones(y.shape, dtype=bool) for x, y in data] # Resample auxiliary variables if they are not given if omega is None: omega = self._resample_auxiliary_variables(data) # Make copies of parameters (for sample collection in calling methods) self.A = self.A.copy() self.b = self.b.copy() D = self.D_in for n in range(self.D_out): # Resample C_{n,:} given z, omega[:,n], and kappa[:,n] prior_Sigma = np.zeros((D + 1, D + 1)) prior_Sigma[:D, :D] = self.sigmasq_A[n] prior_Sigma[D, D] = self.sigmasq_b[n] prior_J = np.linalg.inv(prior_Sigma) prior_h = prior_J.dot(np.concatenate((self.mu_A[n], [self.mu_b[n]]))) lkhd_h = np.zeros(D + 1) lkhd_J = np.zeros((D + 1, D + 1)) for d, m, o in zip(data, mask, omega): if isinstance(d, tuple): x, y = d else: x, y = d[:, :D], d[:, D:] augx = np.hstack((x, np.ones((x.shape[0], 1)))) J = o * m h = self.kappa_func(y) * m lkhd_J += (augx * J[:, n][:, None]).T.dot(augx) lkhd_h += h[:, n].T.dot(augx) post_h = prior_h + lkhd_h post_J = prior_J + lkhd_J joint_sample = sample_gaussian(J=post_J, h=post_h) self.A[n, :] = joint_sample[:D] self.b[n] = joint_sample[D]
Multinomial regression is somewhat special. We have to compute the kappa functions for the entire dataset, not just for one column of the data at a time.
entailment
def get_claims_for(self, user_id, requested_claims): # type: (str, Mapping[str, Optional[Mapping[str, Union[str, List[str]]]]) -> Dict[str, Union[str, List[str]]] """ Filter the userinfo based on which claims where requested. :param user_id: user identifier :param requested_claims: see <a href="http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter"> "OpenID Connect Core 1.0", Section 5.5</a> for structure :return: All requested claims available from the userinfo. """ userinfo = self._db[user_id] claims = {claim: userinfo[claim] for claim in requested_claims if claim in userinfo} return claims
Filter the userinfo based on which claims where requested. :param user_id: user identifier :param requested_claims: see <a href="http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter"> "OpenID Connect Core 1.0", Section 5.5</a> for structure :return: All requested claims available from the userinfo.
entailment
def create_authorization_code(self, authorization_request, subject_identifier, scope=None): # type: (oic.oic.message.AuthorizationRequest, str, Optional[List[str]]) -> str """ Creates an authorization code bound to the authorization request and the authenticated user identified by the subject identifier. """ if not self._is_valid_subject_identifier(subject_identifier): raise InvalidSubjectIdentifier('{} unknown'.format(subject_identifier)) scope = ' '.join(scope or authorization_request['scope']) logger.debug('creating authz code for scope=%s', scope) authorization_code = rand_str() authz_info = { 'used': False, 'exp': int(time.time()) + self.authorization_code_lifetime, 'sub': subject_identifier, 'granted_scope': scope, self.KEY_AUTHORIZATION_REQUEST: authorization_request.to_dict() } self.authorization_codes[authorization_code] = authz_info logger.debug('new authz_code=%s to client_id=%s for sub=%s valid_until=%s', authorization_code, authorization_request['client_id'], subject_identifier, authz_info['exp']) return authorization_code
Creates an authorization code bound to the authorization request and the authenticated user identified by the subject identifier.
entailment
def create_access_token(self, authorization_request, subject_identifier, scope=None): # type: (oic.oic.message.AuthorizationRequest, str, Optional[List[str]]) -> se_leg_op.access_token.AccessToken """ Creates an access token bound to the authentication request and the authenticated user identified by the subject identifier. """ if not self._is_valid_subject_identifier(subject_identifier): raise InvalidSubjectIdentifier('{} unknown'.format(subject_identifier)) scope = scope or authorization_request['scope'] return self._create_access_token(subject_identifier, authorization_request.to_dict(), ' '.join(scope))
Creates an access token bound to the authentication request and the authenticated user identified by the subject identifier.
entailment
def _create_access_token(self, subject_identifier, auth_req, granted_scope, current_scope=None): # type: (str, Mapping[str, Union[str, List[str]]], str, Optional[str]) -> se_leg_op.access_token.AccessToken """ Creates an access token bound to the subject identifier, client id and requested scope. """ access_token = AccessToken(rand_str(), self.access_token_lifetime) scope = current_scope or granted_scope logger.debug('creating access token for scope=%s', scope) authz_info = { 'iat': int(time.time()), 'exp': int(time.time()) + self.access_token_lifetime, 'sub': subject_identifier, 'client_id': auth_req['client_id'], 'aud': [auth_req['client_id']], 'scope': scope, 'granted_scope': granted_scope, 'token_type': access_token.BEARER_TOKEN_TYPE, self.KEY_AUTHORIZATION_REQUEST: auth_req } self.access_tokens[access_token.value] = authz_info logger.debug('new access_token=%s to client_id=%s for sub=%s valid_until=%s', access_token.value, auth_req['client_id'], subject_identifier, authz_info['exp']) return access_token
Creates an access token bound to the subject identifier, client id and requested scope.
entailment
def exchange_code_for_token(self, authorization_code): # type: (str) -> se_leg_op.access_token.AccessToken """ Exchanges an authorization code for an access token. """ if authorization_code not in self.authorization_codes: raise InvalidAuthorizationCode('{} unknown'.format(authorization_code)) authz_info = self.authorization_codes[authorization_code] if authz_info['used']: logger.debug('detected already used authz_code=%s', authorization_code) raise InvalidAuthorizationCode('{} has already been used'.format(authorization_code)) elif authz_info['exp'] < int(time.time()): logger.debug('detected expired authz_code=%s, now=%s > exp=%s ', authorization_code, int(time.time()), authz_info['exp']) raise InvalidAuthorizationCode('{} has expired'.format(authorization_code)) authz_info['used'] = True access_token = self._create_access_token(authz_info['sub'], authz_info[self.KEY_AUTHORIZATION_REQUEST], authz_info['granted_scope']) logger.debug('authz_code=%s exchanged to access_token=%s', authorization_code, access_token.value) return access_token
Exchanges an authorization code for an access token.
entailment
def introspect_access_token(self, access_token_value): # type: (str) -> Dict[str, Union[str, List[str]]] """ Returns authorization data associated with the access token. See <a href="https://tools.ietf.org/html/rfc7662">"Token Introspection", Section 2.2</a>. """ if access_token_value not in self.access_tokens: raise InvalidAccessToken('{} unknown'.format(access_token_value)) authz_info = self.access_tokens[access_token_value] introspection = {'active': authz_info['exp'] >= int(time.time())} introspection_params = {k: v for k, v in authz_info.items() if k in TokenIntrospectionResponse.c_param} introspection.update(introspection_params) return introspection
Returns authorization data associated with the access token. See <a href="https://tools.ietf.org/html/rfc7662">"Token Introspection", Section 2.2</a>.
entailment
def create_refresh_token(self, access_token_value): # type: (str) -> str """ Creates an refresh token bound to the specified access token. """ if access_token_value not in self.access_tokens: raise InvalidAccessToken('{} unknown'.format(access_token_value)) if not self.refresh_token_lifetime: logger.debug('no refresh token issued for for access_token=%s', access_token_value) return None refresh_token = rand_str() authz_info = {'access_token': access_token_value, 'exp': int(time.time()) + self.refresh_token_lifetime} self.refresh_tokens[refresh_token] = authz_info logger.debug('issued refresh_token=%s expiring=%d for access_token=%s', refresh_token, authz_info['exp'], access_token_value) return refresh_token
Creates an refresh token bound to the specified access token.
entailment
def use_refresh_token(self, refresh_token, scope=None): # type (str, Optional[List[str]]) -> Tuple[se_leg_op.access_token.AccessToken, Optional[str]] """ Creates a new access token, and refresh token, based on the supplied refresh token. :return: new access token and new refresh token if the old one had an expiration time """ if refresh_token not in self.refresh_tokens: raise InvalidRefreshToken('{} unknown'.format(refresh_token)) refresh_token_info = self.refresh_tokens[refresh_token] if 'exp' in refresh_token_info and refresh_token_info['exp'] < int(time.time()): raise InvalidRefreshToken('{} has expired'.format(refresh_token)) authz_info = self.access_tokens[refresh_token_info['access_token']] if scope: if not requested_scope_is_allowed(scope, authz_info['granted_scope']): logger.debug('trying to refresh token with superset scope, requested_scope=%s, granted_scope=%s', scope, authz_info['granted_scope']) raise InvalidScope('Requested scope includes non-granted value') scope = ' '.join(scope) logger.debug('refreshing token with new scope, old_scope=%s -> new_scope=%s', authz_info['scope'], scope) else: # OAuth 2.0: scope: "[...] if omitted is treated as equal to the scope originally granted by the resource owner" scope = authz_info['granted_scope'] new_access_token = self._create_access_token(authz_info['sub'], authz_info[self.KEY_AUTHORIZATION_REQUEST], authz_info['granted_scope'], scope) new_refresh_token = None if self.refresh_token_threshold \ and 'exp' in refresh_token_info \ and refresh_token_info['exp'] - int(time.time()) < self.refresh_token_threshold: # refresh token is close to expiry, issue a new one new_refresh_token = self.create_refresh_token(new_access_token.value) else: self.refresh_tokens[refresh_token]['access_token'] = new_access_token.value logger.debug('refreshed tokens, new_access_token=%s new_refresh_token=%s old_refresh_token=%s', new_access_token, new_refresh_token, refresh_token) return new_access_token, new_refresh_token
Creates a new access token, and refresh token, based on the supplied refresh token. :return: new access token and new refresh token if the old one had an expiration time
entailment
def get_subject_identifier(self, subject_type, user_id, sector_identifier=None): # type: (str, str, str) -> str """ Returns a subject identifier for the local user identifier. :param subject_type: 'pairwise' or 'public', see <a href="http://openid.net/specs/openid-connect-core-1_0.html#SubjectIDTypes"> "OpenID Connect Core 1.0", Section 8</a>. :param user_id: local user identifier :param sector_identifier: the client's sector identifier, see <a href="http://openid.net/specs/openid-connect-core-1_0.html#Terminology"> "OpenID Connect Core 1.0", Section 1.2</a> """ if user_id not in self.subject_identifiers: self.subject_identifiers[user_id] = {} if subject_type == 'public': if 'public' not in self.subject_identifiers[user_id]: new_sub = self._subject_identifier_factory.create_public_identifier(user_id) self.subject_identifiers[user_id] = {'public': new_sub} logger.debug('created new public sub=% for user_id=%s', self.subject_identifiers[user_id]['public'], user_id) sub = self.subject_identifiers[user_id]['public'] logger.debug('returning public sub=%s', sub) return sub elif subject_type == 'pairwise': if not sector_identifier: raise ValueError('sector_identifier cannot be None or empty') subject_id = self._subject_identifier_factory.create_pairwise_identifier(user_id, sector_identifier) logger.debug('returning pairwise sub=%s for user_id=%s and sector_identifier=%s', subject_id, user_id, sector_identifier) sub = self.subject_identifiers[user_id] pairwise_set = set(sub.get('pairwise', [])) pairwise_set.add(subject_id) sub['pairwise'] = list(pairwise_set) self.subject_identifiers[user_id] = sub return subject_id raise ValueError('Unknown subject_type={}'.format(subject_type))
Returns a subject identifier for the local user identifier. :param subject_type: 'pairwise' or 'public', see <a href="http://openid.net/specs/openid-connect-core-1_0.html#SubjectIDTypes"> "OpenID Connect Core 1.0", Section 8</a>. :param user_id: local user identifier :param sector_identifier: the client's sector identifier, see <a href="http://openid.net/specs/openid-connect-core-1_0.html#Terminology"> "OpenID Connect Core 1.0", Section 1.2</a>
entailment
def authorization_request_verify(authentication_request): """ Verifies that all required parameters and correct values are included in the authentication request. :param authentication_request: the authentication request to verify :raise InvalidAuthenticationRequest: if the authentication is incorrect """ try: authentication_request.verify() except MessageException as e: raise InvalidAuthenticationRequest(str(e), authentication_request, oauth_error='invalid_request') from e
Verifies that all required parameters and correct values are included in the authentication request. :param authentication_request: the authentication request to verify :raise InvalidAuthenticationRequest: if the authentication is incorrect
entailment
def client_id_is_known(provider, authentication_request): """ Verifies the client identifier is known. :param provider: provider instance :param authentication_request: the authentication request to verify :raise InvalidAuthenticationRequest: if the client_id is unknown """ if authentication_request['client_id'] not in provider.clients: logger.error('Unknown client_id \'{}\''.format(authentication_request['client_id'])) raise InvalidAuthenticationRequest('Unknown client_id', authentication_request, oauth_error='unauthorized_client')
Verifies the client identifier is known. :param provider: provider instance :param authentication_request: the authentication request to verify :raise InvalidAuthenticationRequest: if the client_id is unknown
entailment
def redirect_uri_is_in_registered_redirect_uris(provider, authentication_request): """ Verifies the redirect uri is registered for the client making the request. :param provider: provider instance :param authentication_request: authentication request to verify :raise InvalidAuthenticationRequest: if the redirect uri is not registered """ error = InvalidAuthenticationRequest('Redirect uri is not registered', authentication_request, oauth_error="invalid_request") try: allowed_redirect_uris = provider.clients[authentication_request['client_id']]['redirect_uris'] except KeyError as e: logger.error('client metadata is missing redirect_uris') raise error if authentication_request['redirect_uri'] not in allowed_redirect_uris: logger.error("Redirect uri \'{0}\' is not registered for this client".format(authentication_request['redirect_uri'])) raise error
Verifies the redirect uri is registered for the client making the request. :param provider: provider instance :param authentication_request: authentication request to verify :raise InvalidAuthenticationRequest: if the redirect uri is not registered
entailment
def response_type_is_in_registered_response_types(provider, authentication_request): """ Verifies that the requested response type is allowed for the client making the request. :param provider: provider instance :param authentication_request: authentication request to verify :raise InvalidAuthenticationRequest: if the response type is not allowed """ error = InvalidAuthenticationRequest('Response type is not registered', authentication_request, oauth_error='invalid_request') try: allowed_response_types = provider.clients[authentication_request['client_id']]['response_types'] except KeyError as e: logger.error('client metadata is missing response_types') raise error if not is_allowed_response_type(authentication_request['response_type'], allowed_response_types): logger.error('Response type \'{}\' is not registered'.format(' '.join(authentication_request['response_type']))) raise error
Verifies that the requested response type is allowed for the client making the request. :param provider: provider instance :param authentication_request: authentication request to verify :raise InvalidAuthenticationRequest: if the response type is not allowed
entailment
def userinfo_claims_only_specified_when_access_token_is_issued(authentication_request): """ According to <a href="http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter"> "OpenID Connect Core 1.0", Section 5.5</a>: "When the userinfo member is used, the request MUST also use a response_type value that results in an Access Token being issued to the Client for use at the UserInfo Endpoint." :param authentication_request: the authentication request to verify :raise InvalidAuthenticationRequest: if the requested claims can not be returned according to the request """ will_issue_access_token = authentication_request['response_type'] != ['id_token'] contains_userinfo_claims_request = 'claims' in authentication_request and 'userinfo' in authentication_request[ 'claims'] if not will_issue_access_token and contains_userinfo_claims_request: raise InvalidAuthenticationRequest('Userinfo claims cannot be requested, when response_type=\'id_token\'', authentication_request, oauth_error='invalid_request')
According to <a href="http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter"> "OpenID Connect Core 1.0", Section 5.5</a>: "When the userinfo member is used, the request MUST also use a response_type value that results in an Access Token being issued to the Client for use at the UserInfo Endpoint." :param authentication_request: the authentication request to verify :raise InvalidAuthenticationRequest: if the requested claims can not be returned according to the request
entailment
def registration_request_verify(registration_request): """ Verifies that all required parameters and correct values are included in the client registration request. :param registration_request: the authentication request to verify :raise InvalidClientRegistrationRequest: if the registration is incorrect """ try: registration_request.verify() except MessageException as e: raise InvalidClientRegistrationRequest(str(e), registration_request, oauth_error='invalid_request') from e
Verifies that all required parameters and correct values are included in the client registration request. :param registration_request: the authentication request to verify :raise InvalidClientRegistrationRequest: if the registration is incorrect
entailment
def client_preferences_match_provider_capabilities(provider, registration_request): """ Verifies that all requested preferences in the client metadata can be fulfilled by this provider. :param registration_request: the authentication request to verify :raise InvalidClientRegistrationRequest: if the registration is incorrect """ def match(client_preference, provider_capability): if isinstance(client_preference, list): # deal with comparing space separated values, e.g. 'response_types', without considering the order # at least one requested preference must be matched return len(find_common_values(client_preference, provider_capability)) > 0 return client_preference in provider_capability for client_preference in registration_request.keys(): if client_preference not in PREFERENCE2PROVIDER: # metadata parameter that shouldn't be matched continue provider_capability = PREFERENCE2PROVIDER[client_preference] if not match(registration_request[client_preference], provider.configuration_information[provider_capability]): raise InvalidClientRegistrationRequest( 'Could not match client preference {}={} with provider capability {}={}'.format( client_preference, registration_request[client_preference], provider_capability, provider.configuration_information[provider_capability]), registration_request, oauth_error='invalid_request')
Verifies that all requested preferences in the client metadata can be fulfilled by this provider. :param registration_request: the authentication request to verify :raise InvalidClientRegistrationRequest: if the registration is incorrect
entailment
def _psi_n(x, n, b): """ Compute the n-th term in the infinite sum of the Jacobi density. """ return 2**(b-1) / gamma(b) * (-1)**n * \ np.exp(gammaln(n+b) - gammaln(n+1) + np.log(2*n+b) - 0.5 * np.log(2*np.pi*x**3) - (2*n+b)**2 / (8.*x))
Compute the n-th term in the infinite sum of the Jacobi density.
entailment
def _tilt(omega, b, psi): """ Compute the tilt of the PG density for value omega and tilt psi. :param omega: point at which to evaluate the density :param psi: tilt parameter """ return np.cosh(psi/2.0)**b * np.exp(-psi**2/2.0 * omega)
Compute the tilt of the PG density for value omega and tilt psi. :param omega: point at which to evaluate the density :param psi: tilt parameter
entailment
def pgpdf(omega, b, psi, trunc=200): """ Approximate the density log PG(omega | b, psi) using a truncation of the density written as an infinite sum. :param omega: point at which to evaluate density :param b: first parameter of PG :param psi: tilting of PG :param trunc: number of terms in sum """ ns = np.arange(trunc) psi_ns = np.array([_psi_n(omega, n, b) for n in ns]) pdf = np.sum(psi_ns, axis=0) # Account for tilting pdf *= _tilt(omega, b, psi) return pdf
Approximate the density log PG(omega | b, psi) using a truncation of the density written as an infinite sum. :param omega: point at which to evaluate density :param b: first parameter of PG :param psi: tilting of PG :param trunc: number of terms in sum
entailment
def psi_to_pi(psi, axis=None): """ Convert psi to a probability vector pi :param psi: Length K-1 vector :return: Length K normalized probability vector """ if axis is None: if psi.ndim == 1: K = psi.size + 1 pi = np.zeros(K) # Set pi[1..K-1] stick = 1.0 for k in range(K-1): pi[k] = logistic(psi[k]) * stick stick -= pi[k] # Set the last output pi[-1] = stick # DEBUG assert np.allclose(pi.sum(), 1.0) elif psi.ndim == 2: M, Km1 = psi.shape K = Km1 + 1 pi = np.zeros((M,K)) # Set pi[1..K-1] stick = np.ones(M) for k in range(K-1): pi[:,k] = logistic(psi[:,k]) * stick stick -= pi[:,k] # Set the last output pi[:,-1] = stick # DEBUG assert np.allclose(pi.sum(axis=1), 1.0) else: raise ValueError("psi must be 1 or 2D") else: K = psi.shape[axis] + 1 pi = np.zeros([psi.shape[dim] if dim != axis else K for dim in range(psi.ndim)]) stick = np.ones(psi.shape[:axis] + psi.shape[axis+1:]) for k in range(K-1): inds = [slice(None) if dim != axis else k for dim in range(psi.ndim)] pi[inds] = logistic(psi[inds]) * stick stick -= pi[inds] pi[[slice(None) if dim != axis else -1 for dim in range(psi.ndim)]] = stick assert np.allclose(pi.sum(axis=axis), 1.) return pi
Convert psi to a probability vector pi :param psi: Length K-1 vector :return: Length K normalized probability vector
entailment
def verify_client_authentication(clients, parsed_request, authz_header=None): # type: (Mapping[str, str], Mapping[str, Mapping[str, Any]], Optional[str]) -> bool """ Verifies client authentication at the token endpoint, see <a href="https://tools.ietf.org/html/rfc6749#section-2.3.1">"The OAuth 2.0 Authorization Framework", Section 2.3.1</a> :param parsed_request: key-value pairs from parsed urlencoded request :param clients: clients db :param authz_header: the HTTP Authorization header value :return: the unmodified parsed request :raise InvalidClientAuthentication: if the client authentication was incorrect """ client_id = None client_secret = None authn_method = None if authz_header: logger.debug('client authentication in Authorization header %s', authz_header) authz_scheme = authz_header.split(maxsplit=1)[0] if authz_scheme == 'Basic': authn_method = 'client_secret_basic' credentials = authz_header[len('Basic '):] missing_padding = 4 - len(credentials) % 4 if missing_padding: credentials += '=' * missing_padding try: auth = base64.urlsafe_b64decode(credentials.encode('utf-8')).decode('utf-8') except UnicodeDecodeError as e: raise InvalidClientAuthentication('Could not userid/password from authorization header'.format(authz_scheme)) client_id, client_secret = auth.split(':') else: raise InvalidClientAuthentication('Unknown scheme in authorization header, {} != Basic'.format(authz_scheme)) elif 'client_id' in parsed_request: logger.debug('client authentication in request body %s', parsed_request) client_id = parsed_request['client_id'] if 'client_secret' in parsed_request: authn_method = 'client_secret_post' client_secret = parsed_request['client_secret'] else: authn_method = 'none' client_secret = None if client_id not in clients: raise InvalidClientAuthentication('client_id \'{}\' unknown'.format(client_id)) client_info = clients[client_id] if client_secret != client_info.get('client_secret', None): raise InvalidClientAuthentication('Incorrect client_secret') expected_authn_method = client_info.get('token_endpoint_auth_method', 'client_secret_basic') if authn_method != expected_authn_method: raise InvalidClientAuthentication( 'Wrong authentication method used, MUST use \'{}\''.format(expected_authn_method)) return client_id
Verifies client authentication at the token endpoint, see <a href="https://tools.ietf.org/html/rfc6749#section-2.3.1">"The OAuth 2.0 Authorization Framework", Section 2.3.1</a> :param parsed_request: key-value pairs from parsed urlencoded request :param clients: clients db :param authz_header: the HTTP Authorization header value :return: the unmodified parsed request :raise InvalidClientAuthentication: if the client authentication was incorrect
entailment
def add_visualization(self, visualization, size_x=6, size_y=3, col=0, row=0): """ Adds the visualization to the dashboard. Leave col and row = 0 for automatic placement of the visualization. Visualizations are placed on a grid with 12 columns and unlimited rows. :param visualization: previously loaded visualization :param size_x width of the panel :param size_y height of the panel :param col 1-based column of the top left corner, leave 0 for automatic placement :param row 1-based row of the top left corner, leave 0 for automatic placement :return: newly created panel or None """ new_panel_index = self.get_max_index()+1 if col and row: new_panel = { 'col': col, 'row': row, 'size_x': size_x, 'size_y': size_y, 'panelIndex': new_panel_index, 'type': 'visualization', 'id': visualization.id } self.panels.append(new_panel) return new_panel else: new_panel = append_panel(self.panels, size_x, size_y) if new_panel: new_panel['id'] = visualization.id new_panel['panelIndex'] = new_panel_index new_panel['type'] = 'visualization' return new_panel
Adds the visualization to the dashboard. Leave col and row = 0 for automatic placement of the visualization. Visualizations are placed on a grid with 12 columns and unlimited rows. :param visualization: previously loaded visualization :param size_x width of the panel :param size_y height of the panel :param col 1-based column of the top left corner, leave 0 for automatic placement :param row 1-based row of the top left corner, leave 0 for automatic placement :return: newly created panel or None
entailment
def remove_visualization(self, visualization_id): """ Removes all visualizations with the specified id from the dashboard :param visualization_id: :return: """ for i, panel in enumerate(self.panels): if panel['id'] == visualization_id: del self.panels[i]
Removes all visualizations with the specified id from the dashboard :param visualization_id: :return:
entailment
def get_all(self): """ Returns a list of all visualizations :return: list of the Visualization instances """ res = self.es.search(index=self.index, doc_type=self.doc_type, body={'query': {'match_all': {}}}) if not res['hits']['total']: return [] return [Visualization.from_kibana(hit) for hit in res['hits']['hits']]
Returns a list of all visualizations :return: list of the Visualization instances
entailment
def add(self, visualization): """ Creates a new visualization :param visualization: instance of Visualization :return: """ res = self.es.create(index=self.index, id=visualization.id or str(uuid.uuid1()), doc_type=self.doc_type, body=visualization.to_kibana(), refresh=True) return res
Creates a new visualization :param visualization: instance of Visualization :return:
entailment
def update(self, visualization): """ Updates existing visualization :param visualization: instance of Visualization that was previously loaded :return: """ res = self.es.update(index=self.index, id=visualization.id, doc_type=self.doc_type, body={'doc': visualization.to_kibana()}, refresh=True) return res
Updates existing visualization :param visualization: instance of Visualization that was previously loaded :return:
entailment
def remove(self, visualization): """ Deletes the visualization :param visualization: instance of Visualization that was previously loaded :return: """ res = self.es.delete(index=self.index, id=visualization.id, doc_type=self.doc_type, refresh=True) return res
Deletes the visualization :param visualization: instance of Visualization that was previously loaded :return:
entailment
def bottoms(panels): """ Finds bottom lines of all panels :param panels: :return: sorted by row list of tuples representing lines (col, row , col + len, row) """ bottom_lines = [(p['col'], p['row'] + p['size_y'], p['col'] + p['size_x'], p['row'] + p['size_y']) for p in panels] return sorted(bottom_lines, key=lambda l: l[1], reverse=True)
Finds bottom lines of all panels :param panels: :return: sorted by row list of tuples representing lines (col, row , col + len, row)
entailment
def find_shape(bottom_lines, max_len): """ Finds a shape of lowest horizontal lines with step=1 :param bottom_lines: :param max_len: :return: list of levels (row values), list indexes are columns """ shape = [1] * max_len for i in range(max_len): for line in bottom_lines: if line[0] <= i + 1 < line[2]: shape[i] = line[1] break return shape
Finds a shape of lowest horizontal lines with step=1 :param bottom_lines: :param max_len: :return: list of levels (row values), list indexes are columns
entailment
def longest_lines(shape): """ Creates lines from shape :param shape: :return: list of dictionaries with col,row,len fields """ lines = [] for level in set(shape): count = 0 for i in range(len(shape)): if shape[i] <= level: count += 1 elif count: lines.append({'row': level, 'col': i - count + 1, 'len': count}) count = 0 if count: lines.append({'row': level, 'col': i - count + 2, 'len': count}) return sorted(lines, key=lambda l: l['row'])
Creates lines from shape :param shape: :return: list of dictionaries with col,row,len fields
entailment
def append_panel(panels, size_x, size_y, max_col=12): """ Appends a panel to the list of panels. Finds the highest palce at the left for the new panel. :param panels: :param size_x: :param size_y: :param max_col: :return: a new panel or None if it is not possible to place a panel with such size_x """ bottom_lines = bottoms(panels) shape = find_shape(bottom_lines, max_col) lines = longest_lines(shape) line = find_place(lines, size_x) if not line: return panel = { 'col': line['col'], 'row': line['row'], 'size_x': size_x, 'size_y': size_y, } panels.append(panel) return panel
Appends a panel to the list of panels. Finds the highest palce at the left for the new panel. :param panels: :param size_x: :param size_y: :param max_col: :return: a new panel or None if it is not possible to place a panel with such size_x
entailment
def run_from_argv(self, argv): """ Set the default Gherkin test runner for its options to be parsed. """ self.test_runner = test_runner_class super(Command, self).run_from_argv(argv)
Set the default Gherkin test runner for its options to be parsed.
entailment
def handle(self, *test_labels, **options): """ Set the default Gherkin test runner. """ if not options.get('testrunner', None): options['testrunner'] = test_runner_class return super(Command, self).handle(*test_labels, **options)
Set the default Gherkin test runner.
entailment
def django_url(step, url=None): """ The URL for a page from the test server. :param step: A Gherkin step :param url: If specified, the relative URL to append. """ base_url = step.test.live_server_url if url: return urljoin(base_url, url) else: return base_url
The URL for a page from the test server. :param step: A Gherkin step :param url: If specified, the relative URL to append.
entailment
def namespace(self, namespace, to=None): """ Filter by namespace. Try to guess which field to use in lookup. Accept 'to' argument if you need to specify. """ fields = get_apphook_field_names(self.model) if not fields: raise ValueError( ugettext( 'Can\'t find any relation to an ApphookConfig model in {0}' ).format(self.model.__name__) ) if to and to not in fields: raise ValueError( ugettext( 'Can\'t find relation to ApphookConfig model named ' '"{0}" in "{1}"' ).format(to, self.model.__name__) ) if len(fields) > 1 and to not in fields: raise ValueError( ugettext( '"{0}" has {1} relations to an ApphookConfig model.' ' Please, specify which one to use in argument "to".' ' Choices are: {2}' ).format( self.model.__name__, len(fields), ', '.join(fields) ) ) else: if not to: to = fields[0] lookup = '{0}__namespace'.format(to) kwargs = {lookup: namespace} return self.filter(**kwargs)
Filter by namespace. Try to guess which field to use in lookup. Accept 'to' argument if you need to specify.
entailment
def _app_config_select(self, request, obj): """ Return the select value for apphook configs :param request: request object :param obj: current object :return: False if no preselected value is available (more than one or no apphook config is present), apphook config instance if exactly one apphook config is defined or apphook config defined in the request or in the current object, False otherwise """ if not obj and not request.GET.get(self.app_config_attribute, False): config_model = get_apphook_model(self.model, self.app_config_attribute) if config_model.objects.count() == 1: return config_model.objects.first() return None elif obj and getattr(obj, self.app_config_attribute, False): return getattr(obj, self.app_config_attribute) elif request.GET.get(self.app_config_attribute, False): config_model = get_apphook_model(self.model, self.app_config_attribute) return config_model.objects.get( pk=int(request.GET.get(self.app_config_attribute, False)) ) return False
Return the select value for apphook configs :param request: request object :param obj: current object :return: False if no preselected value is available (more than one or no apphook config is present), apphook config instance if exactly one apphook config is defined or apphook config defined in the request or in the current object, False otherwise
entailment
def _set_config_defaults(self, request, form, obj=None): """ Cycle through app_config_values and sets the form value according to the options in the current apphook config. self.app_config_values is a dictionary containing config options as keys, form fields as values:: app_config_values = { 'apphook_config': 'form_field', ... } :param request: request object :param form: model form for the current model :param obj: current object :return: form with defaults set """ for config_option, field in self.app_config_values.items(): if field in form.base_fields: form.base_fields[field].initial = self.get_config_data(request, obj, config_option) return form
Cycle through app_config_values and sets the form value according to the options in the current apphook config. self.app_config_values is a dictionary containing config options as keys, form fields as values:: app_config_values = { 'apphook_config': 'form_field', ... } :param request: request object :param form: model form for the current model :param obj: current object :return: form with defaults set
entailment
def get_fieldsets(self, request, obj=None): """ If the apphook config must be selected first, returns a fieldset with just the app config field and help text :param request: :param obj: :return: """ app_config_default = self._app_config_select(request, obj) if app_config_default is None and request.method == 'GET': return (_(self.app_config_selection_title), {'fields': (self.app_config_attribute, ), 'description': _(self.app_config_selection_desc)}), else: return super(ModelAppHookConfig, self).get_fieldsets(request, obj)
If the apphook config must be selected first, returns a fieldset with just the app config field and help text :param request: :param obj: :return:
entailment
def get_config_data(self, request, obj, name): """ Method that retrieves a configuration option for a specific AppHookConfig instance :param request: the request object :param obj: the model instance :param name: name of the config option as defined in the config form :return value: config value or None if no app config is found """ return_value = None config = None if obj: try: config = getattr(obj, self.app_config_attribute, False) except ObjectDoesNotExist: # pragma: no cover pass if not config and self.app_config_attribute in request.GET: config_model = get_apphook_model(self.model, self.app_config_attribute) try: config = config_model.objects.get(pk=request.GET[self.app_config_attribute]) except config_model.DoesNotExist: # pragma: no cover pass if config: return_value = getattr(config, name) return return_value
Method that retrieves a configuration option for a specific AppHookConfig instance :param request: the request object :param obj: the model instance :param name: name of the config option as defined in the config form :return value: config value or None if no app config is found
entailment
def get_form(self, request, obj=None, **kwargs): """ Provides a flexible way to get the right form according to the context For the add view it checks whether the app_config is set; if not, a special form to select the namespace is shown, which is reloaded after namespace selection. If only one namespace exists, the current is selected and the normal form is used. """ form = super(ModelAppHookConfig, self).get_form(request, obj, **kwargs) if self.app_config_attribute not in form.base_fields: return form app_config_default = self._app_config_select(request, obj) if app_config_default: form.base_fields[self.app_config_attribute].initial = app_config_default get = copy.copy(request.GET) get[self.app_config_attribute] = app_config_default.pk request.GET = get elif app_config_default is None and request.method == 'GET': class InitialForm(form): class Meta(form.Meta): fields = (self.app_config_attribute,) form = InitialForm form = self._set_config_defaults(request, form, obj) return form
Provides a flexible way to get the right form according to the context For the add view it checks whether the app_config is set; if not, a special form to select the namespace is shown, which is reloaded after namespace selection. If only one namespace exists, the current is selected and the normal form is used.
entailment
def _models_generator(): """ Build a hash of model verbose names to models """ for app in apps.get_app_configs(): for model in app.get_models(): yield (str(model._meta.verbose_name).lower(), model) yield (str(model._meta.verbose_name_plural).lower(), model)
Build a hash of model verbose names to models
entailment
def get_model(name): """ Convert a model's verbose name to the model class. This allows us to use the models verbose name in steps. """ model = MODELS.get(name.lower(), None) assert model, "Could not locate model by name '%s'" % name return model
Convert a model's verbose name to the model class. This allows us to use the models verbose name in steps.
entailment
def reset_sequence(model): """ Reset the ID sequence for a model. """ sql = connection.ops.sequence_reset_sql(no_style(), [model]) for cmd in sql: connection.cursor().execute(cmd)
Reset the ID sequence for a model.
entailment
def _dump_model(model, attrs=None): """ Dump the model fields for debugging. """ fields = [] for field in model._meta.fields: fields.append((field.name, str(getattr(model, field.name)))) if attrs is not None: for attr in attrs: fields.append((attr, str(getattr(model, attr)))) for field in model._meta.many_to_many: vals = getattr(model, field.name) fields.append((field.name, '{val} ({count})'.format( val=', '.join(map(str, vals.all())), count=vals.count(), ))) print(', '.join( '{0}={1}'.format(field, value) for field, value in fields ))
Dump the model fields for debugging.
entailment
def _model_exists_step(self, model, should_exist): """ Test for the existence of a model matching the given data. """ model = get_model(model) data = guess_types(self.hashes) queryset = model.objects try: existence_check = _TEST_MODEL[model] except KeyError: existence_check = test_existence failed = 0 try: for hash_ in data: match = existence_check(queryset, hash_) if should_exist: assert match, \ "%s does not exist: %s" % (model.__name__, hash_) else: assert not match, \ "%s exists: %s" % (model.__name__, hash_) except AssertionError as exc: print(exc) failed += 1 if failed: print("Rows in DB are:") for existing_model in queryset.all(): _dump_model(existing_model, attrs=[k[1:] for k in data[0].keys() if k.startswith('@')]) if should_exist: raise AssertionError("%i rows missing" % failed) else: raise AssertionError("%i rows found" % failed)
Test for the existence of a model matching the given data.
entailment
def write_models(model, data, field): """ :param model: a Django model class :param data: a list of hashes to build models from :param field: a field name to match models on, or None :returns: a list of models written Create or update models for each data hash. `field` is the field that is used to get the existing models out of the database to update them; otherwise, if ``field=None``, new models are created. Useful when registering custom tests with :func:`writes_models`. """ written = [] for hash_ in data: if field: if field not in hash_: raise KeyError(("The \"%s\" field is required for all update " "operations") % field) model_kwargs = {field: hash_[field]} model_obj = model.objects.get(**model_kwargs) for to_set, val in hash_.items(): setattr(model_obj, to_set, val) model_obj.save() else: model_obj = model.objects.create(**hash_) written.append(model_obj) reset_sequence(model) return written
:param model: a Django model class :param data: a list of hashes to build models from :param field: a field name to match models on, or None :returns: a list of models written Create or update models for each data hash. `field` is the field that is used to get the existing models out of the database to update them; otherwise, if ``field=None``, new models are created. Useful when registering custom tests with :func:`writes_models`.
entailment
def _write_models_step(self, model, field=None): """ Write or update a model. """ model = get_model(model) data = guess_types(self.hashes) try: func = _WRITE_MODEL[model] except KeyError: func = partial(write_models, model) func(data, field)
Write or update a model.
entailment
def _create_models_for_relation_step(self, rel_model_name, rel_key, rel_value, model): """ Create a new model linked to the given model. Syntax: And `model` with `field` "`value`" has `new model` in the database: Example: .. code-block:: gherkin And project with name "Ball Project" has goals in the database: | description | | To have fun playing with balls of twine | """ model = get_model(model) lookup = {rel_key: rel_value} rel_model = get_model(rel_model_name).objects.get(**lookup) data = guess_types(self.hashes) for hash_ in data: hash_['%s' % rel_model_name] = rel_model try: func = _WRITE_MODEL[model] except KeyError: func = partial(write_models, model) func(data, None)
Create a new model linked to the given model. Syntax: And `model` with `field` "`value`" has `new model` in the database: Example: .. code-block:: gherkin And project with name "Ball Project" has goals in the database: | description | | To have fun playing with balls of twine |
entailment
def _create_m2m_links_step(self, rel_model_name, rel_key, rel_value, relation_name): """ Link many-to-many models together. Syntax: And `model` with `field` "`value`" is linked to `other model` in the database: Example: .. code-block:: gherkin And article with name "Guidelines" is linked to tags in the database: | name | | coding | | style | """ lookup = {rel_key: rel_value} rel_model = get_model(rel_model_name).objects.get(**lookup) relation = None for m2m in rel_model._meta.many_to_many: if relation_name in (m2m.name, m2m.verbose_name): relation = getattr(rel_model, m2m.name) break if not relation: try: relation = getattr(rel_model, relation_name) except AttributeError: pass assert relation, \ "%s does not have a many-to-many relation named '%s'" % ( rel_model._meta.verbose_name.capitalize(), relation_name, ) m2m_model = relation.model for hash_ in self.hashes: relation.add(m2m_model.objects.get(**hash_))
Link many-to-many models together. Syntax: And `model` with `field` "`value`" is linked to `other model` in the database: Example: .. code-block:: gherkin And article with name "Guidelines" is linked to tags in the database: | name | | coding | | style |
entailment
def _model_count_step(self, count, model): """ Count the number of models in the database. Example: .. code-block:: gherkin Then there should be 0 goals in the database """ model = get_model(model) expected = int(count) found = model.objects.count() assert found == expected, "Expected %d %s, found %d." % \ (expected, model._meta.verbose_name_plural, found)
Count the number of models in the database. Example: .. code-block:: gherkin Then there should be 0 goals in the database
entailment
def mail_sent_count(self, count): """ Test that `count` mails have been sent. Syntax: I have sent `count` emails Example: .. code-block:: gherkin Then I have sent 2 emails """ expected = int(count) actual = len(mail.outbox) assert expected == actual, \ "Expected to send {0} email(s), got {1}.".format(expected, actual)
Test that `count` mails have been sent. Syntax: I have sent `count` emails Example: .. code-block:: gherkin Then I have sent 2 emails
entailment
def mail_sent_content(self, text, part): """ Test an email contains (assert text in) the given text in the relevant message part (accessible as an attribute on the email object). This step strictly applies whitespace. Syntax: I have sent an email with "`text`" in the `part` Example: .. code-block:: gherkin Then I have sent an email with "pandas" in the body """ if not any(text in getattr(email, part) for email in mail.outbox): dump_emails(part) raise AssertionError( "No email contained expected text in the {0}.".format(part))
Test an email contains (assert text in) the given text in the relevant message part (accessible as an attribute on the email object). This step strictly applies whitespace. Syntax: I have sent an email with "`text`" in the `part` Example: .. code-block:: gherkin Then I have sent an email with "pandas" in the body
entailment
def mail_sent_contains_html(self): """ Test that an email contains the HTML (assert HTML in) in the multiline as one of its MIME alternatives. The HTML is normalised by passing through Django's :func:`django.test.html.parse_html`. Example: .. code-block:: gherkin And I have sent an email with the following HTML alternative: \"\"\" <p><strong>Name:</strong> Sir Panda</p> <p><strong>Phone:</strong> 0400000000</p> <p><strong>Email:</strong> sir.panda@pand.as</p> \"\"\" """ for email in mail.outbox: try: html = next(content for content, mime in email.alternatives if mime == 'text/html') dom1 = parse_html(html) dom2 = parse_html(self.multiline) assert_in(dom1, dom2) except AssertionError as exc: print("Email did not match", exc) # we intentionally eat the exception continue return True raise AssertionError("No email contained the HTML")
Test that an email contains the HTML (assert HTML in) in the multiline as one of its MIME alternatives. The HTML is normalised by passing through Django's :func:`django.test.html.parse_html`. Example: .. code-block:: gherkin And I have sent an email with the following HTML alternative: \"\"\" <p><strong>Name:</strong> Sir Panda</p> <p><strong>Phone:</strong> 0400000000</p> <p><strong>Email:</strong> sir.panda@pand.as</p> \"\"\"
entailment
def dump_emails(part): """Show the sent emails' tested parts, to aid in debugging.""" print("Sent emails:") for email in mail.outbox: print(getattr(email, part))
Show the sent emails' tested parts, to aid in debugging.
entailment
def namespace_url(context, view_name, *args, **kwargs): """ Returns an absolute URL matching named view with its parameters and the provided application instance namespace. If no namespace is passed as a kwarg (or it is "" or None), this templatetag will look into the request object for the app_config's namespace. If there is still no namespace found, this tag will act like the normal {% url ... %} template tag. Normally, this tag will return whatever is returned by the ultimate call to reverse, which also means it will raise NoReverseMatch if reverse() cannot find a match. This behaviour can be override by suppling a 'default' kwarg with the value of what should be returned when no match is found. """ namespace = kwargs.pop('namespace', None) if not namespace: namespace, __ = get_app_instance(context['request']) if namespace: namespace += ':' reverse = partial( urls.reverse, '{0:s}{1:s}'.format(namespace, view_name)) # We're explicitly NOT happy to just re-raise the exception, as that may # adversely affect stack traces. if 'default' not in kwargs: if kwargs: return reverse(kwargs=kwargs) elif args: return reverse(args=args) else: return reverse() default = kwargs.pop('default', None) try: if kwargs: return reverse(kwargs=kwargs) elif args: return reverse(args=args) else: return reverse() except urls.NoReverseMatch: return default
Returns an absolute URL matching named view with its parameters and the provided application instance namespace. If no namespace is passed as a kwarg (or it is "" or None), this templatetag will look into the request object for the app_config's namespace. If there is still no namespace found, this tag will act like the normal {% url ... %} template tag. Normally, this tag will return whatever is returned by the ultimate call to reverse, which also means it will raise NoReverseMatch if reverse() cannot find a match. This behaviour can be override by suppling a 'default' kwarg with the value of what should be returned when no match is found.
entailment
def get_app_instance(request): """ Returns a tuple containing the current namespace and the AppHookConfig instance :param request: request object :return: namespace, config """ app = None if getattr(request, 'current_page', None) and request.current_page.application_urls: app = apphook_pool.get_apphook(request.current_page.application_urls) if app and app.app_config: try: config = None with override(get_language_from_request(request, check_path=True)): namespace = resolve(request.path_info).namespace config = app.get_config(namespace) return namespace, config except Resolver404: pass return '', None
Returns a tuple containing the current namespace and the AppHookConfig instance :param request: request object :return: namespace, config
entailment
def setup_config(form_class, config_model=None): """ Register the provided form as config form for the provided config model This can be used as a decorator by adding a `model` attribute to the config form:: @setup_config class ExampleConfigForm(AppDataForm): model = ExampleConfig :param form_class: Form class derived from AppDataForm :param config_model: Model class derived from AppHookConfig :return: """ # allow use as a decorator if config_model is None: return setup_config(form_class, form_class.model) app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
Register the provided form as config form for the provided config model This can be used as a decorator by adding a `model` attribute to the config form:: @setup_config class ExampleConfigForm(AppDataForm): model = ExampleConfig :param form_class: Form class derived from AppDataForm :param config_model: Model class derived from AppHookConfig :return:
entailment
def _get_apphook_field_names(model): """ Return all foreign key field names for a AppHookConfig based model """ from .models import AppHookConfig # avoid circular dependencies fields = [] for field in model._meta.fields: if isinstance(field, ForeignKey) and issubclass(field.remote_field.model, AppHookConfig): fields.append(field) return [field.name for field in fields]
Return all foreign key field names for a AppHookConfig based model
entailment
def get_apphook_field_names(model): """ Cache app-hook field names on model :param model: model class or object :return: list of foreign key field names to AppHookConfigs """ key = APP_CONFIG_FIELDS_KEY.format( app_label=model._meta.app_label, model_name=model._meta.object_name ).lower() if not hasattr(model, key): field_names = _get_apphook_field_names(model) setattr(model, key, field_names) return getattr(model, key)
Cache app-hook field names on model :param model: model class or object :return: list of foreign key field names to AppHookConfigs
entailment
def get_apphook_configs(obj): """ Get apphook configs for an object obj :param obj: any model instance :return: list of apphook configs for given obj """ keys = get_apphook_field_names(obj) return [getattr(obj, key) for key in keys] if keys else []
Get apphook configs for an object obj :param obj: any model instance :return: list of apphook configs for given obj
entailment
def data_received(self, data): """Receive data from the protocol. Called when asyncio.Protocol detects received data from network. """ _LOGGER.debug("Starting: data_received") _LOGGER.debug('Received %d bytes from PLM: %s', len(data), binascii.hexlify(data)) self._buffer.put_nowait(data) asyncio.ensure_future(self._peel_messages_from_buffer(), loop=self._loop) _LOGGER.debug("Finishing: data_received")
Receive data from the protocol. Called when asyncio.Protocol detects received data from network.
entailment
def connection_lost(self, exc): """Reestablish the connection to the transport. Called when asyncio.Protocol loses the network connection. """ if exc is None: _LOGGER.warning('End of file received from Insteon Modem') else: _LOGGER.warning('Lost connection to Insteon Modem: %s', exc) self.transport = None asyncio.ensure_future(self.pause_writing(), loop=self.loop) if self._connection_lost_callback: self._connection_lost_callback()
Reestablish the connection to the transport. Called when asyncio.Protocol loses the network connection.
entailment
def add_all_link_done_callback(self, callback): """Register a callback to be invoked when the ALDB is loaded.""" _LOGGER.debug('Added new callback %s ', callback) self._cb_load_all_link_db_done.append(callback)
Register a callback to be invoked when the ALDB is loaded.
entailment
def add_device_not_active_callback(self, callback): """Register callback to be invoked when a device is not responding.""" _LOGGER.debug('Added new callback %s ', callback) self._cb_device_not_active.append(callback)
Register callback to be invoked when a device is not responding.
entailment
def poll_devices(self): """Request status updates from each device.""" for addr in self.devices: device = self.devices[addr] if not device.address.is_x10: device.async_refresh_state()
Request status updates from each device.
entailment
def send_msg(self, msg, wait_nak=True, wait_timeout=WAIT_TIMEOUT): """Place a message on the send queue for sending. Message are sent in the order they are placed in the queue. """ msg_info = MessageInfo(msg=msg, wait_nak=wait_nak, wait_timeout=wait_timeout) _LOGGER.debug("Queueing msg: %s", msg) self._send_queue.put_nowait(msg_info)
Place a message on the send queue for sending. Message are sent in the order they are placed in the queue.
entailment
def start_all_linking(self, mode, group): """Put the IM into All-Linking mode. Puts the IM into All-Linking mode for 4 minutes. Parameters: mode: 0 | 1 | 3 | 255 0 - PLM is responder 1 - PLM is controller 3 - Device that initiated All-Linking is Controller 255 = Delete All-Link group: All-Link group number (0 - 255) """ msg = StartAllLinking(mode, group) self.send_msg(msg)
Put the IM into All-Linking mode. Puts the IM into All-Linking mode for 4 minutes. Parameters: mode: 0 | 1 | 3 | 255 0 - PLM is responder 1 - PLM is controller 3 - Device that initiated All-Linking is Controller 255 = Delete All-Link group: All-Link group number (0 - 255)
entailment
def add_x10_device(self, housecode, unitcode, feature='OnOff'): """Add an X10 device based on a feature description. Current features are: - OnOff - Dimmable - Sensor - AllUnitsOff - AllLightsOn - AllLightsOff """ device = insteonplm.devices.create_x10(self, housecode, unitcode, feature) if device: self.devices[device.address.id] = device return device
Add an X10 device based on a feature description. Current features are: - OnOff - Dimmable - Sensor - AllUnitsOff - AllLightsOn - AllLightsOff
entailment
def device_not_active(self, addr): """Handle inactive devices.""" self.aldb_device_handled(addr) for callback in self._cb_device_not_active: callback(addr)
Handle inactive devices.
entailment
def aldb_device_handled(self, addr): """Remove device from ALDB device list.""" if isinstance(addr, Address): remove_addr = addr.id else: remove_addr = addr try: self._aldb_devices.pop(remove_addr) _LOGGER.debug('Removed ALDB device %s', remove_addr) except KeyError: _LOGGER.debug('Device %s not in ALDB device list', remove_addr) _LOGGER.debug('ALDB device count: %d', len(self._aldb_devices))
Remove device from ALDB device list.
entailment
def manage_aldb_record(self, control_code, control_flags, group, address, data1, data2, data3): """Update an IM All-Link record. Control Code values: - 0x00 Find First Starting at the top of the ALDB, search for the first ALL-Link Record matching the <ALL-Link Group> and <ID> in bytes 5 – 8. The search ignores byte 4, <ALL-Link Record Flags>. You will receive an ACK at the end of the returned message if such an ALL-Link Record exists, or else a NAK if it doesn’t. If the record exists, the IM will return it in an ALL-Link Record Response (0x51) message. - 0x01 Find Next Search for the next ALL-Link Record following the one found using <Control Code> 0x00 above. This allows you to find both Controller and Responder records for a given <ALL-Link Group> and <ID>. Be sure to use the same <ALL-Link Group> and <ID> (bytes 5 – 8) as you used for <Control Code> 0x00. You will receive an ACK at the end of the returned message if another matching ALL-Link Record exists, or else a NAK if it doesn’t. If the record exists, the IM will return it in an ALL-Link Record Response (0x51) message. - 0x20 Modify First Found or Add Modify an existing or else add a new ALL-Link Record for either a Controller or Responder. Starting at the top of the ALDB, search for the first ALL-Link Record matching the <ALL-Link Group> and <ID> in bytes 5 – 8. The search ignores byte 4, <ALL-Link Record Flags>. If such an ALL-Link Record exists, overwrite it with the data in bytes 4 – 11; otherwise, create a new ALL-Link Record using bytes 4 – 11. Note that the IM will copy <ALL-Link Record Flags> you supplied in byte 4 below directly into the <ALL-Link Record Flags> byte of the ALL-Link Record in an ALDB-L (linear) database. Use caution, because you can damage an ALDB-L if you misuse this Command. For instance, if you zero the <ALL-Link Record Flags> byte in the first ALL-Link Record, the IM’s ALDB-L database will then appear empty. - 0x40 Modify First Controller Found or Add Modify an existing or else add a new Controller (master) ALL-Link Record. Starting at the top of the ALDB, search for the first ALL-Link Controller Record matching the <ALL-Link Group> and <ID> in bytes 5 – 8. An ALL-Link Controller Record has bit 6 of its <ALL-Link Record Flags> byte set to 1. If such a Controller ALL-Link Record exists, overwrite it with the data in bytes 5 – 11; otherwise, create a new ALL-Link Record using bytes 5 – 11. In either case, the IM will set bit 6 of the <ALL-Link Record Flags> byte in the ALL-Link Record to 1 to indicate that the record is for a Controller. - 0x41 Modify First Responder Found or Add Modify an existing or else add a new Responder (slave) ALLLink Record. Starting at the top of the ALDB, search for the first ALL-Link Responder Record matching the <ALL-Link Group> and <ID> in bytes 5 – 8. An ALL-Link Responder Record has bit 6 of its <ALL-Link Record Flags> byte cleared to 0. If such a Responder ALL-Link Record exists, overwrite it with the data in bytes 5 – 11; otherwise, create a new ALL-Link Record using bytes 5 – 11. In either case, The IM will clear bit 6 of the <ALL-Link Record Flags> byte in the ALL-Link Record to 0 to indicate that the record is for a Responder. - 0x80 Delete First Found Delete an ALL-Link Record. Starting at the top of the ALDB, search for the first ALL-Link Record matching the <ALL-Link Group> and <ID> in bytes 5 – 8. The search ignores byte 4, <ALL-Link Record Flags>. You will receive an ACK at the end of the returned message if such an ALL-Link Record existed and was deleted, or else a NAK no such record exists. """ msg = ManageAllLinkRecord(control_code, control_flags, group, address, data1, data2, data3) self.send_msg(msg)
Update an IM All-Link record. Control Code values: - 0x00 Find First Starting at the top of the ALDB, search for the first ALL-Link Record matching the <ALL-Link Group> and <ID> in bytes 5 – 8. The search ignores byte 4, <ALL-Link Record Flags>. You will receive an ACK at the end of the returned message if such an ALL-Link Record exists, or else a NAK if it doesn’t. If the record exists, the IM will return it in an ALL-Link Record Response (0x51) message. - 0x01 Find Next Search for the next ALL-Link Record following the one found using <Control Code> 0x00 above. This allows you to find both Controller and Responder records for a given <ALL-Link Group> and <ID>. Be sure to use the same <ALL-Link Group> and <ID> (bytes 5 – 8) as you used for <Control Code> 0x00. You will receive an ACK at the end of the returned message if another matching ALL-Link Record exists, or else a NAK if it doesn’t. If the record exists, the IM will return it in an ALL-Link Record Response (0x51) message. - 0x20 Modify First Found or Add Modify an existing or else add a new ALL-Link Record for either a Controller or Responder. Starting at the top of the ALDB, search for the first ALL-Link Record matching the <ALL-Link Group> and <ID> in bytes 5 – 8. The search ignores byte 4, <ALL-Link Record Flags>. If such an ALL-Link Record exists, overwrite it with the data in bytes 4 – 11; otherwise, create a new ALL-Link Record using bytes 4 – 11. Note that the IM will copy <ALL-Link Record Flags> you supplied in byte 4 below directly into the <ALL-Link Record Flags> byte of the ALL-Link Record in an ALDB-L (linear) database. Use caution, because you can damage an ALDB-L if you misuse this Command. For instance, if you zero the <ALL-Link Record Flags> byte in the first ALL-Link Record, the IM’s ALDB-L database will then appear empty. - 0x40 Modify First Controller Found or Add Modify an existing or else add a new Controller (master) ALL-Link Record. Starting at the top of the ALDB, search for the first ALL-Link Controller Record matching the <ALL-Link Group> and <ID> in bytes 5 – 8. An ALL-Link Controller Record has bit 6 of its <ALL-Link Record Flags> byte set to 1. If such a Controller ALL-Link Record exists, overwrite it with the data in bytes 5 – 11; otherwise, create a new ALL-Link Record using bytes 5 – 11. In either case, the IM will set bit 6 of the <ALL-Link Record Flags> byte in the ALL-Link Record to 1 to indicate that the record is for a Controller. - 0x41 Modify First Responder Found or Add Modify an existing or else add a new Responder (slave) ALLLink Record. Starting at the top of the ALDB, search for the first ALL-Link Responder Record matching the <ALL-Link Group> and <ID> in bytes 5 – 8. An ALL-Link Responder Record has bit 6 of its <ALL-Link Record Flags> byte cleared to 0. If such a Responder ALL-Link Record exists, overwrite it with the data in bytes 5 – 11; otherwise, create a new ALL-Link Record using bytes 5 – 11. In either case, The IM will clear bit 6 of the <ALL-Link Record Flags> byte in the ALL-Link Record to 0 to indicate that the record is for a Responder. - 0x80 Delete First Found Delete an ALL-Link Record. Starting at the top of the ALDB, search for the first ALL-Link Record matching the <ALL-Link Group> and <ID> in bytes 5 – 8. The search ignores byte 4, <ALL-Link Record Flags>. You will receive an ACK at the end of the returned message if such an ALL-Link Record existed and was deleted, or else a NAK no such record exists.
entailment
async def pause_writing(self): """Pause writing.""" self._restart_writer = False if self._writer_task: self._writer_task.remove_done_callback(self.restart_writing) self._writer_task.cancel() await self._writer_task await asyncio.sleep(0, loop=self._loop)
Pause writing.
entailment
def restart_writing(self, task=None): """Resume writing.""" if self._restart_writer: self._writer_task = asyncio.ensure_future( self._get_message_from_send_queue(), loop=self._loop) self._writer_task.add_done_callback(self.restart_writing)
Resume writing.
entailment
def _get_plm_info(self): """Request PLM Info.""" _LOGGER.info('Requesting Insteon Modem Info') msg = GetImInfo() self.send_msg(msg, wait_nak=True, wait_timeout=.5)
Request PLM Info.
entailment
def _load_all_link_database(self): """Load the ALL-Link Database into object.""" _LOGGER.debug("Starting: _load_all_link_database") self.devices.state = 'loading' self._get_first_all_link_record() _LOGGER.debug("Ending: _load_all_link_database")
Load the ALL-Link Database into object.
entailment
def _get_first_all_link_record(self): """Request first ALL-Link record.""" _LOGGER.debug("Starting: _get_first_all_link_record") _LOGGER.info('Requesting ALL-Link Records') if self.aldb.status == ALDBStatus.LOADED: self._next_all_link_rec_nak_retries = 3 self._handle_get_next_all_link_record_nak(None) return self.aldb.clear() self._next_all_link_rec_nak_retries = 0 msg = GetFirstAllLinkRecord() self.send_msg(msg, wait_nak=True, wait_timeout=.5) _LOGGER.debug("Ending: _get_first_all_link_record")
Request first ALL-Link record.
entailment
def _get_next_all_link_record(self): """Request next ALL-Link record.""" _LOGGER.debug("Starting: _get_next_all_link_record") _LOGGER.debug("Requesting Next All-Link Record") msg = GetNextAllLinkRecord() self.send_msg(msg, wait_nak=True, wait_timeout=.5) _LOGGER.debug("Ending: _get_next_all_link_record")
Request next ALL-Link record.
entailment
def x10_all_units_off(self, housecode): """Send the X10 All Units Off command.""" if isinstance(housecode, str): housecode = housecode.upper() else: raise TypeError('Housecode must be a string') msg = X10Send.command_msg(housecode, X10_COMMAND_ALL_UNITS_OFF) self.send_msg(msg) self._x10_command_to_device(housecode, X10_COMMAND_ALL_UNITS_OFF, msg)
Send the X10 All Units Off command.
entailment
def x10_all_lights_off(self, housecode): """Send the X10 All Lights Off command.""" msg = X10Send.command_msg(housecode, X10_COMMAND_ALL_LIGHTS_OFF) self.send_msg(msg) self._x10_command_to_device(housecode, X10_COMMAND_ALL_LIGHTS_OFF, msg)
Send the X10 All Lights Off command.
entailment
def x10_all_lights_on(self, housecode): """Send the X10 All Lights Off command.""" msg = X10Send.command_msg(housecode, X10_COMMAND_ALL_LIGHTS_ON) self.send_msg(msg) self._x10_command_to_device(housecode, X10_COMMAND_ALL_LIGHTS_ON, msg)
Send the X10 All Lights Off command.
entailment
def connection_made(self, transport): """Start the PLM connection process. Called when asyncio.Protocol establishes the network connection. """ _LOGGER.info('Connection established to PLM') self.transport = transport self._restart_writer = True self.restart_writing() # Testing to see if this fixes the 2413S issue self.transport.serial.timeout = 1 self.transport.serial.write_timeout = 1 self.transport.set_write_buffer_limits(128) # limit = self.transport.get_write_buffer_size() # _LOGGER.debug('Write buffer size is %d', limit) if self._aldb.status != ALDBStatus.LOADED: asyncio.ensure_future(self._setup_devices(), loop=self._loop)
Start the PLM connection process. Called when asyncio.Protocol establishes the network connection.
entailment
def connection_made(self, transport): """Start the Hub connection process. Called when asyncio.Protocol establishes the network connection. """ _LOGGER.info('Connection established to Hub') _LOGGER.debug('Transport: %s', transport) self.transport = transport self._restart_writer = True self.restart_writing() if self._aldb.status != ALDBStatus.LOADED: asyncio.ensure_future(self._setup_devices(), loop=self._loop)
Start the Hub connection process. Called when asyncio.Protocol establishes the network connection.
entailment
def create(plm, address, cat, subcat, firmware=None): """Create a device from device info data.""" from insteonplm.devices.ipdb import IPDB ipdb = IPDB() product = ipdb[[cat, subcat]] deviceclass = product.deviceclass device = None if deviceclass is not None: device = deviceclass(plm, address, cat, subcat, product.product_key, product.description, product.model) return device
Create a device from device info data.
entailment
def create_x10(plm, housecode, unitcode, feature): """Create an X10 device from a feature definition.""" from insteonplm.devices.ipdb import IPDB ipdb = IPDB() product = ipdb.x10(feature) deviceclass = product.deviceclass device = None if deviceclass: device = deviceclass(plm, housecode, unitcode) return device
Create an X10 device from a feature definition.
entailment
def id_request(self): """Request a device ID from a device.""" import inspect curframe = inspect.currentframe() calframe = inspect.getouterframes(curframe, 2) _LOGGER.debug('caller name: %s', calframe[1][3]) msg = StandardSend(self.address, COMMAND_ID_REQUEST_0X10_0X00) self._plm.send_msg(msg)
Request a device ID from a device.
entailment
def product_data_request(self): """Request product data from a device. Not supported by all devices. Required after 01-Feb-2007. """ msg = StandardSend(self._address, COMMAND_PRODUCT_DATA_REQUEST_0X03_0X00) self._send_msg(msg)
Request product data from a device. Not supported by all devices. Required after 01-Feb-2007.
entailment
def assign_to_all_link_group(self, group=0x01): """Assign a device to an All-Link Group. The default is group 0x01. """ msg = StandardSend(self._address, COMMAND_ASSIGN_TO_ALL_LINK_GROUP_0X01_NONE, cmd2=group) self._send_msg(msg)
Assign a device to an All-Link Group. The default is group 0x01.
entailment
def delete_from_all_link_group(self, group): """Delete a device to an All-Link Group.""" msg = StandardSend(self._address, COMMAND_DELETE_FROM_ALL_LINK_GROUP_0X02_NONE, cmd2=group) self._send_msg(msg)
Delete a device to an All-Link Group.
entailment
def fx_username(self): """Get FX Username. Only required for devices that support FX Commands. FX Addressee responds with an ED 0x0301 FX Username Response message """ msg = StandardSend(self._address, COMMAND_FX_USERNAME_0X03_0X01) self._send_msg(msg)
Get FX Username. Only required for devices that support FX Commands. FX Addressee responds with an ED 0x0301 FX Username Response message
entailment
def device_text_string_request(self): """Get FX Username. Only required for devices that support FX Commands. FX Addressee responds with an ED 0x0301 FX Username Response message. """ msg = StandardSend(self._address, COMMAND_FX_USERNAME_0X03_0X01) self._send_msg(msg)
Get FX Username. Only required for devices that support FX Commands. FX Addressee responds with an ED 0x0301 FX Username Response message.
entailment
def enter_linking_mode(self, group=0x01): """Tell a device to enter All-Linking Mode. Same as holding down the Set button for 10 sec. Default group is 0x01. Not supported by i1 devices. """ msg = ExtendedSend(self._address, COMMAND_ENTER_LINKING_MODE_0X09_NONE, cmd2=group, userdata=Userdata()) msg.set_checksum() self._send_msg(msg)
Tell a device to enter All-Linking Mode. Same as holding down the Set button for 10 sec. Default group is 0x01. Not supported by i1 devices.
entailment
def enter_unlinking_mode(self, group): """Unlink a device from an All-Link group. Not supported by i1 devices. """ msg = StandardSend(self._address, COMMAND_ENTER_UNLINKING_MODE_0X0A_NONE, cmd2=group) self._send_msg(msg)
Unlink a device from an All-Link group. Not supported by i1 devices.
entailment
def get_engine_version(self): """Get the device engine version.""" msg = StandardSend(self._address, COMMAND_GET_INSTEON_ENGINE_VERSION_0X0D_0X00) self._send_msg(msg)
Get the device engine version.
entailment
def ping(self): """Ping a device.""" msg = StandardSend(self._address, COMMAND_PING_0X0F_0X00) self._send_msg(msg)
Ping a device.
entailment
def create_default_links(self): """Create the default links between the IM and the device.""" self._plm.manage_aldb_record(0x40, 0xe2, 0x00, self.address, self.cat, self.subcat, self.product_key) self.manage_aldb_record(0x41, 0xa2, 0x00, self._plm.address, self._plm.cat, self._plm.subcat, self._plm.product_key) for link in self._stateList: state = self._stateList[link] if state.is_responder: # IM is controller self._plm.manage_aldb_record(0x40, 0xe2, link, self._address, 0x00, 0x00, 0x00) # Device is responder self.manage_aldb_record(0x41, 0xa2, link, self._plm.address, state.linkdata1, state.linkdata2, state.linkdata3) if state.is_controller: # IM is responder self._plm.manage_aldb_record(0x41, 0xa2, link, self._address, 0x00, 0x00, 0x00) # Device is controller self.manage_aldb_record(0x40, 0xe2, link, self._plm.address, 0x00, 0x00, 0x00) self.read_aldb()
Create the default links between the IM and the device.
entailment
def read_aldb(self, mem_addr=0x0000, num_recs=0): """Read the device All-Link Database.""" if self._aldb.version == ALDBVersion.Null: _LOGGER.info('Device %s does not contain an All-Link Database', self._address.human) else: _LOGGER.info('Reading All-Link Database for device %s', self._address.human) asyncio.ensure_future(self._aldb.load(mem_addr, num_recs), loop=self._plm.loop) self._aldb.add_loaded_callback(self._aldb_loaded_callback)
Read the device All-Link Database.
entailment