repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
twisted/txacme
src/txacme/client.py
Client._parse_authorization
def _parse_authorization(cls, response, uri=None): """ Parse an authorization resource. """ links = _parse_header_links(response) try: new_cert_uri = links[u'next'][u'url'] except KeyError: raise errors.ClientError('"next" link missing') return ( response.json() .addCallback( lambda body: messages.AuthorizationResource( body=messages.Authorization.from_json(body), uri=cls._maybe_location(response, uri=uri), new_cert_uri=new_cert_uri)) )
python
def _parse_authorization(cls, response, uri=None): """ Parse an authorization resource. """ links = _parse_header_links(response) try: new_cert_uri = links[u'next'][u'url'] except KeyError: raise errors.ClientError('"next" link missing') return ( response.json() .addCallback( lambda body: messages.AuthorizationResource( body=messages.Authorization.from_json(body), uri=cls._maybe_location(response, uri=uri), new_cert_uri=new_cert_uri)) )
[ "def", "_parse_authorization", "(", "cls", ",", "response", ",", "uri", "=", "None", ")", ":", "links", "=", "_parse_header_links", "(", "response", ")", "try", ":", "new_cert_uri", "=", "links", "[", "u'next'", "]", "[", "u'url'", "]", "except", "KeyError", ":", "raise", "errors", ".", "ClientError", "(", "'\"next\" link missing'", ")", "return", "(", "response", ".", "json", "(", ")", ".", "addCallback", "(", "lambda", "body", ":", "messages", ".", "AuthorizationResource", "(", "body", "=", "messages", ".", "Authorization", ".", "from_json", "(", "body", ")", ",", "uri", "=", "cls", ".", "_maybe_location", "(", "response", ",", "uri", "=", "uri", ")", ",", "new_cert_uri", "=", "new_cert_uri", ")", ")", ")" ]
Parse an authorization resource.
[ "Parse", "an", "authorization", "resource", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L303-L319
twisted/txacme
src/txacme/client.py
Client._check_authorization
def _check_authorization(cls, authzr, identifier): """ Check that the authorization we got is the one we expected. """ if authzr.body.identifier != identifier: raise errors.UnexpectedUpdate(authzr) return authzr
python
def _check_authorization(cls, authzr, identifier): """ Check that the authorization we got is the one we expected. """ if authzr.body.identifier != identifier: raise errors.UnexpectedUpdate(authzr) return authzr
[ "def", "_check_authorization", "(", "cls", ",", "authzr", ",", "identifier", ")", ":", "if", "authzr", ".", "body", ".", "identifier", "!=", "identifier", ":", "raise", "errors", ".", "UnexpectedUpdate", "(", "authzr", ")", "return", "authzr" ]
Check that the authorization we got is the one we expected.
[ "Check", "that", "the", "authorization", "we", "got", "is", "the", "one", "we", "expected", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L322-L328
twisted/txacme
src/txacme/client.py
Client.answer_challenge
def answer_challenge(self, challenge_body, response): """ Respond to an authorization challenge. :param ~acme.messages.ChallengeBody challenge_body: The challenge being responded to. :param ~acme.challenges.ChallengeResponse response: The response to the challenge. :return: The updated challenge resource. :rtype: Deferred[`~acme.messages.ChallengeResource`] """ action = LOG_ACME_ANSWER_CHALLENGE( challenge_body=challenge_body, response=response) with action.context(): return ( DeferredContext( self._client.post(challenge_body.uri, response)) .addCallback(self._parse_challenge) .addCallback(self._check_challenge, challenge_body) .addCallback( tap(lambda c: action.add_success_fields(challenge_resource=c))) .addActionFinish())
python
def answer_challenge(self, challenge_body, response): """ Respond to an authorization challenge. :param ~acme.messages.ChallengeBody challenge_body: The challenge being responded to. :param ~acme.challenges.ChallengeResponse response: The response to the challenge. :return: The updated challenge resource. :rtype: Deferred[`~acme.messages.ChallengeResource`] """ action = LOG_ACME_ANSWER_CHALLENGE( challenge_body=challenge_body, response=response) with action.context(): return ( DeferredContext( self._client.post(challenge_body.uri, response)) .addCallback(self._parse_challenge) .addCallback(self._check_challenge, challenge_body) .addCallback( tap(lambda c: action.add_success_fields(challenge_resource=c))) .addActionFinish())
[ "def", "answer_challenge", "(", "self", ",", "challenge_body", ",", "response", ")", ":", "action", "=", "LOG_ACME_ANSWER_CHALLENGE", "(", "challenge_body", "=", "challenge_body", ",", "response", "=", "response", ")", "with", "action", ".", "context", "(", ")", ":", "return", "(", "DeferredContext", "(", "self", ".", "_client", ".", "post", "(", "challenge_body", ".", "uri", ",", "response", ")", ")", ".", "addCallback", "(", "self", ".", "_parse_challenge", ")", ".", "addCallback", "(", "self", ".", "_check_challenge", ",", "challenge_body", ")", ".", "addCallback", "(", "tap", "(", "lambda", "c", ":", "action", ".", "add_success_fields", "(", "challenge_resource", "=", "c", ")", ")", ")", ".", "addActionFinish", "(", ")", ")" ]
Respond to an authorization challenge. :param ~acme.messages.ChallengeBody challenge_body: The challenge being responded to. :param ~acme.challenges.ChallengeResponse response: The response to the challenge. :return: The updated challenge resource. :rtype: Deferred[`~acme.messages.ChallengeResource`]
[ "Respond", "to", "an", "authorization", "challenge", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L330-L353
twisted/txacme
src/txacme/client.py
Client._parse_challenge
def _parse_challenge(cls, response): """ Parse a challenge resource. """ links = _parse_header_links(response) try: authzr_uri = links['up']['url'] except KeyError: raise errors.ClientError('"up" link missing') return ( response.json() .addCallback( lambda body: messages.ChallengeResource( authzr_uri=authzr_uri, body=messages.ChallengeBody.from_json(body))) )
python
def _parse_challenge(cls, response): """ Parse a challenge resource. """ links = _parse_header_links(response) try: authzr_uri = links['up']['url'] except KeyError: raise errors.ClientError('"up" link missing') return ( response.json() .addCallback( lambda body: messages.ChallengeResource( authzr_uri=authzr_uri, body=messages.ChallengeBody.from_json(body))) )
[ "def", "_parse_challenge", "(", "cls", ",", "response", ")", ":", "links", "=", "_parse_header_links", "(", "response", ")", "try", ":", "authzr_uri", "=", "links", "[", "'up'", "]", "[", "'url'", "]", "except", "KeyError", ":", "raise", "errors", ".", "ClientError", "(", "'\"up\" link missing'", ")", "return", "(", "response", ".", "json", "(", ")", ".", "addCallback", "(", "lambda", "body", ":", "messages", ".", "ChallengeResource", "(", "authzr_uri", "=", "authzr_uri", ",", "body", "=", "messages", ".", "ChallengeBody", ".", "from_json", "(", "body", ")", ")", ")", ")" ]
Parse a challenge resource.
[ "Parse", "a", "challenge", "resource", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L356-L371
twisted/txacme
src/txacme/client.py
Client._check_challenge
def _check_challenge(cls, challenge, challenge_body): """ Check that the challenge resource we got is the one we expected. """ if challenge.uri != challenge_body.uri: raise errors.UnexpectedUpdate(challenge.uri) return challenge
python
def _check_challenge(cls, challenge, challenge_body): """ Check that the challenge resource we got is the one we expected. """ if challenge.uri != challenge_body.uri: raise errors.UnexpectedUpdate(challenge.uri) return challenge
[ "def", "_check_challenge", "(", "cls", ",", "challenge", ",", "challenge_body", ")", ":", "if", "challenge", ".", "uri", "!=", "challenge_body", ".", "uri", ":", "raise", "errors", ".", "UnexpectedUpdate", "(", "challenge", ".", "uri", ")", "return", "challenge" ]
Check that the challenge resource we got is the one we expected.
[ "Check", "that", "the", "challenge", "resource", "we", "got", "is", "the", "one", "we", "expected", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L374-L380
twisted/txacme
src/txacme/client.py
Client.poll
def poll(self, authzr): """ Update an authorization from the server (usually to check its status). """ action = LOG_ACME_POLL_AUTHORIZATION(authorization=authzr) with action.context(): return ( DeferredContext(self._client.get(authzr.uri)) # Spec says we should get 202 while pending, Boulder actually # sends us 200 always, so just don't check. # .addCallback(self._expect_response, http.ACCEPTED) .addCallback( lambda res: self._parse_authorization(res, uri=authzr.uri) .addCallback( self._check_authorization, authzr.body.identifier) .addCallback( lambda authzr: (authzr, self.retry_after(res, _now=self._clock.seconds))) ) .addCallback(tap( lambda a_r: action.add_success_fields( authorization=a_r[0], retry_after=a_r[1]))) .addActionFinish())
python
def poll(self, authzr): """ Update an authorization from the server (usually to check its status). """ action = LOG_ACME_POLL_AUTHORIZATION(authorization=authzr) with action.context(): return ( DeferredContext(self._client.get(authzr.uri)) # Spec says we should get 202 while pending, Boulder actually # sends us 200 always, so just don't check. # .addCallback(self._expect_response, http.ACCEPTED) .addCallback( lambda res: self._parse_authorization(res, uri=authzr.uri) .addCallback( self._check_authorization, authzr.body.identifier) .addCallback( lambda authzr: (authzr, self.retry_after(res, _now=self._clock.seconds))) ) .addCallback(tap( lambda a_r: action.add_success_fields( authorization=a_r[0], retry_after=a_r[1]))) .addActionFinish())
[ "def", "poll", "(", "self", ",", "authzr", ")", ":", "action", "=", "LOG_ACME_POLL_AUTHORIZATION", "(", "authorization", "=", "authzr", ")", "with", "action", ".", "context", "(", ")", ":", "return", "(", "DeferredContext", "(", "self", ".", "_client", ".", "get", "(", "authzr", ".", "uri", ")", ")", "# Spec says we should get 202 while pending, Boulder actually", "# sends us 200 always, so just don't check.", "# .addCallback(self._expect_response, http.ACCEPTED)", ".", "addCallback", "(", "lambda", "res", ":", "self", ".", "_parse_authorization", "(", "res", ",", "uri", "=", "authzr", ".", "uri", ")", ".", "addCallback", "(", "self", ".", "_check_authorization", ",", "authzr", ".", "body", ".", "identifier", ")", ".", "addCallback", "(", "lambda", "authzr", ":", "(", "authzr", ",", "self", ".", "retry_after", "(", "res", ",", "_now", "=", "self", ".", "_clock", ".", "seconds", ")", ")", ")", ")", ".", "addCallback", "(", "tap", "(", "lambda", "a_r", ":", "action", ".", "add_success_fields", "(", "authorization", "=", "a_r", "[", "0", "]", ",", "retry_after", "=", "a_r", "[", "1", "]", ")", ")", ")", ".", "addActionFinish", "(", ")", ")" ]
Update an authorization from the server (usually to check its status).
[ "Update", "an", "authorization", "from", "the", "server", "(", "usually", "to", "check", "its", "status", ")", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L382-L406
twisted/txacme
src/txacme/client.py
Client.retry_after
def retry_after(cls, response, default=5, _now=time.time): """ Parse the Retry-After value from a response. """ val = response.headers.getRawHeaders(b'retry-after', [default])[0] try: return int(val) except ValueError: return http.stringToDatetime(val) - _now()
python
def retry_after(cls, response, default=5, _now=time.time): """ Parse the Retry-After value from a response. """ val = response.headers.getRawHeaders(b'retry-after', [default])[0] try: return int(val) except ValueError: return http.stringToDatetime(val) - _now()
[ "def", "retry_after", "(", "cls", ",", "response", ",", "default", "=", "5", ",", "_now", "=", "time", ".", "time", ")", ":", "val", "=", "response", ".", "headers", ".", "getRawHeaders", "(", "b'retry-after'", ",", "[", "default", "]", ")", "[", "0", "]", "try", ":", "return", "int", "(", "val", ")", "except", "ValueError", ":", "return", "http", ".", "stringToDatetime", "(", "val", ")", "-", "_now", "(", ")" ]
Parse the Retry-After value from a response.
[ "Parse", "the", "Retry", "-", "After", "value", "from", "a", "response", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L409-L417
twisted/txacme
src/txacme/client.py
Client.request_issuance
def request_issuance(self, csr): """ Request a certificate. Authorizations should have already been completed for all of the names requested in the CSR. Note that unlike `acme.client.Client.request_issuance`, the certificate resource will have the body data as raw bytes. .. seealso:: `txacme.util.csr_for_names` .. todo:: Delayed issuance is not currently supported, the server must issue the requested certificate immediately. :param csr: A certificate request message: normally `txacme.messages.CertificateRequest` or `acme.messages.CertificateRequest`. :rtype: Deferred[`acme.messages.CertificateResource`] :return: The issued certificate. """ action = LOG_ACME_REQUEST_CERTIFICATE() with action.context(): return ( DeferredContext( self._client.post( self.directory[csr], csr, content_type=DER_CONTENT_TYPE, headers=Headers({b'Accept': [DER_CONTENT_TYPE]}))) .addCallback(self._expect_response, http.CREATED) .addCallback(self._parse_certificate) .addActionFinish())
python
def request_issuance(self, csr): """ Request a certificate. Authorizations should have already been completed for all of the names requested in the CSR. Note that unlike `acme.client.Client.request_issuance`, the certificate resource will have the body data as raw bytes. .. seealso:: `txacme.util.csr_for_names` .. todo:: Delayed issuance is not currently supported, the server must issue the requested certificate immediately. :param csr: A certificate request message: normally `txacme.messages.CertificateRequest` or `acme.messages.CertificateRequest`. :rtype: Deferred[`acme.messages.CertificateResource`] :return: The issued certificate. """ action = LOG_ACME_REQUEST_CERTIFICATE() with action.context(): return ( DeferredContext( self._client.post( self.directory[csr], csr, content_type=DER_CONTENT_TYPE, headers=Headers({b'Accept': [DER_CONTENT_TYPE]}))) .addCallback(self._expect_response, http.CREATED) .addCallback(self._parse_certificate) .addActionFinish())
[ "def", "request_issuance", "(", "self", ",", "csr", ")", ":", "action", "=", "LOG_ACME_REQUEST_CERTIFICATE", "(", ")", "with", "action", ".", "context", "(", ")", ":", "return", "(", "DeferredContext", "(", "self", ".", "_client", ".", "post", "(", "self", ".", "directory", "[", "csr", "]", ",", "csr", ",", "content_type", "=", "DER_CONTENT_TYPE", ",", "headers", "=", "Headers", "(", "{", "b'Accept'", ":", "[", "DER_CONTENT_TYPE", "]", "}", ")", ")", ")", ".", "addCallback", "(", "self", ".", "_expect_response", ",", "http", ".", "CREATED", ")", ".", "addCallback", "(", "self", ".", "_parse_certificate", ")", ".", "addActionFinish", "(", ")", ")" ]
Request a certificate. Authorizations should have already been completed for all of the names requested in the CSR. Note that unlike `acme.client.Client.request_issuance`, the certificate resource will have the body data as raw bytes. .. seealso:: `txacme.util.csr_for_names` .. todo:: Delayed issuance is not currently supported, the server must issue the requested certificate immediately. :param csr: A certificate request message: normally `txacme.messages.CertificateRequest` or `acme.messages.CertificateRequest`. :rtype: Deferred[`acme.messages.CertificateResource`] :return: The issued certificate.
[ "Request", "a", "certificate", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L419-L451
twisted/txacme
src/txacme/client.py
Client._parse_certificate
def _parse_certificate(cls, response): """ Parse a response containing a certificate resource. """ links = _parse_header_links(response) try: cert_chain_uri = links[u'up'][u'url'] except KeyError: cert_chain_uri = None return ( response.content() .addCallback( lambda body: messages.CertificateResource( uri=cls._maybe_location(response), cert_chain_uri=cert_chain_uri, body=body)) )
python
def _parse_certificate(cls, response): """ Parse a response containing a certificate resource. """ links = _parse_header_links(response) try: cert_chain_uri = links[u'up'][u'url'] except KeyError: cert_chain_uri = None return ( response.content() .addCallback( lambda body: messages.CertificateResource( uri=cls._maybe_location(response), cert_chain_uri=cert_chain_uri, body=body)) )
[ "def", "_parse_certificate", "(", "cls", ",", "response", ")", ":", "links", "=", "_parse_header_links", "(", "response", ")", "try", ":", "cert_chain_uri", "=", "links", "[", "u'up'", "]", "[", "u'url'", "]", "except", "KeyError", ":", "cert_chain_uri", "=", "None", "return", "(", "response", ".", "content", "(", ")", ".", "addCallback", "(", "lambda", "body", ":", "messages", ".", "CertificateResource", "(", "uri", "=", "cls", ".", "_maybe_location", "(", "response", ")", ",", "cert_chain_uri", "=", "cert_chain_uri", ",", "body", "=", "body", ")", ")", ")" ]
Parse a response containing a certificate resource.
[ "Parse", "a", "response", "containing", "a", "certificate", "resource", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L454-L470
twisted/txacme
src/txacme/client.py
Client.fetch_chain
def fetch_chain(self, certr, max_length=10): """ Fetch the intermediary chain for a certificate. :param acme.messages.CertificateResource certr: The certificate to fetch the chain for. :param int max_length: The maximum length of the chain that will be fetched. :rtype: Deferred[List[`acme.messages.CertificateResource`]] :return: The issuer certificate chain, ordered with the trust anchor last. """ action = LOG_ACME_FETCH_CHAIN() with action.context(): if certr.cert_chain_uri is None: return succeed([]) elif max_length < 1: raise errors.ClientError('chain too long') return ( DeferredContext( self._client.get( certr.cert_chain_uri, content_type=DER_CONTENT_TYPE, headers=Headers({b'Accept': [DER_CONTENT_TYPE]}))) .addCallback(self._parse_certificate) .addCallback( lambda issuer: self.fetch_chain(issuer, max_length=max_length - 1) .addCallback(lambda chain: [issuer] + chain)) .addActionFinish())
python
def fetch_chain(self, certr, max_length=10): """ Fetch the intermediary chain for a certificate. :param acme.messages.CertificateResource certr: The certificate to fetch the chain for. :param int max_length: The maximum length of the chain that will be fetched. :rtype: Deferred[List[`acme.messages.CertificateResource`]] :return: The issuer certificate chain, ordered with the trust anchor last. """ action = LOG_ACME_FETCH_CHAIN() with action.context(): if certr.cert_chain_uri is None: return succeed([]) elif max_length < 1: raise errors.ClientError('chain too long') return ( DeferredContext( self._client.get( certr.cert_chain_uri, content_type=DER_CONTENT_TYPE, headers=Headers({b'Accept': [DER_CONTENT_TYPE]}))) .addCallback(self._parse_certificate) .addCallback( lambda issuer: self.fetch_chain(issuer, max_length=max_length - 1) .addCallback(lambda chain: [issuer] + chain)) .addActionFinish())
[ "def", "fetch_chain", "(", "self", ",", "certr", ",", "max_length", "=", "10", ")", ":", "action", "=", "LOG_ACME_FETCH_CHAIN", "(", ")", "with", "action", ".", "context", "(", ")", ":", "if", "certr", ".", "cert_chain_uri", "is", "None", ":", "return", "succeed", "(", "[", "]", ")", "elif", "max_length", "<", "1", ":", "raise", "errors", ".", "ClientError", "(", "'chain too long'", ")", "return", "(", "DeferredContext", "(", "self", ".", "_client", ".", "get", "(", "certr", ".", "cert_chain_uri", ",", "content_type", "=", "DER_CONTENT_TYPE", ",", "headers", "=", "Headers", "(", "{", "b'Accept'", ":", "[", "DER_CONTENT_TYPE", "]", "}", ")", ")", ")", ".", "addCallback", "(", "self", ".", "_parse_certificate", ")", ".", "addCallback", "(", "lambda", "issuer", ":", "self", ".", "fetch_chain", "(", "issuer", ",", "max_length", "=", "max_length", "-", "1", ")", ".", "addCallback", "(", "lambda", "chain", ":", "[", "issuer", "]", "+", "chain", ")", ")", ".", "addActionFinish", "(", ")", ")" ]
Fetch the intermediary chain for a certificate. :param acme.messages.CertificateResource certr: The certificate to fetch the chain for. :param int max_length: The maximum length of the chain that will be fetched. :rtype: Deferred[List[`acme.messages.CertificateResource`]] :return: The issuer certificate chain, ordered with the trust anchor last.
[ "Fetch", "the", "intermediary", "chain", "for", "a", "certificate", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L472-L502
twisted/txacme
src/txacme/client.py
JWSClient._wrap_in_jws
def _wrap_in_jws(self, nonce, obj): """ Wrap ``JSONDeSerializable`` object in JWS. .. todo:: Implement ``acmePath``. :param ~josepy.interfaces.JSONDeSerializable obj: :param bytes nonce: :rtype: `bytes` :return: JSON-encoded data """ with LOG_JWS_SIGN(key_type=self._key.typ, alg=self._alg.name, nonce=nonce): jobj = obj.json_dumps().encode() return ( JWS.sign( payload=jobj, key=self._key, alg=self._alg, nonce=nonce) .json_dumps() .encode())
python
def _wrap_in_jws(self, nonce, obj): """ Wrap ``JSONDeSerializable`` object in JWS. .. todo:: Implement ``acmePath``. :param ~josepy.interfaces.JSONDeSerializable obj: :param bytes nonce: :rtype: `bytes` :return: JSON-encoded data """ with LOG_JWS_SIGN(key_type=self._key.typ, alg=self._alg.name, nonce=nonce): jobj = obj.json_dumps().encode() return ( JWS.sign( payload=jobj, key=self._key, alg=self._alg, nonce=nonce) .json_dumps() .encode())
[ "def", "_wrap_in_jws", "(", "self", ",", "nonce", ",", "obj", ")", ":", "with", "LOG_JWS_SIGN", "(", "key_type", "=", "self", ".", "_key", ".", "typ", ",", "alg", "=", "self", ".", "_alg", ".", "name", ",", "nonce", "=", "nonce", ")", ":", "jobj", "=", "obj", ".", "json_dumps", "(", ")", ".", "encode", "(", ")", "return", "(", "JWS", ".", "sign", "(", "payload", "=", "jobj", ",", "key", "=", "self", ".", "_key", ",", "alg", "=", "self", ".", "_alg", ",", "nonce", "=", "nonce", ")", ".", "json_dumps", "(", ")", ".", "encode", "(", ")", ")" ]
Wrap ``JSONDeSerializable`` object in JWS. .. todo:: Implement ``acmePath``. :param ~josepy.interfaces.JSONDeSerializable obj: :param bytes nonce: :rtype: `bytes` :return: JSON-encoded data
[ "Wrap", "JSONDeSerializable", "object", "in", "JWS", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L678-L697
twisted/txacme
src/txacme/client.py
JWSClient._check_response
def _check_response(cls, response, content_type=JSON_CONTENT_TYPE): """ Check response content and its type. .. note:: Unlike :mod:`acme.client`, checking is strict. :param bytes content_type: Expected Content-Type response header. If the response Content-Type does not match, :exc:`ClientError` is raised. :raises .ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises ~acme.errors.ClientError: In case of other networking errors. """ def _got_failure(f): f.trap(ValueError) return None def _got_json(jobj): if 400 <= response.code < 600: if response_ct == JSON_ERROR_CONTENT_TYPE and jobj is not None: raise ServerError( messages.Error.from_json(jobj), response) else: # response is not JSON object raise errors.ClientError(response) elif response_ct != content_type: raise errors.ClientError( 'Unexpected response Content-Type: {0!r}'.format( response_ct)) elif content_type == JSON_CONTENT_TYPE and jobj is None: raise errors.ClientError(response) return response response_ct = response.headers.getRawHeaders( b'Content-Type', [None])[0] action = LOG_JWS_CHECK_RESPONSE( expected_content_type=content_type, response_content_type=response_ct) with action.context(): # TODO: response.json() is called twice, once here, and # once in _get and _post clients return ( DeferredContext(response.json()) .addErrback(_got_failure) .addCallback(_got_json) .addActionFinish())
python
def _check_response(cls, response, content_type=JSON_CONTENT_TYPE): """ Check response content and its type. .. note:: Unlike :mod:`acme.client`, checking is strict. :param bytes content_type: Expected Content-Type response header. If the response Content-Type does not match, :exc:`ClientError` is raised. :raises .ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises ~acme.errors.ClientError: In case of other networking errors. """ def _got_failure(f): f.trap(ValueError) return None def _got_json(jobj): if 400 <= response.code < 600: if response_ct == JSON_ERROR_CONTENT_TYPE and jobj is not None: raise ServerError( messages.Error.from_json(jobj), response) else: # response is not JSON object raise errors.ClientError(response) elif response_ct != content_type: raise errors.ClientError( 'Unexpected response Content-Type: {0!r}'.format( response_ct)) elif content_type == JSON_CONTENT_TYPE and jobj is None: raise errors.ClientError(response) return response response_ct = response.headers.getRawHeaders( b'Content-Type', [None])[0] action = LOG_JWS_CHECK_RESPONSE( expected_content_type=content_type, response_content_type=response_ct) with action.context(): # TODO: response.json() is called twice, once here, and # once in _get and _post clients return ( DeferredContext(response.json()) .addErrback(_got_failure) .addCallback(_got_json) .addActionFinish())
[ "def", "_check_response", "(", "cls", ",", "response", ",", "content_type", "=", "JSON_CONTENT_TYPE", ")", ":", "def", "_got_failure", "(", "f", ")", ":", "f", ".", "trap", "(", "ValueError", ")", "return", "None", "def", "_got_json", "(", "jobj", ")", ":", "if", "400", "<=", "response", ".", "code", "<", "600", ":", "if", "response_ct", "==", "JSON_ERROR_CONTENT_TYPE", "and", "jobj", "is", "not", "None", ":", "raise", "ServerError", "(", "messages", ".", "Error", ".", "from_json", "(", "jobj", ")", ",", "response", ")", "else", ":", "# response is not JSON object", "raise", "errors", ".", "ClientError", "(", "response", ")", "elif", "response_ct", "!=", "content_type", ":", "raise", "errors", ".", "ClientError", "(", "'Unexpected response Content-Type: {0!r}'", ".", "format", "(", "response_ct", ")", ")", "elif", "content_type", "==", "JSON_CONTENT_TYPE", "and", "jobj", "is", "None", ":", "raise", "errors", ".", "ClientError", "(", "response", ")", "return", "response", "response_ct", "=", "response", ".", "headers", ".", "getRawHeaders", "(", "b'Content-Type'", ",", "[", "None", "]", ")", "[", "0", "]", "action", "=", "LOG_JWS_CHECK_RESPONSE", "(", "expected_content_type", "=", "content_type", ",", "response_content_type", "=", "response_ct", ")", "with", "action", ".", "context", "(", ")", ":", "# TODO: response.json() is called twice, once here, and", "# once in _get and _post clients", "return", "(", "DeferredContext", "(", "response", ".", "json", "(", ")", ")", ".", "addErrback", "(", "_got_failure", ")", ".", "addCallback", "(", "_got_json", ")", ".", "addActionFinish", "(", ")", ")" ]
Check response content and its type. .. note:: Unlike :mod:`acme.client`, checking is strict. :param bytes content_type: Expected Content-Type response header. If the response Content-Type does not match, :exc:`ClientError` is raised. :raises .ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises ~acme.errors.ClientError: In case of other networking errors.
[ "Check", "response", "content", "and", "its", "type", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L700-L748
twisted/txacme
src/txacme/client.py
JWSClient._send_request
def _send_request(self, method, url, *args, **kwargs): """ Send HTTP request. :param str method: The HTTP method to use. :param str url: The URL to make the request to. :return: Deferred firing with the HTTP response. """ action = LOG_JWS_REQUEST(url=url) with action.context(): headers = kwargs.setdefault('headers', Headers()) headers.setRawHeaders(b'user-agent', [self._user_agent]) kwargs.setdefault('timeout', self.timeout) return ( DeferredContext( self._treq.request(method, url, *args, **kwargs)) .addCallback( tap(lambda r: action.add_success_fields( code=r.code, content_type=r.headers.getRawHeaders( b'content-type', [None])[0]))) .addActionFinish())
python
def _send_request(self, method, url, *args, **kwargs): """ Send HTTP request. :param str method: The HTTP method to use. :param str url: The URL to make the request to. :return: Deferred firing with the HTTP response. """ action = LOG_JWS_REQUEST(url=url) with action.context(): headers = kwargs.setdefault('headers', Headers()) headers.setRawHeaders(b'user-agent', [self._user_agent]) kwargs.setdefault('timeout', self.timeout) return ( DeferredContext( self._treq.request(method, url, *args, **kwargs)) .addCallback( tap(lambda r: action.add_success_fields( code=r.code, content_type=r.headers.getRawHeaders( b'content-type', [None])[0]))) .addActionFinish())
[ "def", "_send_request", "(", "self", ",", "method", ",", "url", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "action", "=", "LOG_JWS_REQUEST", "(", "url", "=", "url", ")", "with", "action", ".", "context", "(", ")", ":", "headers", "=", "kwargs", ".", "setdefault", "(", "'headers'", ",", "Headers", "(", ")", ")", "headers", ".", "setRawHeaders", "(", "b'user-agent'", ",", "[", "self", ".", "_user_agent", "]", ")", "kwargs", ".", "setdefault", "(", "'timeout'", ",", "self", ".", "timeout", ")", "return", "(", "DeferredContext", "(", "self", ".", "_treq", ".", "request", "(", "method", ",", "url", ",", "*", "args", ",", "*", "*", "kwargs", ")", ")", ".", "addCallback", "(", "tap", "(", "lambda", "r", ":", "action", ".", "add_success_fields", "(", "code", "=", "r", ".", "code", ",", "content_type", "=", "r", ".", "headers", ".", "getRawHeaders", "(", "b'content-type'", ",", "[", "None", "]", ")", "[", "0", "]", ")", ")", ")", ".", "addActionFinish", "(", ")", ")" ]
Send HTTP request. :param str method: The HTTP method to use. :param str url: The URL to make the request to. :return: Deferred firing with the HTTP response.
[ "Send", "HTTP", "request", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L750-L772
twisted/txacme
src/txacme/client.py
JWSClient.head
def head(self, url, *args, **kwargs): """ Send HEAD request without checking the response. Note that ``_check_response`` is not called, as there will be no response body to check. :param str url: The URL to make the request to. """ with LOG_JWS_HEAD().context(): return DeferredContext( self._send_request(u'HEAD', url, *args, **kwargs) ).addActionFinish()
python
def head(self, url, *args, **kwargs): """ Send HEAD request without checking the response. Note that ``_check_response`` is not called, as there will be no response body to check. :param str url: The URL to make the request to. """ with LOG_JWS_HEAD().context(): return DeferredContext( self._send_request(u'HEAD', url, *args, **kwargs) ).addActionFinish()
[ "def", "head", "(", "self", ",", "url", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "with", "LOG_JWS_HEAD", "(", ")", ".", "context", "(", ")", ":", "return", "DeferredContext", "(", "self", ".", "_send_request", "(", "u'HEAD'", ",", "url", ",", "*", "args", ",", "*", "*", "kwargs", ")", ")", ".", "addActionFinish", "(", ")" ]
Send HEAD request without checking the response. Note that ``_check_response`` is not called, as there will be no response body to check. :param str url: The URL to make the request to.
[ "Send", "HEAD", "request", "without", "checking", "the", "response", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L774-L786
twisted/txacme
src/txacme/client.py
JWSClient.get
def get(self, url, content_type=JSON_CONTENT_TYPE, **kwargs): """ Send GET request and check response. :param str method: The HTTP method to use. :param str url: The URL to make the request to. :raises txacme.client.ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises acme.errors.ClientError: In case of other protocol errors. :return: Deferred firing with the checked HTTP response. """ with LOG_JWS_GET().context(): return ( DeferredContext(self._send_request(u'GET', url, **kwargs)) .addCallback(self._check_response, content_type=content_type) .addActionFinish())
python
def get(self, url, content_type=JSON_CONTENT_TYPE, **kwargs): """ Send GET request and check response. :param str method: The HTTP method to use. :param str url: The URL to make the request to. :raises txacme.client.ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises acme.errors.ClientError: In case of other protocol errors. :return: Deferred firing with the checked HTTP response. """ with LOG_JWS_GET().context(): return ( DeferredContext(self._send_request(u'GET', url, **kwargs)) .addCallback(self._check_response, content_type=content_type) .addActionFinish())
[ "def", "get", "(", "self", ",", "url", ",", "content_type", "=", "JSON_CONTENT_TYPE", ",", "*", "*", "kwargs", ")", ":", "with", "LOG_JWS_GET", "(", ")", ".", "context", "(", ")", ":", "return", "(", "DeferredContext", "(", "self", ".", "_send_request", "(", "u'GET'", ",", "url", ",", "*", "*", "kwargs", ")", ")", ".", "addCallback", "(", "self", ".", "_check_response", ",", "content_type", "=", "content_type", ")", ".", "addActionFinish", "(", ")", ")" ]
Send GET request and check response. :param str method: The HTTP method to use. :param str url: The URL to make the request to. :raises txacme.client.ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises acme.errors.ClientError: In case of other protocol errors. :return: Deferred firing with the checked HTTP response.
[ "Send", "GET", "request", "and", "check", "response", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L788-L805
twisted/txacme
src/txacme/client.py
JWSClient._add_nonce
def _add_nonce(self, response): """ Store a nonce from a response we received. :param twisted.web.iweb.IResponse response: The HTTP response. :return: The response, unmodified. """ nonce = response.headers.getRawHeaders( REPLAY_NONCE_HEADER, [None])[0] with LOG_JWS_ADD_NONCE(raw_nonce=nonce) as action: if nonce is None: raise errors.MissingNonce(response) else: try: decoded_nonce = Header._fields['nonce'].decode( nonce.decode('ascii') ) action.add_success_fields(nonce=decoded_nonce) except DeserializationError as error: raise errors.BadNonce(nonce, error) self._nonces.add(decoded_nonce) return response
python
def _add_nonce(self, response): """ Store a nonce from a response we received. :param twisted.web.iweb.IResponse response: The HTTP response. :return: The response, unmodified. """ nonce = response.headers.getRawHeaders( REPLAY_NONCE_HEADER, [None])[0] with LOG_JWS_ADD_NONCE(raw_nonce=nonce) as action: if nonce is None: raise errors.MissingNonce(response) else: try: decoded_nonce = Header._fields['nonce'].decode( nonce.decode('ascii') ) action.add_success_fields(nonce=decoded_nonce) except DeserializationError as error: raise errors.BadNonce(nonce, error) self._nonces.add(decoded_nonce) return response
[ "def", "_add_nonce", "(", "self", ",", "response", ")", ":", "nonce", "=", "response", ".", "headers", ".", "getRawHeaders", "(", "REPLAY_NONCE_HEADER", ",", "[", "None", "]", ")", "[", "0", "]", "with", "LOG_JWS_ADD_NONCE", "(", "raw_nonce", "=", "nonce", ")", "as", "action", ":", "if", "nonce", "is", "None", ":", "raise", "errors", ".", "MissingNonce", "(", "response", ")", "else", ":", "try", ":", "decoded_nonce", "=", "Header", ".", "_fields", "[", "'nonce'", "]", ".", "decode", "(", "nonce", ".", "decode", "(", "'ascii'", ")", ")", "action", ".", "add_success_fields", "(", "nonce", "=", "decoded_nonce", ")", "except", "DeserializationError", "as", "error", ":", "raise", "errors", ".", "BadNonce", "(", "nonce", ",", "error", ")", "self", ".", "_nonces", ".", "add", "(", "decoded_nonce", ")", "return", "response" ]
Store a nonce from a response we received. :param twisted.web.iweb.IResponse response: The HTTP response. :return: The response, unmodified.
[ "Store", "a", "nonce", "from", "a", "response", "we", "received", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L807-L829
twisted/txacme
src/txacme/client.py
JWSClient._get_nonce
def _get_nonce(self, url): """ Get a nonce to use in a request, removing it from the nonces on hand. """ action = LOG_JWS_GET_NONCE() if len(self._nonces) > 0: with action: nonce = self._nonces.pop() action.add_success_fields(nonce=nonce) return succeed(nonce) else: with action.context(): return ( DeferredContext(self.head(url)) .addCallback(self._add_nonce) .addCallback(lambda _: self._nonces.pop()) .addCallback(tap( lambda nonce: action.add_success_fields(nonce=nonce))) .addActionFinish())
python
def _get_nonce(self, url): """ Get a nonce to use in a request, removing it from the nonces on hand. """ action = LOG_JWS_GET_NONCE() if len(self._nonces) > 0: with action: nonce = self._nonces.pop() action.add_success_fields(nonce=nonce) return succeed(nonce) else: with action.context(): return ( DeferredContext(self.head(url)) .addCallback(self._add_nonce) .addCallback(lambda _: self._nonces.pop()) .addCallback(tap( lambda nonce: action.add_success_fields(nonce=nonce))) .addActionFinish())
[ "def", "_get_nonce", "(", "self", ",", "url", ")", ":", "action", "=", "LOG_JWS_GET_NONCE", "(", ")", "if", "len", "(", "self", ".", "_nonces", ")", ">", "0", ":", "with", "action", ":", "nonce", "=", "self", ".", "_nonces", ".", "pop", "(", ")", "action", ".", "add_success_fields", "(", "nonce", "=", "nonce", ")", "return", "succeed", "(", "nonce", ")", "else", ":", "with", "action", ".", "context", "(", ")", ":", "return", "(", "DeferredContext", "(", "self", ".", "head", "(", "url", ")", ")", ".", "addCallback", "(", "self", ".", "_add_nonce", ")", ".", "addCallback", "(", "lambda", "_", ":", "self", ".", "_nonces", ".", "pop", "(", ")", ")", ".", "addCallback", "(", "tap", "(", "lambda", "nonce", ":", "action", ".", "add_success_fields", "(", "nonce", "=", "nonce", ")", ")", ")", ".", "addActionFinish", "(", ")", ")" ]
Get a nonce to use in a request, removing it from the nonces on hand.
[ "Get", "a", "nonce", "to", "use", "in", "a", "request", "removing", "it", "from", "the", "nonces", "on", "hand", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L831-L849
twisted/txacme
src/txacme/client.py
JWSClient._post
def _post(self, url, obj, content_type, **kwargs): """ POST an object and check the response. :param str url: The URL to request. :param ~josepy.interfaces.JSONDeSerializable obj: The serializable payload of the request. :param bytes content_type: The expected content type of the response. :raises txacme.client.ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises acme.errors.ClientError: In case of other protocol errors. """ with LOG_JWS_POST().context(): headers = kwargs.setdefault('headers', Headers()) headers.setRawHeaders(b'content-type', [JSON_CONTENT_TYPE]) return ( DeferredContext(self._get_nonce(url)) .addCallback(self._wrap_in_jws, obj) .addCallback( lambda data: self._send_request( u'POST', url, data=data, **kwargs)) .addCallback(self._add_nonce) .addCallback(self._check_response, content_type=content_type) .addActionFinish())
python
def _post(self, url, obj, content_type, **kwargs): """ POST an object and check the response. :param str url: The URL to request. :param ~josepy.interfaces.JSONDeSerializable obj: The serializable payload of the request. :param bytes content_type: The expected content type of the response. :raises txacme.client.ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises acme.errors.ClientError: In case of other protocol errors. """ with LOG_JWS_POST().context(): headers = kwargs.setdefault('headers', Headers()) headers.setRawHeaders(b'content-type', [JSON_CONTENT_TYPE]) return ( DeferredContext(self._get_nonce(url)) .addCallback(self._wrap_in_jws, obj) .addCallback( lambda data: self._send_request( u'POST', url, data=data, **kwargs)) .addCallback(self._add_nonce) .addCallback(self._check_response, content_type=content_type) .addActionFinish())
[ "def", "_post", "(", "self", ",", "url", ",", "obj", ",", "content_type", ",", "*", "*", "kwargs", ")", ":", "with", "LOG_JWS_POST", "(", ")", ".", "context", "(", ")", ":", "headers", "=", "kwargs", ".", "setdefault", "(", "'headers'", ",", "Headers", "(", ")", ")", "headers", ".", "setRawHeaders", "(", "b'content-type'", ",", "[", "JSON_CONTENT_TYPE", "]", ")", "return", "(", "DeferredContext", "(", "self", ".", "_get_nonce", "(", "url", ")", ")", ".", "addCallback", "(", "self", ".", "_wrap_in_jws", ",", "obj", ")", ".", "addCallback", "(", "lambda", "data", ":", "self", ".", "_send_request", "(", "u'POST'", ",", "url", ",", "data", "=", "data", ",", "*", "*", "kwargs", ")", ")", ".", "addCallback", "(", "self", ".", "_add_nonce", ")", ".", "addCallback", "(", "self", ".", "_check_response", ",", "content_type", "=", "content_type", ")", ".", "addActionFinish", "(", ")", ")" ]
POST an object and check the response. :param str url: The URL to request. :param ~josepy.interfaces.JSONDeSerializable obj: The serializable payload of the request. :param bytes content_type: The expected content type of the response. :raises txacme.client.ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises acme.errors.ClientError: In case of other protocol errors.
[ "POST", "an", "object", "and", "check", "the", "response", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L851-L875
twisted/txacme
src/txacme/client.py
JWSClient.post
def post(self, url, obj, content_type=JSON_CONTENT_TYPE, **kwargs): """ POST an object and check the response. Retry once if a badNonce error is received. :param str url: The URL to request. :param ~josepy.interfaces.JSONDeSerializable obj: The serializable payload of the request. :param bytes content_type: The expected content type of the response. By default, JSON. :raises txacme.client.ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises acme.errors.ClientError: In case of other protocol errors. """ def retry_bad_nonce(f): f.trap(ServerError) # The current RFC draft defines the namespace as # urn:ietf:params:acme:error:<code>, but earlier drafts (and some # current implementations) use urn:acme:error:<code> instead. We # don't really care about the namespace here, just the error code. if f.value.message.typ.split(':')[-1] == 'badNonce': # If one nonce is bad, others likely are too. Let's clear them # and re-add the one we just got. self._nonces.clear() self._add_nonce(f.value.response) return self._post(url, obj, content_type, **kwargs) return f return ( self._post(url, obj, content_type, **kwargs) .addErrback(retry_bad_nonce))
python
def post(self, url, obj, content_type=JSON_CONTENT_TYPE, **kwargs): """ POST an object and check the response. Retry once if a badNonce error is received. :param str url: The URL to request. :param ~josepy.interfaces.JSONDeSerializable obj: The serializable payload of the request. :param bytes content_type: The expected content type of the response. By default, JSON. :raises txacme.client.ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises acme.errors.ClientError: In case of other protocol errors. """ def retry_bad_nonce(f): f.trap(ServerError) # The current RFC draft defines the namespace as # urn:ietf:params:acme:error:<code>, but earlier drafts (and some # current implementations) use urn:acme:error:<code> instead. We # don't really care about the namespace here, just the error code. if f.value.message.typ.split(':')[-1] == 'badNonce': # If one nonce is bad, others likely are too. Let's clear them # and re-add the one we just got. self._nonces.clear() self._add_nonce(f.value.response) return self._post(url, obj, content_type, **kwargs) return f return ( self._post(url, obj, content_type, **kwargs) .addErrback(retry_bad_nonce))
[ "def", "post", "(", "self", ",", "url", ",", "obj", ",", "content_type", "=", "JSON_CONTENT_TYPE", ",", "*", "*", "kwargs", ")", ":", "def", "retry_bad_nonce", "(", "f", ")", ":", "f", ".", "trap", "(", "ServerError", ")", "# The current RFC draft defines the namespace as", "# urn:ietf:params:acme:error:<code>, but earlier drafts (and some", "# current implementations) use urn:acme:error:<code> instead. We", "# don't really care about the namespace here, just the error code.", "if", "f", ".", "value", ".", "message", ".", "typ", ".", "split", "(", "':'", ")", "[", "-", "1", "]", "==", "'badNonce'", ":", "# If one nonce is bad, others likely are too. Let's clear them", "# and re-add the one we just got.", "self", ".", "_nonces", ".", "clear", "(", ")", "self", ".", "_add_nonce", "(", "f", ".", "value", ".", "response", ")", "return", "self", ".", "_post", "(", "url", ",", "obj", ",", "content_type", ",", "*", "*", "kwargs", ")", "return", "f", "return", "(", "self", ".", "_post", "(", "url", ",", "obj", ",", "content_type", ",", "*", "*", "kwargs", ")", ".", "addErrback", "(", "retry_bad_nonce", ")", ")" ]
POST an object and check the response. Retry once if a badNonce error is received. :param str url: The URL to request. :param ~josepy.interfaces.JSONDeSerializable obj: The serializable payload of the request. :param bytes content_type: The expected content type of the response. By default, JSON. :raises txacme.client.ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises acme.errors.ClientError: In case of other protocol errors.
[ "POST", "an", "object", "and", "check", "the", "response", ".", "Retry", "once", "if", "a", "badNonce", "error", "is", "received", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/client.py#L877-L907
twisted/txacme
src/txacme/challenges/_libcloud.py
_daemon_thread
def _daemon_thread(*a, **kw): """ Create a `threading.Thread`, but always set ``daemon``. """ thread = Thread(*a, **kw) thread.daemon = True return thread
python
def _daemon_thread(*a, **kw): """ Create a `threading.Thread`, but always set ``daemon``. """ thread = Thread(*a, **kw) thread.daemon = True return thread
[ "def", "_daemon_thread", "(", "*", "a", ",", "*", "*", "kw", ")", ":", "thread", "=", "Thread", "(", "*", "a", ",", "*", "*", "kw", ")", "thread", ".", "daemon", "=", "True", "return", "thread" ]
Create a `threading.Thread`, but always set ``daemon``.
[ "Create", "a", "threading", ".", "Thread", "but", "always", "set", "daemon", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/challenges/_libcloud.py#L18-L24
twisted/txacme
src/txacme/challenges/_libcloud.py
_defer_to_worker
def _defer_to_worker(deliver, worker, work, *args, **kwargs): """ Run a task in a worker, delivering the result as a ``Deferred`` in the reactor thread. """ deferred = Deferred() def wrapped_work(): try: result = work(*args, **kwargs) except BaseException: f = Failure() deliver(lambda: deferred.errback(f)) else: deliver(lambda: deferred.callback(result)) worker.do(wrapped_work) return deferred
python
def _defer_to_worker(deliver, worker, work, *args, **kwargs): """ Run a task in a worker, delivering the result as a ``Deferred`` in the reactor thread. """ deferred = Deferred() def wrapped_work(): try: result = work(*args, **kwargs) except BaseException: f = Failure() deliver(lambda: deferred.errback(f)) else: deliver(lambda: deferred.callback(result)) worker.do(wrapped_work) return deferred
[ "def", "_defer_to_worker", "(", "deliver", ",", "worker", ",", "work", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "deferred", "=", "Deferred", "(", ")", "def", "wrapped_work", "(", ")", ":", "try", ":", "result", "=", "work", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "BaseException", ":", "f", "=", "Failure", "(", ")", "deliver", "(", "lambda", ":", "deferred", ".", "errback", "(", "f", ")", ")", "else", ":", "deliver", "(", "lambda", ":", "deferred", ".", "callback", "(", "result", ")", ")", "worker", ".", "do", "(", "wrapped_work", ")", "return", "deferred" ]
Run a task in a worker, delivering the result as a ``Deferred`` in the reactor thread.
[ "Run", "a", "task", "in", "a", "worker", "delivering", "the", "result", "as", "a", "Deferred", "in", "the", "reactor", "thread", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/challenges/_libcloud.py#L27-L43
twisted/txacme
src/txacme/challenges/_libcloud.py
_split_zone
def _split_zone(server_name, zone_name): """ Split the zone portion off from a DNS label. :param str server_name: The full DNS label. :param str zone_name: The zone name suffix. """ server_name = server_name.rstrip(u'.') zone_name = zone_name.rstrip(u'.') if not (server_name == zone_name or server_name.endswith(u'.' + zone_name)): raise NotInZone(server_name=server_name, zone_name=zone_name) return server_name[:-len(zone_name)].rstrip(u'.')
python
def _split_zone(server_name, zone_name): """ Split the zone portion off from a DNS label. :param str server_name: The full DNS label. :param str zone_name: The zone name suffix. """ server_name = server_name.rstrip(u'.') zone_name = zone_name.rstrip(u'.') if not (server_name == zone_name or server_name.endswith(u'.' + zone_name)): raise NotInZone(server_name=server_name, zone_name=zone_name) return server_name[:-len(zone_name)].rstrip(u'.')
[ "def", "_split_zone", "(", "server_name", ",", "zone_name", ")", ":", "server_name", "=", "server_name", ".", "rstrip", "(", "u'.'", ")", "zone_name", "=", "zone_name", ".", "rstrip", "(", "u'.'", ")", "if", "not", "(", "server_name", "==", "zone_name", "or", "server_name", ".", "endswith", "(", "u'.'", "+", "zone_name", ")", ")", ":", "raise", "NotInZone", "(", "server_name", "=", "server_name", ",", "zone_name", "=", "zone_name", ")", "return", "server_name", "[", ":", "-", "len", "(", "zone_name", ")", "]", ".", "rstrip", "(", "u'.'", ")" ]
Split the zone portion off from a DNS label. :param str server_name: The full DNS label. :param str zone_name: The zone name suffix.
[ "Split", "the", "zone", "portion", "off", "from", "a", "DNS", "label", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/challenges/_libcloud.py#L46-L58
twisted/txacme
src/txacme/challenges/_libcloud.py
_get_existing
def _get_existing(driver, zone_name, server_name, validation): """ Get existing validation records. """ if zone_name is None: zones = sorted( (z for z in driver.list_zones() if server_name.rstrip(u'.') .endswith(u'.' + z.domain.rstrip(u'.'))), key=lambda z: len(z.domain), reverse=True) if len(zones) == 0: raise NotInZone(server_name=server_name, zone_name=None) else: zones = [ z for z in driver.list_zones() if z.domain == zone_name] if len(zones) == 0: raise ZoneNotFound(zone_name=zone_name) zone = zones[0] subdomain = _split_zone(server_name, zone.domain) existing = [ record for record in zone.list_records() if record.name == subdomain and record.type == 'TXT' and record.data == validation] return zone, existing, subdomain
python
def _get_existing(driver, zone_name, server_name, validation): """ Get existing validation records. """ if zone_name is None: zones = sorted( (z for z in driver.list_zones() if server_name.rstrip(u'.') .endswith(u'.' + z.domain.rstrip(u'.'))), key=lambda z: len(z.domain), reverse=True) if len(zones) == 0: raise NotInZone(server_name=server_name, zone_name=None) else: zones = [ z for z in driver.list_zones() if z.domain == zone_name] if len(zones) == 0: raise ZoneNotFound(zone_name=zone_name) zone = zones[0] subdomain = _split_zone(server_name, zone.domain) existing = [ record for record in zone.list_records() if record.name == subdomain and record.type == 'TXT' and record.data == validation] return zone, existing, subdomain
[ "def", "_get_existing", "(", "driver", ",", "zone_name", ",", "server_name", ",", "validation", ")", ":", "if", "zone_name", "is", "None", ":", "zones", "=", "sorted", "(", "(", "z", "for", "z", "in", "driver", ".", "list_zones", "(", ")", "if", "server_name", ".", "rstrip", "(", "u'.'", ")", ".", "endswith", "(", "u'.'", "+", "z", ".", "domain", ".", "rstrip", "(", "u'.'", ")", ")", ")", ",", "key", "=", "lambda", "z", ":", "len", "(", "z", ".", "domain", ")", ",", "reverse", "=", "True", ")", "if", "len", "(", "zones", ")", "==", "0", ":", "raise", "NotInZone", "(", "server_name", "=", "server_name", ",", "zone_name", "=", "None", ")", "else", ":", "zones", "=", "[", "z", "for", "z", "in", "driver", ".", "list_zones", "(", ")", "if", "z", ".", "domain", "==", "zone_name", "]", "if", "len", "(", "zones", ")", "==", "0", ":", "raise", "ZoneNotFound", "(", "zone_name", "=", "zone_name", ")", "zone", "=", "zones", "[", "0", "]", "subdomain", "=", "_split_zone", "(", "server_name", ",", "zone", ".", "domain", ")", "existing", "=", "[", "record", "for", "record", "in", "zone", ".", "list_records", "(", ")", "if", "record", ".", "name", "==", "subdomain", "and", "record", ".", "type", "==", "'TXT'", "and", "record", ".", "data", "==", "validation", "]", "return", "zone", ",", "existing", ",", "subdomain" ]
Get existing validation records.
[ "Get", "existing", "validation", "records", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/challenges/_libcloud.py#L61-L90
twisted/txacme
src/txacme/challenges/_libcloud.py
_validation
def _validation(response): """ Get the validation value for a challenge response. """ h = hashlib.sha256(response.key_authorization.encode("utf-8")) return b64encode(h.digest()).decode()
python
def _validation(response): """ Get the validation value for a challenge response. """ h = hashlib.sha256(response.key_authorization.encode("utf-8")) return b64encode(h.digest()).decode()
[ "def", "_validation", "(", "response", ")", ":", "h", "=", "hashlib", ".", "sha256", "(", "response", ".", "key_authorization", ".", "encode", "(", "\"utf-8\"", ")", ")", "return", "b64encode", "(", "h", ".", "digest", "(", ")", ")", ".", "decode", "(", ")" ]
Get the validation value for a challenge response.
[ "Get", "the", "validation", "value", "for", "a", "challenge", "response", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/challenges/_libcloud.py#L93-L98
twisted/txacme
src/txacme/endpoint.py
load_or_create_client_key
def load_or_create_client_key(pem_path): """ Load the client key from a directory, creating it if it does not exist. .. note:: The client key that will be created will be a 2048-bit RSA key. :type pem_path: ``twisted.python.filepath.FilePath`` :param pem_path: The certificate directory to use, as with the endpoint. """ acme_key_file = pem_path.asTextMode().child(u'client.key') if acme_key_file.exists(): key = serialization.load_pem_private_key( acme_key_file.getContent(), password=None, backend=default_backend()) else: key = generate_private_key(u'rsa') acme_key_file.setContent( key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption())) return JWKRSA(key=key)
python
def load_or_create_client_key(pem_path): """ Load the client key from a directory, creating it if it does not exist. .. note:: The client key that will be created will be a 2048-bit RSA key. :type pem_path: ``twisted.python.filepath.FilePath`` :param pem_path: The certificate directory to use, as with the endpoint. """ acme_key_file = pem_path.asTextMode().child(u'client.key') if acme_key_file.exists(): key = serialization.load_pem_private_key( acme_key_file.getContent(), password=None, backend=default_backend()) else: key = generate_private_key(u'rsa') acme_key_file.setContent( key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption())) return JWKRSA(key=key)
[ "def", "load_or_create_client_key", "(", "pem_path", ")", ":", "acme_key_file", "=", "pem_path", ".", "asTextMode", "(", ")", ".", "child", "(", "u'client.key'", ")", "if", "acme_key_file", ".", "exists", "(", ")", ":", "key", "=", "serialization", ".", "load_pem_private_key", "(", "acme_key_file", ".", "getContent", "(", ")", ",", "password", "=", "None", ",", "backend", "=", "default_backend", "(", ")", ")", "else", ":", "key", "=", "generate_private_key", "(", "u'rsa'", ")", "acme_key_file", ".", "setContent", "(", "key", ".", "private_bytes", "(", "encoding", "=", "serialization", ".", "Encoding", ".", "PEM", ",", "format", "=", "serialization", ".", "PrivateFormat", ".", "TraditionalOpenSSL", ",", "encryption_algorithm", "=", "serialization", ".", "NoEncryption", "(", ")", ")", ")", "return", "JWKRSA", "(", "key", "=", "key", ")" ]
Load the client key from a directory, creating it if it does not exist. .. note:: The client key that will be created will be a 2048-bit RSA key. :type pem_path: ``twisted.python.filepath.FilePath`` :param pem_path: The certificate directory to use, as with the endpoint.
[ "Load", "the", "client", "key", "from", "a", "directory", "creating", "it", "if", "it", "does", "not", "exist", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/endpoint.py#L131-L154
twisted/txacme
src/txacme/endpoint.py
_parse
def _parse(reactor, directory, pemdir, *args, **kwargs): """ Parse a txacme endpoint description. :param reactor: The Twisted reactor. :param directory: ``twisted.python.url.URL`` for the ACME directory to use for issuing certs. :param str pemdir: The path to the certificate directory to use. """ def colon_join(items): return ':'.join([item.replace(':', '\\:') for item in items]) sub = colon_join(list(args) + ['='.join(item) for item in kwargs.items()]) pem_path = FilePath(pemdir).asTextMode() acme_key = load_or_create_client_key(pem_path) return AutoTLSEndpoint( reactor=reactor, directory=directory, client_creator=partial(Client.from_url, key=acme_key, alg=RS256), cert_store=DirectoryStore(pem_path), cert_mapping=HostDirectoryMap(pem_path), sub_endpoint=serverFromString(reactor, sub))
python
def _parse(reactor, directory, pemdir, *args, **kwargs): """ Parse a txacme endpoint description. :param reactor: The Twisted reactor. :param directory: ``twisted.python.url.URL`` for the ACME directory to use for issuing certs. :param str pemdir: The path to the certificate directory to use. """ def colon_join(items): return ':'.join([item.replace(':', '\\:') for item in items]) sub = colon_join(list(args) + ['='.join(item) for item in kwargs.items()]) pem_path = FilePath(pemdir).asTextMode() acme_key = load_or_create_client_key(pem_path) return AutoTLSEndpoint( reactor=reactor, directory=directory, client_creator=partial(Client.from_url, key=acme_key, alg=RS256), cert_store=DirectoryStore(pem_path), cert_mapping=HostDirectoryMap(pem_path), sub_endpoint=serverFromString(reactor, sub))
[ "def", "_parse", "(", "reactor", ",", "directory", ",", "pemdir", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "colon_join", "(", "items", ")", ":", "return", "':'", ".", "join", "(", "[", "item", ".", "replace", "(", "':'", ",", "'\\\\:'", ")", "for", "item", "in", "items", "]", ")", "sub", "=", "colon_join", "(", "list", "(", "args", ")", "+", "[", "'='", ".", "join", "(", "item", ")", "for", "item", "in", "kwargs", ".", "items", "(", ")", "]", ")", "pem_path", "=", "FilePath", "(", "pemdir", ")", ".", "asTextMode", "(", ")", "acme_key", "=", "load_or_create_client_key", "(", "pem_path", ")", "return", "AutoTLSEndpoint", "(", "reactor", "=", "reactor", ",", "directory", "=", "directory", ",", "client_creator", "=", "partial", "(", "Client", ".", "from_url", ",", "key", "=", "acme_key", ",", "alg", "=", "RS256", ")", ",", "cert_store", "=", "DirectoryStore", "(", "pem_path", ")", ",", "cert_mapping", "=", "HostDirectoryMap", "(", "pem_path", ")", ",", "sub_endpoint", "=", "serverFromString", "(", "reactor", ",", "sub", ")", ")" ]
Parse a txacme endpoint description. :param reactor: The Twisted reactor. :param directory: ``twisted.python.url.URL`` for the ACME directory to use for issuing certs. :param str pemdir: The path to the certificate directory to use.
[ "Parse", "a", "txacme", "endpoint", "description", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/endpoint.py#L157-L177
alexwlchan/lazyreader
lazyreader.py
lazyread
def lazyread(f, delimiter): """ Generator which continually reads ``f`` to the next instance of ``delimiter``. This allows you to do batch processing on the contents of ``f`` without loading the entire file into memory. :param f: Any file-like object which has a ``.read()`` method. :param delimiter: Delimiter on which to split up the file. """ # Get an empty string to start with. We need to make sure that if the # file is opened in binary mode, we're using byte strings, and similar # for Unicode. Otherwise trying to update the running string will # hit a TypeError. try: running = f.read(0) except Exception as e: # The boto3 APIs don't let you read zero bytes from an S3 object, but # they always return bytestrings, so in this case we know what to # start with. if e.__class__.__name__ == 'IncompleteReadError': running = b'' else: raise while True: new_data = f.read(1024) # When a call to read() returns nothing, we're at the end of the file. if not new_data: yield running return # Otherwise, update the running stream and look for instances of # the delimiter. Remember we might have read more than one delimiter # since the last time we checked running += new_data while delimiter in running: curr, running = running.split(delimiter, 1) yield curr + delimiter
python
def lazyread(f, delimiter): """ Generator which continually reads ``f`` to the next instance of ``delimiter``. This allows you to do batch processing on the contents of ``f`` without loading the entire file into memory. :param f: Any file-like object which has a ``.read()`` method. :param delimiter: Delimiter on which to split up the file. """ # Get an empty string to start with. We need to make sure that if the # file is opened in binary mode, we're using byte strings, and similar # for Unicode. Otherwise trying to update the running string will # hit a TypeError. try: running = f.read(0) except Exception as e: # The boto3 APIs don't let you read zero bytes from an S3 object, but # they always return bytestrings, so in this case we know what to # start with. if e.__class__.__name__ == 'IncompleteReadError': running = b'' else: raise while True: new_data = f.read(1024) # When a call to read() returns nothing, we're at the end of the file. if not new_data: yield running return # Otherwise, update the running stream and look for instances of # the delimiter. Remember we might have read more than one delimiter # since the last time we checked running += new_data while delimiter in running: curr, running = running.split(delimiter, 1) yield curr + delimiter
[ "def", "lazyread", "(", "f", ",", "delimiter", ")", ":", "# Get an empty string to start with. We need to make sure that if the", "# file is opened in binary mode, we're using byte strings, and similar", "# for Unicode. Otherwise trying to update the running string will", "# hit a TypeError.", "try", ":", "running", "=", "f", ".", "read", "(", "0", ")", "except", "Exception", "as", "e", ":", "# The boto3 APIs don't let you read zero bytes from an S3 object, but", "# they always return bytestrings, so in this case we know what to", "# start with.", "if", "e", ".", "__class__", ".", "__name__", "==", "'IncompleteReadError'", ":", "running", "=", "b''", "else", ":", "raise", "while", "True", ":", "new_data", "=", "f", ".", "read", "(", "1024", ")", "# When a call to read() returns nothing, we're at the end of the file.", "if", "not", "new_data", ":", "yield", "running", "return", "# Otherwise, update the running stream and look for instances of", "# the delimiter. Remember we might have read more than one delimiter", "# since the last time we checked", "running", "+=", "new_data", "while", "delimiter", "in", "running", ":", "curr", ",", "running", "=", "running", ".", "split", "(", "delimiter", ",", "1", ")", "yield", "curr", "+", "delimiter" ]
Generator which continually reads ``f`` to the next instance of ``delimiter``. This allows you to do batch processing on the contents of ``f`` without loading the entire file into memory. :param f: Any file-like object which has a ``.read()`` method. :param delimiter: Delimiter on which to split up the file.
[ "Generator", "which", "continually", "reads", "f", "to", "the", "next", "instance", "of", "delimiter", "." ]
train
https://github.com/alexwlchan/lazyreader/blob/918c408efba015efc1d67b05d1e4b373ac9d1192/lazyreader.py#L3-L44
twisted/txacme
src/txacme/util.py
generate_private_key
def generate_private_key(key_type): """ Generate a random private key using sensible parameters. :param str key_type: The type of key to generate. One of: ``rsa``. """ if key_type == u'rsa': return rsa.generate_private_key( public_exponent=65537, key_size=2048, backend=default_backend()) raise ValueError(key_type)
python
def generate_private_key(key_type): """ Generate a random private key using sensible parameters. :param str key_type: The type of key to generate. One of: ``rsa``. """ if key_type == u'rsa': return rsa.generate_private_key( public_exponent=65537, key_size=2048, backend=default_backend()) raise ValueError(key_type)
[ "def", "generate_private_key", "(", "key_type", ")", ":", "if", "key_type", "==", "u'rsa'", ":", "return", "rsa", ".", "generate_private_key", "(", "public_exponent", "=", "65537", ",", "key_size", "=", "2048", ",", "backend", "=", "default_backend", "(", ")", ")", "raise", "ValueError", "(", "key_type", ")" ]
Generate a random private key using sensible parameters. :param str key_type: The type of key to generate. One of: ``rsa``.
[ "Generate", "a", "random", "private", "key", "using", "sensible", "parameters", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/util.py#L20-L29
twisted/txacme
src/txacme/util.py
generate_tls_sni_01_cert
def generate_tls_sni_01_cert(server_name, key_type=u'rsa', _generate_private_key=None): """ Generate a certificate/key pair for responding to a tls-sni-01 challenge. :param str server_name: The SAN the certificate should have. :param str key_type: The type of key to generate; usually not necessary. :rtype: ``Tuple[`~cryptography.x509.Certificate`, PrivateKey]`` :return: A tuple of the certificate and private key. """ key = (_generate_private_key or generate_private_key)(key_type) name = x509.Name([ x509.NameAttribute(NameOID.COMMON_NAME, u'acme.invalid')]) cert = ( x509.CertificateBuilder() .subject_name(name) .issuer_name(name) .not_valid_before(datetime.now() - timedelta(seconds=3600)) .not_valid_after(datetime.now() + timedelta(seconds=3600)) .serial_number(int(uuid.uuid4())) .public_key(key.public_key()) .add_extension( x509.SubjectAlternativeName([x509.DNSName(server_name)]), critical=False) .sign( private_key=key, algorithm=hashes.SHA256(), backend=default_backend()) ) return (cert, key)
python
def generate_tls_sni_01_cert(server_name, key_type=u'rsa', _generate_private_key=None): """ Generate a certificate/key pair for responding to a tls-sni-01 challenge. :param str server_name: The SAN the certificate should have. :param str key_type: The type of key to generate; usually not necessary. :rtype: ``Tuple[`~cryptography.x509.Certificate`, PrivateKey]`` :return: A tuple of the certificate and private key. """ key = (_generate_private_key or generate_private_key)(key_type) name = x509.Name([ x509.NameAttribute(NameOID.COMMON_NAME, u'acme.invalid')]) cert = ( x509.CertificateBuilder() .subject_name(name) .issuer_name(name) .not_valid_before(datetime.now() - timedelta(seconds=3600)) .not_valid_after(datetime.now() + timedelta(seconds=3600)) .serial_number(int(uuid.uuid4())) .public_key(key.public_key()) .add_extension( x509.SubjectAlternativeName([x509.DNSName(server_name)]), critical=False) .sign( private_key=key, algorithm=hashes.SHA256(), backend=default_backend()) ) return (cert, key)
[ "def", "generate_tls_sni_01_cert", "(", "server_name", ",", "key_type", "=", "u'rsa'", ",", "_generate_private_key", "=", "None", ")", ":", "key", "=", "(", "_generate_private_key", "or", "generate_private_key", ")", "(", "key_type", ")", "name", "=", "x509", ".", "Name", "(", "[", "x509", ".", "NameAttribute", "(", "NameOID", ".", "COMMON_NAME", ",", "u'acme.invalid'", ")", "]", ")", "cert", "=", "(", "x509", ".", "CertificateBuilder", "(", ")", ".", "subject_name", "(", "name", ")", ".", "issuer_name", "(", "name", ")", ".", "not_valid_before", "(", "datetime", ".", "now", "(", ")", "-", "timedelta", "(", "seconds", "=", "3600", ")", ")", ".", "not_valid_after", "(", "datetime", ".", "now", "(", ")", "+", "timedelta", "(", "seconds", "=", "3600", ")", ")", ".", "serial_number", "(", "int", "(", "uuid", ".", "uuid4", "(", ")", ")", ")", ".", "public_key", "(", "key", ".", "public_key", "(", ")", ")", ".", "add_extension", "(", "x509", ".", "SubjectAlternativeName", "(", "[", "x509", ".", "DNSName", "(", "server_name", ")", "]", ")", ",", "critical", "=", "False", ")", ".", "sign", "(", "private_key", "=", "key", ",", "algorithm", "=", "hashes", ".", "SHA256", "(", ")", ",", "backend", "=", "default_backend", "(", ")", ")", ")", "return", "(", "cert", ",", "key", ")" ]
Generate a certificate/key pair for responding to a tls-sni-01 challenge. :param str server_name: The SAN the certificate should have. :param str key_type: The type of key to generate; usually not necessary. :rtype: ``Tuple[`~cryptography.x509.Certificate`, PrivateKey]`` :return: A tuple of the certificate and private key.
[ "Generate", "a", "certificate", "/", "key", "pair", "for", "responding", "to", "a", "tls", "-", "sni", "-", "01", "challenge", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/util.py#L32-L62
twisted/txacme
src/txacme/util.py
tap
def tap(f): """ "Tap" a Deferred callback chain with a function whose return value is ignored. """ @wraps(f) def _cb(res, *a, **kw): d = maybeDeferred(f, res, *a, **kw) d.addCallback(lambda ignored: res) return d return _cb
python
def tap(f): """ "Tap" a Deferred callback chain with a function whose return value is ignored. """ @wraps(f) def _cb(res, *a, **kw): d = maybeDeferred(f, res, *a, **kw) d.addCallback(lambda ignored: res) return d return _cb
[ "def", "tap", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "_cb", "(", "res", ",", "*", "a", ",", "*", "*", "kw", ")", ":", "d", "=", "maybeDeferred", "(", "f", ",", "res", ",", "*", "a", ",", "*", "*", "kw", ")", "d", ".", "addCallback", "(", "lambda", "ignored", ":", "res", ")", "return", "d", "return", "_cb" ]
"Tap" a Deferred callback chain with a function whose return value is ignored.
[ "Tap", "a", "Deferred", "callback", "chain", "with", "a", "function", "whose", "return", "value", "is", "ignored", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/util.py#L65-L75
twisted/txacme
src/txacme/util.py
decode_csr
def decode_csr(b64der): """ Decode JOSE Base-64 DER-encoded CSR. :param str b64der: The encoded CSR. :rtype: `cryptography.x509.CertificateSigningRequest` :return: The decoded CSR. """ try: return x509.load_der_x509_csr( decode_b64jose(b64der), default_backend()) except ValueError as error: raise DeserializationError(error)
python
def decode_csr(b64der): """ Decode JOSE Base-64 DER-encoded CSR. :param str b64der: The encoded CSR. :rtype: `cryptography.x509.CertificateSigningRequest` :return: The decoded CSR. """ try: return x509.load_der_x509_csr( decode_b64jose(b64der), default_backend()) except ValueError as error: raise DeserializationError(error)
[ "def", "decode_csr", "(", "b64der", ")", ":", "try", ":", "return", "x509", ".", "load_der_x509_csr", "(", "decode_b64jose", "(", "b64der", ")", ",", "default_backend", "(", ")", ")", "except", "ValueError", "as", "error", ":", "raise", "DeserializationError", "(", "error", ")" ]
Decode JOSE Base-64 DER-encoded CSR. :param str b64der: The encoded CSR. :rtype: `cryptography.x509.CertificateSigningRequest` :return: The decoded CSR.
[ "Decode", "JOSE", "Base", "-", "64", "DER", "-", "encoded", "CSR", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/util.py#L89-L102
twisted/txacme
src/txacme/util.py
csr_for_names
def csr_for_names(names, key): """ Generate a certificate signing request for the given names and private key. .. seealso:: `acme.client.Client.request_issuance` .. seealso:: `generate_private_key` :param ``List[str]``: One or more names (subjectAltName) for which to request a certificate. :param key: A Cryptography private key object. :rtype: `cryptography.x509.CertificateSigningRequest` :return: The certificate request message. """ if len(names) == 0: raise ValueError('Must have at least one name') if len(names[0]) > 64: common_name = u'san.too.long.invalid' else: common_name = names[0] return ( x509.CertificateSigningRequestBuilder() .subject_name(x509.Name([ x509.NameAttribute(NameOID.COMMON_NAME, common_name)])) .add_extension( x509.SubjectAlternativeName(list(map(x509.DNSName, names))), critical=False) .sign(key, hashes.SHA256(), default_backend()))
python
def csr_for_names(names, key): """ Generate a certificate signing request for the given names and private key. .. seealso:: `acme.client.Client.request_issuance` .. seealso:: `generate_private_key` :param ``List[str]``: One or more names (subjectAltName) for which to request a certificate. :param key: A Cryptography private key object. :rtype: `cryptography.x509.CertificateSigningRequest` :return: The certificate request message. """ if len(names) == 0: raise ValueError('Must have at least one name') if len(names[0]) > 64: common_name = u'san.too.long.invalid' else: common_name = names[0] return ( x509.CertificateSigningRequestBuilder() .subject_name(x509.Name([ x509.NameAttribute(NameOID.COMMON_NAME, common_name)])) .add_extension( x509.SubjectAlternativeName(list(map(x509.DNSName, names))), critical=False) .sign(key, hashes.SHA256(), default_backend()))
[ "def", "csr_for_names", "(", "names", ",", "key", ")", ":", "if", "len", "(", "names", ")", "==", "0", ":", "raise", "ValueError", "(", "'Must have at least one name'", ")", "if", "len", "(", "names", "[", "0", "]", ")", ">", "64", ":", "common_name", "=", "u'san.too.long.invalid'", "else", ":", "common_name", "=", "names", "[", "0", "]", "return", "(", "x509", ".", "CertificateSigningRequestBuilder", "(", ")", ".", "subject_name", "(", "x509", ".", "Name", "(", "[", "x509", ".", "NameAttribute", "(", "NameOID", ".", "COMMON_NAME", ",", "common_name", ")", "]", ")", ")", ".", "add_extension", "(", "x509", ".", "SubjectAlternativeName", "(", "list", "(", "map", "(", "x509", ".", "DNSName", ",", "names", ")", ")", ")", ",", "critical", "=", "False", ")", ".", "sign", "(", "key", ",", "hashes", ".", "SHA256", "(", ")", ",", "default_backend", "(", ")", ")", ")" ]
Generate a certificate signing request for the given names and private key. .. seealso:: `acme.client.Client.request_issuance` .. seealso:: `generate_private_key` :param ``List[str]``: One or more names (subjectAltName) for which to request a certificate. :param key: A Cryptography private key object. :rtype: `cryptography.x509.CertificateSigningRequest` :return: The certificate request message.
[ "Generate", "a", "certificate", "signing", "request", "for", "the", "given", "names", "and", "private", "key", "." ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/util.py#L105-L133
samuelcolvin/python-devtools
devtools/debug.py
Debug._wrap_parse
def _wrap_parse(code, filename): """ async wrapper is required to avoid await calls raising a SyntaxError """ code = 'async def wrapper():\n' + indent(code, ' ') return ast.parse(code, filename=filename).body[0].body[0].value
python
def _wrap_parse(code, filename): """ async wrapper is required to avoid await calls raising a SyntaxError """ code = 'async def wrapper():\n' + indent(code, ' ') return ast.parse(code, filename=filename).body[0].body[0].value
[ "def", "_wrap_parse", "(", "code", ",", "filename", ")", ":", "code", "=", "'async def wrapper():\\n'", "+", "indent", "(", "code", ",", "' '", ")", "return", "ast", ".", "parse", "(", "code", ",", "filename", "=", "filename", ")", ".", "body", "[", "0", "]", ".", "body", "[", "0", "]", ".", "value" ]
async wrapper is required to avoid await calls raising a SyntaxError
[ "async", "wrapper", "is", "required", "to", "avoid", "await", "calls", "raising", "a", "SyntaxError" ]
train
https://github.com/samuelcolvin/python-devtools/blob/fb0021b3e6815348a28c1d2bf11b50b8f0bd511a/devtools/debug.py#L274-L279
twisted/txacme
docs/conf.py
linkcode_resolve
def linkcode_resolve(domain, info): """ Determine the URL corresponding to Python object """ if domain != 'py': return None modname = info['module'] fullname = info['fullname'] submod = sys.modules.get(modname) if submod is None: return None obj = submod for part in fullname.split('.'): try: obj = getattr(obj, part) except: return None try: fn = inspect.getsourcefile(obj) except: fn = None if not fn: return None try: source, lineno = inspect.findsource(obj) except: lineno = None if lineno: linespec = "#L%d" % (lineno + 1) else: linespec = "" fn = relpath(fn, start='..') return "https://github.com/mithrandi/txacme/blob/%s/%s%s" % ( txacme_version_info['full-revisionid'], fn, linespec)
python
def linkcode_resolve(domain, info): """ Determine the URL corresponding to Python object """ if domain != 'py': return None modname = info['module'] fullname = info['fullname'] submod = sys.modules.get(modname) if submod is None: return None obj = submod for part in fullname.split('.'): try: obj = getattr(obj, part) except: return None try: fn = inspect.getsourcefile(obj) except: fn = None if not fn: return None try: source, lineno = inspect.findsource(obj) except: lineno = None if lineno: linespec = "#L%d" % (lineno + 1) else: linespec = "" fn = relpath(fn, start='..') return "https://github.com/mithrandi/txacme/blob/%s/%s%s" % ( txacme_version_info['full-revisionid'], fn, linespec)
[ "def", "linkcode_resolve", "(", "domain", ",", "info", ")", ":", "if", "domain", "!=", "'py'", ":", "return", "None", "modname", "=", "info", "[", "'module'", "]", "fullname", "=", "info", "[", "'fullname'", "]", "submod", "=", "sys", ".", "modules", ".", "get", "(", "modname", ")", "if", "submod", "is", "None", ":", "return", "None", "obj", "=", "submod", "for", "part", "in", "fullname", ".", "split", "(", "'.'", ")", ":", "try", ":", "obj", "=", "getattr", "(", "obj", ",", "part", ")", "except", ":", "return", "None", "try", ":", "fn", "=", "inspect", ".", "getsourcefile", "(", "obj", ")", "except", ":", "fn", "=", "None", "if", "not", "fn", ":", "return", "None", "try", ":", "source", ",", "lineno", "=", "inspect", ".", "findsource", "(", "obj", ")", "except", ":", "lineno", "=", "None", "if", "lineno", ":", "linespec", "=", "\"#L%d\"", "%", "(", "lineno", "+", "1", ")", "else", ":", "linespec", "=", "\"\"", "fn", "=", "relpath", "(", "fn", ",", "start", "=", "'..'", ")", "return", "\"https://github.com/mithrandi/txacme/blob/%s/%s%s\"", "%", "(", "txacme_version_info", "[", "'full-revisionid'", "]", ",", "fn", ",", "linespec", ")" ]
Determine the URL corresponding to Python object
[ "Determine", "the", "URL", "corresponding", "to", "Python", "object" ]
train
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/docs/conf.py#L326-L359
cga-harvard/Hypermap-Registry
hypermap/aggregator/solr.py
SolrHypermap.layers_to_solr
def layers_to_solr(self, layers): """ Sync n layers in Solr. """ layers_dict_list = [] layers_success_ids = [] layers_errors_ids = [] for layer in layers: layer_dict, message = layer2dict(layer) if not layer_dict: layers_errors_ids.append([layer.id, message]) LOGGER.error(message) else: layers_dict_list.append(layer_dict) layers_success_ids.append(layer.id) layers_json = json.dumps(layers_dict_list) try: url_solr_update = '%s/solr/hypermap/update/json/docs' % SEARCH_URL headers = {"content-type": "application/json"} params = {"commitWithin": 1500} requests.post(url_solr_update, data=layers_json, params=params, headers=headers) LOGGER.info('Solr synced for the given layers') except Exception: message = "Error saving solr records: %s" % sys.exc_info()[1] layers_errors_ids.append([-1, message]) LOGGER.error(message) return False, layers_errors_ids return True, layers_errors_ids
python
def layers_to_solr(self, layers): """ Sync n layers in Solr. """ layers_dict_list = [] layers_success_ids = [] layers_errors_ids = [] for layer in layers: layer_dict, message = layer2dict(layer) if not layer_dict: layers_errors_ids.append([layer.id, message]) LOGGER.error(message) else: layers_dict_list.append(layer_dict) layers_success_ids.append(layer.id) layers_json = json.dumps(layers_dict_list) try: url_solr_update = '%s/solr/hypermap/update/json/docs' % SEARCH_URL headers = {"content-type": "application/json"} params = {"commitWithin": 1500} requests.post(url_solr_update, data=layers_json, params=params, headers=headers) LOGGER.info('Solr synced for the given layers') except Exception: message = "Error saving solr records: %s" % sys.exc_info()[1] layers_errors_ids.append([-1, message]) LOGGER.error(message) return False, layers_errors_ids return True, layers_errors_ids
[ "def", "layers_to_solr", "(", "self", ",", "layers", ")", ":", "layers_dict_list", "=", "[", "]", "layers_success_ids", "=", "[", "]", "layers_errors_ids", "=", "[", "]", "for", "layer", "in", "layers", ":", "layer_dict", ",", "message", "=", "layer2dict", "(", "layer", ")", "if", "not", "layer_dict", ":", "layers_errors_ids", ".", "append", "(", "[", "layer", ".", "id", ",", "message", "]", ")", "LOGGER", ".", "error", "(", "message", ")", "else", ":", "layers_dict_list", ".", "append", "(", "layer_dict", ")", "layers_success_ids", ".", "append", "(", "layer", ".", "id", ")", "layers_json", "=", "json", ".", "dumps", "(", "layers_dict_list", ")", "try", ":", "url_solr_update", "=", "'%s/solr/hypermap/update/json/docs'", "%", "SEARCH_URL", "headers", "=", "{", "\"content-type\"", ":", "\"application/json\"", "}", "params", "=", "{", "\"commitWithin\"", ":", "1500", "}", "requests", ".", "post", "(", "url_solr_update", ",", "data", "=", "layers_json", ",", "params", "=", "params", ",", "headers", "=", "headers", ")", "LOGGER", ".", "info", "(", "'Solr synced for the given layers'", ")", "except", "Exception", ":", "message", "=", "\"Error saving solr records: %s\"", "%", "sys", ".", "exc_info", "(", ")", "[", "1", "]", "layers_errors_ids", ".", "append", "(", "[", "-", "1", ",", "message", "]", ")", "LOGGER", ".", "error", "(", "message", ")", "return", "False", ",", "layers_errors_ids", "return", "True", ",", "layers_errors_ids" ]
Sync n layers in Solr.
[ "Sync", "n", "layers", "in", "Solr", "." ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/solr.py#L19-L50
cga-harvard/Hypermap-Registry
hypermap/aggregator/solr.py
SolrHypermap.layer_to_solr
def layer_to_solr(self, layer): """ Sync a layer in Solr. """ success = True message = 'Synced layer id %s to Solr' % layer.id layer_dict, message = layer2dict(layer) if not layer_dict: success = False else: layer_json = json.dumps(layer_dict) try: url_solr_update = '%s/solr/hypermap/update/json/docs' % SEARCH_URL headers = {"content-type": "application/json"} params = {"commitWithin": 1500} res = requests.post(url_solr_update, data=layer_json, params=params, headers=headers) res = res.json() if 'error' in res: success = False message = "Error syncing layer id %s to Solr: %s" % (layer.id, res["error"].get("msg")) except Exception, e: success = False message = "Error syncing layer id %s to Solr: %s" % (layer.id, sys.exc_info()[1]) LOGGER.error(e, exc_info=True) if success: LOGGER.info(message) else: LOGGER.error(message) return success, message
python
def layer_to_solr(self, layer): """ Sync a layer in Solr. """ success = True message = 'Synced layer id %s to Solr' % layer.id layer_dict, message = layer2dict(layer) if not layer_dict: success = False else: layer_json = json.dumps(layer_dict) try: url_solr_update = '%s/solr/hypermap/update/json/docs' % SEARCH_URL headers = {"content-type": "application/json"} params = {"commitWithin": 1500} res = requests.post(url_solr_update, data=layer_json, params=params, headers=headers) res = res.json() if 'error' in res: success = False message = "Error syncing layer id %s to Solr: %s" % (layer.id, res["error"].get("msg")) except Exception, e: success = False message = "Error syncing layer id %s to Solr: %s" % (layer.id, sys.exc_info()[1]) LOGGER.error(e, exc_info=True) if success: LOGGER.info(message) else: LOGGER.error(message) return success, message
[ "def", "layer_to_solr", "(", "self", ",", "layer", ")", ":", "success", "=", "True", "message", "=", "'Synced layer id %s to Solr'", "%", "layer", ".", "id", "layer_dict", ",", "message", "=", "layer2dict", "(", "layer", ")", "if", "not", "layer_dict", ":", "success", "=", "False", "else", ":", "layer_json", "=", "json", ".", "dumps", "(", "layer_dict", ")", "try", ":", "url_solr_update", "=", "'%s/solr/hypermap/update/json/docs'", "%", "SEARCH_URL", "headers", "=", "{", "\"content-type\"", ":", "\"application/json\"", "}", "params", "=", "{", "\"commitWithin\"", ":", "1500", "}", "res", "=", "requests", ".", "post", "(", "url_solr_update", ",", "data", "=", "layer_json", ",", "params", "=", "params", ",", "headers", "=", "headers", ")", "res", "=", "res", ".", "json", "(", ")", "if", "'error'", "in", "res", ":", "success", "=", "False", "message", "=", "\"Error syncing layer id %s to Solr: %s\"", "%", "(", "layer", ".", "id", ",", "res", "[", "\"error\"", "]", ".", "get", "(", "\"msg\"", ")", ")", "except", "Exception", ",", "e", ":", "success", "=", "False", "message", "=", "\"Error syncing layer id %s to Solr: %s\"", "%", "(", "layer", ".", "id", ",", "sys", ".", "exc_info", "(", ")", "[", "1", "]", ")", "LOGGER", ".", "error", "(", "e", ",", "exc_info", "=", "True", ")", "if", "success", ":", "LOGGER", ".", "info", "(", "message", ")", "else", ":", "LOGGER", ".", "error", "(", "message", ")", "return", "success", ",", "message" ]
Sync a layer in Solr.
[ "Sync", "a", "layer", "in", "Solr", "." ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/solr.py#L52-L81
cga-harvard/Hypermap-Registry
hypermap/aggregator/solr.py
SolrHypermap.clear_solr
def clear_solr(self, catalog="hypermap"): """Clear all indexes in the solr core""" solr_url = "{0}/solr/{1}".format(SEARCH_URL, catalog) solr = pysolr.Solr(solr_url, timeout=60) solr.delete(q='*:*') LOGGER.debug('Solr core cleared')
python
def clear_solr(self, catalog="hypermap"): """Clear all indexes in the solr core""" solr_url = "{0}/solr/{1}".format(SEARCH_URL, catalog) solr = pysolr.Solr(solr_url, timeout=60) solr.delete(q='*:*') LOGGER.debug('Solr core cleared')
[ "def", "clear_solr", "(", "self", ",", "catalog", "=", "\"hypermap\"", ")", ":", "solr_url", "=", "\"{0}/solr/{1}\"", ".", "format", "(", "SEARCH_URL", ",", "catalog", ")", "solr", "=", "pysolr", ".", "Solr", "(", "solr_url", ",", "timeout", "=", "60", ")", "solr", ".", "delete", "(", "q", "=", "'*:*'", ")", "LOGGER", ".", "debug", "(", "'Solr core cleared'", ")" ]
Clear all indexes in the solr core
[ "Clear", "all", "indexes", "in", "the", "solr", "core" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/solr.py#L83-L88
cga-harvard/Hypermap-Registry
hypermap/aggregator/solr.py
SolrHypermap.update_schema
def update_schema(self, catalog="hypermap"): """ set the mapping in solr. :param catalog: core :return: """ schema_url = "{0}/solr/{1}/schema".format(SEARCH_URL, catalog) print schema_url # create a special type to draw better heatmaps. location_rpt_quad_5m_payload = { "add-field-type": { "name": "location_rpt_quad_5m", "class": "solr.SpatialRecursivePrefixTreeFieldType", "geo": False, "worldBounds": "ENVELOPE(-180, 180, 180, -180)", "prefixTree": "packedQuad", "distErrPct": "0.025", "maxDistErr": "0.001", "distanceUnits": "degrees" } } requests.post(schema_url, json=location_rpt_quad_5m_payload) # create a special type to implement ngrm text for search. text_ngrm_payload = { "add-field-type": { "name": "text_ngrm", "class": "solr.TextField", "positionIncrementGap": "100", "indexAnalyzer": { "tokenizer": { "class": "solr.WhitespaceTokenizerFactory" }, "filters": [ { "class": "solr.NGramFilterFactory", "minGramSize": "1", "maxGramSize": "50" }, { "class": "solr.LowerCaseFilterFactory" } ] }, "queryAnalyzer": { "tokenizer": { "class": "solr.WhitespaceTokenizerFactory" }, "filters": [ { "class": "solr.LowerCaseFilterFactory", } ] } } } requests.post(schema_url, json=text_ngrm_payload) # now the other fields fields = [ {"name": "abstract", "type": "string"}, {"name": "abstract_txt", "type": "text_ngrm"}, {"name": "area", "type": "pdouble"}, {"name": "availability", "type": "string"}, {"name": "bbox", "type": "location_rpt_quad_5m"}, {"name": "domain_name", "type": "string"}, {"name": "is_public", "type": "boolean"}, {"name": "is_valid", "type": "boolean"}, {"name": "keywords", "type": "string", "multiValued": True}, {"name": "last_status", "type": "boolean"}, {"name": "layer_category", "type": "string"}, {"name": "layer_date", "type": "pdate", "docValues": True}, {"name": "layer_datetype", "type": "string"}, {"name": "layer_id", "type": "plong"}, {"name": "layer_originator", "type": "string"}, {"name": "layer_originator_txt", "type": "text_ngrm"}, {"name": "layer_username", "type": "string"}, {"name": "layer_username_txt", "type": "text_ngrm"}, {"name": "location", "type": "string"}, {"name": "max_x", "type": "pdouble"}, {"name": "max_y", "type": "pdouble"}, {"name": "min_x", "type": "pdouble"}, {"name": "min_y", "type": "pdouble"}, {"name": "name", "type": "string"}, {"name": "recent_reliability", "type": "pdouble"}, {"name": "reliability", "type": "pdouble"}, {"name": "service_id", "type": "plong"}, {"name": "service_type", "type": "string"}, {"name": "srs", "type": "string", "multiValued": True}, {"name": "tile_url", "type": "string"}, {"name": "title", "type": "string"}, {"name": "title_txt", "type": "text_ngrm"}, {"name": "type", "type": "string"}, {"name": "url", "type": "string"}, {"name": "uuid", "type": "string", "required": True}, {"name": "centroid_y", "type": "pdouble"}, {"name": "centroid_x", "type": "pdouble"}, ] copy_fields = [ {"source": "*", "dest": "_text_"}, {"source": "title", "dest": "title_txt"}, {"source": "abstract", "dest": "abstract_txt"}, {"source": "layer_originator", "dest": "layer_originator_txt"}, {"source": "layer_username", "dest": "layer_username_txt"}, ] headers = { "Content-type": "application/json" } for field in fields: data = { "add-field": field } requests.post(schema_url, json=data, headers=headers) for field in copy_fields: data = { "add-copy-field": field } print data requests.post(schema_url, json=data, headers=headers)
python
def update_schema(self, catalog="hypermap"): """ set the mapping in solr. :param catalog: core :return: """ schema_url = "{0}/solr/{1}/schema".format(SEARCH_URL, catalog) print schema_url # create a special type to draw better heatmaps. location_rpt_quad_5m_payload = { "add-field-type": { "name": "location_rpt_quad_5m", "class": "solr.SpatialRecursivePrefixTreeFieldType", "geo": False, "worldBounds": "ENVELOPE(-180, 180, 180, -180)", "prefixTree": "packedQuad", "distErrPct": "0.025", "maxDistErr": "0.001", "distanceUnits": "degrees" } } requests.post(schema_url, json=location_rpt_quad_5m_payload) # create a special type to implement ngrm text for search. text_ngrm_payload = { "add-field-type": { "name": "text_ngrm", "class": "solr.TextField", "positionIncrementGap": "100", "indexAnalyzer": { "tokenizer": { "class": "solr.WhitespaceTokenizerFactory" }, "filters": [ { "class": "solr.NGramFilterFactory", "minGramSize": "1", "maxGramSize": "50" }, { "class": "solr.LowerCaseFilterFactory" } ] }, "queryAnalyzer": { "tokenizer": { "class": "solr.WhitespaceTokenizerFactory" }, "filters": [ { "class": "solr.LowerCaseFilterFactory", } ] } } } requests.post(schema_url, json=text_ngrm_payload) # now the other fields fields = [ {"name": "abstract", "type": "string"}, {"name": "abstract_txt", "type": "text_ngrm"}, {"name": "area", "type": "pdouble"}, {"name": "availability", "type": "string"}, {"name": "bbox", "type": "location_rpt_quad_5m"}, {"name": "domain_name", "type": "string"}, {"name": "is_public", "type": "boolean"}, {"name": "is_valid", "type": "boolean"}, {"name": "keywords", "type": "string", "multiValued": True}, {"name": "last_status", "type": "boolean"}, {"name": "layer_category", "type": "string"}, {"name": "layer_date", "type": "pdate", "docValues": True}, {"name": "layer_datetype", "type": "string"}, {"name": "layer_id", "type": "plong"}, {"name": "layer_originator", "type": "string"}, {"name": "layer_originator_txt", "type": "text_ngrm"}, {"name": "layer_username", "type": "string"}, {"name": "layer_username_txt", "type": "text_ngrm"}, {"name": "location", "type": "string"}, {"name": "max_x", "type": "pdouble"}, {"name": "max_y", "type": "pdouble"}, {"name": "min_x", "type": "pdouble"}, {"name": "min_y", "type": "pdouble"}, {"name": "name", "type": "string"}, {"name": "recent_reliability", "type": "pdouble"}, {"name": "reliability", "type": "pdouble"}, {"name": "service_id", "type": "plong"}, {"name": "service_type", "type": "string"}, {"name": "srs", "type": "string", "multiValued": True}, {"name": "tile_url", "type": "string"}, {"name": "title", "type": "string"}, {"name": "title_txt", "type": "text_ngrm"}, {"name": "type", "type": "string"}, {"name": "url", "type": "string"}, {"name": "uuid", "type": "string", "required": True}, {"name": "centroid_y", "type": "pdouble"}, {"name": "centroid_x", "type": "pdouble"}, ] copy_fields = [ {"source": "*", "dest": "_text_"}, {"source": "title", "dest": "title_txt"}, {"source": "abstract", "dest": "abstract_txt"}, {"source": "layer_originator", "dest": "layer_originator_txt"}, {"source": "layer_username", "dest": "layer_username_txt"}, ] headers = { "Content-type": "application/json" } for field in fields: data = { "add-field": field } requests.post(schema_url, json=data, headers=headers) for field in copy_fields: data = { "add-copy-field": field } print data requests.post(schema_url, json=data, headers=headers)
[ "def", "update_schema", "(", "self", ",", "catalog", "=", "\"hypermap\"", ")", ":", "schema_url", "=", "\"{0}/solr/{1}/schema\"", ".", "format", "(", "SEARCH_URL", ",", "catalog", ")", "print", "schema_url", "# create a special type to draw better heatmaps.", "location_rpt_quad_5m_payload", "=", "{", "\"add-field-type\"", ":", "{", "\"name\"", ":", "\"location_rpt_quad_5m\"", ",", "\"class\"", ":", "\"solr.SpatialRecursivePrefixTreeFieldType\"", ",", "\"geo\"", ":", "False", ",", "\"worldBounds\"", ":", "\"ENVELOPE(-180, 180, 180, -180)\"", ",", "\"prefixTree\"", ":", "\"packedQuad\"", ",", "\"distErrPct\"", ":", "\"0.025\"", ",", "\"maxDistErr\"", ":", "\"0.001\"", ",", "\"distanceUnits\"", ":", "\"degrees\"", "}", "}", "requests", ".", "post", "(", "schema_url", ",", "json", "=", "location_rpt_quad_5m_payload", ")", "# create a special type to implement ngrm text for search.", "text_ngrm_payload", "=", "{", "\"add-field-type\"", ":", "{", "\"name\"", ":", "\"text_ngrm\"", ",", "\"class\"", ":", "\"solr.TextField\"", ",", "\"positionIncrementGap\"", ":", "\"100\"", ",", "\"indexAnalyzer\"", ":", "{", "\"tokenizer\"", ":", "{", "\"class\"", ":", "\"solr.WhitespaceTokenizerFactory\"", "}", ",", "\"filters\"", ":", "[", "{", "\"class\"", ":", "\"solr.NGramFilterFactory\"", ",", "\"minGramSize\"", ":", "\"1\"", ",", "\"maxGramSize\"", ":", "\"50\"", "}", ",", "{", "\"class\"", ":", "\"solr.LowerCaseFilterFactory\"", "}", "]", "}", ",", "\"queryAnalyzer\"", ":", "{", "\"tokenizer\"", ":", "{", "\"class\"", ":", "\"solr.WhitespaceTokenizerFactory\"", "}", ",", "\"filters\"", ":", "[", "{", "\"class\"", ":", "\"solr.LowerCaseFilterFactory\"", ",", "}", "]", "}", "}", "}", "requests", ".", "post", "(", "schema_url", ",", "json", "=", "text_ngrm_payload", ")", "# now the other fields", "fields", "=", "[", "{", "\"name\"", ":", "\"abstract\"", ",", "\"type\"", ":", "\"string\"", "}", ",", "{", "\"name\"", ":", "\"abstract_txt\"", ",", "\"type\"", ":", "\"text_ngrm\"", "}", ",", "{", "\"name\"", ":", "\"area\"", ",", "\"type\"", ":", "\"pdouble\"", "}", ",", "{", "\"name\"", ":", "\"availability\"", ",", "\"type\"", ":", "\"string\"", "}", ",", "{", "\"name\"", ":", "\"bbox\"", ",", "\"type\"", ":", "\"location_rpt_quad_5m\"", "}", ",", "{", "\"name\"", ":", "\"domain_name\"", ",", "\"type\"", ":", "\"string\"", "}", ",", "{", "\"name\"", ":", "\"is_public\"", ",", "\"type\"", ":", "\"boolean\"", "}", ",", "{", "\"name\"", ":", "\"is_valid\"", ",", "\"type\"", ":", "\"boolean\"", "}", ",", "{", "\"name\"", ":", "\"keywords\"", ",", "\"type\"", ":", "\"string\"", ",", "\"multiValued\"", ":", "True", "}", ",", "{", "\"name\"", ":", "\"last_status\"", ",", "\"type\"", ":", "\"boolean\"", "}", ",", "{", "\"name\"", ":", "\"layer_category\"", ",", "\"type\"", ":", "\"string\"", "}", ",", "{", "\"name\"", ":", "\"layer_date\"", ",", "\"type\"", ":", "\"pdate\"", ",", "\"docValues\"", ":", "True", "}", ",", "{", "\"name\"", ":", "\"layer_datetype\"", ",", "\"type\"", ":", "\"string\"", "}", ",", "{", "\"name\"", ":", "\"layer_id\"", ",", "\"type\"", ":", "\"plong\"", "}", ",", "{", "\"name\"", ":", "\"layer_originator\"", ",", "\"type\"", ":", "\"string\"", "}", ",", "{", "\"name\"", ":", "\"layer_originator_txt\"", ",", "\"type\"", ":", "\"text_ngrm\"", "}", ",", "{", "\"name\"", ":", "\"layer_username\"", ",", "\"type\"", ":", "\"string\"", "}", ",", "{", "\"name\"", ":", "\"layer_username_txt\"", ",", "\"type\"", ":", "\"text_ngrm\"", "}", ",", "{", "\"name\"", ":", "\"location\"", ",", "\"type\"", ":", "\"string\"", "}", ",", "{", "\"name\"", ":", "\"max_x\"", ",", "\"type\"", ":", "\"pdouble\"", "}", ",", "{", "\"name\"", ":", "\"max_y\"", ",", "\"type\"", ":", "\"pdouble\"", "}", ",", "{", "\"name\"", ":", "\"min_x\"", ",", "\"type\"", ":", "\"pdouble\"", "}", ",", "{", "\"name\"", ":", "\"min_y\"", ",", "\"type\"", ":", "\"pdouble\"", "}", ",", "{", "\"name\"", ":", "\"name\"", ",", "\"type\"", ":", "\"string\"", "}", ",", "{", "\"name\"", ":", "\"recent_reliability\"", ",", "\"type\"", ":", "\"pdouble\"", "}", ",", "{", "\"name\"", ":", "\"reliability\"", ",", "\"type\"", ":", "\"pdouble\"", "}", ",", "{", "\"name\"", ":", "\"service_id\"", ",", "\"type\"", ":", "\"plong\"", "}", ",", "{", "\"name\"", ":", "\"service_type\"", ",", "\"type\"", ":", "\"string\"", "}", ",", "{", "\"name\"", ":", "\"srs\"", ",", "\"type\"", ":", "\"string\"", ",", "\"multiValued\"", ":", "True", "}", ",", "{", "\"name\"", ":", "\"tile_url\"", ",", "\"type\"", ":", "\"string\"", "}", ",", "{", "\"name\"", ":", "\"title\"", ",", "\"type\"", ":", "\"string\"", "}", ",", "{", "\"name\"", ":", "\"title_txt\"", ",", "\"type\"", ":", "\"text_ngrm\"", "}", ",", "{", "\"name\"", ":", "\"type\"", ",", "\"type\"", ":", "\"string\"", "}", ",", "{", "\"name\"", ":", "\"url\"", ",", "\"type\"", ":", "\"string\"", "}", ",", "{", "\"name\"", ":", "\"uuid\"", ",", "\"type\"", ":", "\"string\"", ",", "\"required\"", ":", "True", "}", ",", "{", "\"name\"", ":", "\"centroid_y\"", ",", "\"type\"", ":", "\"pdouble\"", "}", ",", "{", "\"name\"", ":", "\"centroid_x\"", ",", "\"type\"", ":", "\"pdouble\"", "}", ",", "]", "copy_fields", "=", "[", "{", "\"source\"", ":", "\"*\"", ",", "\"dest\"", ":", "\"_text_\"", "}", ",", "{", "\"source\"", ":", "\"title\"", ",", "\"dest\"", ":", "\"title_txt\"", "}", ",", "{", "\"source\"", ":", "\"abstract\"", ",", "\"dest\"", ":", "\"abstract_txt\"", "}", ",", "{", "\"source\"", ":", "\"layer_originator\"", ",", "\"dest\"", ":", "\"layer_originator_txt\"", "}", ",", "{", "\"source\"", ":", "\"layer_username\"", ",", "\"dest\"", ":", "\"layer_username_txt\"", "}", ",", "]", "headers", "=", "{", "\"Content-type\"", ":", "\"application/json\"", "}", "for", "field", "in", "fields", ":", "data", "=", "{", "\"add-field\"", ":", "field", "}", "requests", ".", "post", "(", "schema_url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "for", "field", "in", "copy_fields", ":", "data", "=", "{", "\"add-copy-field\"", ":", "field", "}", "print", "data", "requests", ".", "post", "(", "schema_url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")" ]
set the mapping in solr. :param catalog: core :return:
[ "set", "the", "mapping", "in", "solr", ".", ":", "param", "catalog", ":", "core", ":", "return", ":" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/solr.py#L99-L221
cga-harvard/Hypermap-Registry
hypermap/aggregator/utils.py
create_layer_from_metadata_xml
def create_layer_from_metadata_xml(resourcetype, xml, monitor=False, service=None, catalog=None): """ Create a layer / keyword list from a metadata record if it does not already exist. """ from models import gen_anytext, Layer if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2': # Dublin core md = CswRecord(etree.fromstring(xml)) layer = Layer( is_monitored=monitor, name=md.title, title=md.title, abstract=md.abstract, xml=xml, service=service, catalog=catalog, anytext=gen_anytext(md.title, md.abstract, md.subjects) ) if hasattr(md, 'alternative'): layer.name = md.alternative if md.bbox is not None: layer.bbox_x0 = format_float(md.bbox.minx) layer.bbox_y0 = format_float(md.bbox.miny) layer.bbox_x1 = format_float(md.bbox.maxx) layer.bbox_y1 = format_float(md.bbox.maxy) layer.wkt_geometry = bbox2wktpolygon([md.bbox.minx, md.bbox.miny, md.bbox.maxx, md.bbox.maxy]) return layer, md.subjects
python
def create_layer_from_metadata_xml(resourcetype, xml, monitor=False, service=None, catalog=None): """ Create a layer / keyword list from a metadata record if it does not already exist. """ from models import gen_anytext, Layer if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2': # Dublin core md = CswRecord(etree.fromstring(xml)) layer = Layer( is_monitored=monitor, name=md.title, title=md.title, abstract=md.abstract, xml=xml, service=service, catalog=catalog, anytext=gen_anytext(md.title, md.abstract, md.subjects) ) if hasattr(md, 'alternative'): layer.name = md.alternative if md.bbox is not None: layer.bbox_x0 = format_float(md.bbox.minx) layer.bbox_y0 = format_float(md.bbox.miny) layer.bbox_x1 = format_float(md.bbox.maxx) layer.bbox_y1 = format_float(md.bbox.maxy) layer.wkt_geometry = bbox2wktpolygon([md.bbox.minx, md.bbox.miny, md.bbox.maxx, md.bbox.maxy]) return layer, md.subjects
[ "def", "create_layer_from_metadata_xml", "(", "resourcetype", ",", "xml", ",", "monitor", "=", "False", ",", "service", "=", "None", ",", "catalog", "=", "None", ")", ":", "from", "models", "import", "gen_anytext", ",", "Layer", "if", "resourcetype", "==", "'http://www.opengis.net/cat/csw/2.0.2'", ":", "# Dublin core", "md", "=", "CswRecord", "(", "etree", ".", "fromstring", "(", "xml", ")", ")", "layer", "=", "Layer", "(", "is_monitored", "=", "monitor", ",", "name", "=", "md", ".", "title", ",", "title", "=", "md", ".", "title", ",", "abstract", "=", "md", ".", "abstract", ",", "xml", "=", "xml", ",", "service", "=", "service", ",", "catalog", "=", "catalog", ",", "anytext", "=", "gen_anytext", "(", "md", ".", "title", ",", "md", ".", "abstract", ",", "md", ".", "subjects", ")", ")", "if", "hasattr", "(", "md", ",", "'alternative'", ")", ":", "layer", ".", "name", "=", "md", ".", "alternative", "if", "md", ".", "bbox", "is", "not", "None", ":", "layer", ".", "bbox_x0", "=", "format_float", "(", "md", ".", "bbox", ".", "minx", ")", "layer", ".", "bbox_y0", "=", "format_float", "(", "md", ".", "bbox", ".", "miny", ")", "layer", ".", "bbox_x1", "=", "format_float", "(", "md", ".", "bbox", ".", "maxx", ")", "layer", ".", "bbox_y1", "=", "format_float", "(", "md", ".", "bbox", ".", "maxy", ")", "layer", ".", "wkt_geometry", "=", "bbox2wktpolygon", "(", "[", "md", ".", "bbox", ".", "minx", ",", "md", ".", "bbox", ".", "miny", ",", "md", ".", "bbox", ".", "maxx", ",", "md", ".", "bbox", ".", "maxy", "]", ")", "return", "layer", ",", "md", ".", "subjects" ]
Create a layer / keyword list from a metadata record if it does not already exist.
[ "Create", "a", "layer", "/", "keyword", "list", "from", "a", "metadata", "record", "if", "it", "does", "not", "already", "exist", "." ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L28-L59
cga-harvard/Hypermap-Registry
hypermap/aggregator/utils.py
create_service_from_endpoint
def create_service_from_endpoint(endpoint, service_type, title=None, abstract=None, catalog=None): """ Create a service from an endpoint if it does not already exists. """ from models import Service if Service.objects.filter(url=endpoint, catalog=catalog).count() == 0: # check if endpoint is valid request = requests.get(endpoint) if request.status_code == 200: LOGGER.debug('Creating a %s service for endpoint=%s catalog=%s' % (service_type, endpoint, catalog)) service = Service( type=service_type, url=endpoint, title=title, abstract=abstract, csw_type='service', catalog=catalog ) service.save() return service else: LOGGER.warning('This endpoint is invalid, status code is %s' % request.status_code) else: LOGGER.warning('A service for this endpoint %s in catalog %s already exists' % (endpoint, catalog)) return None
python
def create_service_from_endpoint(endpoint, service_type, title=None, abstract=None, catalog=None): """ Create a service from an endpoint if it does not already exists. """ from models import Service if Service.objects.filter(url=endpoint, catalog=catalog).count() == 0: # check if endpoint is valid request = requests.get(endpoint) if request.status_code == 200: LOGGER.debug('Creating a %s service for endpoint=%s catalog=%s' % (service_type, endpoint, catalog)) service = Service( type=service_type, url=endpoint, title=title, abstract=abstract, csw_type='service', catalog=catalog ) service.save() return service else: LOGGER.warning('This endpoint is invalid, status code is %s' % request.status_code) else: LOGGER.warning('A service for this endpoint %s in catalog %s already exists' % (endpoint, catalog)) return None
[ "def", "create_service_from_endpoint", "(", "endpoint", ",", "service_type", ",", "title", "=", "None", ",", "abstract", "=", "None", ",", "catalog", "=", "None", ")", ":", "from", "models", "import", "Service", "if", "Service", ".", "objects", ".", "filter", "(", "url", "=", "endpoint", ",", "catalog", "=", "catalog", ")", ".", "count", "(", ")", "==", "0", ":", "# check if endpoint is valid", "request", "=", "requests", ".", "get", "(", "endpoint", ")", "if", "request", ".", "status_code", "==", "200", ":", "LOGGER", ".", "debug", "(", "'Creating a %s service for endpoint=%s catalog=%s'", "%", "(", "service_type", ",", "endpoint", ",", "catalog", ")", ")", "service", "=", "Service", "(", "type", "=", "service_type", ",", "url", "=", "endpoint", ",", "title", "=", "title", ",", "abstract", "=", "abstract", ",", "csw_type", "=", "'service'", ",", "catalog", "=", "catalog", ")", "service", ".", "save", "(", ")", "return", "service", "else", ":", "LOGGER", ".", "warning", "(", "'This endpoint is invalid, status code is %s'", "%", "request", ".", "status_code", ")", "else", ":", "LOGGER", ".", "warning", "(", "'A service for this endpoint %s in catalog %s already exists'", "%", "(", "endpoint", ",", "catalog", ")", ")", "return", "None" ]
Create a service from an endpoint if it does not already exists.
[ "Create", "a", "service", "from", "an", "endpoint", "if", "it", "does", "not", "already", "exists", "." ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L62-L82
cga-harvard/Hypermap-Registry
hypermap/aggregator/utils.py
create_services_from_endpoint
def create_services_from_endpoint(url, catalog, greedy_opt=True): """ Generate service/services from an endpoint. WMS, WMTS, TMS endpoints correspond to a single service. ESRI, CSW endpoints corrispond to many services. :return: imported, message """ # this variable will collect any exception message during the routine. # will be used in the last step to send a message if "detected" var is False. messages = [] num_created = 0 endpoint = get_sanitized_endpoint(url) try: urllib2.urlopen(endpoint, timeout=10) except Exception as e: message = traceback.format_exception(*sys.exc_info()) LOGGER.error('Cannot open this endpoint: %s' % endpoint) LOGGER.error('ERROR MESSAGE: %s' % message) LOGGER.error(e, exc_info=True) return False, message detected = False # handle specific service types for some domains (WorldMap, Wrapper...) parsed_uri = urlparse(endpoint) domain = '{uri.scheme}://{uri.netloc}/'.format(uri=parsed_uri) if domain == 'http://worldmap.harvard.edu/': service_type = 'Hypermap:WorldMap' title = 'Harvard WorldMap' abstract = 'Harvard WorldMap' endpoint = domain detected = True if domain in [ 'http://maps.nypl.org/', 'http://mapwarper.net/', 'http://warp.worldmap.harvard.edu/', ]: service_type = 'Hypermap:WARPER' title = 'Warper at %s' % domain abstract = 'Warper at %s' % domain detected = True # test if it is CSW, WMS, TMS, WMTS or Esri # CSW try: csw = CatalogueServiceWeb(endpoint) service_type = 'OGC:CSW' service_links = {} detected = True typenames = 'csw:Record' outputschema = 'http://www.opengis.net/cat/csw/2.0.2' if 'csw_harvest_pagesize' in settings.REGISTRY_PYCSW['manager']: pagesize = int(settings.REGISTRY_PYCSW['manager']['csw_harvest_pagesize']) else: pagesize = 10 LOGGER.debug('Harvesting CSW %s' % endpoint) # now get all records # get total number of records to loop against try: csw.getrecords2(typenames=typenames, resulttype='hits', outputschema=outputschema) matches = csw.results['matches'] except: # this is a CSW, but server rejects query raise RuntimeError(csw.response) if pagesize > matches: pagesize = matches LOGGER.info('Harvesting %d CSW records' % matches) # loop over all catalogue records incrementally for r in range(1, matches+1, pagesize): LOGGER.info('Parsing %s from %s' % (r, matches)) try: csw.getrecords2(typenames=typenames, startposition=r, maxrecords=pagesize, outputschema=outputschema, esn='full') except Exception as err: # this is a CSW, but server rejects query raise RuntimeError(csw.response) for k, v in csw.records.items(): # try to parse metadata try: LOGGER.info('Looking for service links') LOGGER.debug('Looking for service links via dct:references') if v.references: for ref in v.references: scheme = None if ref['scheme'] in [st[0] for st in SERVICE_TYPES]: if ref['url'] not in service_links: scheme = ref['scheme'] service_links[ref['url']] = scheme else: # loose detection scheme = detect_metadata_url_scheme(ref['url']) if scheme is not None: if ref['url'] not in service_links: service_links[ref['url']] = scheme if scheme is None: continue try: service = create_service_from_endpoint(ref['url'], scheme, catalog=catalog) if service is not None: num_created = num_created + 1 LOGGER.info('Found %s services on endpoint' % num_created) except Exception, e: LOGGER.error('Could not create service for %s : %s' % (scheme, ref['url'])) LOGGER.error(e, exc_info=True) LOGGER.debug('Looking for service links via the GeoNetwork-ish dc:URI') if v.uris: for u in v.uris: # loose detection scheme = detect_metadata_url_scheme(u['url']) if scheme is not None: if u['url'] not in service_links: service_links[u['url']] = scheme else: continue try: service = create_service_from_endpoint(u['url'], scheme, catalog=catalog) if service is not None: num_created = num_created + 1 LOGGER.info('Found %s services on endpoint' % num_created) except Exception, e: LOGGER.error('Could not create service for %s : %s' % (scheme, u['url'])) LOGGER.error(e, exc_info=True) except Exception as err: # parsing failed for some reason LOGGER.warning('Metadata parsing failed %s', err) LOGGER.error(err, exc_info=True) except XMLSyntaxError as e: # This is not XML, so likely not a CSW. Moving on. pass except Exception as e: LOGGER.error(e, exc_info=True) messages.append(str(e)) # WMS if not detected: try: service = get_wms_version_negotiate(endpoint, timeout=10) service_type = 'OGC:WMS' title = service.identification.title, abstract = service.identification.abstract detected = True except XMLSyntaxError as e: # This is not XML, so likely not a WMS. Moving on. pass except Exception as e: LOGGER.error(e, exc_info=True) messages.append(str(e)) # TMS if not detected: try: service = TileMapService(endpoint, timeout=10) service_type = 'OSGeo:TMS' title = service.identification.title, abstract = service.identification.abstract detected = True except XMLSyntaxError as e: # This is not XML, so likely not a TsMS. Moving on. pass except Exception as e: LOGGER.error(e, exc_info=True) messages.append(str(e)) # WMTS if not detected: try: # @tomkralidis timeout is not implemented for WebMapTileService? service = WebMapTileService(endpoint) service_type = 'OGC:WMTS' title = service.identification.title, abstract = service.identification.abstract detected = True except XMLSyntaxError as e: # This is not XML, so likely not a WMTS. Moving on. pass except Exception as e: LOGGER.error(e, exc_info=True) messages.append(str(e)) # if detected, let's create the service if detected and service_type != 'OGC:CSW': try: service = create_service_from_endpoint( endpoint, service_type, title, abstract=abstract, catalog=catalog ) if service is not None: num_created = num_created + 1 except XMLSyntaxError as e: # This is not XML, so likely not a OGC:CSW. Moving on. pass except Exception as e: LOGGER.error(e, exc_info=True) messages.append(str(e)) # Esri # a good sample is here: https://gis.ngdc.noaa.gov/arcgis/rest/services # we can safely assume the following condition (at least it is true for 1170 services) # we need to test this as arcrest.Folder can freeze with not esri url such as this one: # http://hh.worldmap.harvard.edu/admin/aggregator/service/?q=%2Frest%2Fservices if '/rest/services' in endpoint: if not detected: try: esri = arcrest.Folder(endpoint) service_type = 'ESRI' detected = True service_to_process, folder_to_process = esri.services, esri.folders if not greedy_opt: folder_to_process = [] sections = service_url_parse(url) service_to_process = get_single_service(esri, sections) processed_services = process_esri_services(service_to_process, catalog) num_created = num_created + len(processed_services) for folder in folder_to_process: folder_services = process_esri_services(folder.services, catalog) num_created = num_created + len(folder_services) except Exception as e: LOGGER.error(e, exc_info=True) messages.append(str(e)) if detected: return True, '%s service/s created' % num_created else: m = '|'.join(messages) return False, 'ERROR! Could not detect service type for ' \ 'endpoint %s or already existing. messages=(%s)' % (endpoint, m)
python
def create_services_from_endpoint(url, catalog, greedy_opt=True): """ Generate service/services from an endpoint. WMS, WMTS, TMS endpoints correspond to a single service. ESRI, CSW endpoints corrispond to many services. :return: imported, message """ # this variable will collect any exception message during the routine. # will be used in the last step to send a message if "detected" var is False. messages = [] num_created = 0 endpoint = get_sanitized_endpoint(url) try: urllib2.urlopen(endpoint, timeout=10) except Exception as e: message = traceback.format_exception(*sys.exc_info()) LOGGER.error('Cannot open this endpoint: %s' % endpoint) LOGGER.error('ERROR MESSAGE: %s' % message) LOGGER.error(e, exc_info=True) return False, message detected = False # handle specific service types for some domains (WorldMap, Wrapper...) parsed_uri = urlparse(endpoint) domain = '{uri.scheme}://{uri.netloc}/'.format(uri=parsed_uri) if domain == 'http://worldmap.harvard.edu/': service_type = 'Hypermap:WorldMap' title = 'Harvard WorldMap' abstract = 'Harvard WorldMap' endpoint = domain detected = True if domain in [ 'http://maps.nypl.org/', 'http://mapwarper.net/', 'http://warp.worldmap.harvard.edu/', ]: service_type = 'Hypermap:WARPER' title = 'Warper at %s' % domain abstract = 'Warper at %s' % domain detected = True # test if it is CSW, WMS, TMS, WMTS or Esri # CSW try: csw = CatalogueServiceWeb(endpoint) service_type = 'OGC:CSW' service_links = {} detected = True typenames = 'csw:Record' outputschema = 'http://www.opengis.net/cat/csw/2.0.2' if 'csw_harvest_pagesize' in settings.REGISTRY_PYCSW['manager']: pagesize = int(settings.REGISTRY_PYCSW['manager']['csw_harvest_pagesize']) else: pagesize = 10 LOGGER.debug('Harvesting CSW %s' % endpoint) # now get all records # get total number of records to loop against try: csw.getrecords2(typenames=typenames, resulttype='hits', outputschema=outputschema) matches = csw.results['matches'] except: # this is a CSW, but server rejects query raise RuntimeError(csw.response) if pagesize > matches: pagesize = matches LOGGER.info('Harvesting %d CSW records' % matches) # loop over all catalogue records incrementally for r in range(1, matches+1, pagesize): LOGGER.info('Parsing %s from %s' % (r, matches)) try: csw.getrecords2(typenames=typenames, startposition=r, maxrecords=pagesize, outputschema=outputschema, esn='full') except Exception as err: # this is a CSW, but server rejects query raise RuntimeError(csw.response) for k, v in csw.records.items(): # try to parse metadata try: LOGGER.info('Looking for service links') LOGGER.debug('Looking for service links via dct:references') if v.references: for ref in v.references: scheme = None if ref['scheme'] in [st[0] for st in SERVICE_TYPES]: if ref['url'] not in service_links: scheme = ref['scheme'] service_links[ref['url']] = scheme else: # loose detection scheme = detect_metadata_url_scheme(ref['url']) if scheme is not None: if ref['url'] not in service_links: service_links[ref['url']] = scheme if scheme is None: continue try: service = create_service_from_endpoint(ref['url'], scheme, catalog=catalog) if service is not None: num_created = num_created + 1 LOGGER.info('Found %s services on endpoint' % num_created) except Exception, e: LOGGER.error('Could not create service for %s : %s' % (scheme, ref['url'])) LOGGER.error(e, exc_info=True) LOGGER.debug('Looking for service links via the GeoNetwork-ish dc:URI') if v.uris: for u in v.uris: # loose detection scheme = detect_metadata_url_scheme(u['url']) if scheme is not None: if u['url'] not in service_links: service_links[u['url']] = scheme else: continue try: service = create_service_from_endpoint(u['url'], scheme, catalog=catalog) if service is not None: num_created = num_created + 1 LOGGER.info('Found %s services on endpoint' % num_created) except Exception, e: LOGGER.error('Could not create service for %s : %s' % (scheme, u['url'])) LOGGER.error(e, exc_info=True) except Exception as err: # parsing failed for some reason LOGGER.warning('Metadata parsing failed %s', err) LOGGER.error(err, exc_info=True) except XMLSyntaxError as e: # This is not XML, so likely not a CSW. Moving on. pass except Exception as e: LOGGER.error(e, exc_info=True) messages.append(str(e)) # WMS if not detected: try: service = get_wms_version_negotiate(endpoint, timeout=10) service_type = 'OGC:WMS' title = service.identification.title, abstract = service.identification.abstract detected = True except XMLSyntaxError as e: # This is not XML, so likely not a WMS. Moving on. pass except Exception as e: LOGGER.error(e, exc_info=True) messages.append(str(e)) # TMS if not detected: try: service = TileMapService(endpoint, timeout=10) service_type = 'OSGeo:TMS' title = service.identification.title, abstract = service.identification.abstract detected = True except XMLSyntaxError as e: # This is not XML, so likely not a TsMS. Moving on. pass except Exception as e: LOGGER.error(e, exc_info=True) messages.append(str(e)) # WMTS if not detected: try: # @tomkralidis timeout is not implemented for WebMapTileService? service = WebMapTileService(endpoint) service_type = 'OGC:WMTS' title = service.identification.title, abstract = service.identification.abstract detected = True except XMLSyntaxError as e: # This is not XML, so likely not a WMTS. Moving on. pass except Exception as e: LOGGER.error(e, exc_info=True) messages.append(str(e)) # if detected, let's create the service if detected and service_type != 'OGC:CSW': try: service = create_service_from_endpoint( endpoint, service_type, title, abstract=abstract, catalog=catalog ) if service is not None: num_created = num_created + 1 except XMLSyntaxError as e: # This is not XML, so likely not a OGC:CSW. Moving on. pass except Exception as e: LOGGER.error(e, exc_info=True) messages.append(str(e)) # Esri # a good sample is here: https://gis.ngdc.noaa.gov/arcgis/rest/services # we can safely assume the following condition (at least it is true for 1170 services) # we need to test this as arcrest.Folder can freeze with not esri url such as this one: # http://hh.worldmap.harvard.edu/admin/aggregator/service/?q=%2Frest%2Fservices if '/rest/services' in endpoint: if not detected: try: esri = arcrest.Folder(endpoint) service_type = 'ESRI' detected = True service_to_process, folder_to_process = esri.services, esri.folders if not greedy_opt: folder_to_process = [] sections = service_url_parse(url) service_to_process = get_single_service(esri, sections) processed_services = process_esri_services(service_to_process, catalog) num_created = num_created + len(processed_services) for folder in folder_to_process: folder_services = process_esri_services(folder.services, catalog) num_created = num_created + len(folder_services) except Exception as e: LOGGER.error(e, exc_info=True) messages.append(str(e)) if detected: return True, '%s service/s created' % num_created else: m = '|'.join(messages) return False, 'ERROR! Could not detect service type for ' \ 'endpoint %s or already existing. messages=(%s)' % (endpoint, m)
[ "def", "create_services_from_endpoint", "(", "url", ",", "catalog", ",", "greedy_opt", "=", "True", ")", ":", "# this variable will collect any exception message during the routine.", "# will be used in the last step to send a message if \"detected\" var is False.", "messages", "=", "[", "]", "num_created", "=", "0", "endpoint", "=", "get_sanitized_endpoint", "(", "url", ")", "try", ":", "urllib2", ".", "urlopen", "(", "endpoint", ",", "timeout", "=", "10", ")", "except", "Exception", "as", "e", ":", "message", "=", "traceback", ".", "format_exception", "(", "*", "sys", ".", "exc_info", "(", ")", ")", "LOGGER", ".", "error", "(", "'Cannot open this endpoint: %s'", "%", "endpoint", ")", "LOGGER", ".", "error", "(", "'ERROR MESSAGE: %s'", "%", "message", ")", "LOGGER", ".", "error", "(", "e", ",", "exc_info", "=", "True", ")", "return", "False", ",", "message", "detected", "=", "False", "# handle specific service types for some domains (WorldMap, Wrapper...)", "parsed_uri", "=", "urlparse", "(", "endpoint", ")", "domain", "=", "'{uri.scheme}://{uri.netloc}/'", ".", "format", "(", "uri", "=", "parsed_uri", ")", "if", "domain", "==", "'http://worldmap.harvard.edu/'", ":", "service_type", "=", "'Hypermap:WorldMap'", "title", "=", "'Harvard WorldMap'", "abstract", "=", "'Harvard WorldMap'", "endpoint", "=", "domain", "detected", "=", "True", "if", "domain", "in", "[", "'http://maps.nypl.org/'", ",", "'http://mapwarper.net/'", ",", "'http://warp.worldmap.harvard.edu/'", ",", "]", ":", "service_type", "=", "'Hypermap:WARPER'", "title", "=", "'Warper at %s'", "%", "domain", "abstract", "=", "'Warper at %s'", "%", "domain", "detected", "=", "True", "# test if it is CSW, WMS, TMS, WMTS or Esri", "# CSW", "try", ":", "csw", "=", "CatalogueServiceWeb", "(", "endpoint", ")", "service_type", "=", "'OGC:CSW'", "service_links", "=", "{", "}", "detected", "=", "True", "typenames", "=", "'csw:Record'", "outputschema", "=", "'http://www.opengis.net/cat/csw/2.0.2'", "if", "'csw_harvest_pagesize'", "in", "settings", ".", "REGISTRY_PYCSW", "[", "'manager'", "]", ":", "pagesize", "=", "int", "(", "settings", ".", "REGISTRY_PYCSW", "[", "'manager'", "]", "[", "'csw_harvest_pagesize'", "]", ")", "else", ":", "pagesize", "=", "10", "LOGGER", ".", "debug", "(", "'Harvesting CSW %s'", "%", "endpoint", ")", "# now get all records", "# get total number of records to loop against", "try", ":", "csw", ".", "getrecords2", "(", "typenames", "=", "typenames", ",", "resulttype", "=", "'hits'", ",", "outputschema", "=", "outputschema", ")", "matches", "=", "csw", ".", "results", "[", "'matches'", "]", "except", ":", "# this is a CSW, but server rejects query", "raise", "RuntimeError", "(", "csw", ".", "response", ")", "if", "pagesize", ">", "matches", ":", "pagesize", "=", "matches", "LOGGER", ".", "info", "(", "'Harvesting %d CSW records'", "%", "matches", ")", "# loop over all catalogue records incrementally", "for", "r", "in", "range", "(", "1", ",", "matches", "+", "1", ",", "pagesize", ")", ":", "LOGGER", ".", "info", "(", "'Parsing %s from %s'", "%", "(", "r", ",", "matches", ")", ")", "try", ":", "csw", ".", "getrecords2", "(", "typenames", "=", "typenames", ",", "startposition", "=", "r", ",", "maxrecords", "=", "pagesize", ",", "outputschema", "=", "outputschema", ",", "esn", "=", "'full'", ")", "except", "Exception", "as", "err", ":", "# this is a CSW, but server rejects query", "raise", "RuntimeError", "(", "csw", ".", "response", ")", "for", "k", ",", "v", "in", "csw", ".", "records", ".", "items", "(", ")", ":", "# try to parse metadata", "try", ":", "LOGGER", ".", "info", "(", "'Looking for service links'", ")", "LOGGER", ".", "debug", "(", "'Looking for service links via dct:references'", ")", "if", "v", ".", "references", ":", "for", "ref", "in", "v", ".", "references", ":", "scheme", "=", "None", "if", "ref", "[", "'scheme'", "]", "in", "[", "st", "[", "0", "]", "for", "st", "in", "SERVICE_TYPES", "]", ":", "if", "ref", "[", "'url'", "]", "not", "in", "service_links", ":", "scheme", "=", "ref", "[", "'scheme'", "]", "service_links", "[", "ref", "[", "'url'", "]", "]", "=", "scheme", "else", ":", "# loose detection", "scheme", "=", "detect_metadata_url_scheme", "(", "ref", "[", "'url'", "]", ")", "if", "scheme", "is", "not", "None", ":", "if", "ref", "[", "'url'", "]", "not", "in", "service_links", ":", "service_links", "[", "ref", "[", "'url'", "]", "]", "=", "scheme", "if", "scheme", "is", "None", ":", "continue", "try", ":", "service", "=", "create_service_from_endpoint", "(", "ref", "[", "'url'", "]", ",", "scheme", ",", "catalog", "=", "catalog", ")", "if", "service", "is", "not", "None", ":", "num_created", "=", "num_created", "+", "1", "LOGGER", ".", "info", "(", "'Found %s services on endpoint'", "%", "num_created", ")", "except", "Exception", ",", "e", ":", "LOGGER", ".", "error", "(", "'Could not create service for %s : %s'", "%", "(", "scheme", ",", "ref", "[", "'url'", "]", ")", ")", "LOGGER", ".", "error", "(", "e", ",", "exc_info", "=", "True", ")", "LOGGER", ".", "debug", "(", "'Looking for service links via the GeoNetwork-ish dc:URI'", ")", "if", "v", ".", "uris", ":", "for", "u", "in", "v", ".", "uris", ":", "# loose detection", "scheme", "=", "detect_metadata_url_scheme", "(", "u", "[", "'url'", "]", ")", "if", "scheme", "is", "not", "None", ":", "if", "u", "[", "'url'", "]", "not", "in", "service_links", ":", "service_links", "[", "u", "[", "'url'", "]", "]", "=", "scheme", "else", ":", "continue", "try", ":", "service", "=", "create_service_from_endpoint", "(", "u", "[", "'url'", "]", ",", "scheme", ",", "catalog", "=", "catalog", ")", "if", "service", "is", "not", "None", ":", "num_created", "=", "num_created", "+", "1", "LOGGER", ".", "info", "(", "'Found %s services on endpoint'", "%", "num_created", ")", "except", "Exception", ",", "e", ":", "LOGGER", ".", "error", "(", "'Could not create service for %s : %s'", "%", "(", "scheme", ",", "u", "[", "'url'", "]", ")", ")", "LOGGER", ".", "error", "(", "e", ",", "exc_info", "=", "True", ")", "except", "Exception", "as", "err", ":", "# parsing failed for some reason", "LOGGER", ".", "warning", "(", "'Metadata parsing failed %s'", ",", "err", ")", "LOGGER", ".", "error", "(", "err", ",", "exc_info", "=", "True", ")", "except", "XMLSyntaxError", "as", "e", ":", "# This is not XML, so likely not a CSW. Moving on.", "pass", "except", "Exception", "as", "e", ":", "LOGGER", ".", "error", "(", "e", ",", "exc_info", "=", "True", ")", "messages", ".", "append", "(", "str", "(", "e", ")", ")", "# WMS", "if", "not", "detected", ":", "try", ":", "service", "=", "get_wms_version_negotiate", "(", "endpoint", ",", "timeout", "=", "10", ")", "service_type", "=", "'OGC:WMS'", "title", "=", "service", ".", "identification", ".", "title", ",", "abstract", "=", "service", ".", "identification", ".", "abstract", "detected", "=", "True", "except", "XMLSyntaxError", "as", "e", ":", "# This is not XML, so likely not a WMS. Moving on.", "pass", "except", "Exception", "as", "e", ":", "LOGGER", ".", "error", "(", "e", ",", "exc_info", "=", "True", ")", "messages", ".", "append", "(", "str", "(", "e", ")", ")", "# TMS", "if", "not", "detected", ":", "try", ":", "service", "=", "TileMapService", "(", "endpoint", ",", "timeout", "=", "10", ")", "service_type", "=", "'OSGeo:TMS'", "title", "=", "service", ".", "identification", ".", "title", ",", "abstract", "=", "service", ".", "identification", ".", "abstract", "detected", "=", "True", "except", "XMLSyntaxError", "as", "e", ":", "# This is not XML, so likely not a TsMS. Moving on.", "pass", "except", "Exception", "as", "e", ":", "LOGGER", ".", "error", "(", "e", ",", "exc_info", "=", "True", ")", "messages", ".", "append", "(", "str", "(", "e", ")", ")", "# WMTS", "if", "not", "detected", ":", "try", ":", "# @tomkralidis timeout is not implemented for WebMapTileService?", "service", "=", "WebMapTileService", "(", "endpoint", ")", "service_type", "=", "'OGC:WMTS'", "title", "=", "service", ".", "identification", ".", "title", ",", "abstract", "=", "service", ".", "identification", ".", "abstract", "detected", "=", "True", "except", "XMLSyntaxError", "as", "e", ":", "# This is not XML, so likely not a WMTS. Moving on.", "pass", "except", "Exception", "as", "e", ":", "LOGGER", ".", "error", "(", "e", ",", "exc_info", "=", "True", ")", "messages", ".", "append", "(", "str", "(", "e", ")", ")", "# if detected, let's create the service", "if", "detected", "and", "service_type", "!=", "'OGC:CSW'", ":", "try", ":", "service", "=", "create_service_from_endpoint", "(", "endpoint", ",", "service_type", ",", "title", ",", "abstract", "=", "abstract", ",", "catalog", "=", "catalog", ")", "if", "service", "is", "not", "None", ":", "num_created", "=", "num_created", "+", "1", "except", "XMLSyntaxError", "as", "e", ":", "# This is not XML, so likely not a OGC:CSW. Moving on.", "pass", "except", "Exception", "as", "e", ":", "LOGGER", ".", "error", "(", "e", ",", "exc_info", "=", "True", ")", "messages", ".", "append", "(", "str", "(", "e", ")", ")", "# Esri", "# a good sample is here: https://gis.ngdc.noaa.gov/arcgis/rest/services", "# we can safely assume the following condition (at least it is true for 1170 services)", "# we need to test this as arcrest.Folder can freeze with not esri url such as this one:", "# http://hh.worldmap.harvard.edu/admin/aggregator/service/?q=%2Frest%2Fservices", "if", "'/rest/services'", "in", "endpoint", ":", "if", "not", "detected", ":", "try", ":", "esri", "=", "arcrest", ".", "Folder", "(", "endpoint", ")", "service_type", "=", "'ESRI'", "detected", "=", "True", "service_to_process", ",", "folder_to_process", "=", "esri", ".", "services", ",", "esri", ".", "folders", "if", "not", "greedy_opt", ":", "folder_to_process", "=", "[", "]", "sections", "=", "service_url_parse", "(", "url", ")", "service_to_process", "=", "get_single_service", "(", "esri", ",", "sections", ")", "processed_services", "=", "process_esri_services", "(", "service_to_process", ",", "catalog", ")", "num_created", "=", "num_created", "+", "len", "(", "processed_services", ")", "for", "folder", "in", "folder_to_process", ":", "folder_services", "=", "process_esri_services", "(", "folder", ".", "services", ",", "catalog", ")", "num_created", "=", "num_created", "+", "len", "(", "folder_services", ")", "except", "Exception", "as", "e", ":", "LOGGER", ".", "error", "(", "e", ",", "exc_info", "=", "True", ")", "messages", ".", "append", "(", "str", "(", "e", ")", ")", "if", "detected", ":", "return", "True", ",", "'%s service/s created'", "%", "num_created", "else", ":", "m", "=", "'|'", ".", "join", "(", "messages", ")", "return", "False", ",", "'ERROR! Could not detect service type for '", "'endpoint %s or already existing. messages=(%s)'", "%", "(", "endpoint", ",", "m", ")" ]
Generate service/services from an endpoint. WMS, WMTS, TMS endpoints correspond to a single service. ESRI, CSW endpoints corrispond to many services. :return: imported, message
[ "Generate", "service", "/", "services", "from", "an", "endpoint", ".", "WMS", "WMTS", "TMS", "endpoints", "correspond", "to", "a", "single", "service", ".", "ESRI", "CSW", "endpoints", "corrispond", "to", "many", "services", ".", ":", "return", ":", "imported", "message" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L85-L332
cga-harvard/Hypermap-Registry
hypermap/aggregator/utils.py
service_url_parse
def service_url_parse(url): """ Function that parses from url the service and folder of services. """ endpoint = get_sanitized_endpoint(url) url_split_list = url.split(endpoint + '/') if len(url_split_list) != 0: url_split_list = url_split_list[1].split('/') else: raise Exception('Wrong url parsed') # Remove unnecessary items from list of the split url. parsed_url = [s for s in url_split_list if '?' not in s if 'Server' not in s] return parsed_url
python
def service_url_parse(url): """ Function that parses from url the service and folder of services. """ endpoint = get_sanitized_endpoint(url) url_split_list = url.split(endpoint + '/') if len(url_split_list) != 0: url_split_list = url_split_list[1].split('/') else: raise Exception('Wrong url parsed') # Remove unnecessary items from list of the split url. parsed_url = [s for s in url_split_list if '?' not in s if 'Server' not in s] return parsed_url
[ "def", "service_url_parse", "(", "url", ")", ":", "endpoint", "=", "get_sanitized_endpoint", "(", "url", ")", "url_split_list", "=", "url", ".", "split", "(", "endpoint", "+", "'/'", ")", "if", "len", "(", "url_split_list", ")", "!=", "0", ":", "url_split_list", "=", "url_split_list", "[", "1", "]", ".", "split", "(", "'/'", ")", "else", ":", "raise", "Exception", "(", "'Wrong url parsed'", ")", "# Remove unnecessary items from list of the split url.", "parsed_url", "=", "[", "s", "for", "s", "in", "url_split_list", "if", "'?'", "not", "in", "s", "if", "'Server'", "not", "in", "s", "]", "return", "parsed_url" ]
Function that parses from url the service and folder of services.
[ "Function", "that", "parses", "from", "url", "the", "service", "and", "folder", "of", "services", "." ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L335-L349
cga-harvard/Hypermap-Registry
hypermap/aggregator/utils.py
inverse_mercator
def inverse_mercator(xy): """ Given coordinates in spherical mercator, return a lon,lat tuple. """ lon = (xy[0] / 20037508.34) * 180 lat = (xy[1] / 20037508.34) * 180 lat = 180 / math.pi * \ (2 * math.atan(math.exp(lat * math.pi / 180)) - math.pi / 2) return (lon, lat)
python
def inverse_mercator(xy): """ Given coordinates in spherical mercator, return a lon,lat tuple. """ lon = (xy[0] / 20037508.34) * 180 lat = (xy[1] / 20037508.34) * 180 lat = 180 / math.pi * \ (2 * math.atan(math.exp(lat * math.pi / 180)) - math.pi / 2) return (lon, lat)
[ "def", "inverse_mercator", "(", "xy", ")", ":", "lon", "=", "(", "xy", "[", "0", "]", "/", "20037508.34", ")", "*", "180", "lat", "=", "(", "xy", "[", "1", "]", "/", "20037508.34", ")", "*", "180", "lat", "=", "180", "/", "math", ".", "pi", "*", "(", "2", "*", "math", ".", "atan", "(", "math", ".", "exp", "(", "lat", "*", "math", ".", "pi", "/", "180", ")", ")", "-", "math", ".", "pi", "/", "2", ")", "return", "(", "lon", ",", "lat", ")" ]
Given coordinates in spherical mercator, return a lon,lat tuple.
[ "Given", "coordinates", "in", "spherical", "mercator", "return", "a", "lon", "lat", "tuple", "." ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L404-L412
cga-harvard/Hypermap-Registry
hypermap/aggregator/utils.py
get_wms_version_negotiate
def get_wms_version_negotiate(url, timeout=10): """ OWSLib wrapper function to perform version negotiation against owslib.wms.WebMapService """ try: LOGGER.debug('Trying a WMS 1.3.0 GetCapabilities request') return WebMapService(url, version='1.3.0', timeout=timeout) except Exception as err: LOGGER.warning('WMS 1.3.0 support not found: %s', err) LOGGER.debug('Trying a WMS 1.1.1 GetCapabilities request instead') return WebMapService(url, version='1.1.1', timeout=timeout)
python
def get_wms_version_negotiate(url, timeout=10): """ OWSLib wrapper function to perform version negotiation against owslib.wms.WebMapService """ try: LOGGER.debug('Trying a WMS 1.3.0 GetCapabilities request') return WebMapService(url, version='1.3.0', timeout=timeout) except Exception as err: LOGGER.warning('WMS 1.3.0 support not found: %s', err) LOGGER.debug('Trying a WMS 1.1.1 GetCapabilities request instead') return WebMapService(url, version='1.1.1', timeout=timeout)
[ "def", "get_wms_version_negotiate", "(", "url", ",", "timeout", "=", "10", ")", ":", "try", ":", "LOGGER", ".", "debug", "(", "'Trying a WMS 1.3.0 GetCapabilities request'", ")", "return", "WebMapService", "(", "url", ",", "version", "=", "'1.3.0'", ",", "timeout", "=", "timeout", ")", "except", "Exception", "as", "err", ":", "LOGGER", ".", "warning", "(", "'WMS 1.3.0 support not found: %s'", ",", "err", ")", "LOGGER", ".", "debug", "(", "'Trying a WMS 1.1.1 GetCapabilities request instead'", ")", "return", "WebMapService", "(", "url", ",", "version", "=", "'1.1.1'", ",", "timeout", "=", "timeout", ")" ]
OWSLib wrapper function to perform version negotiation against owslib.wms.WebMapService
[ "OWSLib", "wrapper", "function", "to", "perform", "version", "negotiation", "against", "owslib", ".", "wms", ".", "WebMapService" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L415-L426
cga-harvard/Hypermap-Registry
hypermap/aggregator/utils.py
get_sanitized_endpoint
def get_sanitized_endpoint(url): """ Sanitize an endpoint, as removing unneeded parameters """ # sanitize esri sanitized_url = url.rstrip() esri_string = '/rest/services' if esri_string in url: match = re.search(esri_string, sanitized_url) sanitized_url = url[0:(match.start(0)+len(esri_string))] return sanitized_url
python
def get_sanitized_endpoint(url): """ Sanitize an endpoint, as removing unneeded parameters """ # sanitize esri sanitized_url = url.rstrip() esri_string = '/rest/services' if esri_string in url: match = re.search(esri_string, sanitized_url) sanitized_url = url[0:(match.start(0)+len(esri_string))] return sanitized_url
[ "def", "get_sanitized_endpoint", "(", "url", ")", ":", "# sanitize esri", "sanitized_url", "=", "url", ".", "rstrip", "(", ")", "esri_string", "=", "'/rest/services'", "if", "esri_string", "in", "url", ":", "match", "=", "re", ".", "search", "(", "esri_string", ",", "sanitized_url", ")", "sanitized_url", "=", "url", "[", "0", ":", "(", "match", ".", "start", "(", "0", ")", "+", "len", "(", "esri_string", ")", ")", "]", "return", "sanitized_url" ]
Sanitize an endpoint, as removing unneeded parameters
[ "Sanitize", "an", "endpoint", "as", "removing", "unneeded", "parameters" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L435-L445
cga-harvard/Hypermap-Registry
hypermap/aggregator/utils.py
get_esri_service_name
def get_esri_service_name(url): """ A method to get a service name from an esri endpoint. For example: http://example.com/arcgis/rest/services/myservice/mylayer/MapServer/?f=json Will return: myservice/mylayer """ result = re.search('rest/services/(.*)/[MapServer|ImageServer]', url) if result is None: return url else: return result.group(1)
python
def get_esri_service_name(url): """ A method to get a service name from an esri endpoint. For example: http://example.com/arcgis/rest/services/myservice/mylayer/MapServer/?f=json Will return: myservice/mylayer """ result = re.search('rest/services/(.*)/[MapServer|ImageServer]', url) if result is None: return url else: return result.group(1)
[ "def", "get_esri_service_name", "(", "url", ")", ":", "result", "=", "re", ".", "search", "(", "'rest/services/(.*)/[MapServer|ImageServer]'", ",", "url", ")", "if", "result", "is", "None", ":", "return", "url", "else", ":", "return", "result", ".", "group", "(", "1", ")" ]
A method to get a service name from an esri endpoint. For example: http://example.com/arcgis/rest/services/myservice/mylayer/MapServer/?f=json Will return: myservice/mylayer
[ "A", "method", "to", "get", "a", "service", "name", "from", "an", "esri", "endpoint", ".", "For", "example", ":", "http", ":", "//", "example", ".", "com", "/", "arcgis", "/", "rest", "/", "services", "/", "myservice", "/", "mylayer", "/", "MapServer", "/", "?f", "=", "json", "Will", "return", ":", "myservice", "/", "mylayer" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L448-L458
cga-harvard/Hypermap-Registry
hypermap/aggregator/utils.py
get_esri_extent
def get_esri_extent(esriobj): """ Get the extent of an ESRI resource """ extent = None srs = None if 'fullExtent' in esriobj._json_struct: extent = esriobj._json_struct['fullExtent'] if 'extent' in esriobj._json_struct: extent = esriobj._json_struct['extent'] try: srs = extent['spatialReference']['wkid'] except KeyError, err: LOGGER.error(err, exc_info=True) return [extent, srs]
python
def get_esri_extent(esriobj): """ Get the extent of an ESRI resource """ extent = None srs = None if 'fullExtent' in esriobj._json_struct: extent = esriobj._json_struct['fullExtent'] if 'extent' in esriobj._json_struct: extent = esriobj._json_struct['extent'] try: srs = extent['spatialReference']['wkid'] except KeyError, err: LOGGER.error(err, exc_info=True) return [extent, srs]
[ "def", "get_esri_extent", "(", "esriobj", ")", ":", "extent", "=", "None", "srs", "=", "None", "if", "'fullExtent'", "in", "esriobj", ".", "_json_struct", ":", "extent", "=", "esriobj", ".", "_json_struct", "[", "'fullExtent'", "]", "if", "'extent'", "in", "esriobj", ".", "_json_struct", ":", "extent", "=", "esriobj", ".", "_json_struct", "[", "'extent'", "]", "try", ":", "srs", "=", "extent", "[", "'spatialReference'", "]", "[", "'wkid'", "]", "except", "KeyError", ",", "err", ":", "LOGGER", ".", "error", "(", "err", ",", "exc_info", "=", "True", ")", "return", "[", "extent", ",", "srs", "]" ]
Get the extent of an ESRI resource
[ "Get", "the", "extent", "of", "an", "ESRI", "resource" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L461-L479
cga-harvard/Hypermap-Registry
hypermap/aggregator/utils.py
bbox2wktpolygon
def bbox2wktpolygon(bbox): """ Return OGC WKT Polygon of a simple bbox list of strings """ minx = float(bbox[0]) miny = float(bbox[1]) maxx = float(bbox[2]) maxy = float(bbox[3]) return 'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f))' \ % (minx, miny, minx, maxy, maxx, maxy, maxx, miny, minx, miny)
python
def bbox2wktpolygon(bbox): """ Return OGC WKT Polygon of a simple bbox list of strings """ minx = float(bbox[0]) miny = float(bbox[1]) maxx = float(bbox[2]) maxy = float(bbox[3]) return 'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f))' \ % (minx, miny, minx, maxy, maxx, maxy, maxx, miny, minx, miny)
[ "def", "bbox2wktpolygon", "(", "bbox", ")", ":", "minx", "=", "float", "(", "bbox", "[", "0", "]", ")", "miny", "=", "float", "(", "bbox", "[", "1", "]", ")", "maxx", "=", "float", "(", "bbox", "[", "2", "]", ")", "maxy", "=", "float", "(", "bbox", "[", "3", "]", ")", "return", "'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f))'", "%", "(", "minx", ",", "miny", ",", "minx", ",", "maxy", ",", "maxx", ",", "maxy", ",", "maxx", ",", "miny", ",", "minx", ",", "miny", ")" ]
Return OGC WKT Polygon of a simple bbox list of strings
[ "Return", "OGC", "WKT", "Polygon", "of", "a", "simple", "bbox", "list", "of", "strings" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L503-L513
cga-harvard/Hypermap-Registry
hypermap/aggregator/utils.py
get_solr_date
def get_solr_date(pydate, is_negative): """ Returns a date in a valid Solr format from a string. """ # check if date is valid and then set it to solr format YYYY-MM-DDThh:mm:ssZ try: if isinstance(pydate, datetime.datetime): solr_date = '%sZ' % pydate.isoformat()[0:19] if is_negative: LOGGER.debug('%s This layer has a negative date' % solr_date) solr_date = '-%s' % solr_date return solr_date else: return None except Exception, e: LOGGER.error(e, exc_info=True) return None
python
def get_solr_date(pydate, is_negative): """ Returns a date in a valid Solr format from a string. """ # check if date is valid and then set it to solr format YYYY-MM-DDThh:mm:ssZ try: if isinstance(pydate, datetime.datetime): solr_date = '%sZ' % pydate.isoformat()[0:19] if is_negative: LOGGER.debug('%s This layer has a negative date' % solr_date) solr_date = '-%s' % solr_date return solr_date else: return None except Exception, e: LOGGER.error(e, exc_info=True) return None
[ "def", "get_solr_date", "(", "pydate", ",", "is_negative", ")", ":", "# check if date is valid and then set it to solr format YYYY-MM-DDThh:mm:ssZ", "try", ":", "if", "isinstance", "(", "pydate", ",", "datetime", ".", "datetime", ")", ":", "solr_date", "=", "'%sZ'", "%", "pydate", ".", "isoformat", "(", ")", "[", "0", ":", "19", "]", "if", "is_negative", ":", "LOGGER", ".", "debug", "(", "'%s This layer has a negative date'", "%", "solr_date", ")", "solr_date", "=", "'-%s'", "%", "solr_date", "return", "solr_date", "else", ":", "return", "None", "except", "Exception", ",", "e", ":", "LOGGER", ".", "error", "(", "e", ",", "exc_info", "=", "True", ")", "return", "None" ]
Returns a date in a valid Solr format from a string.
[ "Returns", "a", "date", "in", "a", "valid", "Solr", "format", "from", "a", "string", "." ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L516-L532
cga-harvard/Hypermap-Registry
hypermap/aggregator/utils.py
get_date
def get_date(layer): """ Returns a custom date representation. A date can be detected or from metadata. It can be a range or a simple date in isoformat. """ date = None sign = '+' date_type = 1 layer_dates = layer.get_layer_dates() # we index the first date! if layer_dates: sign = layer_dates[0][0] date = layer_dates[0][1] date_type = layer_dates[0][2] if date is None: date = layer.created # layer date > 2300 is invalid for sure # TODO put this logic in date miner if date.year > 2300: date = None if date_type == 0: date_type = "Detected" if date_type == 1: date_type = "From Metadata" return get_solr_date(date, (sign == '-')), date_type
python
def get_date(layer): """ Returns a custom date representation. A date can be detected or from metadata. It can be a range or a simple date in isoformat. """ date = None sign = '+' date_type = 1 layer_dates = layer.get_layer_dates() # we index the first date! if layer_dates: sign = layer_dates[0][0] date = layer_dates[0][1] date_type = layer_dates[0][2] if date is None: date = layer.created # layer date > 2300 is invalid for sure # TODO put this logic in date miner if date.year > 2300: date = None if date_type == 0: date_type = "Detected" if date_type == 1: date_type = "From Metadata" return get_solr_date(date, (sign == '-')), date_type
[ "def", "get_date", "(", "layer", ")", ":", "date", "=", "None", "sign", "=", "'+'", "date_type", "=", "1", "layer_dates", "=", "layer", ".", "get_layer_dates", "(", ")", "# we index the first date!", "if", "layer_dates", ":", "sign", "=", "layer_dates", "[", "0", "]", "[", "0", "]", "date", "=", "layer_dates", "[", "0", "]", "[", "1", "]", "date_type", "=", "layer_dates", "[", "0", "]", "[", "2", "]", "if", "date", "is", "None", ":", "date", "=", "layer", ".", "created", "# layer date > 2300 is invalid for sure", "# TODO put this logic in date miner", "if", "date", ".", "year", ">", "2300", ":", "date", "=", "None", "if", "date_type", "==", "0", ":", "date_type", "=", "\"Detected\"", "if", "date_type", "==", "1", ":", "date_type", "=", "\"From Metadata\"", "return", "get_solr_date", "(", "date", ",", "(", "sign", "==", "'-'", ")", ")", ",", "date_type" ]
Returns a custom date representation. A date can be detected or from metadata. It can be a range or a simple date in isoformat.
[ "Returns", "a", "custom", "date", "representation", ".", "A", "date", "can", "be", "detected", "or", "from", "metadata", ".", "It", "can", "be", "a", "range", "or", "a", "simple", "date", "in", "isoformat", "." ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L535-L559
cga-harvard/Hypermap-Registry
hypermap/aggregator/utils.py
layer2dict
def layer2dict(layer): """ Return a json representation for a layer. """ category = None username = None # bbox must be valid before proceeding if not layer.has_valid_bbox(): message = 'Layer id: %s has a not valid bbox' % layer.id return None, message # we can proceed safely bbox = [float(layer.bbox_x0), float(layer.bbox_y0), float(layer.bbox_x1), float(layer.bbox_y1)] minX = bbox[0] minY = bbox[1] maxX = bbox[2] maxY = bbox[3] # coords hack needed by solr if (minX < -180): minX = -180 if (maxX > 180): maxX = 180 if (minY < -90): minY = -90 if (maxY > 90): maxY = 90 rectangle = box(minX, minY, maxX, maxY) wkt = "ENVELOPE({:f},{:f},{:f},{:f})".format(minX, maxX, maxY, minY) halfWidth = (maxX - minX) / 2.0 halfHeight = (maxY - minY) / 2.0 area = (halfWidth * 2) * (halfHeight * 2) domain = get_domain(layer.service.url) if hasattr(layer, 'layerwm'): category = layer.layerwm.category username = layer.layerwm.username abstract = layer.abstract if abstract: abstract = strip_tags(layer.abstract) else: abstract = '' if layer.type == "WM": originator = username else: originator = domain layer_dict = { 'id': layer.id, 'uuid': str(layer.uuid), 'type': 'Layer', 'layer_id': layer.id, 'name': layer.name, 'title': layer.title, 'layer_originator': originator, 'service_id': layer.service.id, 'service_type': layer.service.type, 'layer_category': category, 'layer_username': username, 'url': layer.url, 'keywords': [kw.name for kw in layer.keywords.all()], 'reliability': layer.reliability, 'recent_reliability': layer.recent_reliability, 'last_status': layer.last_status, 'is_public': layer.is_public, 'is_valid': layer.is_valid, 'availability': 'Online', 'location': '{"layerInfoPage": "' + layer.get_absolute_url + '"}', 'abstract': abstract, 'domain_name': layer.service.get_domain } solr_date, date_type = get_date(layer) if solr_date is not None: layer_dict['layer_date'] = solr_date layer_dict['layer_datetype'] = date_type if bbox is not None: layer_dict['min_x'] = minX layer_dict['min_y'] = minY layer_dict['max_x'] = maxX layer_dict['max_y'] = maxY layer_dict['area'] = area layer_dict['bbox'] = wkt layer_dict['centroid_x'] = rectangle.centroid.x layer_dict['centroid_y'] = rectangle.centroid.y srs_list = [srs.encode('utf-8') for srs in layer.service.srs.values_list('code', flat=True)] layer_dict['srs'] = srs_list if layer.get_tile_url(): layer_dict['tile_url'] = layer.get_tile_url() message = 'Layer %s successfully converted to json' % layer.id return layer_dict, message
python
def layer2dict(layer): """ Return a json representation for a layer. """ category = None username = None # bbox must be valid before proceeding if not layer.has_valid_bbox(): message = 'Layer id: %s has a not valid bbox' % layer.id return None, message # we can proceed safely bbox = [float(layer.bbox_x0), float(layer.bbox_y0), float(layer.bbox_x1), float(layer.bbox_y1)] minX = bbox[0] minY = bbox[1] maxX = bbox[2] maxY = bbox[3] # coords hack needed by solr if (minX < -180): minX = -180 if (maxX > 180): maxX = 180 if (minY < -90): minY = -90 if (maxY > 90): maxY = 90 rectangle = box(minX, minY, maxX, maxY) wkt = "ENVELOPE({:f},{:f},{:f},{:f})".format(minX, maxX, maxY, minY) halfWidth = (maxX - minX) / 2.0 halfHeight = (maxY - minY) / 2.0 area = (halfWidth * 2) * (halfHeight * 2) domain = get_domain(layer.service.url) if hasattr(layer, 'layerwm'): category = layer.layerwm.category username = layer.layerwm.username abstract = layer.abstract if abstract: abstract = strip_tags(layer.abstract) else: abstract = '' if layer.type == "WM": originator = username else: originator = domain layer_dict = { 'id': layer.id, 'uuid': str(layer.uuid), 'type': 'Layer', 'layer_id': layer.id, 'name': layer.name, 'title': layer.title, 'layer_originator': originator, 'service_id': layer.service.id, 'service_type': layer.service.type, 'layer_category': category, 'layer_username': username, 'url': layer.url, 'keywords': [kw.name for kw in layer.keywords.all()], 'reliability': layer.reliability, 'recent_reliability': layer.recent_reliability, 'last_status': layer.last_status, 'is_public': layer.is_public, 'is_valid': layer.is_valid, 'availability': 'Online', 'location': '{"layerInfoPage": "' + layer.get_absolute_url + '"}', 'abstract': abstract, 'domain_name': layer.service.get_domain } solr_date, date_type = get_date(layer) if solr_date is not None: layer_dict['layer_date'] = solr_date layer_dict['layer_datetype'] = date_type if bbox is not None: layer_dict['min_x'] = minX layer_dict['min_y'] = minY layer_dict['max_x'] = maxX layer_dict['max_y'] = maxY layer_dict['area'] = area layer_dict['bbox'] = wkt layer_dict['centroid_x'] = rectangle.centroid.x layer_dict['centroid_y'] = rectangle.centroid.y srs_list = [srs.encode('utf-8') for srs in layer.service.srs.values_list('code', flat=True)] layer_dict['srs'] = srs_list if layer.get_tile_url(): layer_dict['tile_url'] = layer.get_tile_url() message = 'Layer %s successfully converted to json' % layer.id return layer_dict, message
[ "def", "layer2dict", "(", "layer", ")", ":", "category", "=", "None", "username", "=", "None", "# bbox must be valid before proceeding", "if", "not", "layer", ".", "has_valid_bbox", "(", ")", ":", "message", "=", "'Layer id: %s has a not valid bbox'", "%", "layer", ".", "id", "return", "None", ",", "message", "# we can proceed safely", "bbox", "=", "[", "float", "(", "layer", ".", "bbox_x0", ")", ",", "float", "(", "layer", ".", "bbox_y0", ")", ",", "float", "(", "layer", ".", "bbox_x1", ")", ",", "float", "(", "layer", ".", "bbox_y1", ")", "]", "minX", "=", "bbox", "[", "0", "]", "minY", "=", "bbox", "[", "1", "]", "maxX", "=", "bbox", "[", "2", "]", "maxY", "=", "bbox", "[", "3", "]", "# coords hack needed by solr", "if", "(", "minX", "<", "-", "180", ")", ":", "minX", "=", "-", "180", "if", "(", "maxX", ">", "180", ")", ":", "maxX", "=", "180", "if", "(", "minY", "<", "-", "90", ")", ":", "minY", "=", "-", "90", "if", "(", "maxY", ">", "90", ")", ":", "maxY", "=", "90", "rectangle", "=", "box", "(", "minX", ",", "minY", ",", "maxX", ",", "maxY", ")", "wkt", "=", "\"ENVELOPE({:f},{:f},{:f},{:f})\"", ".", "format", "(", "minX", ",", "maxX", ",", "maxY", ",", "minY", ")", "halfWidth", "=", "(", "maxX", "-", "minX", ")", "/", "2.0", "halfHeight", "=", "(", "maxY", "-", "minY", ")", "/", "2.0", "area", "=", "(", "halfWidth", "*", "2", ")", "*", "(", "halfHeight", "*", "2", ")", "domain", "=", "get_domain", "(", "layer", ".", "service", ".", "url", ")", "if", "hasattr", "(", "layer", ",", "'layerwm'", ")", ":", "category", "=", "layer", ".", "layerwm", ".", "category", "username", "=", "layer", ".", "layerwm", ".", "username", "abstract", "=", "layer", ".", "abstract", "if", "abstract", ":", "abstract", "=", "strip_tags", "(", "layer", ".", "abstract", ")", "else", ":", "abstract", "=", "''", "if", "layer", ".", "type", "==", "\"WM\"", ":", "originator", "=", "username", "else", ":", "originator", "=", "domain", "layer_dict", "=", "{", "'id'", ":", "layer", ".", "id", ",", "'uuid'", ":", "str", "(", "layer", ".", "uuid", ")", ",", "'type'", ":", "'Layer'", ",", "'layer_id'", ":", "layer", ".", "id", ",", "'name'", ":", "layer", ".", "name", ",", "'title'", ":", "layer", ".", "title", ",", "'layer_originator'", ":", "originator", ",", "'service_id'", ":", "layer", ".", "service", ".", "id", ",", "'service_type'", ":", "layer", ".", "service", ".", "type", ",", "'layer_category'", ":", "category", ",", "'layer_username'", ":", "username", ",", "'url'", ":", "layer", ".", "url", ",", "'keywords'", ":", "[", "kw", ".", "name", "for", "kw", "in", "layer", ".", "keywords", ".", "all", "(", ")", "]", ",", "'reliability'", ":", "layer", ".", "reliability", ",", "'recent_reliability'", ":", "layer", ".", "recent_reliability", ",", "'last_status'", ":", "layer", ".", "last_status", ",", "'is_public'", ":", "layer", ".", "is_public", ",", "'is_valid'", ":", "layer", ".", "is_valid", ",", "'availability'", ":", "'Online'", ",", "'location'", ":", "'{\"layerInfoPage\": \"'", "+", "layer", ".", "get_absolute_url", "+", "'\"}'", ",", "'abstract'", ":", "abstract", ",", "'domain_name'", ":", "layer", ".", "service", ".", "get_domain", "}", "solr_date", ",", "date_type", "=", "get_date", "(", "layer", ")", "if", "solr_date", "is", "not", "None", ":", "layer_dict", "[", "'layer_date'", "]", "=", "solr_date", "layer_dict", "[", "'layer_datetype'", "]", "=", "date_type", "if", "bbox", "is", "not", "None", ":", "layer_dict", "[", "'min_x'", "]", "=", "minX", "layer_dict", "[", "'min_y'", "]", "=", "minY", "layer_dict", "[", "'max_x'", "]", "=", "maxX", "layer_dict", "[", "'max_y'", "]", "=", "maxY", "layer_dict", "[", "'area'", "]", "=", "area", "layer_dict", "[", "'bbox'", "]", "=", "wkt", "layer_dict", "[", "'centroid_x'", "]", "=", "rectangle", ".", "centroid", ".", "x", "layer_dict", "[", "'centroid_y'", "]", "=", "rectangle", ".", "centroid", ".", "y", "srs_list", "=", "[", "srs", ".", "encode", "(", "'utf-8'", ")", "for", "srs", "in", "layer", ".", "service", ".", "srs", ".", "values_list", "(", "'code'", ",", "flat", "=", "True", ")", "]", "layer_dict", "[", "'srs'", "]", "=", "srs_list", "if", "layer", ".", "get_tile_url", "(", ")", ":", "layer_dict", "[", "'tile_url'", "]", "=", "layer", ".", "get_tile_url", "(", ")", "message", "=", "'Layer %s successfully converted to json'", "%", "layer", ".", "id", "return", "layer_dict", ",", "message" ]
Return a json representation for a layer.
[ "Return", "a", "json", "representation", "for", "a", "layer", "." ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L570-L661
cga-harvard/Hypermap-Registry
hypermap/aggregator/utils.py
detect_metadata_url_scheme
def detect_metadata_url_scheme(url): """detect whether a url is a Service type that HHypermap supports""" scheme = None url_lower = url.lower() if any(x in url_lower for x in ['wms', 'service=wms']): scheme = 'OGC:WMS' if any(x in url_lower for x in ['wmts', 'service=wmts']): scheme = 'OGC:WMTS' elif all(x in url for x in ['/MapServer', 'f=json']): scheme = 'ESRI:ArcGIS:MapServer' elif all(x in url for x in ['/ImageServer', 'f=json']): scheme = 'ESRI:ArcGIS:ImageServer' return scheme
python
def detect_metadata_url_scheme(url): """detect whether a url is a Service type that HHypermap supports""" scheme = None url_lower = url.lower() if any(x in url_lower for x in ['wms', 'service=wms']): scheme = 'OGC:WMS' if any(x in url_lower for x in ['wmts', 'service=wmts']): scheme = 'OGC:WMTS' elif all(x in url for x in ['/MapServer', 'f=json']): scheme = 'ESRI:ArcGIS:MapServer' elif all(x in url for x in ['/ImageServer', 'f=json']): scheme = 'ESRI:ArcGIS:ImageServer' return scheme
[ "def", "detect_metadata_url_scheme", "(", "url", ")", ":", "scheme", "=", "None", "url_lower", "=", "url", ".", "lower", "(", ")", "if", "any", "(", "x", "in", "url_lower", "for", "x", "in", "[", "'wms'", ",", "'service=wms'", "]", ")", ":", "scheme", "=", "'OGC:WMS'", "if", "any", "(", "x", "in", "url_lower", "for", "x", "in", "[", "'wmts'", ",", "'service=wmts'", "]", ")", ":", "scheme", "=", "'OGC:WMTS'", "elif", "all", "(", "x", "in", "url", "for", "x", "in", "[", "'/MapServer'", ",", "'f=json'", "]", ")", ":", "scheme", "=", "'ESRI:ArcGIS:MapServer'", "elif", "all", "(", "x", "in", "url", "for", "x", "in", "[", "'/ImageServer'", ",", "'f=json'", "]", ")", ":", "scheme", "=", "'ESRI:ArcGIS:ImageServer'", "return", "scheme" ]
detect whether a url is a Service type that HHypermap supports
[ "detect", "whether", "a", "url", "is", "a", "Service", "type", "that", "HHypermap", "supports" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L664-L679
cga-harvard/Hypermap-Registry
hypermap/aggregator/views.py
serialize_checks
def serialize_checks(check_set): """ Serialize a check_set for raphael """ check_set_list = [] for check in check_set.all()[:25]: check_set_list.append( { 'datetime': check.checked_datetime.isoformat(), 'value': check.response_time, 'success': 1 if check.success else 0 } ) return check_set_list
python
def serialize_checks(check_set): """ Serialize a check_set for raphael """ check_set_list = [] for check in check_set.all()[:25]: check_set_list.append( { 'datetime': check.checked_datetime.isoformat(), 'value': check.response_time, 'success': 1 if check.success else 0 } ) return check_set_list
[ "def", "serialize_checks", "(", "check_set", ")", ":", "check_set_list", "=", "[", "]", "for", "check", "in", "check_set", ".", "all", "(", ")", "[", ":", "25", "]", ":", "check_set_list", ".", "append", "(", "{", "'datetime'", ":", "check", ".", "checked_datetime", ".", "isoformat", "(", ")", ",", "'value'", ":", "check", ".", "response_time", ",", "'success'", ":", "1", "if", "check", ".", "success", "else", "0", "}", ")", "return", "check_set_list" ]
Serialize a check_set for raphael
[ "Serialize", "a", "check_set", "for", "raphael" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/views.py#L45-L58
cga-harvard/Hypermap-Registry
hypermap/aggregator/views.py
domains
def domains(request): """ A page with number of services and layers faceted on domains. """ url = '' query = '*:*&facet=true&facet.limit=-1&facet.pivot=domain_name,service_id&wt=json&indent=true&rows=0' if settings.SEARCH_TYPE == 'elasticsearch': url = '%s/select?q=%s' % (settings.SEARCH_URL, query) if settings.SEARCH_TYPE == 'solr': url = '%s/solr/hypermap/select?q=%s' % (settings.SEARCH_URL, query) LOGGER.debug(url) response = urllib2.urlopen(url) data = response.read().replace('\n', '') # stats layers_count = Layer.objects.all().count() services_count = Service.objects.all().count() template = loader.get_template('aggregator/index.html') context = RequestContext(request, { 'data': data, 'layers_count': layers_count, 'services_count': services_count, }) return HttpResponse(template.render(context))
python
def domains(request): """ A page with number of services and layers faceted on domains. """ url = '' query = '*:*&facet=true&facet.limit=-1&facet.pivot=domain_name,service_id&wt=json&indent=true&rows=0' if settings.SEARCH_TYPE == 'elasticsearch': url = '%s/select?q=%s' % (settings.SEARCH_URL, query) if settings.SEARCH_TYPE == 'solr': url = '%s/solr/hypermap/select?q=%s' % (settings.SEARCH_URL, query) LOGGER.debug(url) response = urllib2.urlopen(url) data = response.read().replace('\n', '') # stats layers_count = Layer.objects.all().count() services_count = Service.objects.all().count() template = loader.get_template('aggregator/index.html') context = RequestContext(request, { 'data': data, 'layers_count': layers_count, 'services_count': services_count, }) return HttpResponse(template.render(context))
[ "def", "domains", "(", "request", ")", ":", "url", "=", "''", "query", "=", "'*:*&facet=true&facet.limit=-1&facet.pivot=domain_name,service_id&wt=json&indent=true&rows=0'", "if", "settings", ".", "SEARCH_TYPE", "==", "'elasticsearch'", ":", "url", "=", "'%s/select?q=%s'", "%", "(", "settings", ".", "SEARCH_URL", ",", "query", ")", "if", "settings", ".", "SEARCH_TYPE", "==", "'solr'", ":", "url", "=", "'%s/solr/hypermap/select?q=%s'", "%", "(", "settings", ".", "SEARCH_URL", ",", "query", ")", "LOGGER", ".", "debug", "(", "url", ")", "response", "=", "urllib2", ".", "urlopen", "(", "url", ")", "data", "=", "response", ".", "read", "(", ")", ".", "replace", "(", "'\\n'", ",", "''", ")", "# stats", "layers_count", "=", "Layer", ".", "objects", ".", "all", "(", ")", ".", "count", "(", ")", "services_count", "=", "Service", ".", "objects", ".", "all", "(", ")", ".", "count", "(", ")", "template", "=", "loader", ".", "get_template", "(", "'aggregator/index.html'", ")", "context", "=", "RequestContext", "(", "request", ",", "{", "'data'", ":", "data", ",", "'layers_count'", ":", "layers_count", ",", "'services_count'", ":", "services_count", ",", "}", ")", "return", "HttpResponse", "(", "template", ".", "render", "(", "context", ")", ")" ]
A page with number of services and layers faceted on domains.
[ "A", "page", "with", "number", "of", "services", "and", "layers", "faceted", "on", "domains", "." ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/views.py#L62-L84
cga-harvard/Hypermap-Registry
hypermap/aggregator/views.py
tasks_runner
def tasks_runner(request): """ A page that let the admin to run global tasks. """ # server info cached_layers_number = 0 cached_layers = cache.get('layers') if cached_layers: cached_layers_number = len(cached_layers) cached_deleted_layers_number = 0 cached_deleted_layers = cache.get('deleted_layers') if cached_deleted_layers: cached_deleted_layers_number = len(cached_deleted_layers) # task actions if request.method == 'POST': if 'check_all' in request.POST: if settings.REGISTRY_SKIP_CELERY: check_all_services() else: check_all_services.delay() if 'index_all' in request.POST: if settings.REGISTRY_SKIP_CELERY: index_all_layers() else: index_all_layers.delay() if 'index_cached' in request.POST: if settings.REGISTRY_SKIP_CELERY: index_cached_layers() else: index_cached_layers.delay() if 'drop_cached' in request.POST: cache.set('layers', None) cache.set('deleted_layers', None) if 'clear_index' in request.POST: if settings.REGISTRY_SKIP_CELERY: clear_index() else: clear_index.delay() if 'remove_index' in request.POST: if settings.REGISTRY_SKIP_CELERY: unindex_layers_with_issues() else: unindex_layers_with_issues.delay() return render( request, 'aggregator/tasks_runner.html', { 'cached_layers_number': cached_layers_number, 'cached_deleted_layers_number': cached_deleted_layers_number, } )
python
def tasks_runner(request): """ A page that let the admin to run global tasks. """ # server info cached_layers_number = 0 cached_layers = cache.get('layers') if cached_layers: cached_layers_number = len(cached_layers) cached_deleted_layers_number = 0 cached_deleted_layers = cache.get('deleted_layers') if cached_deleted_layers: cached_deleted_layers_number = len(cached_deleted_layers) # task actions if request.method == 'POST': if 'check_all' in request.POST: if settings.REGISTRY_SKIP_CELERY: check_all_services() else: check_all_services.delay() if 'index_all' in request.POST: if settings.REGISTRY_SKIP_CELERY: index_all_layers() else: index_all_layers.delay() if 'index_cached' in request.POST: if settings.REGISTRY_SKIP_CELERY: index_cached_layers() else: index_cached_layers.delay() if 'drop_cached' in request.POST: cache.set('layers', None) cache.set('deleted_layers', None) if 'clear_index' in request.POST: if settings.REGISTRY_SKIP_CELERY: clear_index() else: clear_index.delay() if 'remove_index' in request.POST: if settings.REGISTRY_SKIP_CELERY: unindex_layers_with_issues() else: unindex_layers_with_issues.delay() return render( request, 'aggregator/tasks_runner.html', { 'cached_layers_number': cached_layers_number, 'cached_deleted_layers_number': cached_deleted_layers_number, } )
[ "def", "tasks_runner", "(", "request", ")", ":", "# server info", "cached_layers_number", "=", "0", "cached_layers", "=", "cache", ".", "get", "(", "'layers'", ")", "if", "cached_layers", ":", "cached_layers_number", "=", "len", "(", "cached_layers", ")", "cached_deleted_layers_number", "=", "0", "cached_deleted_layers", "=", "cache", ".", "get", "(", "'deleted_layers'", ")", "if", "cached_deleted_layers", ":", "cached_deleted_layers_number", "=", "len", "(", "cached_deleted_layers", ")", "# task actions", "if", "request", ".", "method", "==", "'POST'", ":", "if", "'check_all'", "in", "request", ".", "POST", ":", "if", "settings", ".", "REGISTRY_SKIP_CELERY", ":", "check_all_services", "(", ")", "else", ":", "check_all_services", ".", "delay", "(", ")", "if", "'index_all'", "in", "request", ".", "POST", ":", "if", "settings", ".", "REGISTRY_SKIP_CELERY", ":", "index_all_layers", "(", ")", "else", ":", "index_all_layers", ".", "delay", "(", ")", "if", "'index_cached'", "in", "request", ".", "POST", ":", "if", "settings", ".", "REGISTRY_SKIP_CELERY", ":", "index_cached_layers", "(", ")", "else", ":", "index_cached_layers", ".", "delay", "(", ")", "if", "'drop_cached'", "in", "request", ".", "POST", ":", "cache", ".", "set", "(", "'layers'", ",", "None", ")", "cache", ".", "set", "(", "'deleted_layers'", ",", "None", ")", "if", "'clear_index'", "in", "request", ".", "POST", ":", "if", "settings", ".", "REGISTRY_SKIP_CELERY", ":", "clear_index", "(", ")", "else", ":", "clear_index", ".", "delay", "(", ")", "if", "'remove_index'", "in", "request", ".", "POST", ":", "if", "settings", ".", "REGISTRY_SKIP_CELERY", ":", "unindex_layers_with_issues", "(", ")", "else", ":", "unindex_layers_with_issues", ".", "delay", "(", ")", "return", "render", "(", "request", ",", "'aggregator/tasks_runner.html'", ",", "{", "'cached_layers_number'", ":", "cached_layers_number", ",", "'cached_deleted_layers_number'", ":", "cached_deleted_layers_number", ",", "}", ")" ]
A page that let the admin to run global tasks.
[ "A", "page", "that", "let", "the", "admin", "to", "run", "global", "tasks", "." ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/views.py#L254-L307
cga-harvard/Hypermap-Registry
hypermap/aggregator/views.py
layer_mapproxy
def layer_mapproxy(request, catalog_slug, layer_uuid, path_info): """ Get Layer with matching catalog and uuid """ layer = get_object_or_404(Layer, uuid=layer_uuid, catalog__slug=catalog_slug) # for WorldMap layers we need to use the url of the layer if layer.service.type == 'Hypermap:WorldMap': layer.service.url = layer.url # Set up a mapproxy app for this particular layer mp, yaml_config = get_mapproxy(layer) query = request.META['QUERY_STRING'] if len(query) > 0: path_info = path_info + '?' + query params = {} headers = { 'X-Script-Name': '/registry/{0}/layer/{1}/map/'.format(catalog_slug, layer.id), 'X-Forwarded-Host': request.META['HTTP_HOST'], 'HTTP_HOST': request.META['HTTP_HOST'], 'SERVER_NAME': request.META['SERVER_NAME'], } if path_info == '/config': response = HttpResponse(yaml_config, content_type='text/plain') return response # Get a response from MapProxy as if it was running standalone. mp_response = mp.get(path_info, params, headers) # Create a Django response from the MapProxy WSGI response. response = HttpResponse(mp_response.body, status=mp_response.status_int) for header, value in mp_response.headers.iteritems(): response[header] = value return response
python
def layer_mapproxy(request, catalog_slug, layer_uuid, path_info): """ Get Layer with matching catalog and uuid """ layer = get_object_or_404(Layer, uuid=layer_uuid, catalog__slug=catalog_slug) # for WorldMap layers we need to use the url of the layer if layer.service.type == 'Hypermap:WorldMap': layer.service.url = layer.url # Set up a mapproxy app for this particular layer mp, yaml_config = get_mapproxy(layer) query = request.META['QUERY_STRING'] if len(query) > 0: path_info = path_info + '?' + query params = {} headers = { 'X-Script-Name': '/registry/{0}/layer/{1}/map/'.format(catalog_slug, layer.id), 'X-Forwarded-Host': request.META['HTTP_HOST'], 'HTTP_HOST': request.META['HTTP_HOST'], 'SERVER_NAME': request.META['SERVER_NAME'], } if path_info == '/config': response = HttpResponse(yaml_config, content_type='text/plain') return response # Get a response from MapProxy as if it was running standalone. mp_response = mp.get(path_info, params, headers) # Create a Django response from the MapProxy WSGI response. response = HttpResponse(mp_response.body, status=mp_response.status_int) for header, value in mp_response.headers.iteritems(): response[header] = value return response
[ "def", "layer_mapproxy", "(", "request", ",", "catalog_slug", ",", "layer_uuid", ",", "path_info", ")", ":", "layer", "=", "get_object_or_404", "(", "Layer", ",", "uuid", "=", "layer_uuid", ",", "catalog__slug", "=", "catalog_slug", ")", "# for WorldMap layers we need to use the url of the layer", "if", "layer", ".", "service", ".", "type", "==", "'Hypermap:WorldMap'", ":", "layer", ".", "service", ".", "url", "=", "layer", ".", "url", "# Set up a mapproxy app for this particular layer", "mp", ",", "yaml_config", "=", "get_mapproxy", "(", "layer", ")", "query", "=", "request", ".", "META", "[", "'QUERY_STRING'", "]", "if", "len", "(", "query", ")", ">", "0", ":", "path_info", "=", "path_info", "+", "'?'", "+", "query", "params", "=", "{", "}", "headers", "=", "{", "'X-Script-Name'", ":", "'/registry/{0}/layer/{1}/map/'", ".", "format", "(", "catalog_slug", ",", "layer", ".", "id", ")", ",", "'X-Forwarded-Host'", ":", "request", ".", "META", "[", "'HTTP_HOST'", "]", ",", "'HTTP_HOST'", ":", "request", ".", "META", "[", "'HTTP_HOST'", "]", ",", "'SERVER_NAME'", ":", "request", ".", "META", "[", "'SERVER_NAME'", "]", ",", "}", "if", "path_info", "==", "'/config'", ":", "response", "=", "HttpResponse", "(", "yaml_config", ",", "content_type", "=", "'text/plain'", ")", "return", "response", "# Get a response from MapProxy as if it was running standalone.", "mp_response", "=", "mp", ".", "get", "(", "path_info", ",", "params", ",", "headers", ")", "# Create a Django response from the MapProxy WSGI response.", "response", "=", "HttpResponse", "(", "mp_response", ".", "body", ",", "status", "=", "mp_response", ".", "status_int", ")", "for", "header", ",", "value", "in", "mp_response", ".", "headers", ".", "iteritems", "(", ")", ":", "response", "[", "header", "]", "=", "value", "return", "response" ]
Get Layer with matching catalog and uuid
[ "Get", "Layer", "with", "matching", "catalog", "and", "uuid" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/views.py#L310-L350
cga-harvard/Hypermap-Registry
hypermap/search_api/utils.py
parse_datetime
def parse_datetime(date_str): """ Parses a date string to date object. for BCE dates, only supports the year part. """ is_common_era = True date_str_parts = date_str.split("-") if date_str_parts and date_str_parts[0] == '': is_common_era = False # for now, only support BCE years # assume the datetime comes complete, but # when it comes only the year, add the missing datetime info: if len(date_str_parts) == 2: date_str = date_str + "-01-01T00:00:00Z" parsed_datetime = { 'is_common_era': is_common_era, 'parsed_datetime': None } if is_common_era: if date_str == '*': return parsed_datetime # open ended. default = datetime.datetime.now().replace( hour=0, minute=0, second=0, microsecond=0, day=1, month=1 ) parsed_datetime['parsed_datetime'] = parse(date_str, default=default) return parsed_datetime parsed_datetime['parsed_datetime'] = date_str return parsed_datetime
python
def parse_datetime(date_str): """ Parses a date string to date object. for BCE dates, only supports the year part. """ is_common_era = True date_str_parts = date_str.split("-") if date_str_parts and date_str_parts[0] == '': is_common_era = False # for now, only support BCE years # assume the datetime comes complete, but # when it comes only the year, add the missing datetime info: if len(date_str_parts) == 2: date_str = date_str + "-01-01T00:00:00Z" parsed_datetime = { 'is_common_era': is_common_era, 'parsed_datetime': None } if is_common_era: if date_str == '*': return parsed_datetime # open ended. default = datetime.datetime.now().replace( hour=0, minute=0, second=0, microsecond=0, day=1, month=1 ) parsed_datetime['parsed_datetime'] = parse(date_str, default=default) return parsed_datetime parsed_datetime['parsed_datetime'] = date_str return parsed_datetime
[ "def", "parse_datetime", "(", "date_str", ")", ":", "is_common_era", "=", "True", "date_str_parts", "=", "date_str", ".", "split", "(", "\"-\"", ")", "if", "date_str_parts", "and", "date_str_parts", "[", "0", "]", "==", "''", ":", "is_common_era", "=", "False", "# for now, only support BCE years", "# assume the datetime comes complete, but", "# when it comes only the year, add the missing datetime info:", "if", "len", "(", "date_str_parts", ")", "==", "2", ":", "date_str", "=", "date_str", "+", "\"-01-01T00:00:00Z\"", "parsed_datetime", "=", "{", "'is_common_era'", ":", "is_common_era", ",", "'parsed_datetime'", ":", "None", "}", "if", "is_common_era", ":", "if", "date_str", "==", "'*'", ":", "return", "parsed_datetime", "# open ended.", "default", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "replace", "(", "hour", "=", "0", ",", "minute", "=", "0", ",", "second", "=", "0", ",", "microsecond", "=", "0", ",", "day", "=", "1", ",", "month", "=", "1", ")", "parsed_datetime", "[", "'parsed_datetime'", "]", "=", "parse", "(", "date_str", ",", "default", "=", "default", ")", "return", "parsed_datetime", "parsed_datetime", "[", "'parsed_datetime'", "]", "=", "date_str", "return", "parsed_datetime" ]
Parses a date string to date object. for BCE dates, only supports the year part.
[ "Parses", "a", "date", "string", "to", "date", "object", ".", "for", "BCE", "dates", "only", "supports", "the", "year", "part", "." ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L24-L57
cga-harvard/Hypermap-Registry
hypermap/search_api/utils.py
parse_solr_time_range_as_pair
def parse_solr_time_range_as_pair(time_filter): """ :param time_filter: [2013-03-01 TO 2013-05-01T00:00:00] :return: (2013-03-01, 2013-05-01T00:00:00) """ pattern = "\\[(.*) TO (.*)\\]" matcher = re.search(pattern, time_filter) if matcher: return matcher.group(1), matcher.group(2) else: raise Exception("Regex {0} couldn't parse {1}".format(pattern, time_filter))
python
def parse_solr_time_range_as_pair(time_filter): """ :param time_filter: [2013-03-01 TO 2013-05-01T00:00:00] :return: (2013-03-01, 2013-05-01T00:00:00) """ pattern = "\\[(.*) TO (.*)\\]" matcher = re.search(pattern, time_filter) if matcher: return matcher.group(1), matcher.group(2) else: raise Exception("Regex {0} couldn't parse {1}".format(pattern, time_filter))
[ "def", "parse_solr_time_range_as_pair", "(", "time_filter", ")", ":", "pattern", "=", "\"\\\\[(.*) TO (.*)\\\\]\"", "matcher", "=", "re", ".", "search", "(", "pattern", ",", "time_filter", ")", "if", "matcher", ":", "return", "matcher", ".", "group", "(", "1", ")", ",", "matcher", ".", "group", "(", "2", ")", "else", ":", "raise", "Exception", "(", "\"Regex {0} couldn't parse {1}\"", ".", "format", "(", "pattern", ",", "time_filter", ")", ")" ]
:param time_filter: [2013-03-01 TO 2013-05-01T00:00:00] :return: (2013-03-01, 2013-05-01T00:00:00)
[ ":", "param", "time_filter", ":", "[", "2013", "-", "03", "-", "01", "TO", "2013", "-", "05", "-", "01T00", ":", "00", ":", "00", "]", ":", "return", ":", "(", "2013", "-", "03", "-", "01", "2013", "-", "05", "-", "01T00", ":", "00", ":", "00", ")" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L60-L70
cga-harvard/Hypermap-Registry
hypermap/search_api/utils.py
parse_datetime_range
def parse_datetime_range(time_filter): """ Parse the url param to python objects. From what time range to divide by a.time.gap into intervals. Defaults to q.time and otherwise 90 days. Validate in API: re.search("\\[(.*) TO (.*)\\]", value) :param time_filter: [2013-03-01 TO 2013-05-01T00:00:00] :return: datetime.datetime(2013, 3, 1, 0, 0), datetime.datetime(2013, 5, 1, 0, 0) """ if not time_filter: time_filter = "[* TO *]" start, end = parse_solr_time_range_as_pair(time_filter) start, end = parse_datetime(start), parse_datetime(end) return start, end
python
def parse_datetime_range(time_filter): """ Parse the url param to python objects. From what time range to divide by a.time.gap into intervals. Defaults to q.time and otherwise 90 days. Validate in API: re.search("\\[(.*) TO (.*)\\]", value) :param time_filter: [2013-03-01 TO 2013-05-01T00:00:00] :return: datetime.datetime(2013, 3, 1, 0, 0), datetime.datetime(2013, 5, 1, 0, 0) """ if not time_filter: time_filter = "[* TO *]" start, end = parse_solr_time_range_as_pair(time_filter) start, end = parse_datetime(start), parse_datetime(end) return start, end
[ "def", "parse_datetime_range", "(", "time_filter", ")", ":", "if", "not", "time_filter", ":", "time_filter", "=", "\"[* TO *]\"", "start", ",", "end", "=", "parse_solr_time_range_as_pair", "(", "time_filter", ")", "start", ",", "end", "=", "parse_datetime", "(", "start", ")", ",", "parse_datetime", "(", "end", ")", "return", "start", ",", "end" ]
Parse the url param to python objects. From what time range to divide by a.time.gap into intervals. Defaults to q.time and otherwise 90 days. Validate in API: re.search("\\[(.*) TO (.*)\\]", value) :param time_filter: [2013-03-01 TO 2013-05-01T00:00:00] :return: datetime.datetime(2013, 3, 1, 0, 0), datetime.datetime(2013, 5, 1, 0, 0)
[ "Parse", "the", "url", "param", "to", "python", "objects", ".", "From", "what", "time", "range", "to", "divide", "by", "a", ".", "time", ".", "gap", "into", "intervals", ".", "Defaults", "to", "q", ".", "time", "and", "otherwise", "90", "days", ".", "Validate", "in", "API", ":", "re", ".", "search", "(", "\\\\", "[", "(", ".", "*", ")", "TO", "(", ".", "*", ")", "\\\\", "]", "value", ")", ":", "param", "time_filter", ":", "[", "2013", "-", "03", "-", "01", "TO", "2013", "-", "05", "-", "01T00", ":", "00", ":", "00", "]", ":", "return", ":", "datetime", ".", "datetime", "(", "2013", "3", "1", "0", "0", ")", "datetime", ".", "datetime", "(", "2013", "5", "1", "0", "0", ")" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L73-L88
cga-harvard/Hypermap-Registry
hypermap/search_api/utils.py
parse_ISO8601
def parse_ISO8601(time_gap): """ P1D to (1, ("DAYS", isodate.Duration(days=1)). P1Y to (1, ("YEARS", isodate.Duration(years=1)). :param time_gap: ISO8601 string. :return: tuple with quantity and unit of time. """ matcher = None if time_gap.count("T"): units = { "H": ("HOURS", isodate.Duration(hours=1)), "M": ("MINUTES", isodate.Duration(minutes=1)), "S": ("SECONDS", isodate.Duration(seconds=1)) } matcher = re.search("PT(\d+)([HMS])", time_gap) if matcher: quantity = int(matcher.group(1)) unit = matcher.group(2) return quantity, units.get(unit) else: raise Exception("Does not match the pattern: {}".format(time_gap)) else: units = { "Y": ("YEARS", isodate.Duration(years=1)), "M": ("MONTHS", isodate.Duration(months=1)), "W": ("WEEKS", isodate.Duration(weeks=1)), "D": ("DAYS", isodate.Duration(days=1)) } matcher = re.search("P(\d+)([YMWD])", time_gap) if matcher: quantity = int(matcher.group(1)) unit = matcher.group(2) else: raise Exception("Does not match the pattern: {}".format(time_gap)) return quantity, units.get(unit)
python
def parse_ISO8601(time_gap): """ P1D to (1, ("DAYS", isodate.Duration(days=1)). P1Y to (1, ("YEARS", isodate.Duration(years=1)). :param time_gap: ISO8601 string. :return: tuple with quantity and unit of time. """ matcher = None if time_gap.count("T"): units = { "H": ("HOURS", isodate.Duration(hours=1)), "M": ("MINUTES", isodate.Duration(minutes=1)), "S": ("SECONDS", isodate.Duration(seconds=1)) } matcher = re.search("PT(\d+)([HMS])", time_gap) if matcher: quantity = int(matcher.group(1)) unit = matcher.group(2) return quantity, units.get(unit) else: raise Exception("Does not match the pattern: {}".format(time_gap)) else: units = { "Y": ("YEARS", isodate.Duration(years=1)), "M": ("MONTHS", isodate.Duration(months=1)), "W": ("WEEKS", isodate.Duration(weeks=1)), "D": ("DAYS", isodate.Duration(days=1)) } matcher = re.search("P(\d+)([YMWD])", time_gap) if matcher: quantity = int(matcher.group(1)) unit = matcher.group(2) else: raise Exception("Does not match the pattern: {}".format(time_gap)) return quantity, units.get(unit)
[ "def", "parse_ISO8601", "(", "time_gap", ")", ":", "matcher", "=", "None", "if", "time_gap", ".", "count", "(", "\"T\"", ")", ":", "units", "=", "{", "\"H\"", ":", "(", "\"HOURS\"", ",", "isodate", ".", "Duration", "(", "hours", "=", "1", ")", ")", ",", "\"M\"", ":", "(", "\"MINUTES\"", ",", "isodate", ".", "Duration", "(", "minutes", "=", "1", ")", ")", ",", "\"S\"", ":", "(", "\"SECONDS\"", ",", "isodate", ".", "Duration", "(", "seconds", "=", "1", ")", ")", "}", "matcher", "=", "re", ".", "search", "(", "\"PT(\\d+)([HMS])\"", ",", "time_gap", ")", "if", "matcher", ":", "quantity", "=", "int", "(", "matcher", ".", "group", "(", "1", ")", ")", "unit", "=", "matcher", ".", "group", "(", "2", ")", "return", "quantity", ",", "units", ".", "get", "(", "unit", ")", "else", ":", "raise", "Exception", "(", "\"Does not match the pattern: {}\"", ".", "format", "(", "time_gap", ")", ")", "else", ":", "units", "=", "{", "\"Y\"", ":", "(", "\"YEARS\"", ",", "isodate", ".", "Duration", "(", "years", "=", "1", ")", ")", ",", "\"M\"", ":", "(", "\"MONTHS\"", ",", "isodate", ".", "Duration", "(", "months", "=", "1", ")", ")", ",", "\"W\"", ":", "(", "\"WEEKS\"", ",", "isodate", ".", "Duration", "(", "weeks", "=", "1", ")", ")", ",", "\"D\"", ":", "(", "\"DAYS\"", ",", "isodate", ".", "Duration", "(", "days", "=", "1", ")", ")", "}", "matcher", "=", "re", ".", "search", "(", "\"P(\\d+)([YMWD])\"", ",", "time_gap", ")", "if", "matcher", ":", "quantity", "=", "int", "(", "matcher", ".", "group", "(", "1", ")", ")", "unit", "=", "matcher", ".", "group", "(", "2", ")", "else", ":", "raise", "Exception", "(", "\"Does not match the pattern: {}\"", ".", "format", "(", "time_gap", ")", ")", "return", "quantity", ",", "units", ".", "get", "(", "unit", ")" ]
P1D to (1, ("DAYS", isodate.Duration(days=1)). P1Y to (1, ("YEARS", isodate.Duration(years=1)). :param time_gap: ISO8601 string. :return: tuple with quantity and unit of time.
[ "P1D", "to", "(", "1", "(", "DAYS", "isodate", ".", "Duration", "(", "days", "=", "1", "))", ".", "P1Y", "to", "(", "1", "(", "YEARS", "isodate", ".", "Duration", "(", "years", "=", "1", "))", ".", ":", "param", "time_gap", ":", "ISO8601", "string", ".", ":", "return", ":", "tuple", "with", "quantity", "and", "unit", "of", "time", "." ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L109-L145
cga-harvard/Hypermap-Registry
hypermap/search_api/utils.py
compute_gap
def compute_gap(start, end, time_limit): """ Compute a gap that seems reasonable, considering natural time units and limit. # TODO: make it to be reasonable. # TODO: make it to be small unit of time sensitive. :param start: datetime :param end: datetime :param time_limit: gaps count :return: solr's format duration. """ if is_range_common_era(start, end): duration = end.get("parsed_datetime") - start.get("parsed_datetime") unit = int(math.ceil(duration.days / float(time_limit))) return "+{0}DAYS".format(unit) else: # at the moment can not do maths with BCE dates. # those dates are relatively big, so 100 years are reasonable in those cases. # TODO: calculate duration on those cases. return "+100YEARS"
python
def compute_gap(start, end, time_limit): """ Compute a gap that seems reasonable, considering natural time units and limit. # TODO: make it to be reasonable. # TODO: make it to be small unit of time sensitive. :param start: datetime :param end: datetime :param time_limit: gaps count :return: solr's format duration. """ if is_range_common_era(start, end): duration = end.get("parsed_datetime") - start.get("parsed_datetime") unit = int(math.ceil(duration.days / float(time_limit))) return "+{0}DAYS".format(unit) else: # at the moment can not do maths with BCE dates. # those dates are relatively big, so 100 years are reasonable in those cases. # TODO: calculate duration on those cases. return "+100YEARS"
[ "def", "compute_gap", "(", "start", ",", "end", ",", "time_limit", ")", ":", "if", "is_range_common_era", "(", "start", ",", "end", ")", ":", "duration", "=", "end", ".", "get", "(", "\"parsed_datetime\"", ")", "-", "start", ".", "get", "(", "\"parsed_datetime\"", ")", "unit", "=", "int", "(", "math", ".", "ceil", "(", "duration", ".", "days", "/", "float", "(", "time_limit", ")", ")", ")", "return", "\"+{0}DAYS\"", ".", "format", "(", "unit", ")", "else", ":", "# at the moment can not do maths with BCE dates.", "# those dates are relatively big, so 100 years are reasonable in those cases.", "# TODO: calculate duration on those cases.", "return", "\"+100YEARS\"" ]
Compute a gap that seems reasonable, considering natural time units and limit. # TODO: make it to be reasonable. # TODO: make it to be small unit of time sensitive. :param start: datetime :param end: datetime :param time_limit: gaps count :return: solr's format duration.
[ "Compute", "a", "gap", "that", "seems", "reasonable", "considering", "natural", "time", "units", "and", "limit", ".", "#", "TODO", ":", "make", "it", "to", "be", "reasonable", ".", "#", "TODO", ":", "make", "it", "to", "be", "small", "unit", "of", "time", "sensitive", ".", ":", "param", "start", ":", "datetime", ":", "param", "end", ":", "datetime", ":", "param", "time_limit", ":", "gaps", "count", ":", "return", ":", "solr", "s", "format", "duration", "." ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L148-L166
cga-harvard/Hypermap-Registry
hypermap/search_api/utils.py
gap_to_sorl
def gap_to_sorl(time_gap): """ P1D to +1DAY :param time_gap: :return: solr's format duration. """ quantity, unit = parse_ISO8601(time_gap) if unit[0] == "WEEKS": return "+{0}DAYS".format(quantity * 7) else: return "+{0}{1}".format(quantity, unit[0])
python
def gap_to_sorl(time_gap): """ P1D to +1DAY :param time_gap: :return: solr's format duration. """ quantity, unit = parse_ISO8601(time_gap) if unit[0] == "WEEKS": return "+{0}DAYS".format(quantity * 7) else: return "+{0}{1}".format(quantity, unit[0])
[ "def", "gap_to_sorl", "(", "time_gap", ")", ":", "quantity", ",", "unit", "=", "parse_ISO8601", "(", "time_gap", ")", "if", "unit", "[", "0", "]", "==", "\"WEEKS\"", ":", "return", "\"+{0}DAYS\"", ".", "format", "(", "quantity", "*", "7", ")", "else", ":", "return", "\"+{0}{1}\"", ".", "format", "(", "quantity", ",", "unit", "[", "0", "]", ")" ]
P1D to +1DAY :param time_gap: :return: solr's format duration.
[ "P1D", "to", "+", "1DAY", ":", "param", "time_gap", ":", ":", "return", ":", "solr", "s", "format", "duration", "." ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L185-L195
cga-harvard/Hypermap-Registry
hypermap/search_api/utils.py
request_time_facet
def request_time_facet(field, time_filter, time_gap, time_limit=100): """ time facet query builder :param field: map the query to this field. :param time_limit: Non-0 triggers time/date range faceting. This value is the maximum number of time ranges to return when a.time.gap is unspecified. This is a soft maximum; less will usually be returned. A suggested value is 100. Note that a.time.gap effectively ignores this value. See Solr docs for more details on the query/response format. :param time_filter: From what time range to divide by a.time.gap into intervals. Defaults to q.time and otherwise 90 days. :param time_gap: The consecutive time interval/gap for each time range. Ignores a.time.limit. The format is based on a subset of the ISO-8601 duration format :return: facet.range=manufacturedate_dt&f.manufacturedate_dt.facet.range.start=2006-02-11T15:26:37Z&f. manufacturedate_dt.facet.range.end=2006-02-14T15:26:37Z&f.manufacturedate_dt.facet.range.gap=+1DAY """ start, end = parse_datetime_range(time_filter) key_range_start = "f.{0}.facet.range.start".format(field) key_range_end = "f.{0}.facet.range.end".format(field) key_range_gap = "f.{0}.facet.range.gap".format(field) key_range_mincount = "f.{0}.facet.mincount".format(field) if time_gap: gap = gap_to_sorl(time_gap) else: gap = compute_gap(start, end, time_limit) value_range_start = start.get("parsed_datetime") if start.get("is_common_era"): value_range_start = start.get("parsed_datetime").isoformat().replace("+00:00", "") + "Z" value_range_end = start.get("parsed_datetime") if end.get("is_common_era"): value_range_end = end.get("parsed_datetime").isoformat().replace("+00:00", "") + "Z" value_range_gap = gap params = { 'facet.range': field, key_range_start: value_range_start, key_range_end: value_range_end, key_range_gap: value_range_gap, key_range_mincount: 1 } return params
python
def request_time_facet(field, time_filter, time_gap, time_limit=100): """ time facet query builder :param field: map the query to this field. :param time_limit: Non-0 triggers time/date range faceting. This value is the maximum number of time ranges to return when a.time.gap is unspecified. This is a soft maximum; less will usually be returned. A suggested value is 100. Note that a.time.gap effectively ignores this value. See Solr docs for more details on the query/response format. :param time_filter: From what time range to divide by a.time.gap into intervals. Defaults to q.time and otherwise 90 days. :param time_gap: The consecutive time interval/gap for each time range. Ignores a.time.limit. The format is based on a subset of the ISO-8601 duration format :return: facet.range=manufacturedate_dt&f.manufacturedate_dt.facet.range.start=2006-02-11T15:26:37Z&f. manufacturedate_dt.facet.range.end=2006-02-14T15:26:37Z&f.manufacturedate_dt.facet.range.gap=+1DAY """ start, end = parse_datetime_range(time_filter) key_range_start = "f.{0}.facet.range.start".format(field) key_range_end = "f.{0}.facet.range.end".format(field) key_range_gap = "f.{0}.facet.range.gap".format(field) key_range_mincount = "f.{0}.facet.mincount".format(field) if time_gap: gap = gap_to_sorl(time_gap) else: gap = compute_gap(start, end, time_limit) value_range_start = start.get("parsed_datetime") if start.get("is_common_era"): value_range_start = start.get("parsed_datetime").isoformat().replace("+00:00", "") + "Z" value_range_end = start.get("parsed_datetime") if end.get("is_common_era"): value_range_end = end.get("parsed_datetime").isoformat().replace("+00:00", "") + "Z" value_range_gap = gap params = { 'facet.range': field, key_range_start: value_range_start, key_range_end: value_range_end, key_range_gap: value_range_gap, key_range_mincount: 1 } return params
[ "def", "request_time_facet", "(", "field", ",", "time_filter", ",", "time_gap", ",", "time_limit", "=", "100", ")", ":", "start", ",", "end", "=", "parse_datetime_range", "(", "time_filter", ")", "key_range_start", "=", "\"f.{0}.facet.range.start\"", ".", "format", "(", "field", ")", "key_range_end", "=", "\"f.{0}.facet.range.end\"", ".", "format", "(", "field", ")", "key_range_gap", "=", "\"f.{0}.facet.range.gap\"", ".", "format", "(", "field", ")", "key_range_mincount", "=", "\"f.{0}.facet.mincount\"", ".", "format", "(", "field", ")", "if", "time_gap", ":", "gap", "=", "gap_to_sorl", "(", "time_gap", ")", "else", ":", "gap", "=", "compute_gap", "(", "start", ",", "end", ",", "time_limit", ")", "value_range_start", "=", "start", ".", "get", "(", "\"parsed_datetime\"", ")", "if", "start", ".", "get", "(", "\"is_common_era\"", ")", ":", "value_range_start", "=", "start", ".", "get", "(", "\"parsed_datetime\"", ")", ".", "isoformat", "(", ")", ".", "replace", "(", "\"+00:00\"", ",", "\"\"", ")", "+", "\"Z\"", "value_range_end", "=", "start", ".", "get", "(", "\"parsed_datetime\"", ")", "if", "end", ".", "get", "(", "\"is_common_era\"", ")", ":", "value_range_end", "=", "end", ".", "get", "(", "\"parsed_datetime\"", ")", ".", "isoformat", "(", ")", ".", "replace", "(", "\"+00:00\"", ",", "\"\"", ")", "+", "\"Z\"", "value_range_gap", "=", "gap", "params", "=", "{", "'facet.range'", ":", "field", ",", "key_range_start", ":", "value_range_start", ",", "key_range_end", ":", "value_range_end", ",", "key_range_gap", ":", "value_range_gap", ",", "key_range_mincount", ":", "1", "}", "return", "params" ]
time facet query builder :param field: map the query to this field. :param time_limit: Non-0 triggers time/date range faceting. This value is the maximum number of time ranges to return when a.time.gap is unspecified. This is a soft maximum; less will usually be returned. A suggested value is 100. Note that a.time.gap effectively ignores this value. See Solr docs for more details on the query/response format. :param time_filter: From what time range to divide by a.time.gap into intervals. Defaults to q.time and otherwise 90 days. :param time_gap: The consecutive time interval/gap for each time range. Ignores a.time.limit. The format is based on a subset of the ISO-8601 duration format :return: facet.range=manufacturedate_dt&f.manufacturedate_dt.facet.range.start=2006-02-11T15:26:37Z&f. manufacturedate_dt.facet.range.end=2006-02-14T15:26:37Z&f.manufacturedate_dt.facet.range.gap=+1DAY
[ "time", "facet", "query", "builder", ":", "param", "field", ":", "map", "the", "query", "to", "this", "field", ".", ":", "param", "time_limit", ":", "Non", "-", "0", "triggers", "time", "/", "date", "range", "faceting", ".", "This", "value", "is", "the", "maximum", "number", "of", "time", "ranges", "to", "return", "when", "a", ".", "time", ".", "gap", "is", "unspecified", ".", "This", "is", "a", "soft", "maximum", ";", "less", "will", "usually", "be", "returned", ".", "A", "suggested", "value", "is", "100", ".", "Note", "that", "a", ".", "time", ".", "gap", "effectively", "ignores", "this", "value", ".", "See", "Solr", "docs", "for", "more", "details", "on", "the", "query", "/", "response", "format", ".", ":", "param", "time_filter", ":", "From", "what", "time", "range", "to", "divide", "by", "a", ".", "time", ".", "gap", "into", "intervals", ".", "Defaults", "to", "q", ".", "time", "and", "otherwise", "90", "days", ".", ":", "param", "time_gap", ":", "The", "consecutive", "time", "interval", "/", "gap", "for", "each", "time", "range", ".", "Ignores", "a", ".", "time", ".", "limit", ".", "The", "format", "is", "based", "on", "a", "subset", "of", "the", "ISO", "-", "8601", "duration", "format", ":", "return", ":", "facet", ".", "range", "=", "manufacturedate_dt&f", ".", "manufacturedate_dt", ".", "facet", ".", "range", ".", "start", "=", "2006", "-", "02", "-", "11T15", ":", "26", ":", "37Z&f", ".", "manufacturedate_dt", ".", "facet", ".", "range", ".", "end", "=", "2006", "-", "02", "-", "14T15", ":", "26", ":", "37Z&f", ".", "manufacturedate_dt", ".", "facet", ".", "range", ".", "gap", "=", "+", "1DAY" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L198-L244
cga-harvard/Hypermap-Registry
hypermap/search_api/utils.py
parse_solr_geo_range_as_pair
def parse_solr_geo_range_as_pair(geo_box_str): """ :param geo_box_str: [-90,-180 TO 90,180] :return: ("-90,-180", "90,180") """ pattern = "\\[(.*) TO (.*)\\]" matcher = re.search(pattern, geo_box_str) if matcher: return matcher.group(1), matcher.group(2) else: raise Exception("Regex {0} could not parse {1}".format(pattern, geo_box_str))
python
def parse_solr_geo_range_as_pair(geo_box_str): """ :param geo_box_str: [-90,-180 TO 90,180] :return: ("-90,-180", "90,180") """ pattern = "\\[(.*) TO (.*)\\]" matcher = re.search(pattern, geo_box_str) if matcher: return matcher.group(1), matcher.group(2) else: raise Exception("Regex {0} could not parse {1}".format(pattern, geo_box_str))
[ "def", "parse_solr_geo_range_as_pair", "(", "geo_box_str", ")", ":", "pattern", "=", "\"\\\\[(.*) TO (.*)\\\\]\"", "matcher", "=", "re", ".", "search", "(", "pattern", ",", "geo_box_str", ")", "if", "matcher", ":", "return", "matcher", ".", "group", "(", "1", ")", ",", "matcher", ".", "group", "(", "2", ")", "else", ":", "raise", "Exception", "(", "\"Regex {0} could not parse {1}\"", ".", "format", "(", "pattern", ",", "geo_box_str", ")", ")" ]
:param geo_box_str: [-90,-180 TO 90,180] :return: ("-90,-180", "90,180")
[ ":", "param", "geo_box_str", ":", "[", "-", "90", "-", "180", "TO", "90", "180", "]", ":", "return", ":", "(", "-", "90", "-", "180", "90", "180", ")" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L247-L257
cga-harvard/Hypermap-Registry
hypermap/search_api/utils.py
parse_geo_box
def parse_geo_box(geo_box_str): """ parses [-90,-180 TO 90,180] to a shapely.geometry.box :param geo_box_str: :return: """ from_point_str, to_point_str = parse_solr_geo_range_as_pair(geo_box_str) from_point = parse_lat_lon(from_point_str) to_point = parse_lat_lon(to_point_str) rectangle = box(from_point[0], from_point[1], to_point[0], to_point[1]) return rectangle
python
def parse_geo_box(geo_box_str): """ parses [-90,-180 TO 90,180] to a shapely.geometry.box :param geo_box_str: :return: """ from_point_str, to_point_str = parse_solr_geo_range_as_pair(geo_box_str) from_point = parse_lat_lon(from_point_str) to_point = parse_lat_lon(to_point_str) rectangle = box(from_point[0], from_point[1], to_point[0], to_point[1]) return rectangle
[ "def", "parse_geo_box", "(", "geo_box_str", ")", ":", "from_point_str", ",", "to_point_str", "=", "parse_solr_geo_range_as_pair", "(", "geo_box_str", ")", "from_point", "=", "parse_lat_lon", "(", "from_point_str", ")", "to_point", "=", "parse_lat_lon", "(", "to_point_str", ")", "rectangle", "=", "box", "(", "from_point", "[", "0", "]", ",", "from_point", "[", "1", "]", ",", "to_point", "[", "0", "]", ",", "to_point", "[", "1", "]", ")", "return", "rectangle" ]
parses [-90,-180 TO 90,180] to a shapely.geometry.box :param geo_box_str: :return:
[ "parses", "[", "-", "90", "-", "180", "TO", "90", "180", "]", "to", "a", "shapely", ".", "geometry", ".", "box", ":", "param", "geo_box_str", ":", ":", "return", ":" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L265-L276
cga-harvard/Hypermap-Registry
hypermap/search_api/utils.py
request_heatmap_facet
def request_heatmap_facet(field, hm_filter, hm_grid_level, hm_limit): """ heatmap facet query builder :param field: map the query to this field. :param hm_filter: From what region to plot the heatmap. Defaults to q.geo or otherwise the world. :param hm_grid_level: To explicitly specify the grid level, e.g. to let a user ask for greater or courser resolution than the most recent request. Ignores a.hm.limit. :param hm_limit: Non-0 triggers heatmap/grid faceting. This number is a soft maximum on thenumber of cells it should have. There may be as few as 1/4th this number in return. Note that a.hm.gridLevel can effectively ignore this value. The response heatmap contains a counts grid that can be null or contain null rows when all its values would be 0. See Solr docs for more details on the response format. :return: """ if not hm_filter: hm_filter = '[-90,-180 TO 90,180]' params = { 'facet': 'on', 'facet.heatmap': field, 'facet.heatmap.geom': hm_filter } if hm_grid_level: # note: aHmLimit is ignored in this case params['facet.heatmap.gridLevel'] = hm_grid_level else: # Calculate distErr that will approximate aHmLimit many cells as an upper bound rectangle = parse_geo_box(hm_filter) degrees_side_length = rectangle.length / 2 cell_side_length = math.sqrt(float(hm_limit)) cell_side_length_degrees = degrees_side_length / cell_side_length * 2 params['facet.heatmap.distErr'] = str(float(cell_side_length_degrees)) # TODO: not sure about if returning correct param values. # get_params = urllib.urlencode(params) return params
python
def request_heatmap_facet(field, hm_filter, hm_grid_level, hm_limit): """ heatmap facet query builder :param field: map the query to this field. :param hm_filter: From what region to plot the heatmap. Defaults to q.geo or otherwise the world. :param hm_grid_level: To explicitly specify the grid level, e.g. to let a user ask for greater or courser resolution than the most recent request. Ignores a.hm.limit. :param hm_limit: Non-0 triggers heatmap/grid faceting. This number is a soft maximum on thenumber of cells it should have. There may be as few as 1/4th this number in return. Note that a.hm.gridLevel can effectively ignore this value. The response heatmap contains a counts grid that can be null or contain null rows when all its values would be 0. See Solr docs for more details on the response format. :return: """ if not hm_filter: hm_filter = '[-90,-180 TO 90,180]' params = { 'facet': 'on', 'facet.heatmap': field, 'facet.heatmap.geom': hm_filter } if hm_grid_level: # note: aHmLimit is ignored in this case params['facet.heatmap.gridLevel'] = hm_grid_level else: # Calculate distErr that will approximate aHmLimit many cells as an upper bound rectangle = parse_geo_box(hm_filter) degrees_side_length = rectangle.length / 2 cell_side_length = math.sqrt(float(hm_limit)) cell_side_length_degrees = degrees_side_length / cell_side_length * 2 params['facet.heatmap.distErr'] = str(float(cell_side_length_degrees)) # TODO: not sure about if returning correct param values. # get_params = urllib.urlencode(params) return params
[ "def", "request_heatmap_facet", "(", "field", ",", "hm_filter", ",", "hm_grid_level", ",", "hm_limit", ")", ":", "if", "not", "hm_filter", ":", "hm_filter", "=", "'[-90,-180 TO 90,180]'", "params", "=", "{", "'facet'", ":", "'on'", ",", "'facet.heatmap'", ":", "field", ",", "'facet.heatmap.geom'", ":", "hm_filter", "}", "if", "hm_grid_level", ":", "# note: aHmLimit is ignored in this case", "params", "[", "'facet.heatmap.gridLevel'", "]", "=", "hm_grid_level", "else", ":", "# Calculate distErr that will approximate aHmLimit many cells as an upper bound", "rectangle", "=", "parse_geo_box", "(", "hm_filter", ")", "degrees_side_length", "=", "rectangle", ".", "length", "/", "2", "cell_side_length", "=", "math", ".", "sqrt", "(", "float", "(", "hm_limit", ")", ")", "cell_side_length_degrees", "=", "degrees_side_length", "/", "cell_side_length", "*", "2", "params", "[", "'facet.heatmap.distErr'", "]", "=", "str", "(", "float", "(", "cell_side_length_degrees", ")", ")", "# TODO: not sure about if returning correct param values.", "# get_params = urllib.urlencode(params)", "return", "params" ]
heatmap facet query builder :param field: map the query to this field. :param hm_filter: From what region to plot the heatmap. Defaults to q.geo or otherwise the world. :param hm_grid_level: To explicitly specify the grid level, e.g. to let a user ask for greater or courser resolution than the most recent request. Ignores a.hm.limit. :param hm_limit: Non-0 triggers heatmap/grid faceting. This number is a soft maximum on thenumber of cells it should have. There may be as few as 1/4th this number in return. Note that a.hm.gridLevel can effectively ignore this value. The response heatmap contains a counts grid that can be null or contain null rows when all its values would be 0. See Solr docs for more details on the response format. :return:
[ "heatmap", "facet", "query", "builder", ":", "param", "field", ":", "map", "the", "query", "to", "this", "field", ".", ":", "param", "hm_filter", ":", "From", "what", "region", "to", "plot", "the", "heatmap", ".", "Defaults", "to", "q", ".", "geo", "or", "otherwise", "the", "world", ".", ":", "param", "hm_grid_level", ":", "To", "explicitly", "specify", "the", "grid", "level", "e", ".", "g", ".", "to", "let", "a", "user", "ask", "for", "greater", "or", "courser", "resolution", "than", "the", "most", "recent", "request", ".", "Ignores", "a", ".", "hm", ".", "limit", ".", ":", "param", "hm_limit", ":", "Non", "-", "0", "triggers", "heatmap", "/", "grid", "faceting", ".", "This", "number", "is", "a", "soft", "maximum", "on", "thenumber", "of", "cells", "it", "should", "have", ".", "There", "may", "be", "as", "few", "as", "1", "/", "4th", "this", "number", "in", "return", ".", "Note", "that", "a", ".", "hm", ".", "gridLevel", "can", "effectively", "ignore", "this", "value", ".", "The", "response", "heatmap", "contains", "a", "counts", "grid", "that", "can", "be", "null", "or", "contain", "null", "rows", "when", "all", "its", "values", "would", "be", "0", ".", "See", "Solr", "docs", "for", "more", "details", "on", "the", "response", "format", ".", ":", "return", ":" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L279-L315
cga-harvard/Hypermap-Registry
hypermap/search_api/utils.py
asterisk_to_min_max
def asterisk_to_min_max(field, time_filter, search_engine_endpoint, actual_params=None): """ traduce [* TO *] to something like [MIN-INDEXED-DATE TO MAX-INDEXED-DATE] :param field: map the stats to this field. :param time_filter: this is the value to be translated. think in "[* TO 2000]" :param search_engine_endpoint: solr core :param actual_params: (not implemented) to merge with other params. :return: translated time filter """ if actual_params: raise NotImplemented("actual_params") start, end = parse_solr_time_range_as_pair(time_filter) if start == '*' or end == '*': params_stats = { "q": "*:*", "rows": 0, "stats.field": field, "stats": "true", "wt": "json" } res_stats = requests.get(search_engine_endpoint, params=params_stats) if res_stats.ok: stats_date_field = res_stats.json()["stats"]["stats_fields"][field] date_min = stats_date_field["min"] date_max = stats_date_field["max"] if start != '*': date_min = start if end != '*': date_max = end time_filter = "[{0} TO {1}]".format(date_min, date_max) return time_filter
python
def asterisk_to_min_max(field, time_filter, search_engine_endpoint, actual_params=None): """ traduce [* TO *] to something like [MIN-INDEXED-DATE TO MAX-INDEXED-DATE] :param field: map the stats to this field. :param time_filter: this is the value to be translated. think in "[* TO 2000]" :param search_engine_endpoint: solr core :param actual_params: (not implemented) to merge with other params. :return: translated time filter """ if actual_params: raise NotImplemented("actual_params") start, end = parse_solr_time_range_as_pair(time_filter) if start == '*' or end == '*': params_stats = { "q": "*:*", "rows": 0, "stats.field": field, "stats": "true", "wt": "json" } res_stats = requests.get(search_engine_endpoint, params=params_stats) if res_stats.ok: stats_date_field = res_stats.json()["stats"]["stats_fields"][field] date_min = stats_date_field["min"] date_max = stats_date_field["max"] if start != '*': date_min = start if end != '*': date_max = end time_filter = "[{0} TO {1}]".format(date_min, date_max) return time_filter
[ "def", "asterisk_to_min_max", "(", "field", ",", "time_filter", ",", "search_engine_endpoint", ",", "actual_params", "=", "None", ")", ":", "if", "actual_params", ":", "raise", "NotImplemented", "(", "\"actual_params\"", ")", "start", ",", "end", "=", "parse_solr_time_range_as_pair", "(", "time_filter", ")", "if", "start", "==", "'*'", "or", "end", "==", "'*'", ":", "params_stats", "=", "{", "\"q\"", ":", "\"*:*\"", ",", "\"rows\"", ":", "0", ",", "\"stats.field\"", ":", "field", ",", "\"stats\"", ":", "\"true\"", ",", "\"wt\"", ":", "\"json\"", "}", "res_stats", "=", "requests", ".", "get", "(", "search_engine_endpoint", ",", "params", "=", "params_stats", ")", "if", "res_stats", ".", "ok", ":", "stats_date_field", "=", "res_stats", ".", "json", "(", ")", "[", "\"stats\"", "]", "[", "\"stats_fields\"", "]", "[", "field", "]", "date_min", "=", "stats_date_field", "[", "\"min\"", "]", "date_max", "=", "stats_date_field", "[", "\"max\"", "]", "if", "start", "!=", "'*'", ":", "date_min", "=", "start", "if", "end", "!=", "'*'", ":", "date_max", "=", "end", "time_filter", "=", "\"[{0} TO {1}]\"", ".", "format", "(", "date_min", ",", "date_max", ")", "return", "time_filter" ]
traduce [* TO *] to something like [MIN-INDEXED-DATE TO MAX-INDEXED-DATE] :param field: map the stats to this field. :param time_filter: this is the value to be translated. think in "[* TO 2000]" :param search_engine_endpoint: solr core :param actual_params: (not implemented) to merge with other params. :return: translated time filter
[ "traduce", "[", "*", "TO", "*", "]", "to", "something", "like", "[", "MIN", "-", "INDEXED", "-", "DATE", "TO", "MAX", "-", "INDEXED", "-", "DATE", "]", ":", "param", "field", ":", "map", "the", "stats", "to", "this", "field", ".", ":", "param", "time_filter", ":", "this", "is", "the", "value", "to", "be", "translated", ".", "think", "in", "[", "*", "TO", "2000", "]", ":", "param", "search_engine_endpoint", ":", "solr", "core", ":", "param", "actual_params", ":", "(", "not", "implemented", ")", "to", "merge", "with", "other", "params", ".", ":", "return", ":", "translated", "time", "filter" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L322-L359
cga-harvard/Hypermap-Registry
hypermap/search/pycsw_plugin.py
get_service
def get_service(raw_xml): """ Set a service object based on the XML metadata <dct:references scheme="OGC:WMS">http://ngamaps.geointapps.org/arcgis /services/RIO/Rio_Foundation_Transportation/MapServer/WMSServer </dct:references> :param instance: :return: Layer """ from pycsw.core.etree import etree parsed = etree.fromstring(raw_xml, etree.XMLParser(resolve_entities=False)) # <dc:format>OGC:WMS</dc:format> source_tag = parsed.find("{http://purl.org/dc/elements/1.1/}source") # <dc:source> # http://ngamaps.geointapps.org/arcgis/services/RIO/Rio_Foundation_Transportation/MapServer/WMSServer # </dc:source> format_tag = parsed.find("{http://purl.org/dc/elements/1.1/}format") service_url = None service_type = None if hasattr(source_tag, 'text'): service_url = source_tag.text if hasattr(format_tag, 'text'): service_type = format_tag.text if hasattr(format_tag, 'text'): service_type = format_tag.text service, created = Service.objects.get_or_create(url=service_url, is_monitored=False, type=service_type) # TODO: dont hardcode SRS, get them from the parsed XML. srs, created = SpatialReferenceSystem.objects.get_or_create(code="EPSG:4326") service.srs.add(srs) return service
python
def get_service(raw_xml): """ Set a service object based on the XML metadata <dct:references scheme="OGC:WMS">http://ngamaps.geointapps.org/arcgis /services/RIO/Rio_Foundation_Transportation/MapServer/WMSServer </dct:references> :param instance: :return: Layer """ from pycsw.core.etree import etree parsed = etree.fromstring(raw_xml, etree.XMLParser(resolve_entities=False)) # <dc:format>OGC:WMS</dc:format> source_tag = parsed.find("{http://purl.org/dc/elements/1.1/}source") # <dc:source> # http://ngamaps.geointapps.org/arcgis/services/RIO/Rio_Foundation_Transportation/MapServer/WMSServer # </dc:source> format_tag = parsed.find("{http://purl.org/dc/elements/1.1/}format") service_url = None service_type = None if hasattr(source_tag, 'text'): service_url = source_tag.text if hasattr(format_tag, 'text'): service_type = format_tag.text if hasattr(format_tag, 'text'): service_type = format_tag.text service, created = Service.objects.get_or_create(url=service_url, is_monitored=False, type=service_type) # TODO: dont hardcode SRS, get them from the parsed XML. srs, created = SpatialReferenceSystem.objects.get_or_create(code="EPSG:4326") service.srs.add(srs) return service
[ "def", "get_service", "(", "raw_xml", ")", ":", "from", "pycsw", ".", "core", ".", "etree", "import", "etree", "parsed", "=", "etree", ".", "fromstring", "(", "raw_xml", ",", "etree", ".", "XMLParser", "(", "resolve_entities", "=", "False", ")", ")", "# <dc:format>OGC:WMS</dc:format>", "source_tag", "=", "parsed", ".", "find", "(", "\"{http://purl.org/dc/elements/1.1/}source\"", ")", "# <dc:source>", "# http://ngamaps.geointapps.org/arcgis/services/RIO/Rio_Foundation_Transportation/MapServer/WMSServer", "# </dc:source>", "format_tag", "=", "parsed", ".", "find", "(", "\"{http://purl.org/dc/elements/1.1/}format\"", ")", "service_url", "=", "None", "service_type", "=", "None", "if", "hasattr", "(", "source_tag", ",", "'text'", ")", ":", "service_url", "=", "source_tag", ".", "text", "if", "hasattr", "(", "format_tag", ",", "'text'", ")", ":", "service_type", "=", "format_tag", ".", "text", "if", "hasattr", "(", "format_tag", ",", "'text'", ")", ":", "service_type", "=", "format_tag", ".", "text", "service", ",", "created", "=", "Service", ".", "objects", ".", "get_or_create", "(", "url", "=", "service_url", ",", "is_monitored", "=", "False", ",", "type", "=", "service_type", ")", "# TODO: dont hardcode SRS, get them from the parsed XML.", "srs", ",", "created", "=", "SpatialReferenceSystem", ".", "objects", ".", "get_or_create", "(", "code", "=", "\"EPSG:4326\"", ")", "service", ".", "srs", ".", "add", "(", "srs", ")", "return", "service" ]
Set a service object based on the XML metadata <dct:references scheme="OGC:WMS">http://ngamaps.geointapps.org/arcgis /services/RIO/Rio_Foundation_Transportation/MapServer/WMSServer </dct:references> :param instance: :return: Layer
[ "Set", "a", "service", "object", "based", "on", "the", "XML", "metadata", "<dct", ":", "references", "scheme", "=", "OGC", ":", "WMS", ">", "http", ":", "//", "ngamaps", ".", "geointapps", ".", "org", "/", "arcgis", "/", "services", "/", "RIO", "/", "Rio_Foundation_Transportation", "/", "MapServer", "/", "WMSServer", "<", "/", "dct", ":", "references", ">", ":", "param", "instance", ":", ":", "return", ":", "Layer" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L56-L95
cga-harvard/Hypermap-Registry
hypermap/search/pycsw_plugin.py
HHypermapRepository.query_ids
def query_ids(self, ids): """ Query by list of identifiers """ results = self._get_repo_filter(Layer.objects).filter(uuid__in=ids).all() if len(results) == 0: # try services results = self._get_repo_filter(Service.objects).filter(uuid__in=ids).all() return results
python
def query_ids(self, ids): """ Query by list of identifiers """ results = self._get_repo_filter(Layer.objects).filter(uuid__in=ids).all() if len(results) == 0: # try services results = self._get_repo_filter(Service.objects).filter(uuid__in=ids).all() return results
[ "def", "query_ids", "(", "self", ",", "ids", ")", ":", "results", "=", "self", ".", "_get_repo_filter", "(", "Layer", ".", "objects", ")", ".", "filter", "(", "uuid__in", "=", "ids", ")", ".", "all", "(", ")", "if", "len", "(", "results", ")", "==", "0", ":", "# try services", "results", "=", "self", ".", "_get_repo_filter", "(", "Service", ".", "objects", ")", ".", "filter", "(", "uuid__in", "=", "ids", ")", ".", "all", "(", ")", "return", "results" ]
Query by list of identifiers
[ "Query", "by", "list", "of", "identifiers" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L152-L162
cga-harvard/Hypermap-Registry
hypermap/search/pycsw_plugin.py
HHypermapRepository.query_domain
def query_domain(self, domain, typenames, domainquerytype='list', count=False): """ Query by property domain values """ objects = self._get_repo_filter(Layer.objects) if domainquerytype == 'range': return [tuple(objects.aggregate(Min(domain), Max(domain)).values())] else: if count: return [(d[domain], d['%s__count' % domain]) for d in objects.values(domain).annotate(Count(domain))] else: return objects.values_list(domain).distinct()
python
def query_domain(self, domain, typenames, domainquerytype='list', count=False): """ Query by property domain values """ objects = self._get_repo_filter(Layer.objects) if domainquerytype == 'range': return [tuple(objects.aggregate(Min(domain), Max(domain)).values())] else: if count: return [(d[domain], d['%s__count' % domain]) for d in objects.values(domain).annotate(Count(domain))] else: return objects.values_list(domain).distinct()
[ "def", "query_domain", "(", "self", ",", "domain", ",", "typenames", ",", "domainquerytype", "=", "'list'", ",", "count", "=", "False", ")", ":", "objects", "=", "self", ".", "_get_repo_filter", "(", "Layer", ".", "objects", ")", "if", "domainquerytype", "==", "'range'", ":", "return", "[", "tuple", "(", "objects", ".", "aggregate", "(", "Min", "(", "domain", ")", ",", "Max", "(", "domain", ")", ")", ".", "values", "(", ")", ")", "]", "else", ":", "if", "count", ":", "return", "[", "(", "d", "[", "domain", "]", ",", "d", "[", "'%s__count'", "%", "domain", "]", ")", "for", "d", "in", "objects", ".", "values", "(", "domain", ")", ".", "annotate", "(", "Count", "(", "domain", ")", ")", "]", "else", ":", "return", "objects", ".", "values_list", "(", "domain", ")", ".", "distinct", "(", ")" ]
Query by property domain values
[ "Query", "by", "property", "domain", "values" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L164-L178
cga-harvard/Hypermap-Registry
hypermap/search/pycsw_plugin.py
HHypermapRepository.query_insert
def query_insert(self, direction='max'): """ Query to get latest (default) or earliest update to repository """ if direction == 'min': return Layer.objects.aggregate( Min('last_updated'))['last_updated__min'].strftime('%Y-%m-%dT%H:%M:%SZ') return self._get_repo_filter(Layer.objects).aggregate( Max('last_updated'))['last_updated__max'].strftime('%Y-%m-%dT%H:%M:%SZ')
python
def query_insert(self, direction='max'): """ Query to get latest (default) or earliest update to repository """ if direction == 'min': return Layer.objects.aggregate( Min('last_updated'))['last_updated__min'].strftime('%Y-%m-%dT%H:%M:%SZ') return self._get_repo_filter(Layer.objects).aggregate( Max('last_updated'))['last_updated__max'].strftime('%Y-%m-%dT%H:%M:%SZ')
[ "def", "query_insert", "(", "self", ",", "direction", "=", "'max'", ")", ":", "if", "direction", "==", "'min'", ":", "return", "Layer", ".", "objects", ".", "aggregate", "(", "Min", "(", "'last_updated'", ")", ")", "[", "'last_updated__min'", "]", ".", "strftime", "(", "'%Y-%m-%dT%H:%M:%SZ'", ")", "return", "self", ".", "_get_repo_filter", "(", "Layer", ".", "objects", ")", ".", "aggregate", "(", "Max", "(", "'last_updated'", ")", ")", "[", "'last_updated__max'", "]", ".", "strftime", "(", "'%Y-%m-%dT%H:%M:%SZ'", ")" ]
Query to get latest (default) or earliest update to repository
[ "Query", "to", "get", "latest", "(", "default", ")", "or", "earliest", "update", "to", "repository" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L180-L188
cga-harvard/Hypermap-Registry
hypermap/search/pycsw_plugin.py
HHypermapRepository.query_source
def query_source(self, source): """ Query by source """ return self._get_repo_filter(Layer.objects).filter(url=source)
python
def query_source(self, source): """ Query by source """ return self._get_repo_filter(Layer.objects).filter(url=source)
[ "def", "query_source", "(", "self", ",", "source", ")", ":", "return", "self", ".", "_get_repo_filter", "(", "Layer", ".", "objects", ")", ".", "filter", "(", "url", "=", "source", ")" ]
Query by source
[ "Query", "by", "source" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L190-L194
cga-harvard/Hypermap-Registry
hypermap/search/pycsw_plugin.py
HHypermapRepository.query
def query(self, constraint, sortby=None, typenames=None, maxrecords=10, startposition=0): """ Query records from underlying repository """ # run the raw query and get total # we want to exclude layers which are not valid, as it is done in the search engine if 'where' in constraint: # GetRecords with constraint query = self._get_repo_filter(Layer.objects).filter( is_valid=True).extra(where=[constraint['where']], params=constraint['values']) else: # GetRecords sans constraint query = self._get_repo_filter(Layer.objects).filter(is_valid=True) total = query.count() # apply sorting, limit and offset if sortby is not None: if 'spatial' in sortby and sortby['spatial']: # spatial sort desc = False if sortby['order'] == 'DESC': desc = True query = query.all() return [str(total), sorted(query, key=lambda x: float(util.get_geometry_area(getattr(x, sortby['propertyname']))), reverse=desc, )[startposition:startposition+int(maxrecords)]] else: if sortby['order'] == 'DESC': pname = '-%s' % sortby['propertyname'] else: pname = sortby['propertyname'] return [str(total), query.order_by(pname)[startposition:startposition+int(maxrecords)]] else: # no sort return [str(total), query.all()[startposition:startposition+int(maxrecords)]]
python
def query(self, constraint, sortby=None, typenames=None, maxrecords=10, startposition=0): """ Query records from underlying repository """ # run the raw query and get total # we want to exclude layers which are not valid, as it is done in the search engine if 'where' in constraint: # GetRecords with constraint query = self._get_repo_filter(Layer.objects).filter( is_valid=True).extra(where=[constraint['where']], params=constraint['values']) else: # GetRecords sans constraint query = self._get_repo_filter(Layer.objects).filter(is_valid=True) total = query.count() # apply sorting, limit and offset if sortby is not None: if 'spatial' in sortby and sortby['spatial']: # spatial sort desc = False if sortby['order'] == 'DESC': desc = True query = query.all() return [str(total), sorted(query, key=lambda x: float(util.get_geometry_area(getattr(x, sortby['propertyname']))), reverse=desc, )[startposition:startposition+int(maxrecords)]] else: if sortby['order'] == 'DESC': pname = '-%s' % sortby['propertyname'] else: pname = sortby['propertyname'] return [str(total), query.order_by(pname)[startposition:startposition+int(maxrecords)]] else: # no sort return [str(total), query.all()[startposition:startposition+int(maxrecords)]]
[ "def", "query", "(", "self", ",", "constraint", ",", "sortby", "=", "None", ",", "typenames", "=", "None", ",", "maxrecords", "=", "10", ",", "startposition", "=", "0", ")", ":", "# run the raw query and get total", "# we want to exclude layers which are not valid, as it is done in the search engine", "if", "'where'", "in", "constraint", ":", "# GetRecords with constraint", "query", "=", "self", ".", "_get_repo_filter", "(", "Layer", ".", "objects", ")", ".", "filter", "(", "is_valid", "=", "True", ")", ".", "extra", "(", "where", "=", "[", "constraint", "[", "'where'", "]", "]", ",", "params", "=", "constraint", "[", "'values'", "]", ")", "else", ":", "# GetRecords sans constraint", "query", "=", "self", ".", "_get_repo_filter", "(", "Layer", ".", "objects", ")", ".", "filter", "(", "is_valid", "=", "True", ")", "total", "=", "query", ".", "count", "(", ")", "# apply sorting, limit and offset", "if", "sortby", "is", "not", "None", ":", "if", "'spatial'", "in", "sortby", "and", "sortby", "[", "'spatial'", "]", ":", "# spatial sort", "desc", "=", "False", "if", "sortby", "[", "'order'", "]", "==", "'DESC'", ":", "desc", "=", "True", "query", "=", "query", ".", "all", "(", ")", "return", "[", "str", "(", "total", ")", ",", "sorted", "(", "query", ",", "key", "=", "lambda", "x", ":", "float", "(", "util", ".", "get_geometry_area", "(", "getattr", "(", "x", ",", "sortby", "[", "'propertyname'", "]", ")", ")", ")", ",", "reverse", "=", "desc", ",", ")", "[", "startposition", ":", "startposition", "+", "int", "(", "maxrecords", ")", "]", "]", "else", ":", "if", "sortby", "[", "'order'", "]", "==", "'DESC'", ":", "pname", "=", "'-%s'", "%", "sortby", "[", "'propertyname'", "]", "else", ":", "pname", "=", "sortby", "[", "'propertyname'", "]", "return", "[", "str", "(", "total", ")", ",", "query", ".", "order_by", "(", "pname", ")", "[", "startposition", ":", "startposition", "+", "int", "(", "maxrecords", ")", "]", "]", "else", ":", "# no sort", "return", "[", "str", "(", "total", ")", ",", "query", ".", "all", "(", ")", "[", "startposition", ":", "startposition", "+", "int", "(", "maxrecords", ")", "]", "]" ]
Query records from underlying repository
[ "Query", "records", "from", "underlying", "repository" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L196-L231
cga-harvard/Hypermap-Registry
hypermap/search/pycsw_plugin.py
HHypermapRepository.insert
def insert(self, resourcetype, source, insert_date=None): """ Insert a record into the repository """ caller = inspect.stack()[1][3] if caller == 'transaction': # insert of Layer hhclass = 'Layer' source = resourcetype resourcetype = resourcetype.csw_schema else: # insert of service hhclass = 'Service' if resourcetype not in HYPERMAP_SERVICE_TYPES.keys(): raise RuntimeError('Unsupported Service Type') return self._insert_or_update(resourcetype, source, mode='insert', hhclass=hhclass)
python
def insert(self, resourcetype, source, insert_date=None): """ Insert a record into the repository """ caller = inspect.stack()[1][3] if caller == 'transaction': # insert of Layer hhclass = 'Layer' source = resourcetype resourcetype = resourcetype.csw_schema else: # insert of service hhclass = 'Service' if resourcetype not in HYPERMAP_SERVICE_TYPES.keys(): raise RuntimeError('Unsupported Service Type') return self._insert_or_update(resourcetype, source, mode='insert', hhclass=hhclass)
[ "def", "insert", "(", "self", ",", "resourcetype", ",", "source", ",", "insert_date", "=", "None", ")", ":", "caller", "=", "inspect", ".", "stack", "(", ")", "[", "1", "]", "[", "3", "]", "if", "caller", "==", "'transaction'", ":", "# insert of Layer", "hhclass", "=", "'Layer'", "source", "=", "resourcetype", "resourcetype", "=", "resourcetype", ".", "csw_schema", "else", ":", "# insert of service", "hhclass", "=", "'Service'", "if", "resourcetype", "not", "in", "HYPERMAP_SERVICE_TYPES", ".", "keys", "(", ")", ":", "raise", "RuntimeError", "(", "'Unsupported Service Type'", ")", "return", "self", ".", "_insert_or_update", "(", "resourcetype", ",", "source", ",", "mode", "=", "'insert'", ",", "hhclass", "=", "hhclass", ")" ]
Insert a record into the repository
[ "Insert", "a", "record", "into", "the", "repository" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L233-L249
cga-harvard/Hypermap-Registry
hypermap/search/pycsw_plugin.py
HHypermapRepository._insert_or_update
def _insert_or_update(self, resourcetype, source, mode='insert', hhclass='Service'): """ Insert or update a record in the repository """ keywords = [] if self.filter is not None: catalog = Catalog.objects.get(id=int(self.filter.split()[-1])) try: if hhclass == 'Layer': # TODO: better way of figuring out duplicates match = Layer.objects.filter(name=source.name, title=source.title, abstract=source.abstract, is_monitored=False) matches = match.all() if matches: if mode == 'insert': raise RuntimeError('HHypermap error: Layer %d \'%s\' already exists' % ( matches[0].id, source.title)) elif mode == 'update': match.update( name=source.name, title=source.title, abstract=source.abstract, is_monitored=False, xml=source.xml, wkt_geometry=source.wkt_geometry, anytext=util.get_anytext([source.title, source.abstract, source.keywords_csv]) ) service = get_service(source.xml) res, keywords = create_layer_from_metadata_xml(resourcetype, source.xml, monitor=False, service=service, catalog=catalog) res.save() LOGGER.debug('Indexing layer with id %s on search engine' % res.uuid) index_layer(res.id, use_cache=True) else: if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2': res = Endpoint(url=source, catalog=catalog) else: res = Service(type=HYPERMAP_SERVICE_TYPES[resourcetype], url=source, catalog=catalog) res.save() if keywords: for kw in keywords: res.keywords.add(kw) except Exception as err: raise RuntimeError('HHypermap error: %s' % err) # return a list of ids that were inserted or updated ids = [] if hhclass == 'Layer': ids.append({'identifier': res.uuid, 'title': res.title}) else: if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2': for res in Endpoint.objects.filter(url=source).all(): ids.append({'identifier': res.uuid, 'title': res.url}) else: for res in Service.objects.filter(url=source).all(): ids.append({'identifier': res.uuid, 'title': res.title}) return ids
python
def _insert_or_update(self, resourcetype, source, mode='insert', hhclass='Service'): """ Insert or update a record in the repository """ keywords = [] if self.filter is not None: catalog = Catalog.objects.get(id=int(self.filter.split()[-1])) try: if hhclass == 'Layer': # TODO: better way of figuring out duplicates match = Layer.objects.filter(name=source.name, title=source.title, abstract=source.abstract, is_monitored=False) matches = match.all() if matches: if mode == 'insert': raise RuntimeError('HHypermap error: Layer %d \'%s\' already exists' % ( matches[0].id, source.title)) elif mode == 'update': match.update( name=source.name, title=source.title, abstract=source.abstract, is_monitored=False, xml=source.xml, wkt_geometry=source.wkt_geometry, anytext=util.get_anytext([source.title, source.abstract, source.keywords_csv]) ) service = get_service(source.xml) res, keywords = create_layer_from_metadata_xml(resourcetype, source.xml, monitor=False, service=service, catalog=catalog) res.save() LOGGER.debug('Indexing layer with id %s on search engine' % res.uuid) index_layer(res.id, use_cache=True) else: if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2': res = Endpoint(url=source, catalog=catalog) else: res = Service(type=HYPERMAP_SERVICE_TYPES[resourcetype], url=source, catalog=catalog) res.save() if keywords: for kw in keywords: res.keywords.add(kw) except Exception as err: raise RuntimeError('HHypermap error: %s' % err) # return a list of ids that were inserted or updated ids = [] if hhclass == 'Layer': ids.append({'identifier': res.uuid, 'title': res.title}) else: if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2': for res in Endpoint.objects.filter(url=source).all(): ids.append({'identifier': res.uuid, 'title': res.url}) else: for res in Service.objects.filter(url=source).all(): ids.append({'identifier': res.uuid, 'title': res.title}) return ids
[ "def", "_insert_or_update", "(", "self", ",", "resourcetype", ",", "source", ",", "mode", "=", "'insert'", ",", "hhclass", "=", "'Service'", ")", ":", "keywords", "=", "[", "]", "if", "self", ".", "filter", "is", "not", "None", ":", "catalog", "=", "Catalog", ".", "objects", ".", "get", "(", "id", "=", "int", "(", "self", ".", "filter", ".", "split", "(", ")", "[", "-", "1", "]", ")", ")", "try", ":", "if", "hhclass", "==", "'Layer'", ":", "# TODO: better way of figuring out duplicates", "match", "=", "Layer", ".", "objects", ".", "filter", "(", "name", "=", "source", ".", "name", ",", "title", "=", "source", ".", "title", ",", "abstract", "=", "source", ".", "abstract", ",", "is_monitored", "=", "False", ")", "matches", "=", "match", ".", "all", "(", ")", "if", "matches", ":", "if", "mode", "==", "'insert'", ":", "raise", "RuntimeError", "(", "'HHypermap error: Layer %d \\'%s\\' already exists'", "%", "(", "matches", "[", "0", "]", ".", "id", ",", "source", ".", "title", ")", ")", "elif", "mode", "==", "'update'", ":", "match", ".", "update", "(", "name", "=", "source", ".", "name", ",", "title", "=", "source", ".", "title", ",", "abstract", "=", "source", ".", "abstract", ",", "is_monitored", "=", "False", ",", "xml", "=", "source", ".", "xml", ",", "wkt_geometry", "=", "source", ".", "wkt_geometry", ",", "anytext", "=", "util", ".", "get_anytext", "(", "[", "source", ".", "title", ",", "source", ".", "abstract", ",", "source", ".", "keywords_csv", "]", ")", ")", "service", "=", "get_service", "(", "source", ".", "xml", ")", "res", ",", "keywords", "=", "create_layer_from_metadata_xml", "(", "resourcetype", ",", "source", ".", "xml", ",", "monitor", "=", "False", ",", "service", "=", "service", ",", "catalog", "=", "catalog", ")", "res", ".", "save", "(", ")", "LOGGER", ".", "debug", "(", "'Indexing layer with id %s on search engine'", "%", "res", ".", "uuid", ")", "index_layer", "(", "res", ".", "id", ",", "use_cache", "=", "True", ")", "else", ":", "if", "resourcetype", "==", "'http://www.opengis.net/cat/csw/2.0.2'", ":", "res", "=", "Endpoint", "(", "url", "=", "source", ",", "catalog", "=", "catalog", ")", "else", ":", "res", "=", "Service", "(", "type", "=", "HYPERMAP_SERVICE_TYPES", "[", "resourcetype", "]", ",", "url", "=", "source", ",", "catalog", "=", "catalog", ")", "res", ".", "save", "(", ")", "if", "keywords", ":", "for", "kw", "in", "keywords", ":", "res", ".", "keywords", ".", "add", "(", "kw", ")", "except", "Exception", "as", "err", ":", "raise", "RuntimeError", "(", "'HHypermap error: %s'", "%", "err", ")", "# return a list of ids that were inserted or updated", "ids", "=", "[", "]", "if", "hhclass", "==", "'Layer'", ":", "ids", ".", "append", "(", "{", "'identifier'", ":", "res", ".", "uuid", ",", "'title'", ":", "res", ".", "title", "}", ")", "else", ":", "if", "resourcetype", "==", "'http://www.opengis.net/cat/csw/2.0.2'", ":", "for", "res", "in", "Endpoint", ".", "objects", ".", "filter", "(", "url", "=", "source", ")", ".", "all", "(", ")", ":", "ids", ".", "append", "(", "{", "'identifier'", ":", "res", ".", "uuid", ",", "'title'", ":", "res", ".", "url", "}", ")", "else", ":", "for", "res", "in", "Service", ".", "objects", ".", "filter", "(", "url", "=", "source", ")", ".", "all", "(", ")", ":", "ids", ".", "append", "(", "{", "'identifier'", ":", "res", ".", "uuid", ",", "'title'", ":", "res", ".", "title", "}", ")", "return", "ids" ]
Insert or update a record in the repository
[ "Insert", "or", "update", "a", "record", "in", "the", "repository" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L251-L321
cga-harvard/Hypermap-Registry
hypermap/search/pycsw_plugin.py
HHypermapRepository.delete
def delete(self, constraint): """ Delete a record from the repository """ results = self._get_repo_filter(Service.objects).extra(where=[constraint['where']], params=constraint['values']).all() deleted = len(results) results.delete() return deleted
python
def delete(self, constraint): """ Delete a record from the repository """ results = self._get_repo_filter(Service.objects).extra(where=[constraint['where']], params=constraint['values']).all() deleted = len(results) results.delete() return deleted
[ "def", "delete", "(", "self", ",", "constraint", ")", ":", "results", "=", "self", ".", "_get_repo_filter", "(", "Service", ".", "objects", ")", ".", "extra", "(", "where", "=", "[", "constraint", "[", "'where'", "]", "]", ",", "params", "=", "constraint", "[", "'values'", "]", ")", ".", "all", "(", ")", "deleted", "=", "len", "(", "results", ")", "results", ".", "delete", "(", ")", "return", "deleted" ]
Delete a record from the repository
[ "Delete", "a", "record", "from", "the", "repository" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L323-L332
cga-harvard/Hypermap-Registry
hypermap/search/pycsw_plugin.py
HHypermapRepository._get_repo_filter
def _get_repo_filter(self, query): """ Apply repository wide side filter / mask query """ if self.filter is not None: return query.extra(where=[self.filter]) return query
python
def _get_repo_filter(self, query): """ Apply repository wide side filter / mask query """ if self.filter is not None: return query.extra(where=[self.filter]) return query
[ "def", "_get_repo_filter", "(", "self", ",", "query", ")", ":", "if", "self", ".", "filter", "is", "not", "None", ":", "return", "query", ".", "extra", "(", "where", "=", "[", "self", ".", "filter", "]", ")", "return", "query" ]
Apply repository wide side filter / mask query
[ "Apply", "repository", "wide", "side", "filter", "/", "mask", "query" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L334-L340
hni14/jismesh
jismesh/utils.py
to_meshcode
def to_meshcode(lat, lon, level): """緯度経度から指定次の地域メッシュコードを算出する。 Args: lat: 世界測地系の緯度(度単位) lon: 世界測地系の経度(度単位) level: 地域メッシュコードの次数 1次(80km四方):1 40倍(40km四方):40000 20倍(20km四方):20000 16倍(16km四方):16000 2次(10km四方):2 8倍(8km四方):8000 5倍(5km四方):5000 4倍(4km四方):4000 2.5倍(2.5km四方):2500 2倍(2km四方):2000 3次(1km四方):3 4次(500m四方):4 5次(250m四方):5 6次(125m四方):6 Return: 指定次の地域メッシュコード """ if not 0 <= lat < 66.66: raise ValueError('the latitude is out of bound.') if not 100 <= lon < 180: raise ValueError('the longitude is out of bound.') # reminder of latitude and longitude by its unit in degree of mesh level. rem_lat_lv0 = lambda lat: lat rem_lon_lv0 = lambda lon: lon % 100 rem_lat_lv1 = lambda lat: rem_lat_lv0(lat) % _unit_lat_lv1() rem_lon_lv1 = lambda lon: rem_lon_lv0(lon) % _unit_lon_lv1() rem_lat_40000 = lambda lat: rem_lat_lv1(lat) % _unit_lat_40000() rem_lon_40000 = lambda lon: rem_lon_lv1(lon) % _unit_lon_40000() rem_lat_20000 = lambda lat: rem_lat_40000(lat) % _unit_lat_20000() rem_lon_20000 = lambda lon: rem_lon_40000(lon) % _unit_lon_20000() rem_lat_16000 = lambda lat: rem_lat_lv1(lat) % _unit_lat_16000() rem_lon_16000 = lambda lon: rem_lon_lv1(lon) % _unit_lon_16000() rem_lat_lv2 = lambda lat: rem_lat_lv1(lat) % _unit_lat_lv2() rem_lon_lv2 = lambda lon: rem_lon_lv1(lon) % _unit_lon_lv2() rem_lat_8000 = lambda lat: rem_lat_lv1(lat) % _unit_lat_8000() rem_lon_8000 = lambda lon: rem_lon_lv1(lon) % _unit_lon_8000() rem_lat_5000 = lambda lat: rem_lat_lv2(lat) % _unit_lat_5000() rem_lon_5000 = lambda lon: rem_lon_lv2(lon) % _unit_lon_5000() rem_lat_4000 = lambda lat: rem_lat_8000(lat) % _unit_lat_4000() rem_lon_4000 = lambda lon: rem_lon_8000(lon) % _unit_lon_4000() rem_lat_2500 = lambda lat: rem_lat_5000(lat) % _unit_lat_2500() rem_lon_2500 = lambda lon: rem_lon_5000(lon) % _unit_lon_2500() rem_lat_2000 = lambda lat: rem_lat_lv2(lat) % _unit_lat_2000() rem_lon_2000 = lambda lon: rem_lon_lv2(lon) % _unit_lon_2000() rem_lat_lv3 = lambda lat: rem_lat_lv2(lat) % _unit_lat_lv3() rem_lon_lv3 = lambda lon: rem_lon_lv2(lon) % _unit_lon_lv3() rem_lat_lv4 = lambda lat: rem_lat_lv3(lat) % _unit_lat_lv4() rem_lon_lv4 = lambda lon: rem_lon_lv3(lon) % _unit_lon_lv4() rem_lat_lv5 = lambda lat: rem_lat_lv4(lat) % _unit_lat_lv5() rem_lon_lv5 = lambda lon: rem_lon_lv4(lon) % _unit_lon_lv5() rem_lat_lv6 = lambda lat: rem_lat_lv5(lat) % _unit_lat_lv6() rem_lon_lv6 = lambda lon: rem_lon_lv5(lon) % _unit_lon_lv6() def meshcode_lv1(lat, lon): ab = int(rem_lat_lv0(lat) / _unit_lat_lv1()) cd = int(rem_lon_lv0(lon) / _unit_lon_lv1()) return str(ab) + str(cd) def meshcode_40000(lat, lon): e = int(rem_lat_lv1(lat) / _unit_lat_40000())*2 + int(rem_lon_lv1(lon) / _unit_lon_40000()) + 1 return meshcode_lv1(lat, lon) + str(e) def meshcode_20000(lat, lon): f = int(rem_lat_40000(lat) / _unit_lat_20000())*2 + int(rem_lon_40000(lon) / _unit_lon_20000()) + 1 g = 5 return meshcode_40000(lat, lon) + str(f) + str(g) def meshcode_16000(lat, lon): e = int(rem_lat_lv1(lat) / _unit_lat_16000())*2 f = int(rem_lon_lv1(lon) / _unit_lon_16000())*2 g = 7 return meshcode_lv1(lat, lon) + str(e) + str(f) + str(g) def meshcode_lv2(lat, lon): e = int(rem_lat_lv1(lat) / _unit_lat_lv2()) f = int(rem_lon_lv1(lon) / _unit_lon_lv2()) return meshcode_lv1(lat, lon) + str(e) + str(f) def meshcode_8000(lat, lon): e = int(rem_lat_lv1(lat) / _unit_lat_8000()) f = int(rem_lon_lv1(lon) / _unit_lon_8000()) g = 6 return meshcode_lv1(lat, lon) + str(e) + str(f) + str(g) def meshcode_5000(lat, lon): g = int(rem_lat_lv2(lat) / _unit_lat_5000())*2 + int(rem_lon_lv2(lon) / _unit_lon_5000()) + 1 return meshcode_lv2(lat, lon) + str(g) def meshcode_4000(lat, lon): h = int(rem_lat_8000(lat) / _unit_lat_4000())*2 + int(rem_lon_8000(lon) / _unit_lon_4000()) + 1 i = 7 return meshcode_8000(lat, lon) + str(h) + str(i) def meshcode_2500(lat, lon): h = int(rem_lat_5000(lat) / _unit_lat_2500())*2 + int(rem_lon_5000(lon) / _unit_lon_2500()) + 1 i = 6 return meshcode_5000(lat, lon) + str(h) + str(i) def meshcode_2000(lat, lon): g = int(rem_lat_lv2(lat) / _unit_lat_2000())*2 h = int(rem_lon_lv2(lon) / _unit_lon_2000())*2 i = 5 return meshcode_lv2(lat, lon) + str(g) + str(h) + str(i) def meshcode_lv3(lat, lon): g = int(rem_lat_lv2(lat) / _unit_lat_lv3()) h = int(rem_lon_lv2(lon) / _unit_lon_lv3()) return meshcode_lv2(lat, lon) + str(g) + str(h) def meshcode_lv4(lat, lon): i = int(rem_lat_lv3(lat) / _unit_lat_lv4())*2 + int(rem_lon_lv3(lon) / _unit_lon_lv4()) + 1 return meshcode_lv3(lat, lon) + str(i) def meshcode_lv5(lat, lon): j = int(rem_lat_lv4(lat) / _unit_lat_lv5())*2 + int(rem_lon_lv4(lon) / _unit_lon_lv5()) + 1 return meshcode_lv4(lat, lon) + str(j) def meshcode_lv6(lat, lon): k = int(rem_lat_lv5(lat) / _unit_lat_lv6())*2 + int(rem_lon_lv5(lon) / _unit_lon_lv6()) + 1 return meshcode_lv5(lat, lon) + str(k) if level == 1: return meshcode_lv1(lat, lon) if level == 40000: return meshcode_40000(lat, lon) if level == 20000: return meshcode_20000(lat, lon) if level == 16000: return meshcode_16000(lat, lon) if level == 2: return meshcode_lv2(lat, lon) if level == 8000: return meshcode_8000(lat, lon) if level == 5000: return meshcode_5000(lat, lon) if level == 4000: return meshcode_4000(lat, lon) if level == 2500: return meshcode_2500(lat, lon) if level == 2000: return meshcode_2000(lat, lon) if level == 3: return meshcode_lv3(lat, lon) if level == 4: return meshcode_lv4(lat, lon) if level == 5: return meshcode_lv5(lat, lon) if level == 6: return meshcode_lv6(lat, lon) raise ValueError("the level is unsupported.")
python
def to_meshcode(lat, lon, level): """緯度経度から指定次の地域メッシュコードを算出する。 Args: lat: 世界測地系の緯度(度単位) lon: 世界測地系の経度(度単位) level: 地域メッシュコードの次数 1次(80km四方):1 40倍(40km四方):40000 20倍(20km四方):20000 16倍(16km四方):16000 2次(10km四方):2 8倍(8km四方):8000 5倍(5km四方):5000 4倍(4km四方):4000 2.5倍(2.5km四方):2500 2倍(2km四方):2000 3次(1km四方):3 4次(500m四方):4 5次(250m四方):5 6次(125m四方):6 Return: 指定次の地域メッシュコード """ if not 0 <= lat < 66.66: raise ValueError('the latitude is out of bound.') if not 100 <= lon < 180: raise ValueError('the longitude is out of bound.') # reminder of latitude and longitude by its unit in degree of mesh level. rem_lat_lv0 = lambda lat: lat rem_lon_lv0 = lambda lon: lon % 100 rem_lat_lv1 = lambda lat: rem_lat_lv0(lat) % _unit_lat_lv1() rem_lon_lv1 = lambda lon: rem_lon_lv0(lon) % _unit_lon_lv1() rem_lat_40000 = lambda lat: rem_lat_lv1(lat) % _unit_lat_40000() rem_lon_40000 = lambda lon: rem_lon_lv1(lon) % _unit_lon_40000() rem_lat_20000 = lambda lat: rem_lat_40000(lat) % _unit_lat_20000() rem_lon_20000 = lambda lon: rem_lon_40000(lon) % _unit_lon_20000() rem_lat_16000 = lambda lat: rem_lat_lv1(lat) % _unit_lat_16000() rem_lon_16000 = lambda lon: rem_lon_lv1(lon) % _unit_lon_16000() rem_lat_lv2 = lambda lat: rem_lat_lv1(lat) % _unit_lat_lv2() rem_lon_lv2 = lambda lon: rem_lon_lv1(lon) % _unit_lon_lv2() rem_lat_8000 = lambda lat: rem_lat_lv1(lat) % _unit_lat_8000() rem_lon_8000 = lambda lon: rem_lon_lv1(lon) % _unit_lon_8000() rem_lat_5000 = lambda lat: rem_lat_lv2(lat) % _unit_lat_5000() rem_lon_5000 = lambda lon: rem_lon_lv2(lon) % _unit_lon_5000() rem_lat_4000 = lambda lat: rem_lat_8000(lat) % _unit_lat_4000() rem_lon_4000 = lambda lon: rem_lon_8000(lon) % _unit_lon_4000() rem_lat_2500 = lambda lat: rem_lat_5000(lat) % _unit_lat_2500() rem_lon_2500 = lambda lon: rem_lon_5000(lon) % _unit_lon_2500() rem_lat_2000 = lambda lat: rem_lat_lv2(lat) % _unit_lat_2000() rem_lon_2000 = lambda lon: rem_lon_lv2(lon) % _unit_lon_2000() rem_lat_lv3 = lambda lat: rem_lat_lv2(lat) % _unit_lat_lv3() rem_lon_lv3 = lambda lon: rem_lon_lv2(lon) % _unit_lon_lv3() rem_lat_lv4 = lambda lat: rem_lat_lv3(lat) % _unit_lat_lv4() rem_lon_lv4 = lambda lon: rem_lon_lv3(lon) % _unit_lon_lv4() rem_lat_lv5 = lambda lat: rem_lat_lv4(lat) % _unit_lat_lv5() rem_lon_lv5 = lambda lon: rem_lon_lv4(lon) % _unit_lon_lv5() rem_lat_lv6 = lambda lat: rem_lat_lv5(lat) % _unit_lat_lv6() rem_lon_lv6 = lambda lon: rem_lon_lv5(lon) % _unit_lon_lv6() def meshcode_lv1(lat, lon): ab = int(rem_lat_lv0(lat) / _unit_lat_lv1()) cd = int(rem_lon_lv0(lon) / _unit_lon_lv1()) return str(ab) + str(cd) def meshcode_40000(lat, lon): e = int(rem_lat_lv1(lat) / _unit_lat_40000())*2 + int(rem_lon_lv1(lon) / _unit_lon_40000()) + 1 return meshcode_lv1(lat, lon) + str(e) def meshcode_20000(lat, lon): f = int(rem_lat_40000(lat) / _unit_lat_20000())*2 + int(rem_lon_40000(lon) / _unit_lon_20000()) + 1 g = 5 return meshcode_40000(lat, lon) + str(f) + str(g) def meshcode_16000(lat, lon): e = int(rem_lat_lv1(lat) / _unit_lat_16000())*2 f = int(rem_lon_lv1(lon) / _unit_lon_16000())*2 g = 7 return meshcode_lv1(lat, lon) + str(e) + str(f) + str(g) def meshcode_lv2(lat, lon): e = int(rem_lat_lv1(lat) / _unit_lat_lv2()) f = int(rem_lon_lv1(lon) / _unit_lon_lv2()) return meshcode_lv1(lat, lon) + str(e) + str(f) def meshcode_8000(lat, lon): e = int(rem_lat_lv1(lat) / _unit_lat_8000()) f = int(rem_lon_lv1(lon) / _unit_lon_8000()) g = 6 return meshcode_lv1(lat, lon) + str(e) + str(f) + str(g) def meshcode_5000(lat, lon): g = int(rem_lat_lv2(lat) / _unit_lat_5000())*2 + int(rem_lon_lv2(lon) / _unit_lon_5000()) + 1 return meshcode_lv2(lat, lon) + str(g) def meshcode_4000(lat, lon): h = int(rem_lat_8000(lat) / _unit_lat_4000())*2 + int(rem_lon_8000(lon) / _unit_lon_4000()) + 1 i = 7 return meshcode_8000(lat, lon) + str(h) + str(i) def meshcode_2500(lat, lon): h = int(rem_lat_5000(lat) / _unit_lat_2500())*2 + int(rem_lon_5000(lon) / _unit_lon_2500()) + 1 i = 6 return meshcode_5000(lat, lon) + str(h) + str(i) def meshcode_2000(lat, lon): g = int(rem_lat_lv2(lat) / _unit_lat_2000())*2 h = int(rem_lon_lv2(lon) / _unit_lon_2000())*2 i = 5 return meshcode_lv2(lat, lon) + str(g) + str(h) + str(i) def meshcode_lv3(lat, lon): g = int(rem_lat_lv2(lat) / _unit_lat_lv3()) h = int(rem_lon_lv2(lon) / _unit_lon_lv3()) return meshcode_lv2(lat, lon) + str(g) + str(h) def meshcode_lv4(lat, lon): i = int(rem_lat_lv3(lat) / _unit_lat_lv4())*2 + int(rem_lon_lv3(lon) / _unit_lon_lv4()) + 1 return meshcode_lv3(lat, lon) + str(i) def meshcode_lv5(lat, lon): j = int(rem_lat_lv4(lat) / _unit_lat_lv5())*2 + int(rem_lon_lv4(lon) / _unit_lon_lv5()) + 1 return meshcode_lv4(lat, lon) + str(j) def meshcode_lv6(lat, lon): k = int(rem_lat_lv5(lat) / _unit_lat_lv6())*2 + int(rem_lon_lv5(lon) / _unit_lon_lv6()) + 1 return meshcode_lv5(lat, lon) + str(k) if level == 1: return meshcode_lv1(lat, lon) if level == 40000: return meshcode_40000(lat, lon) if level == 20000: return meshcode_20000(lat, lon) if level == 16000: return meshcode_16000(lat, lon) if level == 2: return meshcode_lv2(lat, lon) if level == 8000: return meshcode_8000(lat, lon) if level == 5000: return meshcode_5000(lat, lon) if level == 4000: return meshcode_4000(lat, lon) if level == 2500: return meshcode_2500(lat, lon) if level == 2000: return meshcode_2000(lat, lon) if level == 3: return meshcode_lv3(lat, lon) if level == 4: return meshcode_lv4(lat, lon) if level == 5: return meshcode_lv5(lat, lon) if level == 6: return meshcode_lv6(lat, lon) raise ValueError("the level is unsupported.")
[ "def", "to_meshcode", "(", "lat", ",", "lon", ",", "level", ")", ":", "if", "not", "0", "<=", "lat", "<", "66.66", ":", "raise", "ValueError", "(", "'the latitude is out of bound.'", ")", "if", "not", "100", "<=", "lon", "<", "180", ":", "raise", "ValueError", "(", "'the longitude is out of bound.'", ")", "# reminder of latitude and longitude by its unit in degree of mesh level.", "rem_lat_lv0", "=", "lambda", "lat", ":", "lat", "rem_lon_lv0", "=", "lambda", "lon", ":", "lon", "%", "100", "rem_lat_lv1", "=", "lambda", "lat", ":", "rem_lat_lv0", "(", "lat", ")", "%", "_unit_lat_lv1", "(", ")", "rem_lon_lv1", "=", "lambda", "lon", ":", "rem_lon_lv0", "(", "lon", ")", "%", "_unit_lon_lv1", "(", ")", "rem_lat_40000", "=", "lambda", "lat", ":", "rem_lat_lv1", "(", "lat", ")", "%", "_unit_lat_40000", "(", ")", "rem_lon_40000", "=", "lambda", "lon", ":", "rem_lon_lv1", "(", "lon", ")", "%", "_unit_lon_40000", "(", ")", "rem_lat_20000", "=", "lambda", "lat", ":", "rem_lat_40000", "(", "lat", ")", "%", "_unit_lat_20000", "(", ")", "rem_lon_20000", "=", "lambda", "lon", ":", "rem_lon_40000", "(", "lon", ")", "%", "_unit_lon_20000", "(", ")", "rem_lat_16000", "=", "lambda", "lat", ":", "rem_lat_lv1", "(", "lat", ")", "%", "_unit_lat_16000", "(", ")", "rem_lon_16000", "=", "lambda", "lon", ":", "rem_lon_lv1", "(", "lon", ")", "%", "_unit_lon_16000", "(", ")", "rem_lat_lv2", "=", "lambda", "lat", ":", "rem_lat_lv1", "(", "lat", ")", "%", "_unit_lat_lv2", "(", ")", "rem_lon_lv2", "=", "lambda", "lon", ":", "rem_lon_lv1", "(", "lon", ")", "%", "_unit_lon_lv2", "(", ")", "rem_lat_8000", "=", "lambda", "lat", ":", "rem_lat_lv1", "(", "lat", ")", "%", "_unit_lat_8000", "(", ")", "rem_lon_8000", "=", "lambda", "lon", ":", "rem_lon_lv1", "(", "lon", ")", "%", "_unit_lon_8000", "(", ")", "rem_lat_5000", "=", "lambda", "lat", ":", "rem_lat_lv2", "(", "lat", ")", "%", "_unit_lat_5000", "(", ")", "rem_lon_5000", "=", "lambda", "lon", ":", "rem_lon_lv2", "(", "lon", ")", "%", "_unit_lon_5000", "(", ")", "rem_lat_4000", "=", "lambda", "lat", ":", "rem_lat_8000", "(", "lat", ")", "%", "_unit_lat_4000", "(", ")", "rem_lon_4000", "=", "lambda", "lon", ":", "rem_lon_8000", "(", "lon", ")", "%", "_unit_lon_4000", "(", ")", "rem_lat_2500", "=", "lambda", "lat", ":", "rem_lat_5000", "(", "lat", ")", "%", "_unit_lat_2500", "(", ")", "rem_lon_2500", "=", "lambda", "lon", ":", "rem_lon_5000", "(", "lon", ")", "%", "_unit_lon_2500", "(", ")", "rem_lat_2000", "=", "lambda", "lat", ":", "rem_lat_lv2", "(", "lat", ")", "%", "_unit_lat_2000", "(", ")", "rem_lon_2000", "=", "lambda", "lon", ":", "rem_lon_lv2", "(", "lon", ")", "%", "_unit_lon_2000", "(", ")", "rem_lat_lv3", "=", "lambda", "lat", ":", "rem_lat_lv2", "(", "lat", ")", "%", "_unit_lat_lv3", "(", ")", "rem_lon_lv3", "=", "lambda", "lon", ":", "rem_lon_lv2", "(", "lon", ")", "%", "_unit_lon_lv3", "(", ")", "rem_lat_lv4", "=", "lambda", "lat", ":", "rem_lat_lv3", "(", "lat", ")", "%", "_unit_lat_lv4", "(", ")", "rem_lon_lv4", "=", "lambda", "lon", ":", "rem_lon_lv3", "(", "lon", ")", "%", "_unit_lon_lv4", "(", ")", "rem_lat_lv5", "=", "lambda", "lat", ":", "rem_lat_lv4", "(", "lat", ")", "%", "_unit_lat_lv5", "(", ")", "rem_lon_lv5", "=", "lambda", "lon", ":", "rem_lon_lv4", "(", "lon", ")", "%", "_unit_lon_lv5", "(", ")", "rem_lat_lv6", "=", "lambda", "lat", ":", "rem_lat_lv5", "(", "lat", ")", "%", "_unit_lat_lv6", "(", ")", "rem_lon_lv6", "=", "lambda", "lon", ":", "rem_lon_lv5", "(", "lon", ")", "%", "_unit_lon_lv6", "(", ")", "def", "meshcode_lv1", "(", "lat", ",", "lon", ")", ":", "ab", "=", "int", "(", "rem_lat_lv0", "(", "lat", ")", "/", "_unit_lat_lv1", "(", ")", ")", "cd", "=", "int", "(", "rem_lon_lv0", "(", "lon", ")", "/", "_unit_lon_lv1", "(", ")", ")", "return", "str", "(", "ab", ")", "+", "str", "(", "cd", ")", "def", "meshcode_40000", "(", "lat", ",", "lon", ")", ":", "e", "=", "int", "(", "rem_lat_lv1", "(", "lat", ")", "/", "_unit_lat_40000", "(", ")", ")", "*", "2", "+", "int", "(", "rem_lon_lv1", "(", "lon", ")", "/", "_unit_lon_40000", "(", ")", ")", "+", "1", "return", "meshcode_lv1", "(", "lat", ",", "lon", ")", "+", "str", "(", "e", ")", "def", "meshcode_20000", "(", "lat", ",", "lon", ")", ":", "f", "=", "int", "(", "rem_lat_40000", "(", "lat", ")", "/", "_unit_lat_20000", "(", ")", ")", "*", "2", "+", "int", "(", "rem_lon_40000", "(", "lon", ")", "/", "_unit_lon_20000", "(", ")", ")", "+", "1", "g", "=", "5", "return", "meshcode_40000", "(", "lat", ",", "lon", ")", "+", "str", "(", "f", ")", "+", "str", "(", "g", ")", "def", "meshcode_16000", "(", "lat", ",", "lon", ")", ":", "e", "=", "int", "(", "rem_lat_lv1", "(", "lat", ")", "/", "_unit_lat_16000", "(", ")", ")", "*", "2", "f", "=", "int", "(", "rem_lon_lv1", "(", "lon", ")", "/", "_unit_lon_16000", "(", ")", ")", "*", "2", "g", "=", "7", "return", "meshcode_lv1", "(", "lat", ",", "lon", ")", "+", "str", "(", "e", ")", "+", "str", "(", "f", ")", "+", "str", "(", "g", ")", "def", "meshcode_lv2", "(", "lat", ",", "lon", ")", ":", "e", "=", "int", "(", "rem_lat_lv1", "(", "lat", ")", "/", "_unit_lat_lv2", "(", ")", ")", "f", "=", "int", "(", "rem_lon_lv1", "(", "lon", ")", "/", "_unit_lon_lv2", "(", ")", ")", "return", "meshcode_lv1", "(", "lat", ",", "lon", ")", "+", "str", "(", "e", ")", "+", "str", "(", "f", ")", "def", "meshcode_8000", "(", "lat", ",", "lon", ")", ":", "e", "=", "int", "(", "rem_lat_lv1", "(", "lat", ")", "/", "_unit_lat_8000", "(", ")", ")", "f", "=", "int", "(", "rem_lon_lv1", "(", "lon", ")", "/", "_unit_lon_8000", "(", ")", ")", "g", "=", "6", "return", "meshcode_lv1", "(", "lat", ",", "lon", ")", "+", "str", "(", "e", ")", "+", "str", "(", "f", ")", "+", "str", "(", "g", ")", "def", "meshcode_5000", "(", "lat", ",", "lon", ")", ":", "g", "=", "int", "(", "rem_lat_lv2", "(", "lat", ")", "/", "_unit_lat_5000", "(", ")", ")", "*", "2", "+", "int", "(", "rem_lon_lv2", "(", "lon", ")", "/", "_unit_lon_5000", "(", ")", ")", "+", "1", "return", "meshcode_lv2", "(", "lat", ",", "lon", ")", "+", "str", "(", "g", ")", "def", "meshcode_4000", "(", "lat", ",", "lon", ")", ":", "h", "=", "int", "(", "rem_lat_8000", "(", "lat", ")", "/", "_unit_lat_4000", "(", ")", ")", "*", "2", "+", "int", "(", "rem_lon_8000", "(", "lon", ")", "/", "_unit_lon_4000", "(", ")", ")", "+", "1", "i", "=", "7", "return", "meshcode_8000", "(", "lat", ",", "lon", ")", "+", "str", "(", "h", ")", "+", "str", "(", "i", ")", "def", "meshcode_2500", "(", "lat", ",", "lon", ")", ":", "h", "=", "int", "(", "rem_lat_5000", "(", "lat", ")", "/", "_unit_lat_2500", "(", ")", ")", "*", "2", "+", "int", "(", "rem_lon_5000", "(", "lon", ")", "/", "_unit_lon_2500", "(", ")", ")", "+", "1", "i", "=", "6", "return", "meshcode_5000", "(", "lat", ",", "lon", ")", "+", "str", "(", "h", ")", "+", "str", "(", "i", ")", "def", "meshcode_2000", "(", "lat", ",", "lon", ")", ":", "g", "=", "int", "(", "rem_lat_lv2", "(", "lat", ")", "/", "_unit_lat_2000", "(", ")", ")", "*", "2", "h", "=", "int", "(", "rem_lon_lv2", "(", "lon", ")", "/", "_unit_lon_2000", "(", ")", ")", "*", "2", "i", "=", "5", "return", "meshcode_lv2", "(", "lat", ",", "lon", ")", "+", "str", "(", "g", ")", "+", "str", "(", "h", ")", "+", "str", "(", "i", ")", "def", "meshcode_lv3", "(", "lat", ",", "lon", ")", ":", "g", "=", "int", "(", "rem_lat_lv2", "(", "lat", ")", "/", "_unit_lat_lv3", "(", ")", ")", "h", "=", "int", "(", "rem_lon_lv2", "(", "lon", ")", "/", "_unit_lon_lv3", "(", ")", ")", "return", "meshcode_lv2", "(", "lat", ",", "lon", ")", "+", "str", "(", "g", ")", "+", "str", "(", "h", ")", "def", "meshcode_lv4", "(", "lat", ",", "lon", ")", ":", "i", "=", "int", "(", "rem_lat_lv3", "(", "lat", ")", "/", "_unit_lat_lv4", "(", ")", ")", "*", "2", "+", "int", "(", "rem_lon_lv3", "(", "lon", ")", "/", "_unit_lon_lv4", "(", ")", ")", "+", "1", "return", "meshcode_lv3", "(", "lat", ",", "lon", ")", "+", "str", "(", "i", ")", "def", "meshcode_lv5", "(", "lat", ",", "lon", ")", ":", "j", "=", "int", "(", "rem_lat_lv4", "(", "lat", ")", "/", "_unit_lat_lv5", "(", ")", ")", "*", "2", "+", "int", "(", "rem_lon_lv4", "(", "lon", ")", "/", "_unit_lon_lv5", "(", ")", ")", "+", "1", "return", "meshcode_lv4", "(", "lat", ",", "lon", ")", "+", "str", "(", "j", ")", "def", "meshcode_lv6", "(", "lat", ",", "lon", ")", ":", "k", "=", "int", "(", "rem_lat_lv5", "(", "lat", ")", "/", "_unit_lat_lv6", "(", ")", ")", "*", "2", "+", "int", "(", "rem_lon_lv5", "(", "lon", ")", "/", "_unit_lon_lv6", "(", ")", ")", "+", "1", "return", "meshcode_lv5", "(", "lat", ",", "lon", ")", "+", "str", "(", "k", ")", "if", "level", "==", "1", ":", "return", "meshcode_lv1", "(", "lat", ",", "lon", ")", "if", "level", "==", "40000", ":", "return", "meshcode_40000", "(", "lat", ",", "lon", ")", "if", "level", "==", "20000", ":", "return", "meshcode_20000", "(", "lat", ",", "lon", ")", "if", "level", "==", "16000", ":", "return", "meshcode_16000", "(", "lat", ",", "lon", ")", "if", "level", "==", "2", ":", "return", "meshcode_lv2", "(", "lat", ",", "lon", ")", "if", "level", "==", "8000", ":", "return", "meshcode_8000", "(", "lat", ",", "lon", ")", "if", "level", "==", "5000", ":", "return", "meshcode_5000", "(", "lat", ",", "lon", ")", "if", "level", "==", "4000", ":", "return", "meshcode_4000", "(", "lat", ",", "lon", ")", "if", "level", "==", "2500", ":", "return", "meshcode_2500", "(", "lat", ",", "lon", ")", "if", "level", "==", "2000", ":", "return", "meshcode_2000", "(", "lat", ",", "lon", ")", "if", "level", "==", "3", ":", "return", "meshcode_lv3", "(", "lat", ",", "lon", ")", "if", "level", "==", "4", ":", "return", "meshcode_lv4", "(", "lat", ",", "lon", ")", "if", "level", "==", "5", ":", "return", "meshcode_lv5", "(", "lat", ",", "lon", ")", "if", "level", "==", "6", ":", "return", "meshcode_lv6", "(", "lat", ",", "lon", ")", "raise", "ValueError", "(", "\"the level is unsupported.\"", ")" ]
緯度経度から指定次の地域メッシュコードを算出する。 Args: lat: 世界測地系の緯度(度単位) lon: 世界測地系の経度(度単位) level: 地域メッシュコードの次数 1次(80km四方):1 40倍(40km四方):40000 20倍(20km四方):20000 16倍(16km四方):16000 2次(10km四方):2 8倍(8km四方):8000 5倍(5km四方):5000 4倍(4km四方):4000 2.5倍(2.5km四方):2500 2倍(2km四方):2000 3次(1km四方):3 4次(500m四方):4 5次(250m四方):5 6次(125m四方):6 Return: 指定次の地域メッシュコード
[ "緯度経度から指定次の地域メッシュコードを算出する。" ]
train
https://github.com/hni14/jismesh/blob/bda486ac7828d0adaea2a128154d0a554be7ef37/jismesh/utils.py#L64-L238
hni14/jismesh
jismesh/utils.py
to_meshlevel
def to_meshlevel(meshcode): """メッシュコードから次数を算出する。 Args: meshcode: メッシュコード Return: 地域メッシュコードの次数 1次(80km四方):1 40倍(40km四方):40000 20倍(20km四方):20000 16倍(16km四方):16000 2次(10km四方):2 8倍(8km四方):8000 5倍(5km四方):5000 4倍(4km四方):4000 2.5倍(2.5km四方):2500 2倍(2km四方):2000 3次(1km四方):3 4次(500m四方):4 5次(250m四方):5 6次(125m四方):6 """ length = len(str(meshcode)) if length == 4: return 1 if length == 5: return 40000 if length == 6: return 2 if length == 7: if meshcode[6:7] in ['1','2','3','4']: return 5000 if meshcode[6:7] == '6': return 8000 if meshcode[6:7] == '5': return 20000 if meshcode[6:7] == '7': return 16000 if length == 8: return 3 if length == 9: if meshcode[8:9] in ['1','2','3','4']: return 4 if meshcode[8:9] == '5': return 2000 if meshcode[8:9] == '6': return 2500 if meshcode[8:9] == '7': return 4000 if length == 10: if meshcode[9:10] in ['1','2','3','4']: return 5 if length == 11: if meshcode[10:11] in ['1','2','3','4']: return 6 raise ValueError('the meshcode is unsupported.')
python
def to_meshlevel(meshcode): """メッシュコードから次数を算出する。 Args: meshcode: メッシュコード Return: 地域メッシュコードの次数 1次(80km四方):1 40倍(40km四方):40000 20倍(20km四方):20000 16倍(16km四方):16000 2次(10km四方):2 8倍(8km四方):8000 5倍(5km四方):5000 4倍(4km四方):4000 2.5倍(2.5km四方):2500 2倍(2km四方):2000 3次(1km四方):3 4次(500m四方):4 5次(250m四方):5 6次(125m四方):6 """ length = len(str(meshcode)) if length == 4: return 1 if length == 5: return 40000 if length == 6: return 2 if length == 7: if meshcode[6:7] in ['1','2','3','4']: return 5000 if meshcode[6:7] == '6': return 8000 if meshcode[6:7] == '5': return 20000 if meshcode[6:7] == '7': return 16000 if length == 8: return 3 if length == 9: if meshcode[8:9] in ['1','2','3','4']: return 4 if meshcode[8:9] == '5': return 2000 if meshcode[8:9] == '6': return 2500 if meshcode[8:9] == '7': return 4000 if length == 10: if meshcode[9:10] in ['1','2','3','4']: return 5 if length == 11: if meshcode[10:11] in ['1','2','3','4']: return 6 raise ValueError('the meshcode is unsupported.')
[ "def", "to_meshlevel", "(", "meshcode", ")", ":", "length", "=", "len", "(", "str", "(", "meshcode", ")", ")", "if", "length", "==", "4", ":", "return", "1", "if", "length", "==", "5", ":", "return", "40000", "if", "length", "==", "6", ":", "return", "2", "if", "length", "==", "7", ":", "if", "meshcode", "[", "6", ":", "7", "]", "in", "[", "'1'", ",", "'2'", ",", "'3'", ",", "'4'", "]", ":", "return", "5000", "if", "meshcode", "[", "6", ":", "7", "]", "==", "'6'", ":", "return", "8000", "if", "meshcode", "[", "6", ":", "7", "]", "==", "'5'", ":", "return", "20000", "if", "meshcode", "[", "6", ":", "7", "]", "==", "'7'", ":", "return", "16000", "if", "length", "==", "8", ":", "return", "3", "if", "length", "==", "9", ":", "if", "meshcode", "[", "8", ":", "9", "]", "in", "[", "'1'", ",", "'2'", ",", "'3'", ",", "'4'", "]", ":", "return", "4", "if", "meshcode", "[", "8", ":", "9", "]", "==", "'5'", ":", "return", "2000", "if", "meshcode", "[", "8", ":", "9", "]", "==", "'6'", ":", "return", "2500", "if", "meshcode", "[", "8", ":", "9", "]", "==", "'7'", ":", "return", "4000", "if", "length", "==", "10", ":", "if", "meshcode", "[", "9", ":", "10", "]", "in", "[", "'1'", ",", "'2'", ",", "'3'", ",", "'4'", "]", ":", "return", "5", "if", "length", "==", "11", ":", "if", "meshcode", "[", "10", ":", "11", "]", "in", "[", "'1'", ",", "'2'", ",", "'3'", ",", "'4'", "]", ":", "return", "6", "raise", "ValueError", "(", "'the meshcode is unsupported.'", ")" ]
メッシュコードから次数を算出する。 Args: meshcode: メッシュコード Return: 地域メッシュコードの次数 1次(80km四方):1 40倍(40km四方):40000 20倍(20km四方):20000 16倍(16km四方):16000 2次(10km四方):2 8倍(8km四方):8000 5倍(5km四方):5000 4倍(4km四方):4000 2.5倍(2.5km四方):2500 2倍(2km四方):2000 3次(1km四方):3 4次(500m四方):4 5次(250m四方):5 6次(125m四方):6
[ "メッシュコードから次数を算出する。" ]
train
https://github.com/hni14/jismesh/blob/bda486ac7828d0adaea2a128154d0a554be7ef37/jismesh/utils.py#L240-L310
hni14/jismesh
jismesh/utils.py
to_meshpoint
def to_meshpoint(meshcode, lat_multiplier, lon_multiplier): """地域メッシュコードから緯度経度を算出する。 下記のメッシュに対応している。 1次(80km四方):1 40倍(40km四方):40000 20倍(20km四方):20000 16倍(16km四方):16000 2次(10km四方):2 8倍(8km四方):8000 5倍(5km四方):5000 4倍(4km四方):4000 2.5倍(2.5km四方):2500 2倍(2km四方):2000 3次(1km四方):3 4次(500m四方):4 5次(250m四方):5 6次(125m四方):6 Args: meshcode: 指定次の地域メッシュコード lat_multiplier: 当該メッシュの基準点(南西端)から、緯度座標上の点の位置を当該メッシュの単位緯度の倍数で指定 lon_multiplier: 当該メッシュの基準点(南西端)から、経度座標上の点の位置を当該メッシュの単位経度の倍数で指定 Return: lat: 世界測地系の緯度(度単位) lon: 世界測地系の経度(度単位) """ def mesh_cord(func_higher_cord, func_unit_cord, func_multiplier): return func_higher_cord() + func_unit_cord() * func_multiplier() lat_multiplier_lv = lambda: lat_multiplier lon_multiplier_lv = lambda: lon_multiplier lat_multiplier_lv1 = _functools.partial( lambda meshcode: int(meshcode[0:2]), meshcode=meshcode) lon_multiplier_lv1 = _functools.partial( lambda meshcode: int(meshcode[2:4]), meshcode=meshcode) lat_multiplier_40000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[4:5])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_40000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[4:5])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) lat_multiplier_20000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[5:6])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_20000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[5:6])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) lat_multiplier_16000 = _functools.partial( lambda meshcode: int(meshcode[4:5])/2, meshcode=meshcode) lon_multiplier_16000 = _functools.partial( lambda meshcode: int(meshcode[5:6])/2, meshcode=meshcode) lat_multiplier_lv2 = _functools.partial( lambda meshcode: int(meshcode[4:5]), meshcode=meshcode) lon_multiplier_lv2 = _functools.partial( lambda meshcode: int(meshcode[5:6]), meshcode=meshcode) lat_multiplier_8000 = _functools.partial( lambda meshcode: int(meshcode[4:5]), meshcode=meshcode) lon_multiplier_8000 = _functools.partial( lambda meshcode: int(meshcode[5:6]), meshcode=meshcode) lat_multiplier_5000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[6:7])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_5000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[6:7])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) lat_multiplier_4000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_4000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) lat_multiplier_2500 = _functools.partial( lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_2500 = _functools.partial( lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) lat_multiplier_2000 = _functools.partial( lambda meshcode: int(meshcode[6:7])/2, meshcode=meshcode) lon_multiplier_2000 = _functools.partial( lambda meshcode: int(meshcode[7:8])/2, meshcode=meshcode) lat_multiplier_lv3 = _functools.partial( lambda meshcode: int(meshcode[6:7]), meshcode=meshcode) lon_multiplier_lv3 = _functools.partial( lambda meshcode: int(meshcode[7:8]), meshcode=meshcode) lat_multiplier_lv4 = _functools.partial( lambda meshcode: int(bin(int(meshcode[8:9])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_lv4 = _functools.partial( lambda meshcode: int(bin(int(meshcode[8:9])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) lat_multiplier_lv5 = _functools.partial( lambda meshcode: int(bin(int(meshcode[9:10])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_lv5 = _functools.partial( lambda meshcode: int(bin(int(meshcode[9:10])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) lat_multiplier_lv6 = _functools.partial( lambda meshcode: int(bin(int(meshcode[10:11])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_lv6 = _functools.partial( lambda meshcode: int(bin(int(meshcode[10:11])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) mesh_lv1_default_lat = _functools.partial( mesh_cord, func_higher_cord=lambda: 0, func_unit_cord=_unit_lat_lv1, func_multiplier=lat_multiplier_lv1) mesh_lv1_default_lon = _functools.partial( mesh_cord, func_higher_cord=lambda: 100, func_unit_cord=_unit_lon_lv1, func_multiplier=lon_multiplier_lv1) mesh_40000_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lat, func_unit_cord=_unit_lat_40000, func_multiplier=lat_multiplier_40000) mesh_40000_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lon, func_unit_cord=_unit_lon_40000, func_multiplier=lon_multiplier_40000) mesh_20000_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_40000_default_lat, func_unit_cord=_unit_lat_20000, func_multiplier=lat_multiplier_20000) mesh_20000_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_40000_default_lon, func_unit_cord=_unit_lon_20000, func_multiplier=lon_multiplier_20000) mesh_16000_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lat, func_unit_cord=_unit_lat_16000, func_multiplier=lat_multiplier_16000) mesh_16000_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lon, func_unit_cord=_unit_lon_16000, func_multiplier=lon_multiplier_16000) mesh_lv2_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lat, func_unit_cord=_unit_lat_lv2, func_multiplier=lat_multiplier_lv2) mesh_lv2_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lon, func_unit_cord=_unit_lon_lv2, func_multiplier=lon_multiplier_lv2) mesh_8000_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lat, func_unit_cord=_unit_lat_8000, func_multiplier=lat_multiplier_8000) mesh_8000_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lon, func_unit_cord=_unit_lon_8000, func_multiplier=lon_multiplier_8000) mesh_5000_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lat, func_unit_cord=_unit_lat_5000, func_multiplier=lat_multiplier_5000) mesh_5000_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lon, func_unit_cord=_unit_lon_5000, func_multiplier=lon_multiplier_5000) mesh_4000_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_8000_default_lat, func_unit_cord=_unit_lat_4000, func_multiplier=lat_multiplier_4000) mesh_4000_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_8000_default_lon, func_unit_cord=_unit_lon_4000, func_multiplier=lon_multiplier_4000) mesh_2500_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_5000_default_lat, func_unit_cord=_unit_lat_2500, func_multiplier=lat_multiplier_2500) mesh_2500_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_5000_default_lon, func_unit_cord=_unit_lon_2500, func_multiplier=lon_multiplier_2500) mesh_2000_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lat, func_unit_cord=_unit_lat_2000, func_multiplier=lat_multiplier_2000) mesh_2000_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lon, func_unit_cord=_unit_lon_2000, func_multiplier=lon_multiplier_2000) mesh_lv3_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lat, func_unit_cord=_unit_lat_lv3, func_multiplier=lat_multiplier_lv3) mesh_lv3_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lon, func_unit_cord=_unit_lon_lv3, func_multiplier=lon_multiplier_lv3) mesh_lv4_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv3_default_lat, func_unit_cord=_unit_lat_lv4, func_multiplier=lat_multiplier_lv4) mesh_lv4_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv3_default_lon, func_unit_cord=_unit_lon_lv4, func_multiplier=lon_multiplier_lv4) mesh_lv5_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv4_default_lat, func_unit_cord=_unit_lat_lv5, func_multiplier=lat_multiplier_lv5) mesh_lv5_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv4_default_lon, func_unit_cord=_unit_lon_lv5, func_multiplier=lon_multiplier_lv5) mesh_lv6_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv5_default_lat, func_unit_cord=_unit_lat_lv6, func_multiplier=lat_multiplier_lv6) mesh_lv6_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv5_default_lon, func_unit_cord=_unit_lon_lv6, func_multiplier=lon_multiplier_lv6) mesh_lv1_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lat, func_unit_cord=_unit_lat_lv1, func_multiplier=lat_multiplier_lv) mesh_lv1_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lon, func_unit_cord=_unit_lon_lv1, func_multiplier=lon_multiplier_lv) mesh_40000_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_40000_default_lat, func_unit_cord=_unit_lat_40000, func_multiplier=lat_multiplier_lv) mesh_40000_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_40000_default_lon, func_unit_cord=_unit_lon_40000, func_multiplier=lon_multiplier_lv) mesh_20000_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_20000_default_lat, func_unit_cord=_unit_lat_20000, func_multiplier=lat_multiplier_lv) mesh_20000_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_20000_default_lon, func_unit_cord=_unit_lon_20000, func_multiplier=lon_multiplier_lv) mesh_16000_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_16000_default_lat, func_unit_cord=_unit_lat_16000, func_multiplier=lat_multiplier_lv) mesh_16000_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_16000_default_lon, func_unit_cord=_unit_lon_16000, func_multiplier=lon_multiplier_lv) mesh_lv2_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lat, func_unit_cord=_unit_lat_lv2, func_multiplier=lat_multiplier_lv) mesh_lv2_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lon, func_unit_cord=_unit_lon_lv2, func_multiplier=lon_multiplier_lv) mesh_8000_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_8000_default_lat, func_unit_cord=_unit_lat_8000, func_multiplier=lat_multiplier_lv) mesh_8000_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_8000_default_lon, func_unit_cord=_unit_lon_8000, func_multiplier=lon_multiplier_lv) mesh_5000_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_5000_default_lat, func_unit_cord=_unit_lat_5000, func_multiplier=lat_multiplier_lv) mesh_5000_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_5000_default_lon, func_unit_cord=_unit_lon_5000, func_multiplier=lon_multiplier_lv) mesh_4000_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_4000_default_lat, func_unit_cord=_unit_lat_4000, func_multiplier=lat_multiplier_lv) mesh_4000_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_4000_default_lon, func_unit_cord=_unit_lon_4000, func_multiplier=lon_multiplier_lv) mesh_2500_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_2500_default_lat, func_unit_cord=_unit_lat_2500, func_multiplier=lat_multiplier_lv) mesh_2500_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_2500_default_lon, func_unit_cord=_unit_lon_2500, func_multiplier=lon_multiplier_lv) mesh_2000_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_2000_default_lat, func_unit_cord=_unit_lat_2000, func_multiplier=lat_multiplier_lv) mesh_2000_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_2000_default_lon, func_unit_cord=_unit_lon_2000, func_multiplier=lon_multiplier_lv) mesh_lv3_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv3_default_lat, func_unit_cord=_unit_lat_lv3, func_multiplier=lat_multiplier_lv) mesh_lv3_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv3_default_lon, func_unit_cord=_unit_lon_lv3, func_multiplier=lon_multiplier_lv) mesh_lv4_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv4_default_lat, func_unit_cord=_unit_lat_lv4, func_multiplier=lat_multiplier_lv) mesh_lv4_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv4_default_lon, func_unit_cord=_unit_lon_lv4, func_multiplier=lon_multiplier_lv) mesh_lv5_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv5_default_lat, func_unit_cord=_unit_lat_lv5, func_multiplier=lat_multiplier_lv) mesh_lv5_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv5_default_lon, func_unit_cord=_unit_lon_lv5, func_multiplier=lon_multiplier_lv) mesh_lv6_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv6_default_lat, func_unit_cord=_unit_lat_lv6, func_multiplier=lat_multiplier_lv) mesh_lv6_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv6_default_lon, func_unit_cord=_unit_lon_lv6, func_multiplier=lon_multiplier_lv) level = to_meshlevel(meshcode) if level == 1: return mesh_lv1_lat(), mesh_lv1_lon() if level == 40000: return mesh_40000_lat(), mesh_40000_lon() if level == 20000: return mesh_20000_lat(), mesh_20000_lon() if level == 16000: return mesh_16000_lat(), mesh_16000_lon() if level == 2: return mesh_lv2_lat(), mesh_lv2_lon() if level == 8000: return mesh_8000_lat(), mesh_8000_lon() if level == 5000: return mesh_5000_lat(), mesh_5000_lon() if level == 4000: return mesh_4000_lat(), mesh_4000_lon() if level == 2500: return mesh_2500_lat(), mesh_2500_lon() if level == 2000: return mesh_2000_lat(), mesh_2000_lon() if level == 3: return mesh_lv3_lat(), mesh_lv3_lon() if level == 4: return mesh_lv4_lat(), mesh_lv4_lon() if level == 5: return mesh_lv5_lat(), mesh_lv5_lon() if level == 6: return mesh_lv6_lat(), mesh_lv6_lon() raise ValueError("the level is unsupported.")
python
def to_meshpoint(meshcode, lat_multiplier, lon_multiplier): """地域メッシュコードから緯度経度を算出する。 下記のメッシュに対応している。 1次(80km四方):1 40倍(40km四方):40000 20倍(20km四方):20000 16倍(16km四方):16000 2次(10km四方):2 8倍(8km四方):8000 5倍(5km四方):5000 4倍(4km四方):4000 2.5倍(2.5km四方):2500 2倍(2km四方):2000 3次(1km四方):3 4次(500m四方):4 5次(250m四方):5 6次(125m四方):6 Args: meshcode: 指定次の地域メッシュコード lat_multiplier: 当該メッシュの基準点(南西端)から、緯度座標上の点の位置を当該メッシュの単位緯度の倍数で指定 lon_multiplier: 当該メッシュの基準点(南西端)から、経度座標上の点の位置を当該メッシュの単位経度の倍数で指定 Return: lat: 世界測地系の緯度(度単位) lon: 世界測地系の経度(度単位) """ def mesh_cord(func_higher_cord, func_unit_cord, func_multiplier): return func_higher_cord() + func_unit_cord() * func_multiplier() lat_multiplier_lv = lambda: lat_multiplier lon_multiplier_lv = lambda: lon_multiplier lat_multiplier_lv1 = _functools.partial( lambda meshcode: int(meshcode[0:2]), meshcode=meshcode) lon_multiplier_lv1 = _functools.partial( lambda meshcode: int(meshcode[2:4]), meshcode=meshcode) lat_multiplier_40000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[4:5])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_40000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[4:5])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) lat_multiplier_20000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[5:6])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_20000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[5:6])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) lat_multiplier_16000 = _functools.partial( lambda meshcode: int(meshcode[4:5])/2, meshcode=meshcode) lon_multiplier_16000 = _functools.partial( lambda meshcode: int(meshcode[5:6])/2, meshcode=meshcode) lat_multiplier_lv2 = _functools.partial( lambda meshcode: int(meshcode[4:5]), meshcode=meshcode) lon_multiplier_lv2 = _functools.partial( lambda meshcode: int(meshcode[5:6]), meshcode=meshcode) lat_multiplier_8000 = _functools.partial( lambda meshcode: int(meshcode[4:5]), meshcode=meshcode) lon_multiplier_8000 = _functools.partial( lambda meshcode: int(meshcode[5:6]), meshcode=meshcode) lat_multiplier_5000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[6:7])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_5000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[6:7])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) lat_multiplier_4000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_4000 = _functools.partial( lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) lat_multiplier_2500 = _functools.partial( lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_2500 = _functools.partial( lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) lat_multiplier_2000 = _functools.partial( lambda meshcode: int(meshcode[6:7])/2, meshcode=meshcode) lon_multiplier_2000 = _functools.partial( lambda meshcode: int(meshcode[7:8])/2, meshcode=meshcode) lat_multiplier_lv3 = _functools.partial( lambda meshcode: int(meshcode[6:7]), meshcode=meshcode) lon_multiplier_lv3 = _functools.partial( lambda meshcode: int(meshcode[7:8]), meshcode=meshcode) lat_multiplier_lv4 = _functools.partial( lambda meshcode: int(bin(int(meshcode[8:9])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_lv4 = _functools.partial( lambda meshcode: int(bin(int(meshcode[8:9])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) lat_multiplier_lv5 = _functools.partial( lambda meshcode: int(bin(int(meshcode[9:10])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_lv5 = _functools.partial( lambda meshcode: int(bin(int(meshcode[9:10])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) lat_multiplier_lv6 = _functools.partial( lambda meshcode: int(bin(int(meshcode[10:11])-1)[2:].zfill(2)[0:1]), meshcode=meshcode) lon_multiplier_lv6 = _functools.partial( lambda meshcode: int(bin(int(meshcode[10:11])-1)[2:].zfill(2)[1:2]), meshcode=meshcode) mesh_lv1_default_lat = _functools.partial( mesh_cord, func_higher_cord=lambda: 0, func_unit_cord=_unit_lat_lv1, func_multiplier=lat_multiplier_lv1) mesh_lv1_default_lon = _functools.partial( mesh_cord, func_higher_cord=lambda: 100, func_unit_cord=_unit_lon_lv1, func_multiplier=lon_multiplier_lv1) mesh_40000_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lat, func_unit_cord=_unit_lat_40000, func_multiplier=lat_multiplier_40000) mesh_40000_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lon, func_unit_cord=_unit_lon_40000, func_multiplier=lon_multiplier_40000) mesh_20000_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_40000_default_lat, func_unit_cord=_unit_lat_20000, func_multiplier=lat_multiplier_20000) mesh_20000_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_40000_default_lon, func_unit_cord=_unit_lon_20000, func_multiplier=lon_multiplier_20000) mesh_16000_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lat, func_unit_cord=_unit_lat_16000, func_multiplier=lat_multiplier_16000) mesh_16000_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lon, func_unit_cord=_unit_lon_16000, func_multiplier=lon_multiplier_16000) mesh_lv2_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lat, func_unit_cord=_unit_lat_lv2, func_multiplier=lat_multiplier_lv2) mesh_lv2_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lon, func_unit_cord=_unit_lon_lv2, func_multiplier=lon_multiplier_lv2) mesh_8000_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lat, func_unit_cord=_unit_lat_8000, func_multiplier=lat_multiplier_8000) mesh_8000_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lon, func_unit_cord=_unit_lon_8000, func_multiplier=lon_multiplier_8000) mesh_5000_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lat, func_unit_cord=_unit_lat_5000, func_multiplier=lat_multiplier_5000) mesh_5000_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lon, func_unit_cord=_unit_lon_5000, func_multiplier=lon_multiplier_5000) mesh_4000_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_8000_default_lat, func_unit_cord=_unit_lat_4000, func_multiplier=lat_multiplier_4000) mesh_4000_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_8000_default_lon, func_unit_cord=_unit_lon_4000, func_multiplier=lon_multiplier_4000) mesh_2500_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_5000_default_lat, func_unit_cord=_unit_lat_2500, func_multiplier=lat_multiplier_2500) mesh_2500_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_5000_default_lon, func_unit_cord=_unit_lon_2500, func_multiplier=lon_multiplier_2500) mesh_2000_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lat, func_unit_cord=_unit_lat_2000, func_multiplier=lat_multiplier_2000) mesh_2000_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lon, func_unit_cord=_unit_lon_2000, func_multiplier=lon_multiplier_2000) mesh_lv3_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lat, func_unit_cord=_unit_lat_lv3, func_multiplier=lat_multiplier_lv3) mesh_lv3_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lon, func_unit_cord=_unit_lon_lv3, func_multiplier=lon_multiplier_lv3) mesh_lv4_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv3_default_lat, func_unit_cord=_unit_lat_lv4, func_multiplier=lat_multiplier_lv4) mesh_lv4_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv3_default_lon, func_unit_cord=_unit_lon_lv4, func_multiplier=lon_multiplier_lv4) mesh_lv5_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv4_default_lat, func_unit_cord=_unit_lat_lv5, func_multiplier=lat_multiplier_lv5) mesh_lv5_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv4_default_lon, func_unit_cord=_unit_lon_lv5, func_multiplier=lon_multiplier_lv5) mesh_lv6_default_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv5_default_lat, func_unit_cord=_unit_lat_lv6, func_multiplier=lat_multiplier_lv6) mesh_lv6_default_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv5_default_lon, func_unit_cord=_unit_lon_lv6, func_multiplier=lon_multiplier_lv6) mesh_lv1_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lat, func_unit_cord=_unit_lat_lv1, func_multiplier=lat_multiplier_lv) mesh_lv1_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv1_default_lon, func_unit_cord=_unit_lon_lv1, func_multiplier=lon_multiplier_lv) mesh_40000_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_40000_default_lat, func_unit_cord=_unit_lat_40000, func_multiplier=lat_multiplier_lv) mesh_40000_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_40000_default_lon, func_unit_cord=_unit_lon_40000, func_multiplier=lon_multiplier_lv) mesh_20000_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_20000_default_lat, func_unit_cord=_unit_lat_20000, func_multiplier=lat_multiplier_lv) mesh_20000_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_20000_default_lon, func_unit_cord=_unit_lon_20000, func_multiplier=lon_multiplier_lv) mesh_16000_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_16000_default_lat, func_unit_cord=_unit_lat_16000, func_multiplier=lat_multiplier_lv) mesh_16000_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_16000_default_lon, func_unit_cord=_unit_lon_16000, func_multiplier=lon_multiplier_lv) mesh_lv2_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lat, func_unit_cord=_unit_lat_lv2, func_multiplier=lat_multiplier_lv) mesh_lv2_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv2_default_lon, func_unit_cord=_unit_lon_lv2, func_multiplier=lon_multiplier_lv) mesh_8000_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_8000_default_lat, func_unit_cord=_unit_lat_8000, func_multiplier=lat_multiplier_lv) mesh_8000_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_8000_default_lon, func_unit_cord=_unit_lon_8000, func_multiplier=lon_multiplier_lv) mesh_5000_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_5000_default_lat, func_unit_cord=_unit_lat_5000, func_multiplier=lat_multiplier_lv) mesh_5000_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_5000_default_lon, func_unit_cord=_unit_lon_5000, func_multiplier=lon_multiplier_lv) mesh_4000_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_4000_default_lat, func_unit_cord=_unit_lat_4000, func_multiplier=lat_multiplier_lv) mesh_4000_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_4000_default_lon, func_unit_cord=_unit_lon_4000, func_multiplier=lon_multiplier_lv) mesh_2500_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_2500_default_lat, func_unit_cord=_unit_lat_2500, func_multiplier=lat_multiplier_lv) mesh_2500_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_2500_default_lon, func_unit_cord=_unit_lon_2500, func_multiplier=lon_multiplier_lv) mesh_2000_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_2000_default_lat, func_unit_cord=_unit_lat_2000, func_multiplier=lat_multiplier_lv) mesh_2000_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_2000_default_lon, func_unit_cord=_unit_lon_2000, func_multiplier=lon_multiplier_lv) mesh_lv3_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv3_default_lat, func_unit_cord=_unit_lat_lv3, func_multiplier=lat_multiplier_lv) mesh_lv3_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv3_default_lon, func_unit_cord=_unit_lon_lv3, func_multiplier=lon_multiplier_lv) mesh_lv4_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv4_default_lat, func_unit_cord=_unit_lat_lv4, func_multiplier=lat_multiplier_lv) mesh_lv4_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv4_default_lon, func_unit_cord=_unit_lon_lv4, func_multiplier=lon_multiplier_lv) mesh_lv5_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv5_default_lat, func_unit_cord=_unit_lat_lv5, func_multiplier=lat_multiplier_lv) mesh_lv5_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv5_default_lon, func_unit_cord=_unit_lon_lv5, func_multiplier=lon_multiplier_lv) mesh_lv6_lat = _functools.partial( mesh_cord, func_higher_cord=mesh_lv6_default_lat, func_unit_cord=_unit_lat_lv6, func_multiplier=lat_multiplier_lv) mesh_lv6_lon = _functools.partial( mesh_cord, func_higher_cord=mesh_lv6_default_lon, func_unit_cord=_unit_lon_lv6, func_multiplier=lon_multiplier_lv) level = to_meshlevel(meshcode) if level == 1: return mesh_lv1_lat(), mesh_lv1_lon() if level == 40000: return mesh_40000_lat(), mesh_40000_lon() if level == 20000: return mesh_20000_lat(), mesh_20000_lon() if level == 16000: return mesh_16000_lat(), mesh_16000_lon() if level == 2: return mesh_lv2_lat(), mesh_lv2_lon() if level == 8000: return mesh_8000_lat(), mesh_8000_lon() if level == 5000: return mesh_5000_lat(), mesh_5000_lon() if level == 4000: return mesh_4000_lat(), mesh_4000_lon() if level == 2500: return mesh_2500_lat(), mesh_2500_lon() if level == 2000: return mesh_2000_lat(), mesh_2000_lon() if level == 3: return mesh_lv3_lat(), mesh_lv3_lon() if level == 4: return mesh_lv4_lat(), mesh_lv4_lon() if level == 5: return mesh_lv5_lat(), mesh_lv5_lon() if level == 6: return mesh_lv6_lat(), mesh_lv6_lon() raise ValueError("the level is unsupported.")
[ "def", "to_meshpoint", "(", "meshcode", ",", "lat_multiplier", ",", "lon_multiplier", ")", ":", "def", "mesh_cord", "(", "func_higher_cord", ",", "func_unit_cord", ",", "func_multiplier", ")", ":", "return", "func_higher_cord", "(", ")", "+", "func_unit_cord", "(", ")", "*", "func_multiplier", "(", ")", "lat_multiplier_lv", "=", "lambda", ":", "lat_multiplier", "lon_multiplier_lv", "=", "lambda", ":", "lon_multiplier", "lat_multiplier_lv1", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "meshcode", "[", "0", ":", "2", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lon_multiplier_lv1", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "meshcode", "[", "2", ":", "4", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lat_multiplier_40000", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "4", ":", "5", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "0", ":", "1", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lon_multiplier_40000", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "4", ":", "5", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "1", ":", "2", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lat_multiplier_20000", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "5", ":", "6", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "0", ":", "1", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lon_multiplier_20000", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "5", ":", "6", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "1", ":", "2", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lat_multiplier_16000", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "meshcode", "[", "4", ":", "5", "]", ")", "/", "2", ",", "meshcode", "=", "meshcode", ")", "lon_multiplier_16000", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "meshcode", "[", "5", ":", "6", "]", ")", "/", "2", ",", "meshcode", "=", "meshcode", ")", "lat_multiplier_lv2", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "meshcode", "[", "4", ":", "5", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lon_multiplier_lv2", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "meshcode", "[", "5", ":", "6", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lat_multiplier_8000", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "meshcode", "[", "4", ":", "5", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lon_multiplier_8000", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "meshcode", "[", "5", ":", "6", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lat_multiplier_5000", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "6", ":", "7", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "0", ":", "1", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lon_multiplier_5000", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "6", ":", "7", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "1", ":", "2", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lat_multiplier_4000", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "7", ":", "8", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "0", ":", "1", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lon_multiplier_4000", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "7", ":", "8", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "1", ":", "2", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lat_multiplier_2500", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "7", ":", "8", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "0", ":", "1", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lon_multiplier_2500", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "7", ":", "8", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "1", ":", "2", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lat_multiplier_2000", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "meshcode", "[", "6", ":", "7", "]", ")", "/", "2", ",", "meshcode", "=", "meshcode", ")", "lon_multiplier_2000", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "meshcode", "[", "7", ":", "8", "]", ")", "/", "2", ",", "meshcode", "=", "meshcode", ")", "lat_multiplier_lv3", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "meshcode", "[", "6", ":", "7", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lon_multiplier_lv3", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "meshcode", "[", "7", ":", "8", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lat_multiplier_lv4", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "8", ":", "9", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "0", ":", "1", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lon_multiplier_lv4", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "8", ":", "9", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "1", ":", "2", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lat_multiplier_lv5", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "9", ":", "10", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "0", ":", "1", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lon_multiplier_lv5", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "9", ":", "10", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "1", ":", "2", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lat_multiplier_lv6", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "10", ":", "11", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "0", ":", "1", "]", ")", ",", "meshcode", "=", "meshcode", ")", "lon_multiplier_lv6", "=", "_functools", ".", "partial", "(", "lambda", "meshcode", ":", "int", "(", "bin", "(", "int", "(", "meshcode", "[", "10", ":", "11", "]", ")", "-", "1", ")", "[", "2", ":", "]", ".", "zfill", "(", "2", ")", "[", "1", ":", "2", "]", ")", ",", "meshcode", "=", "meshcode", ")", "mesh_lv1_default_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "lambda", ":", "0", ",", "func_unit_cord", "=", "_unit_lat_lv1", ",", "func_multiplier", "=", "lat_multiplier_lv1", ")", "mesh_lv1_default_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "lambda", ":", "100", ",", "func_unit_cord", "=", "_unit_lon_lv1", ",", "func_multiplier", "=", "lon_multiplier_lv1", ")", "mesh_40000_default_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv1_default_lat", ",", "func_unit_cord", "=", "_unit_lat_40000", ",", "func_multiplier", "=", "lat_multiplier_40000", ")", "mesh_40000_default_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv1_default_lon", ",", "func_unit_cord", "=", "_unit_lon_40000", ",", "func_multiplier", "=", "lon_multiplier_40000", ")", "mesh_20000_default_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_40000_default_lat", ",", "func_unit_cord", "=", "_unit_lat_20000", ",", "func_multiplier", "=", "lat_multiplier_20000", ")", "mesh_20000_default_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_40000_default_lon", ",", "func_unit_cord", "=", "_unit_lon_20000", ",", "func_multiplier", "=", "lon_multiplier_20000", ")", "mesh_16000_default_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv1_default_lat", ",", "func_unit_cord", "=", "_unit_lat_16000", ",", "func_multiplier", "=", "lat_multiplier_16000", ")", "mesh_16000_default_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv1_default_lon", ",", "func_unit_cord", "=", "_unit_lon_16000", ",", "func_multiplier", "=", "lon_multiplier_16000", ")", "mesh_lv2_default_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv1_default_lat", ",", "func_unit_cord", "=", "_unit_lat_lv2", ",", "func_multiplier", "=", "lat_multiplier_lv2", ")", "mesh_lv2_default_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv1_default_lon", ",", "func_unit_cord", "=", "_unit_lon_lv2", ",", "func_multiplier", "=", "lon_multiplier_lv2", ")", "mesh_8000_default_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv1_default_lat", ",", "func_unit_cord", "=", "_unit_lat_8000", ",", "func_multiplier", "=", "lat_multiplier_8000", ")", "mesh_8000_default_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv1_default_lon", ",", "func_unit_cord", "=", "_unit_lon_8000", ",", "func_multiplier", "=", "lon_multiplier_8000", ")", "mesh_5000_default_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv2_default_lat", ",", "func_unit_cord", "=", "_unit_lat_5000", ",", "func_multiplier", "=", "lat_multiplier_5000", ")", "mesh_5000_default_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv2_default_lon", ",", "func_unit_cord", "=", "_unit_lon_5000", ",", "func_multiplier", "=", "lon_multiplier_5000", ")", "mesh_4000_default_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_8000_default_lat", ",", "func_unit_cord", "=", "_unit_lat_4000", ",", "func_multiplier", "=", "lat_multiplier_4000", ")", "mesh_4000_default_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_8000_default_lon", ",", "func_unit_cord", "=", "_unit_lon_4000", ",", "func_multiplier", "=", "lon_multiplier_4000", ")", "mesh_2500_default_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_5000_default_lat", ",", "func_unit_cord", "=", "_unit_lat_2500", ",", "func_multiplier", "=", "lat_multiplier_2500", ")", "mesh_2500_default_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_5000_default_lon", ",", "func_unit_cord", "=", "_unit_lon_2500", ",", "func_multiplier", "=", "lon_multiplier_2500", ")", "mesh_2000_default_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv2_default_lat", ",", "func_unit_cord", "=", "_unit_lat_2000", ",", "func_multiplier", "=", "lat_multiplier_2000", ")", "mesh_2000_default_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv2_default_lon", ",", "func_unit_cord", "=", "_unit_lon_2000", ",", "func_multiplier", "=", "lon_multiplier_2000", ")", "mesh_lv3_default_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv2_default_lat", ",", "func_unit_cord", "=", "_unit_lat_lv3", ",", "func_multiplier", "=", "lat_multiplier_lv3", ")", "mesh_lv3_default_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv2_default_lon", ",", "func_unit_cord", "=", "_unit_lon_lv3", ",", "func_multiplier", "=", "lon_multiplier_lv3", ")", "mesh_lv4_default_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv3_default_lat", ",", "func_unit_cord", "=", "_unit_lat_lv4", ",", "func_multiplier", "=", "lat_multiplier_lv4", ")", "mesh_lv4_default_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv3_default_lon", ",", "func_unit_cord", "=", "_unit_lon_lv4", ",", "func_multiplier", "=", "lon_multiplier_lv4", ")", "mesh_lv5_default_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv4_default_lat", ",", "func_unit_cord", "=", "_unit_lat_lv5", ",", "func_multiplier", "=", "lat_multiplier_lv5", ")", "mesh_lv5_default_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv4_default_lon", ",", "func_unit_cord", "=", "_unit_lon_lv5", ",", "func_multiplier", "=", "lon_multiplier_lv5", ")", "mesh_lv6_default_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv5_default_lat", ",", "func_unit_cord", "=", "_unit_lat_lv6", ",", "func_multiplier", "=", "lat_multiplier_lv6", ")", "mesh_lv6_default_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv5_default_lon", ",", "func_unit_cord", "=", "_unit_lon_lv6", ",", "func_multiplier", "=", "lon_multiplier_lv6", ")", "mesh_lv1_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv1_default_lat", ",", "func_unit_cord", "=", "_unit_lat_lv1", ",", "func_multiplier", "=", "lat_multiplier_lv", ")", "mesh_lv1_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv1_default_lon", ",", "func_unit_cord", "=", "_unit_lon_lv1", ",", "func_multiplier", "=", "lon_multiplier_lv", ")", "mesh_40000_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_40000_default_lat", ",", "func_unit_cord", "=", "_unit_lat_40000", ",", "func_multiplier", "=", "lat_multiplier_lv", ")", "mesh_40000_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_40000_default_lon", ",", "func_unit_cord", "=", "_unit_lon_40000", ",", "func_multiplier", "=", "lon_multiplier_lv", ")", "mesh_20000_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_20000_default_lat", ",", "func_unit_cord", "=", "_unit_lat_20000", ",", "func_multiplier", "=", "lat_multiplier_lv", ")", "mesh_20000_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_20000_default_lon", ",", "func_unit_cord", "=", "_unit_lon_20000", ",", "func_multiplier", "=", "lon_multiplier_lv", ")", "mesh_16000_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_16000_default_lat", ",", "func_unit_cord", "=", "_unit_lat_16000", ",", "func_multiplier", "=", "lat_multiplier_lv", ")", "mesh_16000_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_16000_default_lon", ",", "func_unit_cord", "=", "_unit_lon_16000", ",", "func_multiplier", "=", "lon_multiplier_lv", ")", "mesh_lv2_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv2_default_lat", ",", "func_unit_cord", "=", "_unit_lat_lv2", ",", "func_multiplier", "=", "lat_multiplier_lv", ")", "mesh_lv2_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv2_default_lon", ",", "func_unit_cord", "=", "_unit_lon_lv2", ",", "func_multiplier", "=", "lon_multiplier_lv", ")", "mesh_8000_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_8000_default_lat", ",", "func_unit_cord", "=", "_unit_lat_8000", ",", "func_multiplier", "=", "lat_multiplier_lv", ")", "mesh_8000_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_8000_default_lon", ",", "func_unit_cord", "=", "_unit_lon_8000", ",", "func_multiplier", "=", "lon_multiplier_lv", ")", "mesh_5000_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_5000_default_lat", ",", "func_unit_cord", "=", "_unit_lat_5000", ",", "func_multiplier", "=", "lat_multiplier_lv", ")", "mesh_5000_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_5000_default_lon", ",", "func_unit_cord", "=", "_unit_lon_5000", ",", "func_multiplier", "=", "lon_multiplier_lv", ")", "mesh_4000_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_4000_default_lat", ",", "func_unit_cord", "=", "_unit_lat_4000", ",", "func_multiplier", "=", "lat_multiplier_lv", ")", "mesh_4000_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_4000_default_lon", ",", "func_unit_cord", "=", "_unit_lon_4000", ",", "func_multiplier", "=", "lon_multiplier_lv", ")", "mesh_2500_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_2500_default_lat", ",", "func_unit_cord", "=", "_unit_lat_2500", ",", "func_multiplier", "=", "lat_multiplier_lv", ")", "mesh_2500_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_2500_default_lon", ",", "func_unit_cord", "=", "_unit_lon_2500", ",", "func_multiplier", "=", "lon_multiplier_lv", ")", "mesh_2000_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_2000_default_lat", ",", "func_unit_cord", "=", "_unit_lat_2000", ",", "func_multiplier", "=", "lat_multiplier_lv", ")", "mesh_2000_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_2000_default_lon", ",", "func_unit_cord", "=", "_unit_lon_2000", ",", "func_multiplier", "=", "lon_multiplier_lv", ")", "mesh_lv3_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv3_default_lat", ",", "func_unit_cord", "=", "_unit_lat_lv3", ",", "func_multiplier", "=", "lat_multiplier_lv", ")", "mesh_lv3_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv3_default_lon", ",", "func_unit_cord", "=", "_unit_lon_lv3", ",", "func_multiplier", "=", "lon_multiplier_lv", ")", "mesh_lv4_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv4_default_lat", ",", "func_unit_cord", "=", "_unit_lat_lv4", ",", "func_multiplier", "=", "lat_multiplier_lv", ")", "mesh_lv4_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv4_default_lon", ",", "func_unit_cord", "=", "_unit_lon_lv4", ",", "func_multiplier", "=", "lon_multiplier_lv", ")", "mesh_lv5_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv5_default_lat", ",", "func_unit_cord", "=", "_unit_lat_lv5", ",", "func_multiplier", "=", "lat_multiplier_lv", ")", "mesh_lv5_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv5_default_lon", ",", "func_unit_cord", "=", "_unit_lon_lv5", ",", "func_multiplier", "=", "lon_multiplier_lv", ")", "mesh_lv6_lat", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv6_default_lat", ",", "func_unit_cord", "=", "_unit_lat_lv6", ",", "func_multiplier", "=", "lat_multiplier_lv", ")", "mesh_lv6_lon", "=", "_functools", ".", "partial", "(", "mesh_cord", ",", "func_higher_cord", "=", "mesh_lv6_default_lon", ",", "func_unit_cord", "=", "_unit_lon_lv6", ",", "func_multiplier", "=", "lon_multiplier_lv", ")", "level", "=", "to_meshlevel", "(", "meshcode", ")", "if", "level", "==", "1", ":", "return", "mesh_lv1_lat", "(", ")", ",", "mesh_lv1_lon", "(", ")", "if", "level", "==", "40000", ":", "return", "mesh_40000_lat", "(", ")", ",", "mesh_40000_lon", "(", ")", "if", "level", "==", "20000", ":", "return", "mesh_20000_lat", "(", ")", ",", "mesh_20000_lon", "(", ")", "if", "level", "==", "16000", ":", "return", "mesh_16000_lat", "(", ")", ",", "mesh_16000_lon", "(", ")", "if", "level", "==", "2", ":", "return", "mesh_lv2_lat", "(", ")", ",", "mesh_lv2_lon", "(", ")", "if", "level", "==", "8000", ":", "return", "mesh_8000_lat", "(", ")", ",", "mesh_8000_lon", "(", ")", "if", "level", "==", "5000", ":", "return", "mesh_5000_lat", "(", ")", ",", "mesh_5000_lon", "(", ")", "if", "level", "==", "4000", ":", "return", "mesh_4000_lat", "(", ")", ",", "mesh_4000_lon", "(", ")", "if", "level", "==", "2500", ":", "return", "mesh_2500_lat", "(", ")", ",", "mesh_2500_lon", "(", ")", "if", "level", "==", "2000", ":", "return", "mesh_2000_lat", "(", ")", ",", "mesh_2000_lon", "(", ")", "if", "level", "==", "3", ":", "return", "mesh_lv3_lat", "(", ")", ",", "mesh_lv3_lon", "(", ")", "if", "level", "==", "4", ":", "return", "mesh_lv4_lat", "(", ")", ",", "mesh_lv4_lon", "(", ")", "if", "level", "==", "5", ":", "return", "mesh_lv5_lat", "(", ")", ",", "mesh_lv5_lon", "(", ")", "if", "level", "==", "6", ":", "return", "mesh_lv6_lat", "(", ")", ",", "mesh_lv6_lon", "(", ")", "raise", "ValueError", "(", "\"the level is unsupported.\"", ")" ]
地域メッシュコードから緯度経度を算出する。 下記のメッシュに対応している。 1次(80km四方):1 40倍(40km四方):40000 20倍(20km四方):20000 16倍(16km四方):16000 2次(10km四方):2 8倍(8km四方):8000 5倍(5km四方):5000 4倍(4km四方):4000 2.5倍(2.5km四方):2500 2倍(2km四方):2000 3次(1km四方):3 4次(500m四方):4 5次(250m四方):5 6次(125m四方):6 Args: meshcode: 指定次の地域メッシュコード lat_multiplier: 当該メッシュの基準点(南西端)から、緯度座標上の点の位置を当該メッシュの単位緯度の倍数で指定 lon_multiplier: 当該メッシュの基準点(南西端)から、経度座標上の点の位置を当該メッシュの単位経度の倍数で指定 Return: lat: 世界測地系の緯度(度単位) lon: 世界測地系の経度(度単位)
[ "地域メッシュコードから緯度経度を算出する。", "下記のメッシュに対応している。", "1次", "(", "80km四方", ")", ":", "1", "40倍", "(", "40km四方", ")", ":", "40000", "20倍", "(", "20km四方", ")", ":", "20000", "16倍", "(", "16km四方", ")", ":", "16000", "2次", "(", "10km四方", ")", ":", "2", "8倍", "(", "8km四方", ")", ":", "8000", "5倍", "(", "5km四方", ")", ":", "5000", "4倍", "(", "4km四方", ")", ":", "4000", "2", ".", "5倍", "(", "2", ".", "5km四方", ")", ":", "2500", "2倍", "(", "2km四方", ")", ":", "2000", "3次", "(", "1km四方", ")", ":", "3", "4次", "(", "500m四方", ")", ":", "4", "5次", "(", "250m四方", ")", ":", "5", "6次", "(", "125m四方", ")", ":", "6" ]
train
https://github.com/hni14/jismesh/blob/bda486ac7828d0adaea2a128154d0a554be7ef37/jismesh/utils.py#L312-L811
caffeinehit/django-follow
follow/views.py
check
def check(func): """ Check the permissions, http method and login state. """ def iCheck(request, *args, **kwargs): if not request.method == "POST": return HttpResponseBadRequest("Must be POST request.") follow = func(request, *args, **kwargs) if request.is_ajax(): return HttpResponse('ok') try: if 'next' in request.GET: return HttpResponseRedirect(request.GET.get('next')) if 'next' in request.POST: return HttpResponseRedirect(request.POST.get('next')) return HttpResponseRedirect(follow.target.get_absolute_url()) except (AttributeError, TypeError): if 'HTTP_REFERER' in request.META: return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/')) if follow: return HttpResponseServerError('"%s" object of type ``%s`` has no method ``get_absolute_url()``.' % ( unicode(follow.target), follow.target.__class__)) return HttpResponseServerError('No follow object and `next` parameter found.') return iCheck
python
def check(func): """ Check the permissions, http method and login state. """ def iCheck(request, *args, **kwargs): if not request.method == "POST": return HttpResponseBadRequest("Must be POST request.") follow = func(request, *args, **kwargs) if request.is_ajax(): return HttpResponse('ok') try: if 'next' in request.GET: return HttpResponseRedirect(request.GET.get('next')) if 'next' in request.POST: return HttpResponseRedirect(request.POST.get('next')) return HttpResponseRedirect(follow.target.get_absolute_url()) except (AttributeError, TypeError): if 'HTTP_REFERER' in request.META: return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/')) if follow: return HttpResponseServerError('"%s" object of type ``%s`` has no method ``get_absolute_url()``.' % ( unicode(follow.target), follow.target.__class__)) return HttpResponseServerError('No follow object and `next` parameter found.') return iCheck
[ "def", "check", "(", "func", ")", ":", "def", "iCheck", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "request", ".", "method", "==", "\"POST\"", ":", "return", "HttpResponseBadRequest", "(", "\"Must be POST request.\"", ")", "follow", "=", "func", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "if", "request", ".", "is_ajax", "(", ")", ":", "return", "HttpResponse", "(", "'ok'", ")", "try", ":", "if", "'next'", "in", "request", ".", "GET", ":", "return", "HttpResponseRedirect", "(", "request", ".", "GET", ".", "get", "(", "'next'", ")", ")", "if", "'next'", "in", "request", ".", "POST", ":", "return", "HttpResponseRedirect", "(", "request", ".", "POST", ".", "get", "(", "'next'", ")", ")", "return", "HttpResponseRedirect", "(", "follow", ".", "target", ".", "get_absolute_url", "(", ")", ")", "except", "(", "AttributeError", ",", "TypeError", ")", ":", "if", "'HTTP_REFERER'", "in", "request", ".", "META", ":", "return", "HttpResponseRedirect", "(", "request", ".", "META", ".", "get", "(", "'HTTP_REFERER'", ",", "'/'", ")", ")", "if", "follow", ":", "return", "HttpResponseServerError", "(", "'\"%s\" object of type ``%s`` has no method ``get_absolute_url()``.'", "%", "(", "unicode", "(", "follow", ".", "target", ")", ",", "follow", ".", "target", ".", "__class__", ")", ")", "return", "HttpResponseServerError", "(", "'No follow object and `next` parameter found.'", ")", "return", "iCheck" ]
Check the permissions, http method and login state.
[ "Check", "the", "permissions", "http", "method", "and", "login", "state", "." ]
train
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/views.py#L7-L30
caffeinehit/django-follow
follow/utils.py
register
def register(model, field_name=None, related_name=None, lookup_method_name='get_follows'): """ This registers any model class to be follow-able. """ if model in registry: return registry.append(model) if not field_name: field_name = 'target_%s' % model._meta.module_name if not related_name: related_name = 'follow_%s' % model._meta.module_name field = ForeignKey(model, related_name=related_name, null=True, blank=True, db_index=True) field.contribute_to_class(Follow, field_name) setattr(model, lookup_method_name, get_followers_for_object) model_map[model] = [related_name, field_name]
python
def register(model, field_name=None, related_name=None, lookup_method_name='get_follows'): """ This registers any model class to be follow-able. """ if model in registry: return registry.append(model) if not field_name: field_name = 'target_%s' % model._meta.module_name if not related_name: related_name = 'follow_%s' % model._meta.module_name field = ForeignKey(model, related_name=related_name, null=True, blank=True, db_index=True) field.contribute_to_class(Follow, field_name) setattr(model, lookup_method_name, get_followers_for_object) model_map[model] = [related_name, field_name]
[ "def", "register", "(", "model", ",", "field_name", "=", "None", ",", "related_name", "=", "None", ",", "lookup_method_name", "=", "'get_follows'", ")", ":", "if", "model", "in", "registry", ":", "return", "registry", ".", "append", "(", "model", ")", "if", "not", "field_name", ":", "field_name", "=", "'target_%s'", "%", "model", ".", "_meta", ".", "module_name", "if", "not", "related_name", ":", "related_name", "=", "'follow_%s'", "%", "model", ".", "_meta", ".", "module_name", "field", "=", "ForeignKey", "(", "model", ",", "related_name", "=", "related_name", ",", "null", "=", "True", ",", "blank", "=", "True", ",", "db_index", "=", "True", ")", "field", ".", "contribute_to_class", "(", "Follow", ",", "field_name", ")", "setattr", "(", "model", ",", "lookup_method_name", ",", "get_followers_for_object", ")", "model_map", "[", "model", "]", "=", "[", "related_name", ",", "field_name", "]" ]
This registers any model class to be follow-able.
[ "This", "registers", "any", "model", "class", "to", "be", "follow", "-", "able", "." ]
train
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/utils.py#L9-L30
caffeinehit/django-follow
follow/utils.py
follow
def follow(user, obj): """ Make a user follow an object """ follow, created = Follow.objects.get_or_create(user, obj) return follow
python
def follow(user, obj): """ Make a user follow an object """ follow, created = Follow.objects.get_or_create(user, obj) return follow
[ "def", "follow", "(", "user", ",", "obj", ")", ":", "follow", ",", "created", "=", "Follow", ".", "objects", ".", "get_or_create", "(", "user", ",", "obj", ")", "return", "follow" ]
Make a user follow an object
[ "Make", "a", "user", "follow", "an", "object" ]
train
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/utils.py#L32-L35
caffeinehit/django-follow
follow/utils.py
unfollow
def unfollow(user, obj): """ Make a user unfollow an object """ try: follow = Follow.objects.get_follows(obj).get(user=user) follow.delete() return follow except Follow.DoesNotExist: pass
python
def unfollow(user, obj): """ Make a user unfollow an object """ try: follow = Follow.objects.get_follows(obj).get(user=user) follow.delete() return follow except Follow.DoesNotExist: pass
[ "def", "unfollow", "(", "user", ",", "obj", ")", ":", "try", ":", "follow", "=", "Follow", ".", "objects", ".", "get_follows", "(", "obj", ")", ".", "get", "(", "user", "=", "user", ")", "follow", ".", "delete", "(", ")", "return", "follow", "except", "Follow", ".", "DoesNotExist", ":", "pass" ]
Make a user unfollow an object
[ "Make", "a", "user", "unfollow", "an", "object" ]
train
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/utils.py#L37-L44
caffeinehit/django-follow
follow/utils.py
toggle
def toggle(user, obj): """ Toggles a follow status. Useful function if you don't want to perform follow checks but just toggle it on / off. """ if Follow.objects.is_following(user, obj): return unfollow(user, obj) return follow(user, obj)
python
def toggle(user, obj): """ Toggles a follow status. Useful function if you don't want to perform follow checks but just toggle it on / off. """ if Follow.objects.is_following(user, obj): return unfollow(user, obj) return follow(user, obj)
[ "def", "toggle", "(", "user", ",", "obj", ")", ":", "if", "Follow", ".", "objects", ".", "is_following", "(", "user", ",", "obj", ")", ":", "return", "unfollow", "(", "user", ",", "obj", ")", "return", "follow", "(", "user", ",", "obj", ")" ]
Toggles a follow status. Useful function if you don't want to perform follow checks but just toggle it on / off.
[ "Toggles", "a", "follow", "status", ".", "Useful", "function", "if", "you", "don", "t", "want", "to", "perform", "follow", "checks", "but", "just", "toggle", "it", "on", "/", "off", "." ]
train
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/utils.py#L46-L51
cga-harvard/Hypermap-Registry
hypermap/search_api/serializers.py
SearchSerializer.validate_q_time
def validate_q_time(self, value): """ Would be for example: [2013-03-01 TO 2013-04-01T00:00:00] and/or [* TO *] Returns a valid sorl value. [2013-03-01T00:00:00Z TO 2013-04-01T00:00:00Z] and/or [* TO *] """ if value: try: range = utils.parse_datetime_range_to_solr(value) return range except Exception as e: raise serializers.ValidationError(e.message) return value
python
def validate_q_time(self, value): """ Would be for example: [2013-03-01 TO 2013-04-01T00:00:00] and/or [* TO *] Returns a valid sorl value. [2013-03-01T00:00:00Z TO 2013-04-01T00:00:00Z] and/or [* TO *] """ if value: try: range = utils.parse_datetime_range_to_solr(value) return range except Exception as e: raise serializers.ValidationError(e.message) return value
[ "def", "validate_q_time", "(", "self", ",", "value", ")", ":", "if", "value", ":", "try", ":", "range", "=", "utils", ".", "parse_datetime_range_to_solr", "(", "value", ")", "return", "range", "except", "Exception", "as", "e", ":", "raise", "serializers", ".", "ValidationError", "(", "e", ".", "message", ")", "return", "value" ]
Would be for example: [2013-03-01 TO 2013-04-01T00:00:00] and/or [* TO *] Returns a valid sorl value. [2013-03-01T00:00:00Z TO 2013-04-01T00:00:00Z] and/or [* TO *]
[ "Would", "be", "for", "example", ":", "[", "2013", "-", "03", "-", "01", "TO", "2013", "-", "04", "-", "01T00", ":", "00", ":", "00", "]", "and", "/", "or", "[", "*", "TO", "*", "]", "Returns", "a", "valid", "sorl", "value", ".", "[", "2013", "-", "03", "-", "01T00", ":", "00", ":", "00Z", "TO", "2013", "-", "04", "-", "01T00", ":", "00", ":", "00Z", "]", "and", "/", "or", "[", "*", "TO", "*", "]" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/serializers.py#L114-L126
cga-harvard/Hypermap-Registry
hypermap/search_api/serializers.py
SearchSerializer.validate_q_geo
def validate_q_geo(self, value): """ Would be for example: [-90,-180 TO 90,180] """ if value: try: rectangle = utils.parse_geo_box(value) return "[{0},{1} TO {2},{3}]".format( rectangle.bounds[0], rectangle.bounds[1], rectangle.bounds[2], rectangle.bounds[3], ) except Exception as e: raise serializers.ValidationError(e.message) return value
python
def validate_q_geo(self, value): """ Would be for example: [-90,-180 TO 90,180] """ if value: try: rectangle = utils.parse_geo_box(value) return "[{0},{1} TO {2},{3}]".format( rectangle.bounds[0], rectangle.bounds[1], rectangle.bounds[2], rectangle.bounds[3], ) except Exception as e: raise serializers.ValidationError(e.message) return value
[ "def", "validate_q_geo", "(", "self", ",", "value", ")", ":", "if", "value", ":", "try", ":", "rectangle", "=", "utils", ".", "parse_geo_box", "(", "value", ")", "return", "\"[{0},{1} TO {2},{3}]\"", ".", "format", "(", "rectangle", ".", "bounds", "[", "0", "]", ",", "rectangle", ".", "bounds", "[", "1", "]", ",", "rectangle", ".", "bounds", "[", "2", "]", ",", "rectangle", ".", "bounds", "[", "3", "]", ",", ")", "except", "Exception", "as", "e", ":", "raise", "serializers", ".", "ValidationError", "(", "e", ".", "message", ")", "return", "value" ]
Would be for example: [-90,-180 TO 90,180]
[ "Would", "be", "for", "example", ":", "[", "-", "90", "-", "180", "TO", "90", "180", "]" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/serializers.py#L128-L144
cga-harvard/Hypermap-Registry
hypermap/search_api/serializers.py
SearchSerializer.validate_a_time_filter
def validate_a_time_filter(self, value): """ Would be for example: [2013-03-01 TO 2013-04-01:00:00:00] and/or [* TO *] """ if value: try: utils.parse_datetime_range(value) except Exception as e: raise serializers.ValidationError(e.message) return value
python
def validate_a_time_filter(self, value): """ Would be for example: [2013-03-01 TO 2013-04-01:00:00:00] and/or [* TO *] """ if value: try: utils.parse_datetime_range(value) except Exception as e: raise serializers.ValidationError(e.message) return value
[ "def", "validate_a_time_filter", "(", "self", ",", "value", ")", ":", "if", "value", ":", "try", ":", "utils", ".", "parse_datetime_range", "(", "value", ")", "except", "Exception", "as", "e", ":", "raise", "serializers", ".", "ValidationError", "(", "e", ".", "message", ")", "return", "value" ]
Would be for example: [2013-03-01 TO 2013-04-01:00:00:00] and/or [* TO *]
[ "Would", "be", "for", "example", ":", "[", "2013", "-", "03", "-", "01", "TO", "2013", "-", "04", "-", "01", ":", "00", ":", "00", ":", "00", "]", "and", "/", "or", "[", "*", "TO", "*", "]" ]
train
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/serializers.py#L146-L156
caffeinehit/django-follow
follow/models.py
FollowManager.fname
def fname(self, model_or_obj_or_qs): """ Return the field name on the :class:`Follow` model for ``model_or_obj_or_qs``. """ if isinstance(model_or_obj_or_qs, QuerySet): _, fname = model_map[model_or_obj_or_qs.model] else: cls = model_or_obj_or_qs if inspect.isclass(model_or_obj_or_qs) else model_or_obj_or_qs.__class__ _, fname = model_map[cls] return fname
python
def fname(self, model_or_obj_or_qs): """ Return the field name on the :class:`Follow` model for ``model_or_obj_or_qs``. """ if isinstance(model_or_obj_or_qs, QuerySet): _, fname = model_map[model_or_obj_or_qs.model] else: cls = model_or_obj_or_qs if inspect.isclass(model_or_obj_or_qs) else model_or_obj_or_qs.__class__ _, fname = model_map[cls] return fname
[ "def", "fname", "(", "self", ",", "model_or_obj_or_qs", ")", ":", "if", "isinstance", "(", "model_or_obj_or_qs", ",", "QuerySet", ")", ":", "_", ",", "fname", "=", "model_map", "[", "model_or_obj_or_qs", ".", "model", "]", "else", ":", "cls", "=", "model_or_obj_or_qs", "if", "inspect", ".", "isclass", "(", "model_or_obj_or_qs", ")", "else", "model_or_obj_or_qs", ".", "__class__", "_", ",", "fname", "=", "model_map", "[", "cls", "]", "return", "fname" ]
Return the field name on the :class:`Follow` model for ``model_or_obj_or_qs``.
[ "Return", "the", "field", "name", "on", "the", ":", "class", ":", "Follow", "model", "for", "model_or_obj_or_qs", "." ]
train
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/models.py#L10-L19
caffeinehit/django-follow
follow/models.py
FollowManager.create
def create(self, user, obj, **kwargs): """ Create a new follow link between a user and an object of a registered model type. """ follow = Follow(user=user) follow.target = obj follow.save() return follow
python
def create(self, user, obj, **kwargs): """ Create a new follow link between a user and an object of a registered model type. """ follow = Follow(user=user) follow.target = obj follow.save() return follow
[ "def", "create", "(", "self", ",", "user", ",", "obj", ",", "*", "*", "kwargs", ")", ":", "follow", "=", "Follow", "(", "user", "=", "user", ")", "follow", ".", "target", "=", "obj", "follow", ".", "save", "(", ")", "return", "follow" ]
Create a new follow link between a user and an object of a registered model type.
[ "Create", "a", "new", "follow", "link", "between", "a", "user", "and", "an", "object", "of", "a", "registered", "model", "type", "." ]
train
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/models.py#L21-L30
caffeinehit/django-follow
follow/models.py
FollowManager.get_or_create
def get_or_create(self, user, obj, **kwargs): """ Almost the same as `FollowManager.objects.create` - behaves the same as the normal `get_or_create` methods in django though. Returns a tuple with the `Follow` and either `True` or `False` """ if not self.is_following(user, obj): return self.create(user, obj, **kwargs), True return self.get_follows(obj).get(user=user), False
python
def get_or_create(self, user, obj, **kwargs): """ Almost the same as `FollowManager.objects.create` - behaves the same as the normal `get_or_create` methods in django though. Returns a tuple with the `Follow` and either `True` or `False` """ if not self.is_following(user, obj): return self.create(user, obj, **kwargs), True return self.get_follows(obj).get(user=user), False
[ "def", "get_or_create", "(", "self", ",", "user", ",", "obj", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "is_following", "(", "user", ",", "obj", ")", ":", "return", "self", ".", "create", "(", "user", ",", "obj", ",", "*", "*", "kwargs", ")", ",", "True", "return", "self", ".", "get_follows", "(", "obj", ")", ".", "get", "(", "user", "=", "user", ")", ",", "False" ]
Almost the same as `FollowManager.objects.create` - behaves the same as the normal `get_or_create` methods in django though. Returns a tuple with the `Follow` and either `True` or `False`
[ "Almost", "the", "same", "as", "FollowManager", ".", "objects", ".", "create", "-", "behaves", "the", "same", "as", "the", "normal", "get_or_create", "methods", "in", "django", "though", "." ]
train
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/models.py#L32-L42
caffeinehit/django-follow
follow/models.py
FollowManager.is_following
def is_following(self, user, obj): """ Returns `True` or `False` """ if isinstance(user, AnonymousUser): return False return 0 < self.get_follows(obj).filter(user=user).count()
python
def is_following(self, user, obj): """ Returns `True` or `False` """ if isinstance(user, AnonymousUser): return False return 0 < self.get_follows(obj).filter(user=user).count()
[ "def", "is_following", "(", "self", ",", "user", ",", "obj", ")", ":", "if", "isinstance", "(", "user", ",", "AnonymousUser", ")", ":", "return", "False", "return", "0", "<", "self", ".", "get_follows", "(", "obj", ")", ".", "filter", "(", "user", "=", "user", ")", ".", "count", "(", ")" ]
Returns `True` or `False`
[ "Returns", "True", "or", "False" ]
train
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/models.py#L44-L48
caffeinehit/django-follow
follow/models.py
FollowManager.get_follows
def get_follows(self, model_or_obj_or_qs): """ Returns all the followers of a model, an object or a queryset. """ fname = self.fname(model_or_obj_or_qs) if isinstance(model_or_obj_or_qs, QuerySet): return self.filter(**{'%s__in' % fname: model_or_obj_or_qs}) if inspect.isclass(model_or_obj_or_qs): return self.exclude(**{fname:None}) return self.filter(**{fname:model_or_obj_or_qs})
python
def get_follows(self, model_or_obj_or_qs): """ Returns all the followers of a model, an object or a queryset. """ fname = self.fname(model_or_obj_or_qs) if isinstance(model_or_obj_or_qs, QuerySet): return self.filter(**{'%s__in' % fname: model_or_obj_or_qs}) if inspect.isclass(model_or_obj_or_qs): return self.exclude(**{fname:None}) return self.filter(**{fname:model_or_obj_or_qs})
[ "def", "get_follows", "(", "self", ",", "model_or_obj_or_qs", ")", ":", "fname", "=", "self", ".", "fname", "(", "model_or_obj_or_qs", ")", "if", "isinstance", "(", "model_or_obj_or_qs", ",", "QuerySet", ")", ":", "return", "self", ".", "filter", "(", "*", "*", "{", "'%s__in'", "%", "fname", ":", "model_or_obj_or_qs", "}", ")", "if", "inspect", ".", "isclass", "(", "model_or_obj_or_qs", ")", ":", "return", "self", ".", "exclude", "(", "*", "*", "{", "fname", ":", "None", "}", ")", "return", "self", ".", "filter", "(", "*", "*", "{", "fname", ":", "model_or_obj_or_qs", "}", ")" ]
Returns all the followers of a model, an object or a queryset.
[ "Returns", "all", "the", "followers", "of", "a", "model", "an", "object", "or", "a", "queryset", "." ]
train
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/models.py#L50-L62
tknapen/FIRDeconvolution
src/FIRDeconvolution.py
FIRDeconvolution.create_event_regressors
def create_event_regressors(self, event_times_indices, covariates = None, durations = None): """create_event_regressors creates the part of the design matrix corresponding to one event type. :param event_times_indices: indices in the resampled data, on which the events occurred. :type event_times_indices: numpy array, (nr_events) :param covariates: covariates belonging to this event type. If None, covariates with a value of 1 for all events are created and used internally. :type covariates: numpy array, (nr_events) :param durations: durations belonging to this event type. If None, durations with a value of 1 sample for all events are created and used internally. :type durations: numpy array, (nr_events) :returns: This event type's part of the design matrix. """ # check covariates if covariates is None: covariates = np.ones(self.event_times_indices.shape) # check/create durations, convert from seconds to samples time, and compute mean duration for this event type. if durations is None: durations = np.ones(self.event_times_indices.shape) else: durations = np.round(durations*self.deconvolution_frequency).astype(int) mean_duration = np.mean(durations) # set up output array regressors_for_event = np.zeros((self.deconvolution_interval_size, self.resampled_signal_size)) # fill up output array by looping over events. for cov, eti, dur in zip(covariates, event_times_indices, durations): valid = True if eti < 0: self.logger.debug('deconv samples are starting before the data starts.') valid = False if eti+self.deconvolution_interval_size > self.resampled_signal_size: self.logger.debug('deconv samples are continuing after the data stops.') valid = False if eti > self.resampled_signal_size: self.logger.debug('event falls outside of the scope of the data.') valid = False if valid: # only incorporate sensible events. # calculate the design matrix that belongs to this event. this_event_design_matrix = (np.diag(np.ones(self.deconvolution_interval_size)) * cov) over_durations_dm = np.copy(this_event_design_matrix) if dur > 1: # if this event has a non-unity duration, duplicate the stick regressors in the time direction for d in np.arange(1,dur): over_durations_dm[d:] += this_event_design_matrix[:-d] # and correct for differences in durations between different regressor types. over_durations_dm /= mean_duration # add the designmatrix for this event to the full design matrix for this type of event. regressors_for_event[:,eti:int(eti+self.deconvolution_interval_size)] += over_durations_dm return regressors_for_event
python
def create_event_regressors(self, event_times_indices, covariates = None, durations = None): """create_event_regressors creates the part of the design matrix corresponding to one event type. :param event_times_indices: indices in the resampled data, on which the events occurred. :type event_times_indices: numpy array, (nr_events) :param covariates: covariates belonging to this event type. If None, covariates with a value of 1 for all events are created and used internally. :type covariates: numpy array, (nr_events) :param durations: durations belonging to this event type. If None, durations with a value of 1 sample for all events are created and used internally. :type durations: numpy array, (nr_events) :returns: This event type's part of the design matrix. """ # check covariates if covariates is None: covariates = np.ones(self.event_times_indices.shape) # check/create durations, convert from seconds to samples time, and compute mean duration for this event type. if durations is None: durations = np.ones(self.event_times_indices.shape) else: durations = np.round(durations*self.deconvolution_frequency).astype(int) mean_duration = np.mean(durations) # set up output array regressors_for_event = np.zeros((self.deconvolution_interval_size, self.resampled_signal_size)) # fill up output array by looping over events. for cov, eti, dur in zip(covariates, event_times_indices, durations): valid = True if eti < 0: self.logger.debug('deconv samples are starting before the data starts.') valid = False if eti+self.deconvolution_interval_size > self.resampled_signal_size: self.logger.debug('deconv samples are continuing after the data stops.') valid = False if eti > self.resampled_signal_size: self.logger.debug('event falls outside of the scope of the data.') valid = False if valid: # only incorporate sensible events. # calculate the design matrix that belongs to this event. this_event_design_matrix = (np.diag(np.ones(self.deconvolution_interval_size)) * cov) over_durations_dm = np.copy(this_event_design_matrix) if dur > 1: # if this event has a non-unity duration, duplicate the stick regressors in the time direction for d in np.arange(1,dur): over_durations_dm[d:] += this_event_design_matrix[:-d] # and correct for differences in durations between different regressor types. over_durations_dm /= mean_duration # add the designmatrix for this event to the full design matrix for this type of event. regressors_for_event[:,eti:int(eti+self.deconvolution_interval_size)] += over_durations_dm return regressors_for_event
[ "def", "create_event_regressors", "(", "self", ",", "event_times_indices", ",", "covariates", "=", "None", ",", "durations", "=", "None", ")", ":", "# check covariates", "if", "covariates", "is", "None", ":", "covariates", "=", "np", ".", "ones", "(", "self", ".", "event_times_indices", ".", "shape", ")", "# check/create durations, convert from seconds to samples time, and compute mean duration for this event type.", "if", "durations", "is", "None", ":", "durations", "=", "np", ".", "ones", "(", "self", ".", "event_times_indices", ".", "shape", ")", "else", ":", "durations", "=", "np", ".", "round", "(", "durations", "*", "self", ".", "deconvolution_frequency", ")", ".", "astype", "(", "int", ")", "mean_duration", "=", "np", ".", "mean", "(", "durations", ")", "# set up output array", "regressors_for_event", "=", "np", ".", "zeros", "(", "(", "self", ".", "deconvolution_interval_size", ",", "self", ".", "resampled_signal_size", ")", ")", "# fill up output array by looping over events.", "for", "cov", ",", "eti", ",", "dur", "in", "zip", "(", "covariates", ",", "event_times_indices", ",", "durations", ")", ":", "valid", "=", "True", "if", "eti", "<", "0", ":", "self", ".", "logger", ".", "debug", "(", "'deconv samples are starting before the data starts.'", ")", "valid", "=", "False", "if", "eti", "+", "self", ".", "deconvolution_interval_size", ">", "self", ".", "resampled_signal_size", ":", "self", ".", "logger", ".", "debug", "(", "'deconv samples are continuing after the data stops.'", ")", "valid", "=", "False", "if", "eti", ">", "self", ".", "resampled_signal_size", ":", "self", ".", "logger", ".", "debug", "(", "'event falls outside of the scope of the data.'", ")", "valid", "=", "False", "if", "valid", ":", "# only incorporate sensible events.", "# calculate the design matrix that belongs to this event.", "this_event_design_matrix", "=", "(", "np", ".", "diag", "(", "np", ".", "ones", "(", "self", ".", "deconvolution_interval_size", ")", ")", "*", "cov", ")", "over_durations_dm", "=", "np", ".", "copy", "(", "this_event_design_matrix", ")", "if", "dur", ">", "1", ":", "# if this event has a non-unity duration, duplicate the stick regressors in the time direction", "for", "d", "in", "np", ".", "arange", "(", "1", ",", "dur", ")", ":", "over_durations_dm", "[", "d", ":", "]", "+=", "this_event_design_matrix", "[", ":", "-", "d", "]", "# and correct for differences in durations between different regressor types.", "over_durations_dm", "/=", "mean_duration", "# add the designmatrix for this event to the full design matrix for this type of event.", "regressors_for_event", "[", ":", ",", "eti", ":", "int", "(", "eti", "+", "self", ".", "deconvolution_interval_size", ")", "]", "+=", "over_durations_dm", "return", "regressors_for_event" ]
create_event_regressors creates the part of the design matrix corresponding to one event type. :param event_times_indices: indices in the resampled data, on which the events occurred. :type event_times_indices: numpy array, (nr_events) :param covariates: covariates belonging to this event type. If None, covariates with a value of 1 for all events are created and used internally. :type covariates: numpy array, (nr_events) :param durations: durations belonging to this event type. If None, durations with a value of 1 sample for all events are created and used internally. :type durations: numpy array, (nr_events) :returns: This event type's part of the design matrix.
[ "create_event_regressors", "creates", "the", "part", "of", "the", "design", "matrix", "corresponding", "to", "one", "event", "type", "." ]
train
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L121-L172
tknapen/FIRDeconvolution
src/FIRDeconvolution.py
FIRDeconvolution.create_design_matrix
def create_design_matrix(self, demean = False, intercept = True): """create_design_matrix calls create_event_regressors for each of the covariates in the self.covariates dict. self.designmatrix is created and is shaped (nr_regressors, self.resampled_signal.shape[-1]) """ self.design_matrix = np.zeros((int(self.number_of_event_types*self.deconvolution_interval_size), self.resampled_signal_size)) for i, covariate in enumerate(self.covariates.keys()): # document the creation of the designmatrix step by step self.logger.debug('creating regressor for ' + covariate) indices = np.arange(i*self.deconvolution_interval_size,(i+1)*self.deconvolution_interval_size, dtype = int) # here, we implement the dot-separated encoding of events and covariates if len(covariate.split('.')) > 0: which_event_time_indices = covariate.split('.')[0] else: which_event_time_indices = covariate self.design_matrix[indices] = self.create_event_regressors( self.event_times_indices[which_event_time_indices], self.covariates[covariate], self.durations[which_event_time_indices]) if demean: # we expect the data to be demeaned. # it's an option whether the regressors should be, too self.design_matrix = (self.design_matrix.T - self.design_matrix.mean(axis = -1)).T if intercept: # similarly, intercept is a choice. self.design_matrix = np.vstack((self.design_matrix, np.ones((1,self.design_matrix.shape[-1])))) self.logger.debug('created %s design_matrix' % (str(self.design_matrix.shape)))
python
def create_design_matrix(self, demean = False, intercept = True): """create_design_matrix calls create_event_regressors for each of the covariates in the self.covariates dict. self.designmatrix is created and is shaped (nr_regressors, self.resampled_signal.shape[-1]) """ self.design_matrix = np.zeros((int(self.number_of_event_types*self.deconvolution_interval_size), self.resampled_signal_size)) for i, covariate in enumerate(self.covariates.keys()): # document the creation of the designmatrix step by step self.logger.debug('creating regressor for ' + covariate) indices = np.arange(i*self.deconvolution_interval_size,(i+1)*self.deconvolution_interval_size, dtype = int) # here, we implement the dot-separated encoding of events and covariates if len(covariate.split('.')) > 0: which_event_time_indices = covariate.split('.')[0] else: which_event_time_indices = covariate self.design_matrix[indices] = self.create_event_regressors( self.event_times_indices[which_event_time_indices], self.covariates[covariate], self.durations[which_event_time_indices]) if demean: # we expect the data to be demeaned. # it's an option whether the regressors should be, too self.design_matrix = (self.design_matrix.T - self.design_matrix.mean(axis = -1)).T if intercept: # similarly, intercept is a choice. self.design_matrix = np.vstack((self.design_matrix, np.ones((1,self.design_matrix.shape[-1])))) self.logger.debug('created %s design_matrix' % (str(self.design_matrix.shape)))
[ "def", "create_design_matrix", "(", "self", ",", "demean", "=", "False", ",", "intercept", "=", "True", ")", ":", "self", ".", "design_matrix", "=", "np", ".", "zeros", "(", "(", "int", "(", "self", ".", "number_of_event_types", "*", "self", ".", "deconvolution_interval_size", ")", ",", "self", ".", "resampled_signal_size", ")", ")", "for", "i", ",", "covariate", "in", "enumerate", "(", "self", ".", "covariates", ".", "keys", "(", ")", ")", ":", "# document the creation of the designmatrix step by step", "self", ".", "logger", ".", "debug", "(", "'creating regressor for '", "+", "covariate", ")", "indices", "=", "np", ".", "arange", "(", "i", "*", "self", ".", "deconvolution_interval_size", ",", "(", "i", "+", "1", ")", "*", "self", ".", "deconvolution_interval_size", ",", "dtype", "=", "int", ")", "# here, we implement the dot-separated encoding of events and covariates", "if", "len", "(", "covariate", ".", "split", "(", "'.'", ")", ")", ">", "0", ":", "which_event_time_indices", "=", "covariate", ".", "split", "(", "'.'", ")", "[", "0", "]", "else", ":", "which_event_time_indices", "=", "covariate", "self", ".", "design_matrix", "[", "indices", "]", "=", "self", ".", "create_event_regressors", "(", "self", ".", "event_times_indices", "[", "which_event_time_indices", "]", ",", "self", ".", "covariates", "[", "covariate", "]", ",", "self", ".", "durations", "[", "which_event_time_indices", "]", ")", "if", "demean", ":", "# we expect the data to be demeaned. ", "# it's an option whether the regressors should be, too", "self", ".", "design_matrix", "=", "(", "self", ".", "design_matrix", ".", "T", "-", "self", ".", "design_matrix", ".", "mean", "(", "axis", "=", "-", "1", ")", ")", ".", "T", "if", "intercept", ":", "# similarly, intercept is a choice.", "self", ".", "design_matrix", "=", "np", ".", "vstack", "(", "(", "self", ".", "design_matrix", ",", "np", ".", "ones", "(", "(", "1", ",", "self", ".", "design_matrix", ".", "shape", "[", "-", "1", "]", ")", ")", ")", ")", "self", ".", "logger", ".", "debug", "(", "'created %s design_matrix'", "%", "(", "str", "(", "self", ".", "design_matrix", ".", "shape", ")", ")", ")" ]
create_design_matrix calls create_event_regressors for each of the covariates in the self.covariates dict. self.designmatrix is created and is shaped (nr_regressors, self.resampled_signal.shape[-1])
[ "create_design_matrix", "calls", "create_event_regressors", "for", "each", "of", "the", "covariates", "in", "the", "self", ".", "covariates", "dict", ".", "self", ".", "designmatrix", "is", "created", "and", "is", "shaped", "(", "nr_regressors", "self", ".", "resampled_signal", ".", "shape", "[", "-", "1", "]", ")" ]
train
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L174-L200
tknapen/FIRDeconvolution
src/FIRDeconvolution.py
FIRDeconvolution.add_continuous_regressors_to_design_matrix
def add_continuous_regressors_to_design_matrix(self, regressors): """add_continuous_regressors_to_design_matrix appends continuously sampled regressors to the existing design matrix. One uses this addition to the design matrix when one expects the data to contain nuisance factors that aren't tied to the moments of specific events. For instance, in fMRI analysis this allows us to add cardiac / respiratory regressors, as well as tissue and head motion timecourses to the designmatrix. :param regressors: the signal to be appended to the design matrix. :type regressors: numpy array, with shape equal to (nr_regressors, self.resampled_signal.shape[-1]) """ previous_design_matrix_shape = self.design_matrix.shape if len(regressors.shape) == 1: regressors = regressors[np.newaxis, :] if regressors.shape[1] != self.resampled_signal.shape[1]: self.logger.warning('additional regressor shape %s does not conform to designmatrix shape %s' % (regressors.shape, self.resampled_signal.shape)) # and, an vstack append self.design_matrix = np.vstack((self.design_matrix, regressors)) self.logger.debug('added %s continuous regressors to %s design_matrix, shape now %s' % (str(regressors.shape), str(previous_design_matrix_shape), str(self.design_matrix.shape)))
python
def add_continuous_regressors_to_design_matrix(self, regressors): """add_continuous_regressors_to_design_matrix appends continuously sampled regressors to the existing design matrix. One uses this addition to the design matrix when one expects the data to contain nuisance factors that aren't tied to the moments of specific events. For instance, in fMRI analysis this allows us to add cardiac / respiratory regressors, as well as tissue and head motion timecourses to the designmatrix. :param regressors: the signal to be appended to the design matrix. :type regressors: numpy array, with shape equal to (nr_regressors, self.resampled_signal.shape[-1]) """ previous_design_matrix_shape = self.design_matrix.shape if len(regressors.shape) == 1: regressors = regressors[np.newaxis, :] if regressors.shape[1] != self.resampled_signal.shape[1]: self.logger.warning('additional regressor shape %s does not conform to designmatrix shape %s' % (regressors.shape, self.resampled_signal.shape)) # and, an vstack append self.design_matrix = np.vstack((self.design_matrix, regressors)) self.logger.debug('added %s continuous regressors to %s design_matrix, shape now %s' % (str(regressors.shape), str(previous_design_matrix_shape), str(self.design_matrix.shape)))
[ "def", "add_continuous_regressors_to_design_matrix", "(", "self", ",", "regressors", ")", ":", "previous_design_matrix_shape", "=", "self", ".", "design_matrix", ".", "shape", "if", "len", "(", "regressors", ".", "shape", ")", "==", "1", ":", "regressors", "=", "regressors", "[", "np", ".", "newaxis", ",", ":", "]", "if", "regressors", ".", "shape", "[", "1", "]", "!=", "self", ".", "resampled_signal", ".", "shape", "[", "1", "]", ":", "self", ".", "logger", ".", "warning", "(", "'additional regressor shape %s does not conform to designmatrix shape %s'", "%", "(", "regressors", ".", "shape", ",", "self", ".", "resampled_signal", ".", "shape", ")", ")", "# and, an vstack append", "self", ".", "design_matrix", "=", "np", ".", "vstack", "(", "(", "self", ".", "design_matrix", ",", "regressors", ")", ")", "self", ".", "logger", ".", "debug", "(", "'added %s continuous regressors to %s design_matrix, shape now %s'", "%", "(", "str", "(", "regressors", ".", "shape", ")", ",", "str", "(", "previous_design_matrix_shape", ")", ",", "str", "(", "self", ".", "design_matrix", ".", "shape", ")", ")", ")" ]
add_continuous_regressors_to_design_matrix appends continuously sampled regressors to the existing design matrix. One uses this addition to the design matrix when one expects the data to contain nuisance factors that aren't tied to the moments of specific events. For instance, in fMRI analysis this allows us to add cardiac / respiratory regressors, as well as tissue and head motion timecourses to the designmatrix. :param regressors: the signal to be appended to the design matrix. :type regressors: numpy array, with shape equal to (nr_regressors, self.resampled_signal.shape[-1])
[ "add_continuous_regressors_to_design_matrix", "appends", "continuously", "sampled", "regressors", "to", "the", "existing", "design", "matrix", ".", "One", "uses", "this", "addition", "to", "the", "design", "matrix", "when", "one", "expects", "the", "data", "to", "contain", "nuisance", "factors", "that", "aren", "t", "tied", "to", "the", "moments", "of", "specific", "events", ".", "For", "instance", "in", "fMRI", "analysis", "this", "allows", "us", "to", "add", "cardiac", "/", "respiratory", "regressors", "as", "well", "as", "tissue", "and", "head", "motion", "timecourses", "to", "the", "designmatrix", ".", ":", "param", "regressors", ":", "the", "signal", "to", "be", "appended", "to", "the", "design", "matrix", ".", ":", "type", "regressors", ":", "numpy", "array", "with", "shape", "equal", "to", "(", "nr_regressors", "self", ".", "resampled_signal", ".", "shape", "[", "-", "1", "]", ")" ]
train
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L202-L215
tknapen/FIRDeconvolution
src/FIRDeconvolution.py
FIRDeconvolution.regress
def regress(self, method = 'lstsq'): """regress performs linear least squares regression of the designmatrix on the data. :param method: method, or backend to be used for the regression analysis. :type method: string, one of ['lstsq', 'sm_ols'] :returns: instance variables 'betas' (nr_betas x nr_signals) and 'residuals' (nr_signals x nr_samples) are created. """ if method is 'lstsq': self.betas, residuals_sum, rank, s = LA.lstsq(self.design_matrix.T, self.resampled_signal.T) self.residuals = self.resampled_signal - self.predict_from_design_matrix(self.design_matrix) elif method is 'sm_ols': import statsmodels.api as sm assert self.resampled_signal.shape[0] == 1, \ 'signal input into statsmodels OLS cannot contain multiple signals at once, present shape %s' % str(self.resampled_signal.shape) model = sm.OLS(np.squeeze(self.resampled_signal),self.design_matrix.T) results = model.fit() # make betas and residuals that are compatible with the LA.lstsq type. self.betas = np.array(results.params).reshape((self.design_matrix.shape[0], self.resampled_signal.shape[0])) self.residuals = np.array(results.resid).reshape(self.resampled_signal.shape) self.logger.debug('performed %s regression on %s design_matrix and %s signal' % (method, str(self.design_matrix.shape), str(self.resampled_signal.shape)))
python
def regress(self, method = 'lstsq'): """regress performs linear least squares regression of the designmatrix on the data. :param method: method, or backend to be used for the regression analysis. :type method: string, one of ['lstsq', 'sm_ols'] :returns: instance variables 'betas' (nr_betas x nr_signals) and 'residuals' (nr_signals x nr_samples) are created. """ if method is 'lstsq': self.betas, residuals_sum, rank, s = LA.lstsq(self.design_matrix.T, self.resampled_signal.T) self.residuals = self.resampled_signal - self.predict_from_design_matrix(self.design_matrix) elif method is 'sm_ols': import statsmodels.api as sm assert self.resampled_signal.shape[0] == 1, \ 'signal input into statsmodels OLS cannot contain multiple signals at once, present shape %s' % str(self.resampled_signal.shape) model = sm.OLS(np.squeeze(self.resampled_signal),self.design_matrix.T) results = model.fit() # make betas and residuals that are compatible with the LA.lstsq type. self.betas = np.array(results.params).reshape((self.design_matrix.shape[0], self.resampled_signal.shape[0])) self.residuals = np.array(results.resid).reshape(self.resampled_signal.shape) self.logger.debug('performed %s regression on %s design_matrix and %s signal' % (method, str(self.design_matrix.shape), str(self.resampled_signal.shape)))
[ "def", "regress", "(", "self", ",", "method", "=", "'lstsq'", ")", ":", "if", "method", "is", "'lstsq'", ":", "self", ".", "betas", ",", "residuals_sum", ",", "rank", ",", "s", "=", "LA", ".", "lstsq", "(", "self", ".", "design_matrix", ".", "T", ",", "self", ".", "resampled_signal", ".", "T", ")", "self", ".", "residuals", "=", "self", ".", "resampled_signal", "-", "self", ".", "predict_from_design_matrix", "(", "self", ".", "design_matrix", ")", "elif", "method", "is", "'sm_ols'", ":", "import", "statsmodels", ".", "api", "as", "sm", "assert", "self", ".", "resampled_signal", ".", "shape", "[", "0", "]", "==", "1", ",", "'signal input into statsmodels OLS cannot contain multiple signals at once, present shape %s'", "%", "str", "(", "self", ".", "resampled_signal", ".", "shape", ")", "model", "=", "sm", ".", "OLS", "(", "np", ".", "squeeze", "(", "self", ".", "resampled_signal", ")", ",", "self", ".", "design_matrix", ".", "T", ")", "results", "=", "model", ".", "fit", "(", ")", "# make betas and residuals that are compatible with the LA.lstsq type.", "self", ".", "betas", "=", "np", ".", "array", "(", "results", ".", "params", ")", ".", "reshape", "(", "(", "self", ".", "design_matrix", ".", "shape", "[", "0", "]", ",", "self", ".", "resampled_signal", ".", "shape", "[", "0", "]", ")", ")", "self", ".", "residuals", "=", "np", ".", "array", "(", "results", ".", "resid", ")", ".", "reshape", "(", "self", ".", "resampled_signal", ".", "shape", ")", "self", ".", "logger", ".", "debug", "(", "'performed %s regression on %s design_matrix and %s signal'", "%", "(", "method", ",", "str", "(", "self", ".", "design_matrix", ".", "shape", ")", ",", "str", "(", "self", ".", "resampled_signal", ".", "shape", ")", ")", ")" ]
regress performs linear least squares regression of the designmatrix on the data. :param method: method, or backend to be used for the regression analysis. :type method: string, one of ['lstsq', 'sm_ols'] :returns: instance variables 'betas' (nr_betas x nr_signals) and 'residuals' (nr_signals x nr_samples) are created.
[ "regress", "performs", "linear", "least", "squares", "regression", "of", "the", "designmatrix", "on", "the", "data", "." ]
train
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L217-L239
tknapen/FIRDeconvolution
src/FIRDeconvolution.py
FIRDeconvolution.ridge_regress
def ridge_regress(self, cv = 20, alphas = None ): """perform k-folds cross-validated ridge regression on the design_matrix. To be used when the design matrix contains very collinear regressors. For cross-validation and ridge fitting, we use sklearn's RidgeCV functionality. Note: intercept is not fit, and data are not prenormalized. :param cv: cross-validated folds, inherits RidgeCV cv argument's functionality. :type cv: int, standard = 20 :param alphas: values of penalization parameter to be traversed by the procedure, inherits RidgeCV cv argument's functionality. Standard value, when parameter is None, is np.logspace(7, 0, 20) :type alphas: numpy array, from >0 to 1. :returns: instance variables 'betas' (nr_betas x nr_signals) and 'residuals' (nr_signals x nr_samples) are created. """ if alphas is None: alphas = np.logspace(7, 0, 20) self.rcv = linear_model.RidgeCV(alphas=alphas, fit_intercept=False, cv=cv) self.rcv.fit(self.design_matrix.T, self.resampled_signal.T) self.betas = self.rcv.coef_.T self.residuals = self.resampled_signal - self.rcv.predict(self.design_matrix.T) self.logger.debug('performed ridge regression on %s design_matrix and %s signal, resulting alpha value is %f' % (str(self.design_matrix.shape), str(self.resampled_signal.shape), self.rcv.alpha_))
python
def ridge_regress(self, cv = 20, alphas = None ): """perform k-folds cross-validated ridge regression on the design_matrix. To be used when the design matrix contains very collinear regressors. For cross-validation and ridge fitting, we use sklearn's RidgeCV functionality. Note: intercept is not fit, and data are not prenormalized. :param cv: cross-validated folds, inherits RidgeCV cv argument's functionality. :type cv: int, standard = 20 :param alphas: values of penalization parameter to be traversed by the procedure, inherits RidgeCV cv argument's functionality. Standard value, when parameter is None, is np.logspace(7, 0, 20) :type alphas: numpy array, from >0 to 1. :returns: instance variables 'betas' (nr_betas x nr_signals) and 'residuals' (nr_signals x nr_samples) are created. """ if alphas is None: alphas = np.logspace(7, 0, 20) self.rcv = linear_model.RidgeCV(alphas=alphas, fit_intercept=False, cv=cv) self.rcv.fit(self.design_matrix.T, self.resampled_signal.T) self.betas = self.rcv.coef_.T self.residuals = self.resampled_signal - self.rcv.predict(self.design_matrix.T) self.logger.debug('performed ridge regression on %s design_matrix and %s signal, resulting alpha value is %f' % (str(self.design_matrix.shape), str(self.resampled_signal.shape), self.rcv.alpha_))
[ "def", "ridge_regress", "(", "self", ",", "cv", "=", "20", ",", "alphas", "=", "None", ")", ":", "if", "alphas", "is", "None", ":", "alphas", "=", "np", ".", "logspace", "(", "7", ",", "0", ",", "20", ")", "self", ".", "rcv", "=", "linear_model", ".", "RidgeCV", "(", "alphas", "=", "alphas", ",", "fit_intercept", "=", "False", ",", "cv", "=", "cv", ")", "self", ".", "rcv", ".", "fit", "(", "self", ".", "design_matrix", ".", "T", ",", "self", ".", "resampled_signal", ".", "T", ")", "self", ".", "betas", "=", "self", ".", "rcv", ".", "coef_", ".", "T", "self", ".", "residuals", "=", "self", ".", "resampled_signal", "-", "self", ".", "rcv", ".", "predict", "(", "self", ".", "design_matrix", ".", "T", ")", "self", ".", "logger", ".", "debug", "(", "'performed ridge regression on %s design_matrix and %s signal, resulting alpha value is %f'", "%", "(", "str", "(", "self", ".", "design_matrix", ".", "shape", ")", ",", "str", "(", "self", ".", "resampled_signal", ".", "shape", ")", ",", "self", ".", "rcv", ".", "alpha_", ")", ")" ]
perform k-folds cross-validated ridge regression on the design_matrix. To be used when the design matrix contains very collinear regressors. For cross-validation and ridge fitting, we use sklearn's RidgeCV functionality. Note: intercept is not fit, and data are not prenormalized. :param cv: cross-validated folds, inherits RidgeCV cv argument's functionality. :type cv: int, standard = 20 :param alphas: values of penalization parameter to be traversed by the procedure, inherits RidgeCV cv argument's functionality. Standard value, when parameter is None, is np.logspace(7, 0, 20) :type alphas: numpy array, from >0 to 1. :returns: instance variables 'betas' (nr_betas x nr_signals) and 'residuals' (nr_signals x nr_samples) are created.
[ "perform", "k", "-", "folds", "cross", "-", "validated", "ridge", "regression", "on", "the", "design_matrix", ".", "To", "be", "used", "when", "the", "design", "matrix", "contains", "very", "collinear", "regressors", ".", "For", "cross", "-", "validation", "and", "ridge", "fitting", "we", "use", "sklearn", "s", "RidgeCV", "functionality", ".", "Note", ":", "intercept", "is", "not", "fit", "and", "data", "are", "not", "prenormalized", "." ]
train
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L241-L260
tknapen/FIRDeconvolution
src/FIRDeconvolution.py
FIRDeconvolution.betas_for_cov
def betas_for_cov(self, covariate = '0'): """betas_for_cov returns the beta values (i.e. IRF) associated with a specific covariate. :param covariate: name of covariate. :type covariate: string """ # find the index in the designmatrix of the current covariate this_covariate_index = list(self.covariates.keys()).index(covariate) return self.betas[int(this_covariate_index*self.deconvolution_interval_size):int((this_covariate_index+1)*self.deconvolution_interval_size)]
python
def betas_for_cov(self, covariate = '0'): """betas_for_cov returns the beta values (i.e. IRF) associated with a specific covariate. :param covariate: name of covariate. :type covariate: string """ # find the index in the designmatrix of the current covariate this_covariate_index = list(self.covariates.keys()).index(covariate) return self.betas[int(this_covariate_index*self.deconvolution_interval_size):int((this_covariate_index+1)*self.deconvolution_interval_size)]
[ "def", "betas_for_cov", "(", "self", ",", "covariate", "=", "'0'", ")", ":", "# find the index in the designmatrix of the current covariate", "this_covariate_index", "=", "list", "(", "self", ".", "covariates", ".", "keys", "(", ")", ")", ".", "index", "(", "covariate", ")", "return", "self", ".", "betas", "[", "int", "(", "this_covariate_index", "*", "self", ".", "deconvolution_interval_size", ")", ":", "int", "(", "(", "this_covariate_index", "+", "1", ")", "*", "self", ".", "deconvolution_interval_size", ")", "]" ]
betas_for_cov returns the beta values (i.e. IRF) associated with a specific covariate. :param covariate: name of covariate. :type covariate: string
[ "betas_for_cov", "returns", "the", "beta", "values", "(", "i", ".", "e", ".", "IRF", ")", "associated", "with", "a", "specific", "covariate", "." ]
train
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L262-L270
tknapen/FIRDeconvolution
src/FIRDeconvolution.py
FIRDeconvolution.betas_for_events
def betas_for_events(self): """betas_for_events creates an internal self.betas_per_event_type array, of (nr_covariates x self.devonvolution_interval_size), which holds the outcome betas per event type,in the order generated by self.covariates.keys() """ self.betas_per_event_type = np.zeros((len(self.covariates), self.deconvolution_interval_size, self.resampled_signal.shape[0])) for i, covariate in enumerate(self.covariates.keys()): self.betas_per_event_type[i] = self.betas_for_cov(covariate)
python
def betas_for_events(self): """betas_for_events creates an internal self.betas_per_event_type array, of (nr_covariates x self.devonvolution_interval_size), which holds the outcome betas per event type,in the order generated by self.covariates.keys() """ self.betas_per_event_type = np.zeros((len(self.covariates), self.deconvolution_interval_size, self.resampled_signal.shape[0])) for i, covariate in enumerate(self.covariates.keys()): self.betas_per_event_type[i] = self.betas_for_cov(covariate)
[ "def", "betas_for_events", "(", "self", ")", ":", "self", ".", "betas_per_event_type", "=", "np", ".", "zeros", "(", "(", "len", "(", "self", ".", "covariates", ")", ",", "self", ".", "deconvolution_interval_size", ",", "self", ".", "resampled_signal", ".", "shape", "[", "0", "]", ")", ")", "for", "i", ",", "covariate", "in", "enumerate", "(", "self", ".", "covariates", ".", "keys", "(", ")", ")", ":", "self", ".", "betas_per_event_type", "[", "i", "]", "=", "self", ".", "betas_for_cov", "(", "covariate", ")" ]
betas_for_events creates an internal self.betas_per_event_type array, of (nr_covariates x self.devonvolution_interval_size), which holds the outcome betas per event type,in the order generated by self.covariates.keys()
[ "betas_for_events", "creates", "an", "internal", "self", ".", "betas_per_event_type", "array", "of", "(", "nr_covariates", "x", "self", ".", "devonvolution_interval_size", ")", "which", "holds", "the", "outcome", "betas", "per", "event", "type", "in", "the", "order", "generated", "by", "self", ".", "covariates", ".", "keys", "()" ]
train
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L272-L278