sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
|---|---|---|
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
|
Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
|
entailment
|
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
|
Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
|
entailment
|
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
|
Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
|
entailment
|
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
|
Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
|
entailment
|
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
|
Returns the number of bytes transmitted by the slave.
|
entailment
|
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
|
Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
|
entailment
|
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
|
SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
|
entailment
|
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
|
Configure the SPI interface.
|
entailment
|
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
|
Configure the SPI interface by the well known SPI modes.
|
entailment
|
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
|
Write a stream of bytes to a SPI device.
|
entailment
|
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
|
entailment
|
def edit_form(self, obj):
"""Customize edit form."""
form = super(OAISetModelView, self).edit_form(obj)
del form.spec
return form
|
Customize edit form.
|
entailment
|
def flatten_nested_hash(hash_table):
"""
Flatten nested dictionary for GET / POST / DELETE API request
"""
def flatten(hash_table, brackets=True):
f = {}
for key, value in hash_table.items():
_key = '[' + str(key) + ']' if brackets else str(key)
if isinstance(value, dict):
for k, v in flatten(value).items():
f[_key + k] = v
elif isinstance(value, list):
temp_hash = {}
for i, v in enumerate(value):
temp_hash[str(i)] = v
for k, v in flatten(temp_hash).items():
f[_key + k] = v
else:
f[_key] = value
return f
return flatten(hash_table, False)
|
Flatten nested dictionary for GET / POST / DELETE API request
|
entailment
|
def sailthru_http_request(url, data, method, file_data=None, headers=None, request_timeout=10):
"""
Perform an HTTP GET / POST / DELETE request
"""
data = flatten_nested_hash(data)
method = method.upper()
params, data = (None, data) if method == 'POST' else (data, None)
sailthru_headers = {'User-Agent': 'Sailthru API Python Client %s; Python Version: %s' % ('2.3.5', platform.python_version())}
if headers and isinstance(headers, dict):
for key, value in sailthru_headers.items():
headers[key] = value
else:
headers = sailthru_headers
try:
response = requests.request(method, url, params=params, data=data, files=file_data, headers=headers, timeout=request_timeout)
return SailthruResponse(response)
except requests.HTTPError as e:
raise SailthruClientError(str(e))
except requests.RequestException as e:
raise SailthruClientError(str(e))
|
Perform an HTTP GET / POST / DELETE request
|
entailment
|
def _schema_from_verb(verb, partial=False):
"""Return an instance of schema for given verb."""
from .verbs import Verbs
return getattr(Verbs, verb)(partial=partial)
|
Return an instance of schema for given verb.
|
entailment
|
def serialize(pagination, **kwargs):
"""Return resumption token serializer."""
if not pagination.has_next:
return
token_builder = URLSafeTimedSerializer(
current_app.config['SECRET_KEY'],
salt=kwargs['verb'],
)
schema = _schema_from_verb(kwargs['verb'], partial=False)
data = dict(seed=random.random(), page=pagination.next_num,
kwargs=schema.dump(kwargs).data)
scroll_id = getattr(pagination, '_scroll_id', None)
if scroll_id:
data['scroll_id'] = scroll_id
return token_builder.dumps(data)
|
Return resumption token serializer.
|
entailment
|
def _deserialize(self, value, attr, data):
"""Serialize resumption token."""
token_builder = URLSafeTimedSerializer(
current_app.config['SECRET_KEY'],
salt=data['verb'],
)
result = token_builder.loads(value, max_age=current_app.config[
'OAISERVER_RESUMPTION_TOKEN_EXPIRE_TIME'])
result['token'] = value
result['kwargs'] = self.root.load(result['kwargs'], partial=True).data
return result
|
Serialize resumption token.
|
entailment
|
def load(self, data, many=None, partial=None):
"""Deserialize a data structure to an object."""
result = super(ResumptionTokenSchema, self).load(
data, many=many, partial=partial
)
result.data.update(
result.data.get('resumptionToken', {}).get('kwargs', {})
)
return result
|
Deserialize a data structure to an object.
|
entailment
|
def make_request_validator(request):
"""Validate arguments in incomming request."""
verb = request.values.get('verb', '', type=str)
resumption_token = request.values.get('resumptionToken', None)
schema = Verbs if resumption_token is None else ResumptionVerbs
return getattr(schema, verb, OAISchema)(partial=False)
|
Validate arguments in incomming request.
|
entailment
|
def from_iso_permissive(datestring, use_dateutil=True):
"""Parse an ISO8601-formatted datetime and return a datetime object.
Inspired by the marshmallow.utils.from_iso function, but also accepts
datestrings that don't contain the time.
"""
dateutil_available = False
try:
from dateutil import parser
dateutil_available = True
except ImportError:
dateutil_available = False
import datetime
# Use dateutil's parser if possible
if dateutil_available and use_dateutil:
return parser.parse(datestring)
else:
# Strip off timezone info.
return datetime.datetime.strptime(datestring[:19],
'%Y-%m-%dT%H:%M:%S')
|
Parse an ISO8601-formatted datetime and return a datetime object.
Inspired by the marshmallow.utils.from_iso function, but also accepts
datestrings that don't contain the time.
|
entailment
|
def validate(self, data):
"""Check range between dates under keys ``from_`` and ``until``."""
if 'verb' in data and data['verb'] != self.__class__.__name__:
raise ValidationError(
# FIXME encode data
'This is not a valid OAI-PMH verb:{0}'.format(data['verb']),
field_names=['verb'],
)
if 'from_' in data and 'until' in data and \
data['from_'] > data['until']:
raise ValidationError('Date "from" must be before "until".')
extra = set(request.values.keys()) - set([
f.load_from or f.name for f in self.fields.values()
])
if extra:
raise ValidationError('You have passed too many arguments.')
|
Check range between dates under keys ``from_`` and ``until``.
|
entailment
|
def upgrade():
"""Upgrade database."""
op.create_table(
'oaiserver_set',
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('updated', sa.DateTime(), nullable=False),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('spec', sa.String(length=255), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('search_pattern', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('spec')
)
op.create_index(
op.f('ix_oaiserver_set_name'),
'oaiserver_set',
['name'],
unique=False
)
|
Upgrade database.
|
entailment
|
def sets(self):
"""Get list of sets."""
if self.cache:
return self.cache.get(
self.app.config['OAISERVER_CACHE_KEY'])
|
Get list of sets.
|
entailment
|
def sets(self, values):
"""Set list of sets."""
# if cache server is configured, save sets list
if self.cache:
self.cache.set(self.app.config['OAISERVER_CACHE_KEY'], values)
|
Set list of sets.
|
entailment
|
def register_signals(self):
"""Register signals."""
from .receivers import OAIServerUpdater
# Register Record signals to update OAI informations
self.update_function = OAIServerUpdater()
records_signals.before_record_insert.connect(self.update_function,
weak=False)
records_signals.before_record_update.connect(self.update_function,
weak=False)
if self.app.config['OAISERVER_REGISTER_SET_SIGNALS']:
self.register_signals_oaiset()
|
Register signals.
|
entailment
|
def register_signals_oaiset(self):
"""Register OAISet signals to update records."""
from .models import OAISet
from .receivers import after_insert_oai_set, \
after_update_oai_set, after_delete_oai_set
listen(OAISet, 'after_insert', after_insert_oai_set)
listen(OAISet, 'after_update', after_update_oai_set)
listen(OAISet, 'after_delete', after_delete_oai_set)
|
Register OAISet signals to update records.
|
entailment
|
def unregister_signals(self):
"""Unregister signals."""
# Unregister Record signals
if hasattr(self, 'update_function'):
records_signals.before_record_insert.disconnect(
self.update_function)
records_signals.before_record_update.disconnect(
self.update_function)
self.unregister_signals_oaiset()
|
Unregister signals.
|
entailment
|
def unregister_signals_oaiset(self):
"""Unregister signals oaiset."""
from .models import OAISet
from .receivers import after_insert_oai_set, \
after_update_oai_set, after_delete_oai_set
if contains(OAISet, 'after_insert', after_insert_oai_set):
remove(OAISet, 'after_insert', after_insert_oai_set)
remove(OAISet, 'after_update', after_update_oai_set)
remove(OAISet, 'after_delete', after_delete_oai_set)
|
Unregister signals oaiset.
|
entailment
|
def init_config(self, app):
"""Initialize configuration.
:param app: An instance of :class:`flask.Flask`.
"""
app.config.setdefault(
'OAISERVER_BASE_TEMPLATE',
app.config.get('BASE_TEMPLATE',
'invenio_oaiserver/base.html'))
app.config.setdefault(
'OAISERVER_REPOSITORY_NAME',
app.config.get('THEME_SITENAME',
'Invenio-OAIServer'))
# warn user if ID_PREFIX is not set
if 'OAISERVER_ID_PREFIX' not in app.config:
import socket
import warnings
app.config.setdefault(
'OAISERVER_ID_PREFIX',
'oai:{0}:recid/'.format(socket.gethostname()))
warnings.warn(
"""Please specify the OAISERVER_ID_PREFIX configuration."""
"""default value is: {0}""".format(
app.config.get('OAISERVER_ID_PREFIX')))
for k in dir(config):
if k.startswith('OAISERVER_'):
app.config.setdefault(k, getattr(config, k))
|
Initialize configuration.
:param app: An instance of :class:`flask.Flask`.
|
entailment
|
def extract_params(params):
"""
Extracts the values of a set of parameters, recursing into nested dictionaries.
"""
values = []
if isinstance(params, dict):
for key, value in params.items():
values.extend(extract_params(value))
elif isinstance(params, list):
for value in params:
values.extend(extract_params(value))
else:
values.append(params)
return values
|
Extracts the values of a set of parameters, recursing into nested dictionaries.
|
entailment
|
def get_signature_string(params, secret):
"""
Returns the unhashed signature string (secret + sorted list of param values) for an API call.
@param params: dictionary values to generate signature string
@param secret: secret string
"""
str_list = [str(item) for item in extract_params(params)]
str_list.sort()
return (secret + ''.join(str_list)).encode('utf-8')
|
Returns the unhashed signature string (secret + sorted list of param values) for an API call.
@param params: dictionary values to generate signature string
@param secret: secret string
|
entailment
|
def send(self, template, email, _vars=None, options=None, schedule_time=None, limit=None):
"""
Remotely send an email template to a single email address.
http://docs.sailthru.com/api/send
@param template: template string
@param email: Email value
@param _vars: a key/value hash of the replacement vars to use in the send. Each var may be referenced as {varname} within the template itself
@param options: optional dictionary to include replyto and/or test keys
@param limit: optional dictionary to name, time, and handle conflicts of limits
@param schedule_time: do not send the email immediately, but at some point in the future. Any date recognized by PHP's strtotime function is valid, but be sure to specify timezone or use a UTC time to avoid confusion
"""
_vars = _vars or {}
options = options or {}
data = {'template': template,
'email': email,
'vars': _vars,
'options': options.copy()}
if limit:
data['limit'] = limit.copy()
if schedule_time is not None:
data['schedule_time'] = schedule_time
return self.api_post('send', data)
|
Remotely send an email template to a single email address.
http://docs.sailthru.com/api/send
@param template: template string
@param email: Email value
@param _vars: a key/value hash of the replacement vars to use in the send. Each var may be referenced as {varname} within the template itself
@param options: optional dictionary to include replyto and/or test keys
@param limit: optional dictionary to name, time, and handle conflicts of limits
@param schedule_time: do not send the email immediately, but at some point in the future. Any date recognized by PHP's strtotime function is valid, but be sure to specify timezone or use a UTC time to avoid confusion
|
entailment
|
def multi_send(self, template, emails, _vars=None, evars=None, schedule_time=None, options=None):
"""
Remotely send an email template to multiple email addresses.
http://docs.sailthru.com/api/send
@param template: template string
@param emails: List with email values or comma separated email string
@param _vars: a key/value hash of the replacement vars to use in the send. Each var may be referenced as {varname} within the template itself
@param options: optional dictionary to include replyto and/or test keys
@param schedule_time: do not send the email immediately, but at some point in the future. Any date recognized by PHP's strtotime function is valid, but be sure to specify timezone or use a UTC time to avoid confusion
"""
_vars = _vars or {}
evars = evars or {}
options = options or {}
data = {'template': template,
'email': ','.join(emails) if isinstance(emails, list) else emails,
'vars': _vars.copy(),
'evars': evars.copy(),
'options': options.copy()}
if schedule_time is not None:
data['schedule_time'] = schedule_time
return self.api_post('send', data)
|
Remotely send an email template to multiple email addresses.
http://docs.sailthru.com/api/send
@param template: template string
@param emails: List with email values or comma separated email string
@param _vars: a key/value hash of the replacement vars to use in the send. Each var may be referenced as {varname} within the template itself
@param options: optional dictionary to include replyto and/or test keys
@param schedule_time: do not send the email immediately, but at some point in the future. Any date recognized by PHP's strtotime function is valid, but be sure to specify timezone or use a UTC time to avoid confusion
|
entailment
|
def set_email(self, email, _vars=None, lists=None, templates=None, verified=0, optout=None, send=None, send_vars=None):
"""
DEPRECATED!
Update information about one of your users, including adding and removing the user from lists.
http://docs.sailthru.com/api/email
"""
_vars = _vars or {}
lists = lists or []
templates = templates or []
send_vars = send_vars or []
data = {'email': email,
'vars': _vars.copy(),
'lists': lists,
'templates': templates,
'verified': int(verified)}
if optout is not None:
data['optout'] = optout
if send is not None:
data['send'] = send
if send_vars:
data['send_vars'] = send_vars
return self.api_post('email', data)
|
DEPRECATED!
Update information about one of your users, including adding and removing the user from lists.
http://docs.sailthru.com/api/email
|
entailment
|
def get_user(self, idvalue, options=None):
"""
get user by a given id
http://getstarted.sailthru.com/api/user
"""
options = options or {}
data = options.copy()
data['id'] = idvalue
return self.api_get('user', data)
|
get user by a given id
http://getstarted.sailthru.com/api/user
|
entailment
|
def save_user(self, idvalue, options=None):
"""
save user by a given id
http://getstarted.sailthru.com/api/user
"""
options = options or {}
data = options.copy()
data['id'] = idvalue
return self.api_post('user', data)
|
save user by a given id
http://getstarted.sailthru.com/api/user
|
entailment
|
def schedule_blast(self, name, list, schedule_time, from_name, from_email, subject, content_html, content_text, options=None):
"""
Schedule a mass mail blast
http://docs.sailthru.com/api/blast
@param name: name to give to this new blast
@param list: mailing list name to send to
@param schedule_time: when the blast should send. Dates in the past will be scheduled for immediate delivery. Any English textual datetime format known to PHP's strtotime function is acceptable, such as 2009-03-18 23:57:22 UTC, now (immediate delivery), +3 hours (3 hours from now), or February 14, 9:30 EST. Be sure to specify a timezone if you use an exact time.
@param from_name: name appearing in the "From" of the email
@param from_email: email address to use as the "from" - choose from any of your verified emails
@param subject: subject line of the email
@param content_html: HTML format version of the email
@param content_text: Text format version of the email
@param options: optional parameters dictionary
blast_id
copy_blast
copy_template
replyto
report_email
is_link_tracking
is_google_analytics
is_public
suppress_list
test_vars
email_hour_range
abtest
test_percent
data_feed_url
"""
options = options or {}
data = options.copy()
data['name'] = name
data['list'] = list
data['schedule_time'] = schedule_time
data['from_name'] = from_name
data['from_email'] = from_email
data['subject'] = subject
data['content_html'] = content_html
data['content_text'] = content_text
return self.api_post('blast', data)
|
Schedule a mass mail blast
http://docs.sailthru.com/api/blast
@param name: name to give to this new blast
@param list: mailing list name to send to
@param schedule_time: when the blast should send. Dates in the past will be scheduled for immediate delivery. Any English textual datetime format known to PHP's strtotime function is acceptable, such as 2009-03-18 23:57:22 UTC, now (immediate delivery), +3 hours (3 hours from now), or February 14, 9:30 EST. Be sure to specify a timezone if you use an exact time.
@param from_name: name appearing in the "From" of the email
@param from_email: email address to use as the "from" - choose from any of your verified emails
@param subject: subject line of the email
@param content_html: HTML format version of the email
@param content_text: Text format version of the email
@param options: optional parameters dictionary
blast_id
copy_blast
copy_template
replyto
report_email
is_link_tracking
is_google_analytics
is_public
suppress_list
test_vars
email_hour_range
abtest
test_percent
data_feed_url
|
entailment
|
def schedule_blast_from_template(self, template, list_name, schedule_time, options=None):
"""
Schedule a mass mail blast from template
http://docs.sailthru.com/api/blast
@param template: template to copy from
@param list_name: list to send to
@param schedule_time
@param options: additional optional params
"""
options = options or {}
data = options.copy()
data['copy_template'] = template
data['list'] = list_name
data['schedule_time'] = schedule_time
return self.api_post('blast', data)
|
Schedule a mass mail blast from template
http://docs.sailthru.com/api/blast
@param template: template to copy from
@param list_name: list to send to
@param schedule_time
@param options: additional optional params
|
entailment
|
def schedule_blast_from_blast(self, blast_id, schedule_time, options=None):
"""
Schedule a mass mail blast from previous blast
http://docs.sailthru.com/api/blast
@param blast_id: blast_id to copy from
@param schedule_time
@param options: additional optional params
"""
options = options or {}
data = options.copy()
data['copy_blast'] = blast_id
data['schedule_time'] = schedule_time
return self.api_post('blast', data)
|
Schedule a mass mail blast from previous blast
http://docs.sailthru.com/api/blast
@param blast_id: blast_id to copy from
@param schedule_time
@param options: additional optional params
|
entailment
|
def get_list(self, list_name, options=None):
"""
Get detailed metadata information about a list.
"""
options = options or {}
data = {'list': list_name}
data.update(options)
return self.api_get('list', data)
|
Get detailed metadata information about a list.
|
entailment
|
def save_list(self, list_name, emails):
"""
Upload a list. The list import job is queued and will happen shortly after the API request.
http://docs.sailthru.com/api/list
@param list: list name
@param emails: List of email values or comma separated string
"""
data = {'list': list_name,
'emails': ','.join(emails) if isinstance(emails, list) else emails}
return self.api_post('list', data)
|
Upload a list. The list import job is queued and will happen shortly after the API request.
http://docs.sailthru.com/api/list
@param list: list name
@param emails: List of email values or comma separated string
|
entailment
|
def import_contacts(self, email, password, include_name=False):
"""
Fetch email contacts from a user's address book on one of the major email websites. Currently supports AOL, Gmail, Hotmail, and Yahoo! Mail.
"""
data = {'email': email,
'password': password}
if include_name:
data['names'] = 1
return self.api_post('contacts', data)
|
Fetch email contacts from a user's address book on one of the major email websites. Currently supports AOL, Gmail, Hotmail, and Yahoo! Mail.
|
entailment
|
def push_content(self, title, url,
images=None, date=None, expire_date=None,
description=None, location=None, price=None,
tags=None,
author=None, site_name=None,
spider=None, vars=None):
"""
Push a new piece of content to Sailthru.
Expected names for the `images` argument's map are "full" and "thumb"
Expected format for `location` should be [longitude,latitude]
@param title: title string for the content
@param url: URL string for the content
@param images: map of image names
@param date: date string
@param expire_date: date string for when the content expires
@param description: description for the content
@param location: location of the content
@param price: price for the content
@param tags: list or comma separated string values
@param author: author for the content
@param site_name: site name for the content
@param spider: truthy value to force respidering content
@param vars: replaceable vars dictionary
"""
vars = vars or {}
data = {'title': title,
'url': url}
if images is not None:
data['images'] = images
if date is not None:
data['date'] = date
if expire_date is not None:
data['expire_date'] = date
if location is not None:
data['location'] = date
if price is not None:
data['price'] = price
if description is not None:
data['description'] = description
if site_name is not None:
data['site_name'] = images
if author is not None:
data['author'] = author
if spider:
data['spider'] = 1
if tags is not None:
data['tags'] = ",".join(tags) if isinstance(tags, list) else tags
if len(vars) > 0:
data['vars'] = vars.copy()
return self.api_post('content', data)
|
Push a new piece of content to Sailthru.
Expected names for the `images` argument's map are "full" and "thumb"
Expected format for `location` should be [longitude,latitude]
@param title: title string for the content
@param url: URL string for the content
@param images: map of image names
@param date: date string
@param expire_date: date string for when the content expires
@param description: description for the content
@param location: location of the content
@param price: price for the content
@param tags: list or comma separated string values
@param author: author for the content
@param site_name: site name for the content
@param spider: truthy value to force respidering content
@param vars: replaceable vars dictionary
|
entailment
|
def save_alert(self, email, type, template, when=None, options=None):
"""
Add a new alert to a user. You can add either a realtime or a summary alert (daily/weekly).
http://docs.sailthru.com/api/alert
Usage:
email = 'praj@sailthru.com'
type = 'weekly'
template = 'default'
when = '+5 hours'
alert_options = {'match': {}, 'min': {}, 'max': {}, 'tags': []}
alert_options['match']['type'] = 'shoes'
alert_options['min']['price'] = 20000 #cents
alert_options['tags'] = ['red', 'blue', 'green']
response = client.save_alert(email, type, template, when, alert_options)
@param email: Email value
@param type: daily|weekly|realtime
@param template: template name
@param when: date string required for summary alert (daily/weekly)
@param options: dictionary value for adding tags, max price, min price, match type
"""
options = options or {}
data = options.copy()
data['email'] = email
data['type'] = type
data['template'] = template
if type in ['weekly', 'daily']:
data['when'] = when
return self.api_post('alert', data)
|
Add a new alert to a user. You can add either a realtime or a summary alert (daily/weekly).
http://docs.sailthru.com/api/alert
Usage:
email = 'praj@sailthru.com'
type = 'weekly'
template = 'default'
when = '+5 hours'
alert_options = {'match': {}, 'min': {}, 'max': {}, 'tags': []}
alert_options['match']['type'] = 'shoes'
alert_options['min']['price'] = 20000 #cents
alert_options['tags'] = ['red', 'blue', 'green']
response = client.save_alert(email, type, template, when, alert_options)
@param email: Email value
@param type: daily|weekly|realtime
@param template: template name
@param when: date string required for summary alert (daily/weekly)
@param options: dictionary value for adding tags, max price, min price, match type
|
entailment
|
def delete_alert(self, email, alert_id):
"""
delete user alert
"""
data = {'email': email,
'alert_id': alert_id}
return self.api_delete('alert', data)
|
delete user alert
|
entailment
|
def purchase(self, email, items=None, incomplete=None, message_id=None, options=None, extid=None):
"""
Record that a user has made a purchase, or has added items to their purchase total.
http://docs.sailthru.com/api/purchase
@param email: Email string
@param items: list of item dictionary with keys: id, title, price, qty, and url
@param message_id: message_id string
@param extid: external ID to track purchases
@param options: other options that can be set as per the API documentation
"""
items = items or {}
options = options or {}
data = options.copy()
data['email'] = email
data['items'] = items
if incomplete is not None:
data['incomplete'] = incomplete
if message_id is not None:
data['message_id'] = message_id
if extid is not None:
data['extid'] = extid
return self.api_post('purchase', data)
|
Record that a user has made a purchase, or has added items to their purchase total.
http://docs.sailthru.com/api/purchase
@param email: Email string
@param items: list of item dictionary with keys: id, title, price, qty, and url
@param message_id: message_id string
@param extid: external ID to track purchases
@param options: other options that can be set as per the API documentation
|
entailment
|
def get_purchase(self, purchase_id, purchase_key='sid'):
"""
Retrieve information about a purchase using the system's unique ID or a client's ID
@param id_: a string that represents a unique_id or an extid.
@param key: a string that is either 'sid' or 'extid'.
"""
data = {'purchase_id': purchase_id,
'purchase_key': purchase_key}
return self.api_get('purchase', data)
|
Retrieve information about a purchase using the system's unique ID or a client's ID
@param id_: a string that represents a unique_id or an extid.
@param key: a string that is either 'sid' or 'extid'.
|
entailment
|
def stats_list(self, list=None, date=None, headers=None):
"""
Retrieve information about your subscriber counts on a particular list, on a particular day.
http://docs.sailthru.com/api/stat
"""
data = {'stat': 'list'}
if list is not None:
data['list'] = list
if date is not None:
data['date'] = date
return self._stats(data, headers)
|
Retrieve information about your subscriber counts on a particular list, on a particular day.
http://docs.sailthru.com/api/stat
|
entailment
|
def stats_blast(self, blast_id=None, start_date=None, end_date=None, options=None):
"""
Retrieve information about a particular blast or aggregated information from all of blasts over a specified date range.
http://docs.sailthru.com/api/stat
"""
options = options or {}
data = options.copy()
if blast_id is not None:
data['blast_id'] = blast_id
if start_date is not None:
data['start_date'] = start_date
if end_date is not None:
data['end_date'] = end_date
data['stat'] = 'blast'
return self._stats(data)
|
Retrieve information about a particular blast or aggregated information from all of blasts over a specified date range.
http://docs.sailthru.com/api/stat
|
entailment
|
def stats_send(self, template, start_date, end_date, options=None):
"""
Retrieve information about a particular transactional or aggregated information
from transactionals from that template over a specified date range.
http://docs.sailthru.com/api/stat
"""
options = options or {}
data = options.copy()
data = {'template': template,
'start_date': start_date,
'end_date': end_date
}
data['stat'] = 'send'
return self._stats(data)
|
Retrieve information about a particular transactional or aggregated information
from transactionals from that template over a specified date range.
http://docs.sailthru.com/api/stat
|
entailment
|
def receive_verify_post(self, post_params):
"""
Returns true if the incoming request is an authenticated verify post.
"""
if isinstance(post_params, dict):
required_params = ['action', 'email', 'send_id', 'sig']
if not self.check_for_valid_postback_actions(required_params, post_params):
return False
else:
return False
if post_params['action'] != 'verify':
return False
sig = post_params['sig']
post_params = post_params.copy()
del post_params['sig']
if sig != get_signature_hash(post_params, self.secret):
return False
send_response = self.get_send(post_params['send_id'])
try:
send_body = send_response.get_body()
send_json = json.loads(send_body)
if 'email' not in send_body:
return False
if send_json['email'] != post_params['email']:
return False
except ValueError:
return False
return True
|
Returns true if the incoming request is an authenticated verify post.
|
entailment
|
def receive_update_post(self, post_params):
"""
Update postbacks
"""
if isinstance(post_params, dict):
required_params = ['action', 'email', 'sig']
if not self.check_for_valid_postback_actions(required_params, post_params):
return False
else:
return False
if post_params['action'] != 'update':
return False
signature = post_params['sig']
post_params = post_params.copy()
del post_params['sig']
if signature != get_signature_hash(post_params, self.secret):
return False
return True
|
Update postbacks
|
entailment
|
def receive_hardbounce_post(self, post_params):
"""
Hard bounce postbacks
"""
if isinstance(post_params, dict):
required_params = ['action', 'email', 'sig']
if not self.check_for_valid_postback_actions(required_params, post_params):
return False
else:
return False
if post_params['action'] != 'hardbounce':
return False
signature = post_params['sig']
post_params = post_params.copy()
del post_params['sig']
if signature != get_signature_hash(post_params, self.secret):
return False
# for sends
if 'send_id' in post_params:
send_id = post_params['send_id']
send_response = self.get_send(send_id)
if not send_response.is_ok():
return False
send_obj = send_response.get_body()
if not send_obj or 'email' not in send_obj:
return False
# for blasts
if 'blast_id' in post_params:
blast_id = post_params['blast_id']
blast_response = self.get_blast(blast_id)
if not blast_response.is_ok():
return False
blast_obj = blast_response.get_body()
if not blast_obj:
return False
return True
|
Hard bounce postbacks
|
entailment
|
def check_for_valid_postback_actions(self, required_keys, post_params):
"""
checks if post_params contain required keys
"""
for key in required_keys:
if key not in post_params:
return False
return True
|
checks if post_params contain required keys
|
entailment
|
def api_get(self, action, data, headers=None):
"""
Perform an HTTP GET request, using the shared-secret auth hash.
@param action: API action call
@param data: dictionary values
"""
return self._api_request(action, data, 'GET', headers)
|
Perform an HTTP GET request, using the shared-secret auth hash.
@param action: API action call
@param data: dictionary values
|
entailment
|
def api_post(self, action, data, binary_data_param=None):
"""
Perform an HTTP POST request, using the shared-secret auth hash.
@param action: API action call
@param data: dictionary values
"""
binary_data_param = binary_data_param or []
if binary_data_param:
return self.api_post_multipart(action, data, binary_data_param)
else:
return self._api_request(action, data, 'POST')
|
Perform an HTTP POST request, using the shared-secret auth hash.
@param action: API action call
@param data: dictionary values
|
entailment
|
def api_post_multipart(self, action, data, binary_data_param):
"""
Perform an HTTP Multipart POST request, using the shared-secret auth hash.
@param action: API action call
@param data: dictionary values
@param: binary_data_params: array of multipart keys
"""
binary_data = {}
data = data.copy()
try:
file_handles = []
for param in binary_data_param:
if param in data:
binary_data[param] = file_handle = open(data[param], 'r')
file_handles.append(file_handle)
del data[param]
json_payload = self._prepare_json_payload(data)
return self._http_request(action, json_payload, "POST", binary_data)
finally:
for file_handle in file_handles:
file_handle.close()
|
Perform an HTTP Multipart POST request, using the shared-secret auth hash.
@param action: API action call
@param data: dictionary values
@param: binary_data_params: array of multipart keys
|
entailment
|
def _api_request(self, action, data, request_type, headers=None):
"""
Make Request to Sailthru API with given data and api key, format and signature hash
"""
if 'file' in data:
file_data = {'file': open(data['file'], 'rb')}
else:
file_data = None
return self._http_request(action, self._prepare_json_payload(data), request_type, file_data, headers)
|
Make Request to Sailthru API with given data and api key, format and signature hash
|
entailment
|
def get_last_rate_limit_info(self, action, method):
"""
Get rate limit information for last API call
:param action: API endpoint
:param method: Http method, GET, POST or DELETE
:return: dict|None
"""
method = method.upper()
if (action in self.last_rate_limit_info and method in self.last_rate_limit_info[action]):
return self.last_rate_limit_info[action][method]
return None
|
Get rate limit information for last API call
:param action: API endpoint
:param method: Http method, GET, POST or DELETE
:return: dict|None
|
entailment
|
def linked_form(viewset, form_id=None, link=None, link_id=None, method=None):
"""
When having foreign key or m2m relationships between models A and B (B has foreign key to A named parent),
we want to have a form that sits on A's viewset but creates/edits B and sets it relationship to A
automatically.
In order to do so, define linked_forms on A's viewset containing a call to linked_form as follows:
@linked_forms()
class AViewSet(AngularFormMixin, ...):
linked_forms = {
'new-b': linked_form(BViewSet, link='parent')
}
Then, there will be a form definition on <aviewset>/pk/forms/new-b, with POST/PATCH operations pointing
to an automatically created endpoint <aviewset>/pk/linked-endpoint/new-b and detail-route named "new_b"
:param viewset: the foreign viewset
:param form_id: id of the form on the foreign viewset. If unset, use the default form
:param link: either a field name on the foreign viewset or a callable that will get (foreign_instance, this_instance)
:return: an internal definition of a linked form
"""
return {
'viewset' : viewset,
'form_id' : form_id,
'link' : link,
'link_id' : link_id,
'method' : method
}
|
When having foreign key or m2m relationships between models A and B (B has foreign key to A named parent),
we want to have a form that sits on A's viewset but creates/edits B and sets it relationship to A
automatically.
In order to do so, define linked_forms on A's viewset containing a call to linked_form as follows:
@linked_forms()
class AViewSet(AngularFormMixin, ...):
linked_forms = {
'new-b': linked_form(BViewSet, link='parent')
}
Then, there will be a form definition on <aviewset>/pk/forms/new-b, with POST/PATCH operations pointing
to an automatically created endpoint <aviewset>/pk/linked-endpoint/new-b and detail-route named "new_b"
:param viewset: the foreign viewset
:param form_id: id of the form on the foreign viewset. If unset, use the default form
:param link: either a field name on the foreign viewset or a callable that will get (foreign_instance, this_instance)
:return: an internal definition of a linked form
|
entailment
|
def oaiid_minter(record_uuid, data):
"""Mint record identifiers.
:param record_uuid: The record UUID.
:param data: The record data.
:returns: A :class:`invenio_pidstore.models.PersistentIdentifier` instance.
"""
pid_value = data.get('_oai', {}).get('id')
if pid_value is None:
fetcher_name = \
current_app.config.get('OAISERVER_CONTROL_NUMBER_FETCHER', 'recid')
cn_pid = current_pidstore.fetchers[fetcher_name](record_uuid, data)
pid_value = current_app.config.get('OAISERVER_ID_PREFIX', '') + str(
cn_pid.pid_value
)
provider = OAIIDProvider.create(
object_type='rec', object_uuid=record_uuid,
pid_value=str(pid_value)
)
data.setdefault('_oai', {})
data['_oai']['id'] = provider.pid.pid_value
return provider.pid
|
Mint record identifiers.
:param record_uuid: The record UUID.
:param data: The record data.
:returns: A :class:`invenio_pidstore.models.PersistentIdentifier` instance.
|
entailment
|
def validation_error(exception):
"""Return formatter validation error."""
messages = getattr(exception, 'messages', None)
if messages is None:
messages = getattr(exception, 'data', {'messages': None})['messages']
def extract_errors():
"""Extract errors from exception."""
if isinstance(messages, dict):
for field, message in messages.items():
if field == 'verb':
yield 'badVerb', '\n'.join(message)
else:
yield 'badArgument', '\n'.join(message)
else:
for field in exception.field_names:
if field == 'verb':
yield 'badVerb', '\n'.join(messages)
else:
yield 'badArgument', '\n'.join(messages)
if not exception.field_names:
yield 'badArgument', '\n'.join(messages)
return (etree.tostring(xml.error(extract_errors())),
422,
{'Content-Type': 'text/xml'})
|
Return formatter validation error.
|
entailment
|
def response(args):
"""Response endpoint."""
e_tree = getattr(xml, args['verb'].lower())(**args)
response = make_response(etree.tostring(
e_tree,
pretty_print=True,
xml_declaration=True,
encoding='UTF-8',
))
response.headers['Content-Type'] = 'text/xml'
return response
|
Response endpoint.
|
entailment
|
def create(cls, object_type=None, object_uuid=None, **kwargs):
"""Create a new record identifier.
:param object_type: The object type. (Default: ``None``)
:param object_uuid: The object UUID. (Default: ``None``)
"""
assert 'pid_value' in kwargs
kwargs.setdefault('status', cls.default_status)
if object_type and object_uuid:
kwargs['status'] = PIDStatus.REGISTERED
return super(OAIIDProvider, cls).create(
object_type=object_type, object_uuid=object_uuid, **kwargs)
|
Create a new record identifier.
:param object_type: The object type. (Default: ``None``)
:param object_uuid: The object UUID. (Default: ``None``)
|
entailment
|
def _create_percolator_mapping(index, doc_type):
"""Update mappings with the percolator field.
.. note::
This is only needed from ElasticSearch v5 onwards, because percolators
are now just a special type of field inside mappings.
"""
if ES_VERSION[0] >= 5:
current_search_client.indices.put_mapping(
index=index, doc_type=doc_type,
body=PERCOLATOR_MAPPING, ignore=[400, 404])
|
Update mappings with the percolator field.
.. note::
This is only needed from ElasticSearch v5 onwards, because percolators
are now just a special type of field inside mappings.
|
entailment
|
def _percolate_query(index, doc_type, percolator_doc_type, document):
"""Get results for a percolate query."""
if ES_VERSION[0] in (2, 5):
results = current_search_client.percolate(
index=index, doc_type=doc_type, allow_no_indices=True,
ignore_unavailable=True, body={'doc': document}
)
return results['matches']
elif ES_VERSION[0] == 6:
results = current_search_client.search(
index=index, doc_type=percolator_doc_type, allow_no_indices=True,
ignore_unavailable=True, body={
'query': {
'percolate': {
'field': 'query',
'document_type': percolator_doc_type,
'document': document,
}
}
}
)
return results['hits']['hits']
|
Get results for a percolate query.
|
entailment
|
def _new_percolator(spec, search_pattern):
"""Create new percolator associated with the new set."""
if spec and search_pattern:
query = query_string_parser(search_pattern=search_pattern).to_dict()
for index in current_search.mappings.keys():
# Create the percolator doc_type in the existing index for >= ES5
# TODO: Consider doing this only once in app initialization
percolator_doc_type = _get_percolator_doc_type(index)
_create_percolator_mapping(index, percolator_doc_type)
current_search_client.index(
index=index, doc_type=percolator_doc_type,
id='oaiset-{}'.format(spec),
body={'query': query}
)
|
Create new percolator associated with the new set.
|
entailment
|
def _delete_percolator(spec, search_pattern):
"""Delete percolator associated with the new oaiset."""
if spec:
for index in current_search.mappings.keys():
# Create the percolator doc_type in the existing index for >= ES5
percolator_doc_type = _get_percolator_doc_type(index)
_create_percolator_mapping(index, percolator_doc_type)
current_search_client.delete(
index=index, doc_type=percolator_doc_type,
id='oaiset-{}'.format(spec), ignore=[404]
)
|
Delete percolator associated with the new oaiset.
|
entailment
|
def _build_cache():
"""Build sets cache."""
sets = current_oaiserver.sets
if sets is None:
# build sets cache
sets = current_oaiserver.sets = [
oaiset.spec for oaiset in OAISet.query.filter(
OAISet.search_pattern.is_(None)).all()]
return sets
|
Build sets cache.
|
entailment
|
def get_record_sets(record):
"""Find matching sets."""
# get lists of sets with search_pattern equals to None but already in the
# set list inside the record
record_sets = set(record.get('_oai', {}).get('sets', []))
for spec in _build_cache():
if spec in record_sets:
yield spec
# get list of sets that match using percolator
index, doc_type = RecordIndexer().record_to_index(record)
document = record.dumps()
percolator_doc_type = _get_percolator_doc_type(index)
_create_percolator_mapping(index, percolator_doc_type)
results = _percolate_query(index, doc_type, percolator_doc_type, document)
prefix = 'oaiset-'
prefix_len = len(prefix)
for match in results:
set_name = match['_id']
if set_name.startswith(prefix):
name = set_name[prefix_len:]
yield name
raise StopIteration
|
Find matching sets.
|
entailment
|
def _records_commit(record_ids):
"""Commit all records."""
for record_id in record_ids:
record = Record.get_record(record_id)
record.commit()
|
Commit all records.
|
entailment
|
def update_affected_records(spec=None, search_pattern=None):
"""Update all affected records by OAISet change.
:param spec: The record spec.
:param search_pattern: The search pattern.
"""
chunk_size = current_app.config['OAISERVER_CELERY_TASK_CHUNK_SIZE']
record_ids = get_affected_records(spec=spec, search_pattern=search_pattern)
group(
update_records_sets.s(list(filter(None, chunk)))
for chunk in zip_longest(*[iter(record_ids)] * chunk_size)
)()
|
Update all affected records by OAISet change.
:param spec: The record spec.
:param search_pattern: The search pattern.
|
entailment
|
def envelope(**kwargs):
"""Create OAI-PMH envelope for response."""
e_oaipmh = Element(etree.QName(NS_OAIPMH, 'OAI-PMH'), nsmap=NSMAP)
e_oaipmh.set(etree.QName(NS_XSI, 'schemaLocation'),
'{0} {1}'.format(NS_OAIPMH, NS_OAIPMH_XSD))
e_tree = ElementTree(element=e_oaipmh)
if current_app.config['OAISERVER_XSL_URL']:
e_oaipmh.addprevious(etree.ProcessingInstruction(
'xml-stylesheet', 'type="text/xsl" href="{0}"'
.format(current_app.config['OAISERVER_XSL_URL'])))
e_responseDate = SubElement(
e_oaipmh, etree.QName(
NS_OAIPMH, 'responseDate'))
# date should be first possible moment
e_responseDate.text = datetime_to_datestamp(datetime.utcnow())
e_request = SubElement(e_oaipmh, etree.QName(NS_OAIPMH, 'request'))
for key, value in kwargs.items():
if key == 'from_' or key == 'until':
value = datetime_to_datestamp(value)
elif key == 'resumptionToken':
value = value['token']
e_request.set(key, value)
e_request.text = url_for('invenio_oaiserver.response', _external=True)
return e_tree, e_oaipmh
|
Create OAI-PMH envelope for response.
|
entailment
|
def error(errors):
"""Create error element."""
e_tree, e_oaipmh = envelope()
for code, message in errors:
e_error = SubElement(e_oaipmh, etree.QName(NS_OAIPMH, 'error'))
e_error.set('code', code)
e_error.text = message
return e_tree
|
Create error element.
|
entailment
|
def verb(**kwargs):
"""Create OAI-PMH envelope for response with verb."""
e_tree, e_oaipmh = envelope(**kwargs)
e_element = SubElement(e_oaipmh, etree.QName(NS_OAIPMH, kwargs['verb']))
return e_tree, e_element
|
Create OAI-PMH envelope for response with verb.
|
entailment
|
def identify(**kwargs):
"""Create OAI-PMH response for verb Identify."""
cfg = current_app.config
e_tree, e_identify = verb(**kwargs)
e_repositoryName = SubElement(
e_identify, etree.QName(NS_OAIPMH, 'repositoryName'))
e_repositoryName.text = cfg['OAISERVER_REPOSITORY_NAME']
e_baseURL = SubElement(e_identify, etree.QName(NS_OAIPMH, 'baseURL'))
e_baseURL.text = url_for('invenio_oaiserver.response', _external=True)
e_protocolVersion = SubElement(e_identify,
etree.QName(NS_OAIPMH, 'protocolVersion'))
e_protocolVersion.text = cfg['OAISERVER_PROTOCOL_VERSION']
for adminEmail in cfg['OAISERVER_ADMIN_EMAILS']:
e = SubElement(e_identify, etree.QName(NS_OAIPMH, 'adminEmail'))
e.text = adminEmail
e_earliestDatestamp = SubElement(
e_identify, etree.QName(
NS_OAIPMH, 'earliestDatestamp'))
earliest_date = datetime(MINYEAR, 1, 1)
earliest_record = OAIServerSearch(
index=current_app.config['OAISERVER_RECORD_INDEX']).sort({
"_created": {"order": "asc"}})[0:1].execute()
if len(earliest_record.hits.hits) > 0:
created_date_str = earliest_record.hits.hits[0].get(
"_source", {}).get('_created')
if created_date_str:
earliest_date = arrow.get(
created_date_str).to('utc').datetime.replace(tzinfo=None)
e_earliestDatestamp.text = datetime_to_datestamp(earliest_date)
e_deletedRecord = SubElement(e_identify,
etree.QName(NS_OAIPMH, 'deletedRecord'))
e_deletedRecord.text = 'no'
e_granularity = SubElement(e_identify,
etree.QName(NS_OAIPMH, 'granularity'))
assert cfg['OAISERVER_GRANULARITY'] in DATETIME_FORMATS
e_granularity.text = cfg['OAISERVER_GRANULARITY']
compressions = cfg['OAISERVER_COMPRESSIONS']
if compressions != ['identity']:
for compression in compressions:
e_compression = SubElement(e_identify,
etree.QName(NS_OAIPMH, 'compression'))
e_compression.text = compression
for description in cfg.get('OAISERVER_DESCRIPTIONS', []):
e_description = SubElement(e_identify,
etree.QName(NS_OAIPMH, 'description'))
e_description.append(etree.fromstring(description))
return e_tree
|
Create OAI-PMH response for verb Identify.
|
entailment
|
def resumption_token(parent, pagination, **kwargs):
"""Attach resumption token element to a parent."""
# Do not add resumptionToken if all results fit to the first page.
if pagination.page == 1 and not pagination.has_next:
return
token = serialize(pagination, **kwargs)
e_resumptionToken = SubElement(parent, etree.QName(NS_OAIPMH,
'resumptionToken'))
if pagination.total:
expiration_date = datetime.utcnow() + timedelta(
seconds=current_app.config[
'OAISERVER_RESUMPTION_TOKEN_EXPIRE_TIME'
]
)
e_resumptionToken.set('expirationDate', datetime_to_datestamp(
expiration_date
))
e_resumptionToken.set('cursor', str(
(pagination.page - 1) * pagination.per_page
))
e_resumptionToken.set('completeListSize', str(pagination.total))
if token:
e_resumptionToken.text = token
|
Attach resumption token element to a parent.
|
entailment
|
def listsets(**kwargs):
"""Create OAI-PMH response for ListSets verb."""
e_tree, e_listsets = verb(**kwargs)
page = kwargs.get('resumptionToken', {}).get('page', 1)
size = current_app.config['OAISERVER_PAGE_SIZE']
oai_sets = OAISet.query.paginate(page=page, per_page=size, error_out=False)
for oai_set in oai_sets.items:
e_set = SubElement(e_listsets, etree.QName(NS_OAIPMH, 'set'))
e_setSpec = SubElement(e_set, etree.QName(NS_OAIPMH, 'setSpec'))
e_setSpec.text = oai_set.spec
e_setName = SubElement(e_set, etree.QName(NS_OAIPMH, 'setName'))
e_setName.text = sanitize_unicode(oai_set.name)
if oai_set.description:
e_setDescription = SubElement(e_set, etree.QName(NS_OAIPMH,
'setDescription'))
e_dc = SubElement(
e_setDescription, etree.QName(NS_OAIDC, 'dc'),
nsmap=NSMAP_DESCRIPTION
)
e_dc.set(etree.QName(NS_XSI, 'schemaLocation'), NS_OAIDC)
e_description = SubElement(e_dc, etree.QName(NS_DC, 'description'))
e_description.text = oai_set.description
resumption_token(e_listsets, oai_sets, **kwargs)
return e_tree
|
Create OAI-PMH response for ListSets verb.
|
entailment
|
def listmetadataformats(**kwargs):
"""Create OAI-PMH response for ListMetadataFormats verb."""
cfg = current_app.config
e_tree, e_listmetadataformats = verb(**kwargs)
if 'identifier' in kwargs:
# test if record exists
OAIIDProvider.get(pid_value=kwargs['identifier'])
for prefix, metadata in cfg.get('OAISERVER_METADATA_FORMATS', {}).items():
e_metadataformat = SubElement(
e_listmetadataformats, etree.QName(NS_OAIPMH, 'metadataFormat')
)
e_metadataprefix = SubElement(
e_metadataformat, etree.QName(NS_OAIPMH, 'metadataPrefix')
)
e_metadataprefix.text = prefix
e_schema = SubElement(
e_metadataformat, etree.QName(NS_OAIPMH, 'schema')
)
e_schema.text = metadata['schema']
e_metadataNamespace = SubElement(
e_metadataformat, etree.QName(NS_OAIPMH, 'metadataNamespace')
)
e_metadataNamespace.text = metadata['namespace']
return e_tree
|
Create OAI-PMH response for ListMetadataFormats verb.
|
entailment
|
def header(parent, identifier, datestamp, sets=None, deleted=False):
"""Attach ``<header/>`` element to a parent."""
e_header = SubElement(parent, etree.QName(NS_OAIPMH, 'header'))
if deleted:
e_header.set('status', 'deleted')
e_identifier = SubElement(e_header, etree.QName(NS_OAIPMH, 'identifier'))
e_identifier.text = identifier
e_datestamp = SubElement(e_header, etree.QName(NS_OAIPMH, 'datestamp'))
e_datestamp.text = datetime_to_datestamp(datestamp)
for spec in sets or []:
e = SubElement(e_header, etree.QName(NS_OAIPMH, 'setSpec'))
e.text = spec
return e_header
|
Attach ``<header/>`` element to a parent.
|
entailment
|
def getrecord(**kwargs):
"""Create OAI-PMH response for verb Identify."""
record_dumper = serializer(kwargs['metadataPrefix'])
pid = OAIIDProvider.get(pid_value=kwargs['identifier']).pid
record = Record.get_record(pid.object_uuid)
e_tree, e_getrecord = verb(**kwargs)
e_record = SubElement(e_getrecord, etree.QName(NS_OAIPMH, 'record'))
header(
e_record,
identifier=pid.pid_value,
datestamp=record.updated,
sets=record.get('_oai', {}).get('sets', []),
)
e_metadata = SubElement(e_record,
etree.QName(NS_OAIPMH, 'metadata'))
e_metadata.append(record_dumper(pid, {'_source': record}))
return e_tree
|
Create OAI-PMH response for verb Identify.
|
entailment
|
def listidentifiers(**kwargs):
"""Create OAI-PMH response for verb ListIdentifiers."""
e_tree, e_listidentifiers = verb(**kwargs)
result = get_records(**kwargs)
for record in result.items:
pid = oaiid_fetcher(record['id'], record['json']['_source'])
header(
e_listidentifiers,
identifier=pid.pid_value,
datestamp=record['updated'],
sets=record['json']['_source'].get('_oai', {}).get('sets', []),
)
resumption_token(e_listidentifiers, result, **kwargs)
return e_tree
|
Create OAI-PMH response for verb ListIdentifiers.
|
entailment
|
def listrecords(**kwargs):
"""Create OAI-PMH response for verb ListRecords."""
record_dumper = serializer(kwargs['metadataPrefix'])
e_tree, e_listrecords = verb(**kwargs)
result = get_records(**kwargs)
for record in result.items:
pid = oaiid_fetcher(record['id'], record['json']['_source'])
e_record = SubElement(e_listrecords,
etree.QName(NS_OAIPMH, 'record'))
header(
e_record,
identifier=pid.pid_value,
datestamp=record['updated'],
sets=record['json']['_source'].get('_oai', {}).get('sets', []),
)
e_metadata = SubElement(e_record, etree.QName(NS_OAIPMH, 'metadata'))
e_metadata.append(record_dumper(pid, record['json']))
resumption_token(e_listrecords, result, **kwargs)
return e_tree
|
Create OAI-PMH response for verb ListRecords.
|
entailment
|
def oaiid_fetcher(record_uuid, data):
"""Fetch a record's identifier.
:param record_uuid: The record UUID.
:param data: The record data.
:returns: A :class:`invenio_pidstore.fetchers.FetchedPID` instance.
"""
pid_value = data.get('_oai', {}).get('id')
if pid_value is None:
raise PersistentIdentifierError()
return FetchedPID(
provider=OAIIDProvider,
pid_type=OAIIDProvider.pid_type,
pid_value=str(pid_value),
)
|
Fetch a record's identifier.
:param record_uuid: The record UUID.
:param data: The record data.
:returns: A :class:`invenio_pidstore.fetchers.FetchedPID` instance.
|
entailment
|
def validate_spec(self, key, value):
"""Forbit updates of set identifier."""
if self.spec and self.spec != value:
raise OAISetSpecUpdateError("Updating spec is not allowed.")
return value
|
Forbit updates of set identifier.
|
entailment
|
def add_record(self, record):
"""Add a record to the OAISet.
:param record: Record to be added.
:type record: `invenio_records.api.Record` or derivative.
"""
record.setdefault('_oai', {}).setdefault('sets', [])
assert not self.has_record(record)
record['_oai']['sets'].append(self.spec)
|
Add a record to the OAISet.
:param record: Record to be added.
:type record: `invenio_records.api.Record` or derivative.
|
entailment
|
def remove_record(self, record):
"""Remove a record from the OAISet.
:param record: Record to be removed.
:type record: `invenio_records.api.Record` or derivative.
"""
assert self.has_record(record)
record['_oai']['sets'] = [
s for s in record['_oai']['sets'] if s != self.spec]
|
Remove a record from the OAISet.
:param record: Record to be removed.
:type record: `invenio_records.api.Record` or derivative.
|
entailment
|
def oaiserver(sets, records):
"""Initialize OAI-PMH server."""
from invenio_db import db
from invenio_oaiserver.models import OAISet
from invenio_records.api import Record
# create a OAI Set
with db.session.begin_nested():
for i in range(sets):
db.session.add(OAISet(
spec='test{0}'.format(i),
name='Test{0}'.format(i),
description='test desc {0}'.format(i),
search_pattern='title_statement.title:Test{0}'.format(i),
))
# create a record
schema = {
'type': 'object',
'properties': {
'title_statement': {
'type': 'object',
'properties': {
'title': {
'type': 'string',
},
},
},
'field': {'type': 'boolean'},
},
}
with app.app_context():
indexer = RecordIndexer()
with db.session.begin_nested():
for i in range(records):
record_id = uuid.uuid4()
data = {
'title_statement': {'title': 'Test{0}'.format(i)},
'$schema': schema,
}
recid_minter(record_id, data)
oaiid_minter(record_id, data)
record = Record.create(data, id_=record_id)
indexer.index(record)
db.session.commit()
|
Initialize OAI-PMH server.
|
entailment
|
def serializer(metadata_prefix):
"""Return etree_dumper instances.
:param metadata_prefix: One of the metadata identifiers configured in
``OAISERVER_METADATA_FORMATS``.
"""
metadataFormats = current_app.config['OAISERVER_METADATA_FORMATS']
serializer_ = metadataFormats[metadata_prefix]['serializer']
if isinstance(serializer_, tuple):
return partial(import_string(serializer_[0]), **serializer_[1])
return import_string(serializer_)
|
Return etree_dumper instances.
:param metadata_prefix: One of the metadata identifiers configured in
``OAISERVER_METADATA_FORMATS``.
|
entailment
|
def dumps_etree(pid, record, **kwargs):
"""Dump MARC21 compatible record.
:param pid: The :class:`invenio_pidstore.models.PersistentIdentifier`
instance.
:param record: The :class:`invenio_records.api.Record` instance.
:returns: A LXML Element instance.
"""
from dojson.contrib.to_marc21 import to_marc21
from dojson.contrib.to_marc21.utils import dumps_etree
return dumps_etree(to_marc21.do(record['_source']), **kwargs)
|
Dump MARC21 compatible record.
:param pid: The :class:`invenio_pidstore.models.PersistentIdentifier`
instance.
:param record: The :class:`invenio_records.api.Record` instance.
:returns: A LXML Element instance.
|
entailment
|
def eprints_description(metadataPolicy, dataPolicy,
submissionPolicy=None, content=None):
"""Generate the eprints element for the identify response.
The eprints container is used by the e-print community to describe
the content and policies of repositories.
For the full specification and schema definition visit:
http://www.openarchives.org/OAI/2.0/guidelines-eprints.htm
"""
eprints = Element(etree.QName(NS_EPRINTS[None], 'eprints'),
nsmap=NS_EPRINTS)
eprints.set(etree.QName(ns['xsi'], 'schemaLocation'),
'{0} {1}'.format(EPRINTS_SCHEMA_LOCATION,
EPRINTS_SCHEMA_LOCATION_XSD))
if content:
contentElement = etree.Element('content')
for key, value in content.items():
contentElement.append(E(key, value))
eprints.append(contentElement)
metadataPolicyElement = etree.Element('metadataPolicy')
for key, value in metadataPolicy.items():
metadataPolicyElement.append(E(key, value))
eprints.append(metadataPolicyElement)
dataPolicyElement = etree.Element('dataPolicy')
for key, value in dataPolicy.items():
dataPolicyElement.append(E(key, value))
eprints.append(dataPolicyElement)
if submissionPolicy:
submissionPolicyElement = etree.Element('submissionPolicy')
for key, value in submissionPolicy.items():
submissionPolicyElement.append(E(key, value))
eprints.append(submissionPolicyElement)
return etree.tostring(eprints, pretty_print=True)
|
Generate the eprints element for the identify response.
The eprints container is used by the e-print community to describe
the content and policies of repositories.
For the full specification and schema definition visit:
http://www.openarchives.org/OAI/2.0/guidelines-eprints.htm
|
entailment
|
def oai_identifier_description(scheme, repositoryIdentifier,
delimiter, sampleIdentifier):
"""Generate the oai-identifier element for the identify response.
The OAI identifier format is intended to provide persistent resource
identifiers for items in repositories that implement OAI-PMH.
For the full specification and schema definition visit:
http://www.openarchives.org/OAI/2.0/guidelines-oai-identifier.htm
"""
oai_identifier = Element(etree.QName(NS_OAI_IDENTIFIER[None],
'oai_identifier'),
nsmap=NS_OAI_IDENTIFIER)
oai_identifier.set(etree.QName(ns['xsi'], 'schemaLocation'),
'{0} {1}'.format(OAI_IDENTIFIER_SCHEMA_LOCATION,
OAI_IDENTIFIER_SCHEMA_LOCATION_XSD))
oai_identifier.append(E('scheme', scheme))
oai_identifier.append(E('repositoryIdentifier', repositoryIdentifier))
oai_identifier.append(E('delimiter', delimiter))
oai_identifier.append(E('sampleIdentifier', sampleIdentifier))
return etree.tostring(oai_identifier, pretty_print=True)
|
Generate the oai-identifier element for the identify response.
The OAI identifier format is intended to provide persistent resource
identifiers for items in repositories that implement OAI-PMH.
For the full specification and schema definition visit:
http://www.openarchives.org/OAI/2.0/guidelines-oai-identifier.htm
|
entailment
|
def friends_description(baseURLs):
"""Generate the friends element for the identify response.
The friends container is recommended for use by repositories
to list confederate repositories.
For the schema definition visit:
http://www.openarchives.org/OAI/2.0/guidelines-friends.htm
"""
friends = Element(etree.QName(NS_FRIENDS[None], 'friends'),
nsmap=NS_FRIENDS)
friends.set(etree.QName(ns['xsi'], 'schemaLocation'),
'{0} {1}'.format(FRIENDS_SCHEMA_LOCATION,
FRIENDS_SCHEMA_LOCATION_XSD))
for baseURL in baseURLs:
friends.append(E('baseURL', baseURL))
return etree.tostring(friends, pretty_print=True)
|
Generate the friends element for the identify response.
The friends container is recommended for use by repositories
to list confederate repositories.
For the schema definition visit:
http://www.openarchives.org/OAI/2.0/guidelines-friends.htm
|
entailment
|
def after_insert_oai_set(mapper, connection, target):
"""Update records on OAISet insertion."""
_new_percolator(spec=target.spec, search_pattern=target.search_pattern)
sleep(2)
update_affected_records.delay(
search_pattern=target.search_pattern
)
|
Update records on OAISet insertion.
|
entailment
|
def after_update_oai_set(mapper, connection, target):
"""Update records on OAISet update."""
_delete_percolator(spec=target.spec, search_pattern=target.search_pattern)
_new_percolator(spec=target.spec, search_pattern=target.search_pattern)
sleep(2)
update_affected_records.delay(
spec=target.spec, search_pattern=target.search_pattern
)
|
Update records on OAISet update.
|
entailment
|
def after_delete_oai_set(mapper, connection, target):
"""Update records on OAISet deletion."""
_delete_percolator(spec=target.spec, search_pattern=target.search_pattern)
sleep(2)
update_affected_records.delay(
spec=target.spec
)
|
Update records on OAISet deletion.
|
entailment
|
def query_string_parser(search_pattern):
"""Elasticsearch query string parser."""
if not hasattr(current_oaiserver, 'query_parser'):
query_parser = current_app.config['OAISERVER_QUERY_PARSER']
if isinstance(query_parser, six.string_types):
query_parser = import_string(query_parser)
current_oaiserver.query_parser = query_parser
return current_oaiserver.query_parser('query_string', query=search_pattern)
|
Elasticsearch query string parser.
|
entailment
|
def get_affected_records(spec=None, search_pattern=None):
"""Get list of affected records.
:param spec: The record spec.
:param search_pattern: The search pattern.
:returns: An iterator to lazily find results.
"""
# spec pattern query
# ---------- ---------- -------
# None None None
# None Y Y
# X None X
# X '' X
# X Y X OR Y
if spec is None and search_pattern is None:
raise StopIteration
queries = []
if spec is not None:
queries.append(Q('match', **{'_oai.sets': spec}))
if search_pattern:
queries.append(query_string_parser(search_pattern=search_pattern))
search = OAIServerSearch(
index=current_app.config['OAISERVER_RECORD_INDEX'],
).query(Q('bool', should=queries))
for result in search.scan():
yield result.meta.id
|
Get list of affected records.
:param spec: The record spec.
:param search_pattern: The search pattern.
:returns: An iterator to lazily find results.
|
entailment
|
def get_records(**kwargs):
"""Get records paginated."""
page_ = kwargs.get('resumptionToken', {}).get('page', 1)
size_ = current_app.config['OAISERVER_PAGE_SIZE']
scroll = current_app.config['OAISERVER_RESUMPTION_TOKEN_EXPIRE_TIME']
scroll_id = kwargs.get('resumptionToken', {}).get('scroll_id')
if scroll_id is None:
search = OAIServerSearch(
index=current_app.config['OAISERVER_RECORD_INDEX'],
).params(
scroll='{0}s'.format(scroll),
).extra(
version=True,
)[(page_-1)*size_:page_*size_]
if 'set' in kwargs:
search = search.query('match', **{'_oai.sets': kwargs['set']})
time_range = {}
if 'from_' in kwargs:
time_range['gte'] = kwargs['from_']
if 'until' in kwargs:
time_range['lte'] = kwargs['until']
if time_range:
search = search.filter('range', **{'_updated': time_range})
response = search.execute().to_dict()
else:
response = current_search_client.scroll(
scroll_id=scroll_id,
scroll='{0}s'.format(scroll),
)
class Pagination(object):
"""Dummy pagination class."""
page = page_
per_page = size_
def __init__(self, response):
"""Initilize pagination."""
self.response = response
self.total = response['hits']['total']
self._scroll_id = response.get('_scroll_id')
# clean descriptor on last page
if not self.has_next:
current_search_client.clear_scroll(
scroll_id=self._scroll_id
)
self._scroll_id = None
@cached_property
def has_next(self):
"""Return True if there is next page."""
return self.page * self.per_page <= self.total
@cached_property
def next_num(self):
"""Return next page number."""
return self.page + 1 if self.has_next else None
@property
def items(self):
"""Return iterator."""
from datetime import datetime
for result in self.response['hits']['hits']:
if '_oai' in result['_source']:
yield {
'id': result['_id'],
'json': result,
'updated': datetime.strptime(
result['_source']['_updated'][:19],
'%Y-%m-%dT%H:%M:%S'
),
}
return Pagination(response)
|
Get records paginated.
|
entailment
|
def get_file_path(filename, local=True, relative_to_module=None, my_dir=my_dir):
"""
Look for an existing path matching filename.
Try to resolve relative to the module location if the path cannot by found
using "normal" resolution.
"""
# override my_dir if module is provided
if relative_to_module is not None:
my_dir = os.path.dirname(relative_to_module.__file__)
user_path = result = filename
if local:
user_path = os.path.expanduser(filename)
result = os.path.abspath(user_path)
if os.path.exists(result):
return result # The file was found normally
# otherwise look relative to the module.
result = os.path.join(my_dir, filename)
assert os.path.exists(result), "no such file " + repr((filename, result, user_path))
return result
|
Look for an existing path matching filename.
Try to resolve relative to the module location if the path cannot by found
using "normal" resolution.
|
entailment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.