_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3 values | text stringlengths 75 19.8k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q45400 | CrabGateway.get_straat_by_id | train | def get_straat_by_id(self, id):
'''
Retrieve a `straat` by the Id.
:param integer id: The id of the `straat`.
:rtype: :class:`Straat`
'''
def creator():
res = crab_gateway_request(
self.client, 'GetStraatnaamWithStatusByStraatnaamId', id
)
if res == None:
raise GatewayResourceNotFoundException()
return Straat(
res.StraatnaamId,
res.StraatnaamLabel,
res.GemeenteId,
res.StatusStraatnaam,
res.Straatnaam,
res.TaalCode,
res.StraatnaamTweedeTaal,
res.TaalCodeTweedeTaal,
Metadata(
res.BeginDatum,
res.BeginTijd,
self.get_bewerking(res.BeginBewerking),
self.get_organisatie(res.BeginOrganisatie)
)
)
if self.caches['long'].is_configured:
key = 'GetStraatnaamWithStatusByStraatnaamId#%s' % (id)
straat = self.caches['long'].get_or_create(key, creator)
else:
straat = creator()
straat.set_gateway(self)
return straat | python | {
"resource": ""
} |
q45401 | CrabGateway.list_huisnummers_by_straat | train | def list_huisnummers_by_straat(self, straat, sort=1):
'''
List all `huisnummers` in a `Straat`.
:param straat: The :class:`Straat` for which the \
`huisnummers` are wanted.
:rtype: A :class: `list` of :class:`Huisnummer`
'''
try:
id = straat.id
except AttributeError:
id = straat
def creator():
res = crab_gateway_request(
self.client, 'ListHuisnummersWithStatusByStraatnaamId',
id, sort
)
try:
return[
Huisnummer(
r.HuisnummerId,
r.StatusHuisnummer,
r.Huisnummer,
id
) for r in res.HuisnummerWithStatusItem
]
except AttributeError:
return []
if self.caches['short'].is_configured:
key = 'ListHuisnummersWithStatusByStraatnaamId#%s%s' % (id, sort)
huisnummers = self.caches['short'].get_or_create(key, creator)
else:
huisnummers = creator()
for h in huisnummers:
h.set_gateway(self)
return huisnummers | python | {
"resource": ""
} |
q45402 | CrabGateway.list_huisnummers_by_perceel | train | def list_huisnummers_by_perceel(self, perceel, sort=1):
'''
List all `huisnummers` on a `Pereel`.
Generally there will only be one, but multiples are possible.
:param perceel: The :class:`Perceel` for which the \
`huisnummers` are wanted.
:rtype: A :class: `list` of :class:`Huisnummer`
'''
try:
id = perceel.id
except AttributeError:
id = perceel
def creator():
res = crab_gateway_request(
self.client, 'ListHuisnummersWithStatusByIdentificatorPerceel',
id, sort
)
try:
huisnummers= []
for r in res.HuisnummerWithStatusItem:
h = self.get_huisnummer_by_id(r.HuisnummerId)
h.clear_gateway()
huisnummers.append(h)
return huisnummers
except AttributeError:
return []
if self.caches['short'].is_configured:
key = 'ListHuisnummersWithStatusByIdentificatorPerceel#%s%s' % (id, sort)
huisnummers = self.caches['short'].get_or_create(key, creator)
else:
huisnummers = creator()
for h in huisnummers:
h.set_gateway(self)
return huisnummers | python | {
"resource": ""
} |
q45403 | CrabGateway.get_huisnummer_by_id | train | def get_huisnummer_by_id(self, id):
'''
Retrieve a `huisnummer` by the Id.
:param integer id: the Id of the `huisnummer`
:rtype: :class:`Huisnummer`
'''
def creator():
res = crab_gateway_request(
self.client, 'GetHuisnummerWithStatusByHuisnummerId', id
)
if res == None:
raise GatewayResourceNotFoundException()
return Huisnummer(
res.HuisnummerId,
res.StatusHuisnummer,
res.Huisnummer,
res.StraatnaamId,
Metadata(
res.BeginDatum,
res.BeginTijd,
self.get_bewerking(res.BeginBewerking),
self.get_organisatie(res.BeginOrganisatie)
)
)
if self.caches['short'].is_configured:
key = 'GetHuisnummerWithStatusByHuisnummerId#%s' % (id)
huisnummer = self.caches['short'].get_or_create(key, creator)
else:
huisnummer = creator()
huisnummer.set_gateway(self)
return huisnummer | python | {
"resource": ""
} |
q45404 | CrabGateway.get_postkanton_by_huisnummer | train | def get_postkanton_by_huisnummer(self, huisnummer):
'''
Retrieve a `postkanton` by the Huisnummer.
:param huisnummer: The :class:`Huisnummer` for which the `postkanton` \
is wanted.
:rtype: :class:`Postkanton`
'''
try:
id = huisnummer.id
except AttributeError:
id = huisnummer
def creator():
res = crab_gateway_request(
self.client, 'GetPostkantonByHuisnummerId', id
)
if res == None:
raise GatewayResourceNotFoundException()
return Postkanton(
res.PostkantonCode
)
if self.caches['short'].is_configured:
key = 'GetPostkantonByHuisnummerId#%s' % (id)
postkanton = self.caches['short'].get_or_create(key, creator)
else:
postkanton = creator()
postkanton.set_gateway(self)
return postkanton | python | {
"resource": ""
} |
q45405 | CrabGateway.get_wegobject_by_id | train | def get_wegobject_by_id(self, id):
'''
Retrieve a `Wegobject` by the Id.
:param integer id: the Id of the `Wegobject`
:rtype: :class:`Wegobject`
'''
def creator():
res = crab_gateway_request(
self.client, 'GetWegobjectByIdentificatorWegobject', id
)
if res == None:
raise GatewayResourceNotFoundException()
return Wegobject(
res.IdentificatorWegobject,
res.AardWegobject,
(res.CenterX, res.CenterY),
(res.MinimumX, res.MinimumY, res.MaximumX, res.MaximumY),
Metadata(
res.BeginDatum,
res.BeginTijd,
self.get_bewerking(res.BeginBewerking),
self.get_organisatie(res.BeginOrganisatie)
)
)
if self.caches['short'].is_configured:
key = 'GetWegobjectByIdentificatorWegobject#%s' % (id)
wegobject = self.caches['short'].get_or_create(key, creator)
else:
wegobject = creator()
wegobject.set_gateway(self)
return wegobject | python | {
"resource": ""
} |
q45406 | CrabGateway.get_wegsegment_by_id | train | def get_wegsegment_by_id(self, id):
'''
Retrieve a `wegsegment` by the Id.
:param integer id: the Id of the `wegsegment`
:rtype: :class:`Wegsegment`
'''
def creator():
res = crab_gateway_request(
self.client,
'GetWegsegmentByIdentificatorWegsegment', id
)
if res == None:
raise GatewayResourceNotFoundException()
return Wegsegment(
res.IdentificatorWegsegment,
res.StatusWegsegment,
res.GeometriemethodeWegsegment,
res.Geometrie,
Metadata(
res.BeginDatum,
res.BeginTijd,
self.get_bewerking(res.BeginBewerking),
self.get_organisatie(res.BeginOrganisatie)
)
)
if self.caches['short'].is_configured:
key = 'GetWegsegmentByIdentificatorWegsegment#%s' % (id)
wegsegment = self.caches['short'].get_or_create(key, creator)
else:
wegsegment = creator()
wegsegment.set_gateway(self)
return wegsegment | python | {
"resource": ""
} |
q45407 | CrabGateway.get_terreinobject_by_id | train | def get_terreinobject_by_id(self, id):
'''
Retrieve a `Terreinobject` by the Id.
:param integer id: the Id of the `Terreinobject`
:rtype: :class:`Terreinobject`
'''
def creator():
res = crab_gateway_request(
self.client,
'GetTerreinobjectByIdentificatorTerreinobject', id
)
if res == None:
raise GatewayResourceNotFoundException()
return Terreinobject(
res.IdentificatorTerreinobject,
res.AardTerreinobject,
(res.CenterX, res.CenterY),
(res.MinimumX, res.MinimumY, res.MaximumX, res.MaximumY),
Metadata(
res.BeginDatum,
res.BeginTijd,
self.get_bewerking(res.BeginBewerking),
self.get_organisatie(res.BeginOrganisatie)
)
)
if self.caches['short'].is_configured:
key = 'GetTerreinobjectByIdentificatorTerreinobject#%s' % (id)
terreinobject = self.caches['short'].get_or_create(key, creator)
else:
terreinobject = creator()
terreinobject.set_gateway(self)
return terreinobject | python | {
"resource": ""
} |
q45408 | CrabGateway.get_perceel_by_id | train | def get_perceel_by_id(self, id):
'''
Retrieve a `Perceel` by the Id.
:param string id: the Id of the `Perceel`
:rtype: :class:`Perceel`
'''
def creator():
res = crab_gateway_request(
self.client, 'GetPerceelByIdentificatorPerceel', id
)
if res == None:
raise GatewayResourceNotFoundException()
return Perceel(
res.IdentificatorPerceel,
(res.CenterX, res.CenterY),
Metadata(
res.BeginDatum,
res.BeginTijd,
self.get_bewerking(res.BeginBewerking),
self.get_organisatie(res.BeginOrganisatie)
)
)
if self.caches['short'].is_configured:
key = 'GetPerceelByIdentificatorPerceel#%s' % (id)
perceel = self.caches['short'].get_or_create(key, creator)
else:
perceel = creator()
perceel.set_gateway(self)
return perceel | python | {
"resource": ""
} |
q45409 | CrabGateway.get_gebouw_by_id | train | def get_gebouw_by_id(self, id):
'''
Retrieve a `Gebouw` by the Id.
:param integer id: the Id of the `Gebouw`
:rtype: :class:`Gebouw`
'''
def creator():
res = crab_gateway_request(
self.client, 'GetGebouwByIdentificatorGebouw', id
)
if res == None:
raise GatewayResourceNotFoundException()
return Gebouw(
res.IdentificatorGebouw,
res.AardGebouw,
res.StatusGebouw,
res.GeometriemethodeGebouw,
res.Geometrie,
Metadata(
res.BeginDatum,
res.BeginTijd,
self.get_bewerking(res.BeginBewerking),
self.get_organisatie(res.BeginOrganisatie)
)
)
if self.caches['short'].is_configured:
key = 'GetGebouwByIdentificatorGebouw#%s' % (id)
gebouw = self.caches['short'].get_or_create(key, creator)
else:
gebouw = creator()
gebouw.set_gateway(self)
return gebouw | python | {
"resource": ""
} |
q45410 | CrabGateway.get_subadres_by_id | train | def get_subadres_by_id(self, id):
'''
Retrieve a `Subadres` by the Id.
:param integer id: the Id of the `Subadres`
:rtype: :class:`Subadres`
'''
def creator():
res = crab_gateway_request(
self.client, 'GetSubadresWithStatusBySubadresId', id
)
if res == None:
raise GatewayResourceNotFoundException()
return Subadres(
res.SubadresId,
res.Subadres,
res.StatusSubadres,
res.HuisnummerId,
res.AardSubadres,
Metadata(
res.BeginDatum,
res.BeginTijd,
self.get_bewerking(res.BeginBewerking),
self.get_organisatie(res.BeginOrganisatie)
)
)
if self.caches['short'].is_configured:
key = 'GetSubadresWithStatusBySubadresId#%s' % (id)
subadres = self.caches['short'].get_or_create(key, creator)
else:
subadres = creator()
subadres.set_gateway(self)
return subadres | python | {
"resource": ""
} |
q45411 | CrabGateway.get_adrespositie_by_id | train | def get_adrespositie_by_id(self, id):
'''
Retrieve a `Adrespositie` by the Id.
:param integer id: the Id of the `Adrespositie`
:rtype: :class:`Adrespositie`
'''
def creator():
res = crab_gateway_request(
self.client, 'GetAdrespositieByAdrespositieId', id
)
if res == None:
raise GatewayResourceNotFoundException()
return Adrespositie(
res.AdrespositieId,
res.HerkomstAdrespositie,
res.Geometrie,
res.AardAdres,
Metadata(
res.BeginDatum,
res.BeginTijd,
self.get_bewerking(res.BeginBewerking),
self.get_organisatie(res.BeginOrganisatie)
)
)
if self.caches['short'].is_configured:
key = 'GetAdrespositieByAdrespositieId#%s' % (id)
adrespositie = self.caches['short'].get_or_create(key, creator)
else:
adrespositie = creator()
adrespositie.set_gateway(self)
return adrespositie | python | {
"resource": ""
} |
q45412 | Perceel.postadressen | train | def postadressen(self):
'''
Returns the postadressen for this Perceel.
Will only take the huisnummers with status `inGebruik` into account.
:rtype: list
'''
return [h.postadres for h in self.huisnummers if h.status.id == '3'] | python | {
"resource": ""
} |
q45413 | salt_ssh | train | def salt_ssh(project, target, module, args=None, kwargs=None):
"""
Execute a `salt-ssh` command
"""
cmd = ['salt-ssh']
cmd.extend(generate_salt_cmd(target, module, args, kwargs))
cmd.append('--state-output=mixed')
cmd.append('--roster-file=%s' % project.roster_path)
cmd.append('--config-dir=%s' % project.salt_ssh_config_dir)
cmd.append('--ignore-host-keys')
cmd.append('--force-color')
cmd = ' '.join(cmd)
logger.debug('salt-ssh cmd: %s', cmd)
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode != 0 or err:
raise Exception(err)
return out + err | python | {
"resource": ""
} |
q45414 | salt_master | train | def salt_master(project, target, module, args=None, kwargs=None):
"""
Execute a `salt` command in the head node
"""
client = project.cluster.head.ssh_client
cmd = ['salt']
cmd.extend(generate_salt_cmd(target, module, args, kwargs))
cmd.append('--timeout=300')
cmd.append('--state-output=mixed')
cmd = ' '.join(cmd)
output = client.exec_command(cmd, sudo=True)
if output['exit_code'] == 0:
return output['stdout']
else:
return output['stderr'] | python | {
"resource": ""
} |
q45415 | getenv | train | def getenv(name, **kwargs):
"""
Retrieves environment variable by name and casts the value to desired type.
If desired type is list or tuple - uses separator to split the value.
"""
default_value = kwargs.pop('default', None)
desired_type = kwargs.pop('type', str)
list_separator = kwargs.pop('separator', ',')
value = os.getenv(name, None)
if value is None:
if default_value is None:
return None
else:
return default_value
if desired_type is bool:
if value.lower() in ['false', '0']:
return False
else:
return bool(value)
if desired_type is list or desired_type is tuple:
value = value.split(list_separator)
return desired_type(value)
if desired_type is dict:
return dict(literal_eval(value))
return desired_type(value) | python | {
"resource": ""
} |
q45416 | generate_network | train | def generate_network(user=None, reset=False):
"""
Assemble the network connections for a given user
"""
token = collect_token()
try:
gh = login(token=token)
root_user = gh.user(user)
except Exception, e:
# Failed to login using the token, github3.models.GitHubError
raise e
graph_nodes = []
graph_edges = []
username = user if user is not None else root_user.login
if not is_cached(username_to_file(username)) or reset:
graph_nodes.append(username)
# @TODO: take care of the 'rate limit exceeding' if imposed
try:
for person in gh.iter_following(username):
graph_nodes.append(str(person))
graph_edges.append((root_user.login, str(person)))
for i in range(1, root_user.following):
user = gh.user(graph_nodes[i])
user_following_edges = [(user.login, str(person)) for person in gh.iter_following(
user) if str(person) in graph_nodes]
graph_edges += user_following_edges
except Exception, e:
raise e
generate_gml(username, graph_nodes, graph_edges, True)
else:
reuse_gml(username)
return username | python | {
"resource": ""
} |
q45417 | Element.load_xml_attrs | train | def load_xml_attrs(self):
"""
Load XML attributes as object attributes.
:returns: List of parsed attributes.
:rtype: list
"""
attrs_list = list()
if hasattr(self, 'xml_element'):
xml_attrs = self.xml_element.attrib
for variable, value in iter(xml_attrs.items()):
uri, tag = Element.get_namespace_and_tag(variable)
tag = tag.replace('-', '_')
attrs_list.append(tag)
setattr(self, tag, value)
self.attrs = attrs_list
return self.attrs | python | {
"resource": ""
} |
q45418 | Element.get_namespace_and_tag | train | def get_namespace_and_tag(name):
"""
Separates the namespace and tag from an element.
:param str name: Tag.
:returns: Namespace URI and Tag namespace.
:rtype: tuple
"""
if isinstance(name, str):
if name[0] == "{":
uri, ignore, tag = name[1:].partition("}")
else:
uri = None
tag = name
else:
uri = None
tag = None
return uri, tag | python | {
"resource": ""
} |
q45419 | Mechanism.write_command | train | def write_command(cls, writer, name, buffers=()):
"""
Write a command to the specified writer.
:param writer: The writer to use.
:param name: The command name.
:param buffers: The buffers to writer.
"""
assert len(name) < 256
body_len = len(name) + 1 + sum(len(buffer) for buffer in buffers)
if body_len < 256:
writer.write(struct.pack('!BBB', 0x04, body_len, len(name)))
else:
writer.write(struct.pack('!BQB', 0x06, body_len, len(name)))
writer.write(name)
for buffer in buffers:
writer.write(buffer) | python | {
"resource": ""
} |
q45420 | Mechanism._expect_command | train | async def _expect_command(cls, reader, name):
"""
Expect a command.
:param reader: The reader to use.
:returns: The command data.
"""
size_type = struct.unpack('B', await reader.readexactly(1))[0]
if size_type == 0x04:
size = struct.unpack('!B', await reader.readexactly(1))[0]
elif size_type == 0x06:
size = struct.unpack('!Q', await reader.readexactly(8))[0]
else:
raise ProtocolError(
"Unexpected size type: %0x" % size_type,
fatal=True,
)
name_size = struct.unpack('B', await reader.readexactly(1))[0]
if name_size != len(name):
raise ProtocolError(
"Unexpected command name size: %s (expecting %s)" % (
name_size,
len(name),
),
fatal=True,
)
c_name = await reader.readexactly(name_size)
if c_name != name:
raise ProtocolError(
"Unexpected command name: %s (expecting %s)" % (c_name, name),
fatal=True,
)
return await reader.readexactly(size - name_size - 1) | python | {
"resource": ""
} |
q45421 | Fourier.phase_shifted_coefficients | train | def phase_shifted_coefficients(amplitude_coefficients, form='cos',
shift=0.0):
r"""
Converts Fourier coefficients from the amplitude form to the
phase-shifted form, as either a sine or cosine series.
Amplitude form:
.. math::
m(t) = A_0 + \sum_{k=1}^n (a_k \sin(k \omega t)
+ b_k \cos(k \omega t))
Sine form:
.. math::
m(t) = A_0 + \sum_{k=1}^n A_k \sin(k \omega t + \Phi_k)
Cosine form:
.. math::
m(t) = A_0 + \sum_{k=1}^n A_k \cos(k \omega t + \Phi_k)
**Parameters**
amplitude_coefficients : array-like, shape = [:math:`2n+1`]
Array of coefficients
:math:`[ A_0, a_1, b_1, \ldots a_n, b_n ]`.
form : str, optional
Form of output coefficients, must be one of 'sin' or 'cos'
(default 'cos').
shift : number, optional
Shift to apply to light curve (default 0.0).
**Returns**
out : array-like, shape = [:math:`2n+1`]
Array of coefficients
:math:`[ A_0, A_1, \Phi_1, \ldots, A_n, \Phi_n ]`.
"""
if form != 'sin' and form != 'cos':
raise NotImplementedError(
'Fourier series must have form sin or cos')
# separate array of coefficients into respective parts
A_0 = amplitude_coefficients[0]
a_k = amplitude_coefficients[1::2]
b_k = amplitude_coefficients[2::2]
degree = a_k.size
k = numpy.arange(1, degree+1)
# A_k and Phi_k are the angle and hypotenuse in the right triangles
# pictured below. A_k is obtained with the Pythagorean theorem, and
# Phi_k is obtained with the 2-argument inverse tangent.
# The positions of a_k and b_k depend on whether it is a sin or cos
# series.
#
# Cos series Sin series
#
# b_k /|
# --------- / |
# \ Φ_k |_| / |
# \ | A_k / |
# \ | / | b_k
# \ | a_k / |
# A_k \ | / _|
# \ | / Φ_k | |
# \ | ---------
# \| a_k
#
A_k = numpy.sqrt(a_k**2 + b_k**2)
# phase coefficients are shifted to the left by optional ``shift``
if form == 'cos':
Phi_k = numpy.arctan2(-a_k, b_k) + 2*pi*k*shift
elif form == 'sin':
Phi_k = numpy.arctan2(b_k, a_k) + 2*pi*k*shift
# constrain Phi between 0 and 2*pi
Phi_k %= 2*pi
phase_shifted_coefficients_ = numpy.empty(amplitude_coefficients.shape,
dtype=float)
phase_shifted_coefficients_[0] = A_0
phase_shifted_coefficients_[1::2] = A_k
phase_shifted_coefficients_[2::2] = Phi_k
return phase_shifted_coefficients_ | python | {
"resource": ""
} |
q45422 | Repository._execute | train | def _execute(self, command, stdin=None, stdout=subprocess.PIPE):
"""Executes the specified command relative to the repository root.
Returns a tuple containing the return code and the process output.
"""
process = subprocess.Popen(command, shell=True, cwd=self.root_path, stdin=stdin, stdout=stdout)
return (process.wait(), None if stdout is not subprocess.PIPE else process.communicate()[0].decode('utf-8')) | python | {
"resource": ""
} |
q45423 | choices | train | def choices(klass):
"""
Decorator to set `CHOICES` and other attributes
"""
_choices = []
for attr in user_attributes(klass.Meta):
val = getattr(klass.Meta, attr)
setattr(klass, attr, val[0])
_choices.append((val[0], val[1]))
setattr(klass, 'CHOICES', tuple(_choices))
return klass | python | {
"resource": ""
} |
q45424 | drop_db | train | def drop_db():
" Drop all tables. "
from ..ext import db
if prompt_bool("Are you sure? You will lose all your data!"):
db.drop_all()
# Evolution support
from flask import current_app
from flaskext.evolution import db as evolution_db
evolution_db.init_app(current_app)
evolution_db.drop_all()
print "Database clearing" | python | {
"resource": ""
} |
q45425 | NamespaceForm.clean_prefix | train | def clean_prefix(self):
"""
Validates the prefix
"""
if self.instance.fixed:
return self.instance.prefix
prefix = self.cleaned_data['prefix']
if not namespace.is_ncname(prefix):
raise forms.ValidationError("This is an invalid prefix")
return prefix | python | {
"resource": ""
} |
q45426 | NamespaceForm.clean_uri | train | def clean_uri(self):
"""
Validates the URI
"""
if self.instance.fixed:
return self.instance.uri
uri = self.cleaned_data['uri']
# todo: URI validation
return uri | python | {
"resource": ""
} |
q45427 | MakesiteParser.as_dict | train | def as_dict(self, section='Main', **kwargs):
"""Return template context from configs.
"""
items = super(MakesiteParser, self).items(section, **kwargs)
return dict(items) | python | {
"resource": ""
} |
q45428 | config_oauth | train | def config_oauth(app):
" Configure oauth support. "
for name in PROVIDERS:
config = app.config.get('OAUTH_%s' % name.upper())
if not config:
continue
if not name in oauth.remote_apps:
remote_app = oauth.remote_app(name, **config)
else:
remote_app = oauth.remote_apps[name]
client_class = CLIENTS.get(name)
client_class(app, remote_app) | python | {
"resource": ""
} |
q45429 | cached_instance | train | def cached_instance(model, timeout=None, **filters):
""" Auto cached model instance.
"""
if isinstance(model, basestring):
model = _str_to_model(model)
cache_key = generate_cache_key(model, **filters)
return get_cached(cache_key, model.objects.select_related().get, kwargs=filters) | python | {
"resource": ""
} |
q45430 | cached_query | train | def cached_query(qs, timeout=None):
""" Auto cached queryset and generate results.
"""
cache_key = generate_cache_key(qs)
return get_cached(cache_key, list, args=(qs,), timeout=None) | python | {
"resource": ""
} |
q45431 | clean_cache | train | def clean_cache(cached, **kwargs):
" Generate cache key and clean cached value. "
if isinstance(cached, basestring):
cached = _str_to_model(cached)
cache_key = generate_cache_key(cached, **kwargs)
cache.delete(cache_key) | python | {
"resource": ""
} |
q45432 | generate_cache_key | train | def generate_cache_key(cached, **kwargs):
""" Auto generate cache key for model or queryset
"""
if isinstance(cached, QuerySet):
key = str(cached.query)
elif isinstance(cached, (Model, ModelBase)):
key = '%s.%s:%s' % (cached._meta.app_label,
cached._meta.module_name,
','.join('%s=%s' % item for item in kwargs.iteritems()))
else:
raise AttributeError("Objects must be queryset or model.")
if not key:
raise Exception('Cache key cannot be empty.')
key = clean_cache_key(key)
return key | python | {
"resource": ""
} |
q45433 | clean_cache_key | train | def clean_cache_key(key):
""" Replace spaces with '-' and hash if length is greater than 250.
"""
cache_key = re.sub(r'\s+', '-', key)
cache_key = smart_str(cache_key)
if len(cache_key) > 200:
cache_key = cache_key[:150] + '-' + hashlib.md5(cache_key).hexdigest()
return cache_key | python | {
"resource": ""
} |
q45434 | collect_static_files | train | def collect_static_files(src_map, dst):
"""
Collect all static files and move them into a temporary location.
This is very similar to the ``collectstatic`` command.
"""
for rel_src, abs_src in src_map.iteritems():
abs_dst = os.path.join(dst, rel_src)
copy_file(abs_src, abs_dst) | python | {
"resource": ""
} |
q45435 | apply_preprocessors | train | def apply_preprocessors(root, src, dst, processors):
"""
Preprocessors operate based on the source filename, and apply to each
file individually.
"""
matches = [(pattern, cmds) for pattern, cmds in processors.iteritems() if fnmatch(src, pattern)]
if not matches:
return False
params = get_format_params(dst)
for pattern, cmd_list in matches:
for cmd in cmd_list:
run_command(cmd, root=root, dst=dst, input=src, params=params)
src = dst
return True | python | {
"resource": ""
} |
q45436 | apply_postcompilers | train | def apply_postcompilers(root, src_list, dst, processors):
"""
Postcompilers operate based on the destination filename. They operate on a collection
of files, and are expected to take a list of 1+ inputs and generate a single output.
"""
dst_file = os.path.join(root, dst)
matches = [(pattern, cmds) for pattern, cmds in processors.iteritems() if fnmatch(dst, pattern)]
if not matches:
ensure_dirs(dst_file)
logger.info('Combining [%s] into [%s]', ' '.join(src_list), dst_file)
# We should just concatenate the files
with open(dst_file, 'w') as dst_fp:
for src in src_list:
with open(os.path.join(root, src)) as src_fp:
for chunk in src_fp:
dst_fp.write(chunk)
return True
params = get_format_params(dst)
# TODO: probably doesnt play nice everywhere
src_names = src_list
for pattern, cmd_list in matches:
for cmd in cmd_list:
run_command(cmd, root=root, dst=dst, input=' '.join(src_names), params=params)
src_names = [dst]
return True | python | {
"resource": ""
} |
q45437 | set_model_internal_data | train | def set_model_internal_data(model, original_data, modified_data, deleted_data):
"""
Set internal data to model.
"""
model.__original_data__ = original_data
list(map(model._prepare_child, model.__original_data__))
model.__modified_data__ = modified_data
list(map(model._prepare_child, model.__modified_data__))
model.__deleted_fields__ = deleted_data
return model | python | {
"resource": ""
} |
q45438 | DirtyModelMeta.process_base_field | train | def process_base_field(cls, field, key):
"""
Preprocess field instances.
:param field: Field object
:param key: Key where field was found
"""
if not field.name:
field.name = key
elif key != field.name:
if not isinstance(field.alias, list):
field.alias = [key]
else:
field.alias.insert(0, key)
setattr(cls, field.name, field)
cls.prepare_field(field)
if field.alias:
for alias_name in field.alias:
if key is not alias_name:
setattr(cls, alias_name, field) | python | {
"resource": ""
} |
q45439 | BaseModel.set_field_value | train | def set_field_value(self, name, value):
"""
Set the value to the field modified_data
"""
name = self.get_real_name(name)
if not name or not self._can_write_field(name):
return
if name in self.__deleted_fields__:
self.__deleted_fields__.remove(name)
if self.__original_data__.get(name) == value:
try:
self.__modified_data__.pop(name)
except KeyError:
pass
else:
self.__modified_data__[name] = value
self._prepare_child(value)
if name not in self.__structure__ or not self.__structure__[name].read_only:
return
try:
value.set_read_only(True)
except AttributeError:
pass | python | {
"resource": ""
} |
q45440 | BaseModel.get_field_value | train | def get_field_value(self, name):
"""
Get the field value from the modified data or the original one
"""
name = self.get_real_name(name)
if not name or name in self.__deleted_fields__:
return None
modified = self.__modified_data__.get(name)
if modified is not None:
return modified
return self.__original_data__.get(name) | python | {
"resource": ""
} |
q45441 | BaseModel.delete_field_value | train | def delete_field_value(self, name):
"""
Mark this field to be deleted
"""
name = self.get_real_name(name)
if name and self._can_write_field(name):
if name in self.__modified_data__:
self.__modified_data__.pop(name)
if name in self.__original_data__ and name not in self.__deleted_fields__:
self.__deleted_fields__.append(name) | python | {
"resource": ""
} |
q45442 | BaseModel.reset_field_value | train | def reset_field_value(self, name):
"""
Resets value of a field
"""
name = self.get_real_name(name)
if name and self._can_write_field(name):
if name in self.__modified_data__:
del self.__modified_data__[name]
if name in self.__deleted_fields__:
self.__deleted_fields__.remove(name)
try:
self.__original_data__[name].clear_modified_data()
except (KeyError, AttributeError):
pass | python | {
"resource": ""
} |
q45443 | BaseModel.is_modified_field | train | def is_modified_field(self, name):
"""
Returns whether a field is modified or not
"""
name = self.get_real_name(name)
if name in self.__modified_data__ or name in self.__deleted_fields__:
return True
try:
return self.get_field_value(name).is_modified()
except Exception:
return False | python | {
"resource": ""
} |
q45444 | BaseModel.export_data | train | def export_data(self):
"""
Get the results with the modified_data
"""
result = {}
data = self.__original_data__.copy()
data.update(self.__modified_data__)
for key, value in data.items():
if key in self.__deleted_fields__:
continue
try:
result[key] = value.export_data()
except AttributeError:
result[key] = value
return result | python | {
"resource": ""
} |
q45445 | BaseModel.export_modified_data | train | def export_modified_data(self):
"""
Get the modified data
"""
# TODO: why None? Try to get a better flag
result = {key: None for key in self.__deleted_fields__}
for key, value in self.__modified_data__.items():
if key in result.keys():
continue
try:
result[key] = value.export_modified_data()
except AttributeError:
result[key] = value
for key, value in self.__original_data__.items():
if key in result.keys():
continue
try:
if value.is_modified():
result[key] = value.export_modified_data()
except AttributeError:
pass
return result | python | {
"resource": ""
} |
q45446 | BaseModel.export_modifications | train | def export_modifications(self):
"""
Returns model modifications.
"""
result = {}
for key, value in self.__modified_data__.items():
try:
result[key] = value.export_data()
except AttributeError:
result[key] = value
for key, value in self.__original_data__.items():
if key in result.keys() or key in self.__deleted_fields__:
continue
try:
if not value.is_modified():
continue
modifications = value.export_modifications()
except AttributeError:
continue
try:
result.update({'{}.{}'.format(key, f): v for f, v in modifications.items()})
except AttributeError:
result[key] = modifications
return result | python | {
"resource": ""
} |
q45447 | BaseModel.get_original_field_value | train | def get_original_field_value(self, name):
"""
Returns original field value or None
"""
name = self.get_real_name(name)
try:
value = self.__original_data__[name]
except KeyError:
return None
try:
return value.export_original_data()
except AttributeError:
return value | python | {
"resource": ""
} |
q45448 | BaseModel.export_original_data | train | def export_original_data(self):
"""
Get the original data
"""
return {key: self.get_original_field_value(key) for key in self.__original_data__.keys()} | python | {
"resource": ""
} |
q45449 | BaseModel.export_deleted_fields | train | def export_deleted_fields(self):
"""
Resturns a list with any deleted fields form original data.
In tree models, deleted fields on children will be appended.
"""
result = self.__deleted_fields__.copy()
for key, value in self.__original_data__.items():
if key in result:
continue
try:
partial = value.export_deleted_fields()
result.extend(['.'.join([key, key2]) for key2 in partial])
except AttributeError:
pass
return result | python | {
"resource": ""
} |
q45450 | BaseModel.flat_data | train | def flat_data(self):
"""
Pass all the data from modified_data to original_data
"""
def flat_field(value):
"""
Flat field data
"""
try:
value.flat_data()
return value
except AttributeError:
return value
modified_dict = self.__original_data__
modified_dict.update(self.__modified_data__)
self.__original_data__ = {k: flat_field(v)
for k, v in modified_dict.items()
if k not in self.__deleted_fields__}
self.clear_modified_data() | python | {
"resource": ""
} |
q45451 | BaseModel.clear | train | def clear(self):
"""
Clears all the data in the object, keeping original data
"""
self.__modified_data__ = {}
self.__deleted_fields__ = [field for field in self.__original_data__.keys()] | python | {
"resource": ""
} |
q45452 | BaseModel.get_fields | train | def get_fields(self):
"""
Returns used fields of model
"""
result = [key for key in self.__original_data__.keys()
if key not in self.__deleted_fields__]
result.extend([key for key in self.__modified_data__.keys()
if key not in result and key not in self.__deleted_fields__])
return result | python | {
"resource": ""
} |
q45453 | BaseModel.is_modified | train | def is_modified(self):
"""
Returns whether model is modified or not
"""
if len(self.__modified_data__) or len(self.__deleted_fields__):
return True
for value in self.__original_data__.values():
try:
if value.is_modified():
return True
except AttributeError:
pass
return False | python | {
"resource": ""
} |
q45454 | BaseDynamicModel._get_field_type | train | def _get_field_type(self, key, value):
"""
Helper to create field object based on value type
"""
if isinstance(value, bool):
return BooleanField(name=key)
elif isinstance(value, int):
return IntegerField(name=key)
elif isinstance(value, float):
return FloatField(name=key)
elif isinstance(value, str):
return StringField(name=key)
elif isinstance(value, time):
return TimeField(name=key)
elif isinstance(value, datetime):
return DateTimeField(name=key)
elif isinstance(value, date):
return DateField(name=key)
elif isinstance(value, timedelta):
return TimedeltaField(name=key)
elif isinstance(value, Enum):
return EnumField(name=key, enum_class=type(value))
elif isinstance(value, (dict, BaseDynamicModel, Mapping)):
return ModelField(name=key, model_class=self.__dynamic_model__ or self.__class__)
elif isinstance(value, BaseModel):
return ModelField(name=key, model_class=value.__class__)
elif isinstance(value, (list, set, ListModel)):
if not len(value):
return None
field_type = self._get_field_type(None, value[0])
return ArrayField(name=key, field_type=field_type)
elif value is None:
return None
else:
raise TypeError("Invalid parameter: %s. Type not supported." % (key,)) | python | {
"resource": ""
} |
q45455 | HashMapModel.copy | train | def copy(self):
"""
Creates a copy of model
"""
return self.__class__(field_type=self.get_field_type(), data=self.export_data()) | python | {
"resource": ""
} |
q45456 | FastDynamicModel.get_current_structure | train | def get_current_structure(self):
"""
Returns a dictionary with model field objects.
:return: dict
"""
struct = self.__class__.get_structure()
struct.update(self.__field_types__)
return struct | python | {
"resource": ""
} |
q45457 | SSHClient.connect | train | def connect(self):
"""Connect to host
"""
try:
self.client.connect(self.host, username=self.username,
password=self.password, port=self.port,
pkey=self.pkey, timeout=self.timeout)
except sock_gaierror, ex:
raise Exception("Unknown host '%s'" % self.host)
except sock_error, ex:
raise Exception("Error connecting to host '%s:%s'\n%s" % (self.host, self.port, ex))
except paramiko.AuthenticationException, ex:
msg = "Host is '%s:%s'"
raise Exception("Authentication Error to host '%s'" % self.host)
except paramiko.SSHException, ex:
msg = "General SSH error - %s" % ex
raise Exception(msg) | python | {
"resource": ""
} |
q45458 | SSHClient.exec_command | train | def exec_command(self, command, sudo=False, **kwargs):
"""Wrapper to paramiko.SSHClient.exec_command
"""
channel = self.client.get_transport().open_session()
# stdin = channel.makefile('wb')
stdout = channel.makefile('rb')
stderr = channel.makefile_stderr('rb')
if sudo:
command = 'sudo -S bash -c \'%s\'' % command
else:
command = 'bash -c \'%s\'' % command
logger.debug("Running command %s on '%s'", command, self.host)
channel.exec_command(command, **kwargs)
while not (channel.recv_ready() or channel.closed or
channel.exit_status_ready()):
time.sleep(.2)
ret = {'stdout': stdout.read().strip(), 'stderr': stderr.read().strip(),
'exit_code': channel.recv_exit_status()}
return ret | python | {
"resource": ""
} |
q45459 | SSHClient.make_sftp | train | def make_sftp(self):
"""Make SFTP client from open transport"""
transport = self.client.get_transport()
transport.open_session()
return paramiko.SFTPClient.from_transport(transport) | python | {
"resource": ""
} |
q45460 | Version.str_to_time | train | def str_to_time(self):
"""
Formats a XCCDF dateTime string to a datetime object.
:returns: datetime object.
:rtype: datetime.datetime
"""
return datetime(*list(map(int, re.split(r'-|:|T', self.time)))) | python | {
"resource": ""
} |
q45461 | Version.update_xml_element | train | def update_xml_element(self):
"""
Updates the xml element contents to matches the instance contents
:returns: Updated XML element
:rtype: lxml.etree._Element
"""
if not hasattr(self, 'xml_element'):
self.xml_element = etree.Element(self.name, nsmap=NSMAP)
if hasattr(self, 'time'):
self.xml_element.set('time', self.time_to_str())
if hasattr(self, 'update'):
self.xml_element.set('update', str(self.update))
self.xml_element.text = self.text
return self.xml_element | python | {
"resource": ""
} |
q45462 | get_base_modules | train | def get_base_modules():
" Get list of installed modules. "
return sorted(filter(
lambda x: op.isdir(op.join(MOD_DIR, x)),
listdir(MOD_DIR))) | python | {
"resource": ""
} |
q45463 | print_header | train | def print_header(msg, sep='='):
" More strong message "
LOGGER.info("\n%s\n%s" % (msg, ''.join(sep for _ in msg))) | python | {
"resource": ""
} |
q45464 | which | train | def which(program):
" Check program is exists. "
head, _ = op.split(program)
if head:
if is_exe(program):
return program
else:
for path in environ["PATH"].split(pathsep):
exe_file = op.join(path, program)
if is_exe(exe_file):
return exe_file
return None | python | {
"resource": ""
} |
q45465 | call | train | def call(cmd, shell=True, **kwargs):
" Run shell command. "
LOGGER.debug("Cmd: %s" % cmd)
check_call(cmd, shell=shell, stdout=LOGFILE_HANDLER.stream, **kwargs) | python | {
"resource": ""
} |
q45466 | gen_template_files | train | def gen_template_files(path):
" Generate relative template pathes. "
path = path.rstrip(op.sep)
for root, _, files in walk(path):
for f in filter(lambda x: not x in (TPLNAME, CFGNAME), files):
yield op.relpath(op.join(root, f), path) | python | {
"resource": ""
} |
q45467 | get_config | train | def get_config( config_path=CONFIG_PATH ):
"""
Get the config
"""
parser = SafeConfigParser()
parser.read( config_path )
config_dir = os.path.dirname(config_path)
immutable_key = False
key_id = None
blockchain_id = None
hostname = socket.gethostname()
wallet = None
if parser.has_section('blockstack-file'):
if parser.has_option('blockstack-file', 'immutable_key'):
immutable_key = parser.get('blockstack-file', 'immutable_key')
if immutable_key.lower() in ['1', 'yes', 'true']:
immutable_key = True
else:
immutable_key = False
if parser.has_option('blockstack-file', 'file_id'):
key_id = parser.get('blockstack-file', 'key_id' )
if parser.has_option('blockstack-file', 'blockchain_id'):
blockchain_id = parser.get('blockstack-file', 'blockchain_id')
if parser.has_option('blockstack-file', 'hostname'):
hostname = parser.get('blockstack-file', 'hostname')
if parser.has_option('blockstack-file', 'wallet'):
wallet = parser.get('blockstack-file', 'wallet')
config = {
'immutable_key': immutable_key,
'key_id': key_id,
'blockchain_id': blockchain_id,
'hostname': hostname,
'wallet': wallet
}
return config | python | {
"resource": ""
} |
q45468 | file_key_lookup | train | def file_key_lookup( blockchain_id, index, hostname, key_id=None, config_path=CONFIG_PATH, wallet_keys=None ):
"""
Get the file-encryption GPG key for the given blockchain ID, by index.
if index == 0, then give back the current key
if index > 0, then give back an older (revoked) key.
if key_id is given, index and hostname will be ignored
Return {'status': True, 'key_data': ..., 'key_id': key_id, OPTIONAL['stale_key_index': idx]} on success
Return {'error': ...} on failure
"""
log.debug("lookup '%s' key for %s (index %s, key_id = %s)" % (hostname, blockchain_id, index, key_id))
conf = get_config( config_path )
config_dir = os.path.dirname(config_path)
proxy = blockstack_client.get_default_proxy( config_path=config_path )
immutable = conf['immutable_key']
if key_id is not None:
# we know exactly which key to get
# try each current key
hosts_listing = file_list_hosts( blockchain_id, wallet_keys=wallet_keys, config_path=config_path )
if 'error' in hosts_listing:
log.error("Failed to list hosts for %s: %s" % (blockchain_id, hosts_listing['error']))
return {'error': 'Failed to look up hosts'}
hosts = hosts_listing['hosts']
for hostname in hosts:
file_key = blockstack_gpg.gpg_app_get_key( blockchain_id, APP_NAME, hostname, immutable=immutable, key_id=key_id, config_dir=config_dir )
if 'error' not in file_key:
if key_id == file_key['key_id']:
# success!
return file_key
# check previous keys...
url = file_url_expired_keys( blockchain_id )
old_key_bundle_res = blockstack_client.data_get( url, wallet_keys=wallet_keys, proxy=proxy )
if 'error' in old_key_bundle_res:
return old_key_bundle_res
old_key_list = old_key_bundle_res['data']['old_keys']
for i in xrange(0, len(old_key_list)):
old_key = old_key_list[i]
if old_key['key_id'] == key_id:
# success!
ret = {}
ret.update( old_key )
ret['stale_key_index'] = i+1
return old_key
return {'error': 'No such key %s' % key_id}
elif index == 0:
file_key = blockstack_gpg.gpg_app_get_key( blockchain_id, APP_NAME, hostname, immutable=immutable, key_id=key_id, config_dir=config_dir )
if 'error' in file_key:
return file_key
return file_key
else:
# get the bundle of revoked keys
url = file_url_expired_keys( blockchain_id )
old_key_bundle_res = blockstack_client.data_get( url, wallet_keys=wallet_keys, proxy=proxy )
if 'error' in old_key_bundle_res:
return old_key_bundle_res
old_key_list = old_key_bundle_res['data']['old_keys']
if index >= len(old_key_list)+1:
return {'error': 'Index out of bounds: %s' % index}
return old_key_list[index-1] | python | {
"resource": ""
} |
q45469 | file_key_retire | train | def file_key_retire( blockchain_id, file_key, config_path=CONFIG_PATH, wallet_keys=None ):
"""
Retire the given key. Move it to the head of the old key bundle list
@file_key should be data returned by file_key_lookup
Return {'status': True} on success
Return {'error': ...} on error
"""
config_dir = os.path.dirname(config_path)
url = file_url_expired_keys( blockchain_id )
proxy = blockstack_client.get_default_proxy( config_path=config_path )
old_key_bundle_res = blockstack_client.data_get( url, wallet_keys=wallet_keys, proxy=proxy )
if 'error' in old_key_bundle_res:
log.warn('Failed to get old key bundle: %s' % old_key_bundle_res['error'])
old_key_list = []
else:
old_key_list = old_key_bundle_res['data']['old_keys']
for old_key in old_key_list:
if old_key['key_id'] == file_key['key_id']:
# already present
log.warning("Key %s is already retired" % file_key['key_id'])
return {'status': True}
old_key_list.insert(0, file_key )
res = blockstack_client.data_put( url, {'old_keys': old_key_list}, wallet_keys=wallet_keys, proxy=proxy )
if 'error' in res:
log.error("Failed to append to expired key bundle: %s" % res['error'])
return {'error': 'Failed to append to expired key list'}
return {'status': True} | python | {
"resource": ""
} |
q45470 | file_encrypt | train | def file_encrypt( blockchain_id, hostname, recipient_blockchain_id_and_hosts, input_path, output_path, passphrase=None, config_path=CONFIG_PATH, wallet_keys=None ):
"""
Encrypt a file for a set of recipients.
@recipient_blockchain_id_and_hosts must contain a list of (blockchain_id, hostname)
Return {'status': True, 'sender_key_id': ...} on success, and write ciphertext to output_path
Return {'error': ...} on error
"""
config_dir = os.path.dirname(config_path)
# find our encryption key
key_info = file_key_lookup( blockchain_id, 0, hostname, config_path=config_path, wallet_keys=wallet_keys )
if 'error' in key_info:
return {'error': 'Failed to lookup encryption key'}
# find the encryption key IDs for the recipients
recipient_keys = []
for (recipient_id, recipient_hostname) in recipient_blockchain_id_and_hosts:
if recipient_id == blockchain_id and recipient_hostname == hostname:
# already have it
recipient_keys.append(key_info)
continue
recipient_info = file_key_lookup( recipient_id, 0, recipient_hostname, config_path=config_path, wallet_keys=wallet_keys )
if 'error' in recipient_info:
return {'error': "Failed to look up key for '%s'" % recipient_id}
recipient_keys.append(recipient_info)
# encrypt
res = None
with open(input_path, "r") as f:
res = blockstack_gpg.gpg_encrypt( f, output_path, key_info, recipient_keys, passphrase=passphrase, config_dir=config_dir )
if 'error' in res:
log.error("Failed to encrypt: %s" % res['error'])
return {'error': 'Failed to encrypt'}
return {'status': True, 'sender_key_id': key_info['key_id']} | python | {
"resource": ""
} |
q45471 | file_sign | train | def file_sign( blockchain_id, hostname, input_path, passphrase=None, config_path=CONFIG_PATH, wallet_keys=None ):
"""
Sign a file with the current blockchain ID's host's public key.
@config_path should be for the *client*, not blockstack-file
Return {'status': True, 'sender_key_id': ..., 'sig': ...} on success, and write ciphertext to output_path
Return {'error': ...} on error
"""
config_dir = os.path.dirname(config_path)
# find our encryption key
key_info = file_key_lookup( blockchain_id, 0, hostname, config_path=config_path, wallet_keys=wallet_keys )
if 'error' in key_info:
return {'error': 'Failed to lookup encryption key'}
# sign
res = blockstack_gpg.gpg_sign( input_path, key_info, config_dir=config_dir )
if 'error' in res:
log.error("Failed to encrypt: %s" % res['error'])
return {'error': 'Failed to encrypt'}
return {'status': True, 'sender_key_id': key_info['key_id'], 'sig': res['sig']} | python | {
"resource": ""
} |
q45472 | file_verify | train | def file_verify( sender_blockchain_id, sender_key_id, input_path, sig, config_path=CONFIG_PATH, wallet_keys=None ):
"""
Verify that a file was signed with the given blockchain ID
@config_path should be for the *client*, not blockstack-file
Return {'status': True} on succes
Return {'error': ...} on error
"""
config_dir = os.path.dirname(config_path)
old_key = False
old_key_index = 0
sender_old_key_index = 0
# get the sender key
sender_key_info = file_key_lookup( sender_blockchain_id, None, None, key_id=sender_key_id, config_path=config_path, wallet_keys=wallet_keys )
if 'error' in sender_key_info:
log.error("Failed to look up sender key: %s" % sender_key_info['error'])
return {'error': 'Failed to lookup sender key'}
if 'stale_key_index' in sender_key_info.keys():
old_key = True
sender_old_key_index = sender_key_info['sender_key_index']
# attempt to verify
res = blockstack_gpg.gpg_verify( input_path, sig, sender_key_info, config_dir=config_dir )
if 'error' in res:
log.error("Failed to verify from %s.%s" % (sender_blockchain_id, sender_key_id))
return {'error': 'Failed to verify'}
return {'status': True} | python | {
"resource": ""
} |
q45473 | sanitize_latex | train | def sanitize_latex(string):
"""
Sanitize a string for input to LaTeX.
Replacements taken from `Stack Overflow
<http://stackoverflow.com/questions/2627135/how-do-i-sanitize-latex-input>`_
**Parameters**
string: str
**Returns**
sanitized_string: str
"""
sanitized_string = string
for old, new in _latex_replacements:
sanitized_string = sanitized_string.replace(old, new)
return sanitized_string | python | {
"resource": ""
} |
q45474 | Conneg.get_renderers | train | def get_renderers(self, request, context=None, template_name=None,
accept_header=None, formats=None, default_format=None, fallback_formats=None,
early=False):
"""
Returns a list of renderer functions in the order they should be tried.
Tries the format override parameter first, then the Accept header. If
neither is present, attempt to fall back to self._default_format. If
a fallback format has been specified, we try that last.
If early is true, don't test renderers to see whether they can handle
a serialization. This is useful if we're trying to find all relevant
serializers before we've built a context which they will accept.
"""
if formats:
renderers, seen_formats = [], set()
for format in formats:
if format in self.renderers_by_format and format not in seen_formats:
renderers.extend(self.renderers_by_format[format])
seen_formats.add(format)
elif accept_header:
accepts = MediaType.parse_accept_header(accept_header)
renderers = MediaType.resolve(accepts, self.renderers)
elif default_format:
renderers = self.renderers_by_format[default_format]
else:
renderers = []
fallback_formats = fallback_formats if isinstance(fallback_formats, (list, tuple)) else (fallback_formats,)
for format in fallback_formats:
for renderer in self.renderers_by_format[format]:
if renderer not in renderers:
renderers.append(renderer)
if not early and context is not None and template_name:
renderers = [r for r in renderers if r.test(request, context, template_name)]
return renderers | python | {
"resource": ""
} |
q45475 | set_secret_key | train | def set_secret_key(token):
"""
Initializes a Authentication and sets it as the new default global authentication.
It also performs some checks before saving the authentication.
:Example
>>> # Expected format for secret key:
>>> import payplug
>>> payplug.set_secret_key('sk_test_somerandomcharacters')
:param token: your secret token (live or sandbox)
:type token: string
"""
if not isinstance(token, string_types):
raise exceptions.ConfigurationError('Expected string value for token.')
config.secret_key = token | python | {
"resource": ""
} |
q45476 | Payment.retrieve | train | def retrieve(payment_id):
"""
Retrieve a payment from its id.
:param payment_id: The payment id
:type payment_id: string
:return: The payment resource
:rtype: resources.Payment
"""
http_client = HttpClient()
response, __ = http_client.get(routes.url(routes.PAYMENT_RESOURCE, resource_id=payment_id))
return resources.Payment(**response) | python | {
"resource": ""
} |
q45477 | Payment.abort | train | def abort(payment):
"""
Abort a payment from its id.
:param payment: The payment id or payment object
:type payment: string|Payment
:return: The payment resource
:rtype: resources.Payment
"""
if isinstance(payment, resources.Payment):
payment = payment.id
http_client = HttpClient()
response, __ = http_client.patch(routes.url(routes.PAYMENT_RESOURCE, resource_id=payment), {'abort': True})
return resources.Payment(**response) | python | {
"resource": ""
} |
q45478 | Payment.create | train | def create(**data):
"""
Create a Payment request.
:param data: data required to create the payment
:return: The payment resource
:rtype resources.Payment
"""
http_client = HttpClient()
response, _ = http_client.post(routes.url(routes.PAYMENT_RESOURCE), data)
return resources.Payment(**response) | python | {
"resource": ""
} |
q45479 | Payment.list | train | def list(per_page=None, page=None):
"""
List of payments. You have to handle pagination manually
:param page: the page number
:type page: int|None
:param per_page: number of payment per page. It's a good practice to increase this number if you know that you
will need a lot of payments.
:type per_page: int|None
:return A collection of payment
:rtype resources.APIResourceCollection
"""
# Comprehension dict are not supported in Python 2.6-. You can use this commented line instead of the current
# line when you drop support for Python 2.6.
# pagination = {key: value for (key, value) in [('page', page), ('per_page', per_page)] if value}
pagination = dict((key, value) for (key, value) in [('page', page), ('per_page', per_page)] if value)
http_client = HttpClient()
response, _ = http_client.get(routes.url(routes.PAYMENT_RESOURCE, pagination=pagination))
return resources.APIResourceCollection(resources.Payment, **response) | python | {
"resource": ""
} |
q45480 | Refund.retrieve | train | def retrieve(payment, refund_id):
"""
Retrieve a refund from a payment and the refund id.
:param payment: The payment id or the payment object
:type payment: resources.Payment|string
:param refund_id: The refund id
:type refund_id: string
:return: The refund resource
:rtype: resources.Refund
"""
if isinstance(payment, resources.Payment):
payment = payment.id
http_client = HttpClient()
response, _ = http_client.get(routes.url(routes.REFUND_RESOURCE, resource_id=refund_id, payment_id=payment))
return resources.Refund(**response) | python | {
"resource": ""
} |
q45481 | Refund.create | train | def create(payment, **data):
"""
Create a refund on a payment.
:param payment: Either the payment object or the payment id you want to refund.
:type payment: resources.Payment|string
:param data: data required to create the refund
:return: The refund resource
:rtype resources.Refund
"""
if isinstance(payment, resources.Payment):
payment = payment.id
http_client = HttpClient()
response, _ = http_client.post(routes.url(routes.REFUND_RESOURCE, payment_id=payment), data)
return resources.Refund(**response) | python | {
"resource": ""
} |
q45482 | Refund.list | train | def list(payment):
"""
List all the refunds for a payment.
:param payment: The payment object or the payment id
:type payment: resources.Payment|string
:return: A collection of refunds
:rtype resources.APIResourceCollection
"""
if isinstance(payment, resources.Payment):
payment = payment.id
http_client = HttpClient()
response, _ = http_client.get(routes.url(routes.REFUND_RESOURCE, payment_id=payment))
return resources.APIResourceCollection(resources.Refund, **response) | python | {
"resource": ""
} |
q45483 | Customer.retrieve | train | def retrieve(customer_id):
"""
Retrieve a customer from its id.
:param customer_id: The customer id
:type customer_id: string
:return: The customer resource
:rtype: resources.Customer
"""
http_client = HttpClient()
response, __ = http_client.get(routes.url(routes.CUSTOMER_RESOURCE, resource_id=customer_id))
return resources.Customer(**response) | python | {
"resource": ""
} |
q45484 | Customer.delete | train | def delete(customer):
"""
Delete a customer from its id.
:param customer: The customer id or object
:type customer: string|Customer
"""
if isinstance(customer, resources.Customer):
customer = customer.id
http_client = HttpClient()
http_client.delete(routes.url(routes.CUSTOMER_RESOURCE, resource_id=customer)) | python | {
"resource": ""
} |
q45485 | Customer.update | train | def update(customer, **data):
"""
Update a customer from its id.
:param customer: The customer id or object
:type customer: string|Customer
:param data: The data you want to update
:return: The customer resource
:rtype resources.Customer
"""
if isinstance(customer, resources.Customer):
customer = customer.id
http_client = HttpClient()
response, _ = http_client.patch(routes.url(routes.CUSTOMER_RESOURCE, resource_id=customer), data)
return resources.Customer(**response) | python | {
"resource": ""
} |
q45486 | Customer.create | train | def create(**data):
"""
Create a customer.
:param data: data required to create the customer
:return: The customer resource
:rtype resources.Customer
"""
http_client = HttpClient()
response, _ = http_client.post(routes.url(routes.CUSTOMER_RESOURCE), data)
return resources.Customer(**response) | python | {
"resource": ""
} |
q45487 | Card.retrieve | train | def retrieve(customer, card_id):
"""
Retrieve a card from its id.
:param customer: The customer id or object
:type customer: string|Customer
:param card_id: The card id
:type card_id: string
:return: The customer resource
:rtype: resources.Card
"""
if isinstance(customer, resources.Customer):
customer = customer.id
http_client = HttpClient()
response, __ = http_client.get(routes.url(routes.CARD_RESOURCE, resource_id=card_id, customer_id=customer))
return resources.Card(**response) | python | {
"resource": ""
} |
q45488 | Card.delete | train | def delete(customer, card):
"""
Delete a card from its id.
:param customer: The customer id or object
:type customer: string|Customer
:param card: The card id or object
:type card: string|Card
"""
if isinstance(customer, resources.Customer):
customer = customer.id
if isinstance(card, resources.Card):
card = card.id
http_client = HttpClient()
http_client.delete(routes.url(routes.CARD_RESOURCE, resource_id=card, customer_id=customer)) | python | {
"resource": ""
} |
q45489 | Card.create | train | def create(customer, **data):
"""
Create a card instance.
:param customer: the customer id or object
:type customer: string|Customer
:param data: data required to create the card
:return: The card resource
:rtype resources.Card
"""
if isinstance(customer, resources.Customer):
customer = customer.id
http_client = HttpClient()
response, _ = http_client.post(routes.url(routes.CARD_RESOURCE, customer_id=customer), data)
return resources.Card(**response) | python | {
"resource": ""
} |
q45490 | Card.list | train | def list(customer, per_page=None, page=None):
"""
List of cards. You have to handle pagination manually for the moment.
:param customer: the customer id or object
:type customer: string|Customer
:param page: the page number
:type page: int|None
:param per_page: number of customers per page. It's a good practice to increase this number if you know that you
will need a lot of payments.
:type per_page: int|None
:return A collection of cards
:rtype resources.APIResourceCollection
"""
if isinstance(customer, resources.Customer):
customer = customer.id
# Comprehension dict are not supported in Python 2.6-. You can use this commented line instead of the current
# line when you drop support for Python 2.6.
# pagination = {key: value for (key, value) in [('page', page), ('per_page', per_page)] if value}
pagination = dict((key, value) for (key, value) in [('page', page), ('per_page', per_page)] if value)
http_client = HttpClient()
response, _ = http_client.get(routes.url(routes.CARD_RESOURCE, customer_id=customer, pagination=pagination))
return resources.APIResourceCollection(resources.Card, **response) | python | {
"resource": ""
} |
q45491 | IsoDateTimeField.strptime | train | def strptime(self, value, format):
"""
By default, parse datetime with TZ.
If TZ is False, convert datetime to local time and disable TZ
"""
value = force_str(value)
if format == ISO_8601:
try:
parsed = parse_datetime(value)
if not settings.USE_TZ:
fr_tz = pytz.timezone(settings.TIME_ZONE)
parsed = parsed.astimezone(fr_tz).replace(tzinfo=None)
except:
raise APIException(
"date parsing error: since parameter use the date format ISO 8601 (ex: 2014-11-18T15:56:58Z)")
if parsed is None:
raise APIException(
"since parameter use the date format ISO 8601 (ex: 2014-11-18T15:56:58Z)")
return parsed
return super(IsoDateTimeField, self).strptime(value, format) | python | {
"resource": ""
} |
q45492 | find_data_files | train | def find_data_files(source, target, patterns):
"""
Locates the specified data-files and returns the matches
in a data_files compatible format.
source is the root of the source data tree.
Use '' or '.' for current directory.
target is the root of the target data tree.
Use '' or '.' for the distribution directory.
patterns is a sequence of glob-patterns for the
files you want to copy.
"""
if glob.has_magic(source) or glob.has_magic(target):
raise ValueError("Magic not allowed in src, target")
ret = {}
for pattern in patterns:
pattern = os.path.join(source, pattern)
for filename in glob.glob(pattern):
if os.path.isfile(filename):
targetpath = os.path.join(
target, os.path.relpath(filename, source)
)
path = os.path.dirname(targetpath)
ret.setdefault(path, []).append(filename)
return sorted(ret.items()) | python | {
"resource": ""
} |
q45493 | Entity.cache | train | def cache(self):
"""Query or return the Graph API representation of this resource."""
if not self._cache:
self._cache = self.graph.get('%s' % self.id)
return self._cache | python | {
"resource": ""
} |
q45494 | capakey_rest_gateway_request | train | def capakey_rest_gateway_request(url, headers={}, params={}):
'''
Utility function that helps making requests to the CAPAKEY REST service.
:param string url: URL to request.
:param dict headers: Headers to send with the URL.
:param dict params: Parameters to send with the URL.
:returns: Result of the call.
'''
try:
res = requests.get(url, headers=headers, params=params)
res.raise_for_status()
return res
except requests.ConnectionError as ce:
raise GatewayRuntimeException(
'Could not execute request due to connection problems:\n%s' % repr(ce),
ce
)
except requests.HTTPError as he:
raise GatewayResourceNotFoundException()
except requests.RequestException as re:
raise GatewayRuntimeException(
'Could not execute request due to:\n%s' % repr(re),
re
) | python | {
"resource": ""
} |
q45495 | CapakeyRestGateway.list_gemeenten | train | def list_gemeenten(self, sort=1):
'''
List all `gemeenten` in Vlaanderen.
:param integer sort: What field to sort on.
:rtype: A :class:`list` of :class:`Gemeente`.
'''
def creator():
url = self.base_url + '/municipality'
h = self.base_headers
p = {
'orderbyCode': sort == 1
}
res = capakey_rest_gateway_request(url, h, p).json()
return [
Gemeente(r['municipalityCode'], r['municipalityName'])
for r in res['municipalities']
]
if self.caches['permanent'].is_configured:
key = 'list_gemeenten_rest#%s' % sort
gemeente = self.caches['permanent'].get_or_create(key, creator)
else:
gemeente = creator()
for g in gemeente:
g.set_gateway(self)
return gemeente | python | {
"resource": ""
} |
q45496 | CapakeyRestGateway.list_kadastrale_afdelingen | train | def list_kadastrale_afdelingen(self):
'''
List all `kadastrale afdelingen` in Flanders.
:param integer sort: Field to sort on.
:rtype: A :class:`list` of :class:`Afdeling`.
'''
def creator():
gemeentes = self.list_gemeenten()
res = []
for g in gemeentes:
res += self.list_kadastrale_afdelingen_by_gemeente(g)
return res
if self.caches['permanent'].is_configured:
key = 'list_afdelingen_rest'
afdelingen = self.caches['permanent'].get_or_create(key, creator)
else:
afdelingen = creator()
return afdelingen | python | {
"resource": ""
} |
q45497 | CapakeyRestGateway.list_kadastrale_afdelingen_by_gemeente | train | def list_kadastrale_afdelingen_by_gemeente(self, gemeente, sort=1):
'''
List all `kadastrale afdelingen` in a `gemeente`.
:param gemeente: The :class:`Gemeente` for which the \
`afdelingen` are wanted.
:param integer sort: Field to sort on.
:rtype: A :class:`list` of :class:`Afdeling`.
'''
try:
gid = gemeente.id
except AttributeError:
gid = gemeente
gemeente = self.get_gemeente_by_id(gid)
gemeente.clear_gateway()
def creator():
url = self.base_url + '/municipality/%s/department' % gid
h = self.base_headers
p = {
'orderbyCode': sort == 1
}
res = capakey_rest_gateway_request(url, h, p).json()
return [
Afdeling(
id=r['departmentCode'],
naam=r['departmentName'],
gemeente=gemeente
) for r in res['departments']]
if self.caches['permanent'].is_configured:
key = 'list_kadastrale_afdelingen_by_gemeente_rest#%s#%s' % (gid, sort)
afdelingen = self.caches['permanent'].get_or_create(key, creator)
else:
afdelingen = creator()
for a in afdelingen:
a.set_gateway(self)
return afdelingen | python | {
"resource": ""
} |
q45498 | CapakeyRestGateway.get_kadastrale_afdeling_by_id | train | def get_kadastrale_afdeling_by_id(self, aid):
'''
Retrieve a 'kadastrale afdeling' by id.
:param aid: An id of a `kadastrale afdeling`.
:rtype: A :class:`Afdeling`.
'''
def creator():
url = self.base_url + '/department/%s' % (aid)
h = self.base_headers
p = {
'geometry': 'full',
'srs': '31370'
}
res = capakey_rest_gateway_request(url, h, p).json()
return Afdeling(
id=res['departmentCode'],
naam=res['departmentName'],
gemeente=Gemeente(res['municipalityCode'], res['municipalityName']),
centroid=self._parse_centroid(res['geometry']['center']),
bounding_box=self._parse_bounding_box(res['geometry']['boundingBox']),
shape=res['geometry']['shape']
)
if self.caches['long'].is_configured:
key = 'get_kadastrale_afdeling_by_id_rest#%s' % aid
afdeling = self.caches['long'].get_or_create(key, creator)
else:
afdeling = creator()
afdeling.set_gateway(self)
return afdeling | python | {
"resource": ""
} |
q45499 | CapakeyRestGateway.list_secties_by_afdeling | train | def list_secties_by_afdeling(self, afdeling):
'''
List all `secties` in a `kadastrale afdeling`.
:param afdeling: The :class:`Afdeling` for which the `secties` are \
wanted. Can also be the id of and `afdeling`.
:rtype: A :class:`list` of `Sectie`.
'''
try:
aid = afdeling.id
gid = afdeling.gemeente.id
except AttributeError:
aid = afdeling
afdeling = self.get_kadastrale_afdeling_by_id(aid)
gid = afdeling.gemeente.id
afdeling.clear_gateway()
def creator():
url = self.base_url + '/municipality/%s/department/%s/section' % (gid, aid)
h = self.base_headers
res = capakey_rest_gateway_request(url, h).json()
return [
Sectie(
r['sectionCode'],
afdeling
) for r in res['sections']
]
if self.caches['long'].is_configured:
key = 'list_secties_by_afdeling_rest#%s' % aid
secties = self.caches['long'].get_or_create(key, creator)
else:
secties = creator()
for s in secties:
s.set_gateway(self)
return secties | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.