_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3 values | text stringlengths 75 19.8k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q38600 | harvest_fundref | train | def harvest_fundref(source=None):
"""Harvest funders from FundRef and store as authority records."""
loader = LocalFundRefLoader(source=source) if source \
else RemoteFundRefLoader()
for funder_json in loader.iter_funders():
register_funder.delay(funder_json) | python | {
"resource": ""
} |
q38601 | harvest_openaire_projects | train | def harvest_openaire_projects(source=None, setspec=None):
"""Harvest grants from OpenAIRE and store as authority records."""
loader = LocalOAIRELoader(source=source) if source \
else RemoteOAIRELoader(setspec=setspec)
for grant_json in loader.iter_grants():
register_grant.delay(grant_json) | python | {
"resource": ""
} |
q38602 | harvest_all_openaire_projects | train | def harvest_all_openaire_projects():
"""Reharvest all grants from OpenAIRE.
Harvest all OpenAIRE grants in a chain to prevent OpenAIRE
overloading from multiple parallel harvesting.
"""
setspecs = current_app.config['OPENAIRE_GRANTS_SPECS']
chain(harvest_openaire_projects.s(setspec=setspec)
for setspec in setspecs).apply_async() | python | {
"resource": ""
} |
q38603 | create_or_update_record | train | def create_or_update_record(data, pid_type, id_key, minter):
"""Register a funder or grant."""
resolver = Resolver(
pid_type=pid_type, object_type='rec', getter=Record.get_record)
try:
pid, record = resolver.resolve(data[id_key])
data_c = deepcopy(data)
del data_c['remote_modified']
record_c = deepcopy(record)
del record_c['remote_modified']
# All grants on OpenAIRE are modified periodically even if nothing
# has changed. We need to check for actual differences in the metadata
if data_c != record_c:
record.update(data)
record.commit()
record_id = record.id
db.session.commit()
RecordIndexer().index_by_id(str(record_id))
except PIDDoesNotExistError:
record = Record.create(data)
record_id = record.id
minter(record.id, data)
db.session.commit()
RecordIndexer().index_by_id(str(record_id)) | python | {
"resource": ""
} |
q38604 | IS | train | def IS(instance, other): # noqa
"""
Support the `future is other` use-case.
Can't override the language so we built a function.
Will work on non-future objects too.
:param instance: future or any python object
:param other: object to compare.
:return:
"""
try:
instance = instance._redpipe_future_result # noqa
except AttributeError:
pass
try:
other = other._redpipe_future_result
except AttributeError:
pass
return instance is other | python | {
"resource": ""
} |
q38605 | ISINSTANCE | train | def ISINSTANCE(instance, A_tuple): # noqa
"""
Allows you to do isinstance checks on futures.
Really, I discourage this because duck-typing is usually better.
But this can provide you with a way to use isinstance with futures.
Works with other objects too.
:param instance:
:param A_tuple:
:return:
"""
try:
instance = instance._redpipe_future_result
except AttributeError:
pass
return isinstance(instance, A_tuple) | python | {
"resource": ""
} |
q38606 | suspendJustTabProviders | train | def suspendJustTabProviders(installation):
"""
Replace INavigableElements with facades that indicate their suspension.
"""
if installation.suspended:
raise RuntimeError("Installation already suspended")
powerups = list(installation.allPowerups)
for p in powerups:
if INavigableElement.providedBy(p):
p.store.powerDown(p, INavigableElement)
sne = SuspendedNavigableElement(store=p.store, originalNE=p)
p.store.powerUp(sne, INavigableElement)
p.store.powerUp(sne, ISuspender)
installation.suspended = True | python | {
"resource": ""
} |
q38607 | unsuspendTabProviders | train | def unsuspendTabProviders(installation):
"""
Remove suspension facades and replace them with their originals.
"""
if not installation.suspended:
raise RuntimeError("Installation not suspended")
powerups = list(installation.allPowerups)
allSNEs = list(powerups[0].store.powerupsFor(ISuspender))
for p in powerups:
for sne in allSNEs:
if sne.originalNE is p:
p.store.powerDown(sne, INavigableElement)
p.store.powerDown(sne, ISuspender)
p.store.powerUp(p, INavigableElement)
sne.deleteFromStore()
installation.suspended = False | python | {
"resource": ""
} |
q38608 | BatFrame.rows | train | def rows(self):
"""Returns a numpy array of the rows name"""
bf = self.copy()
result = bf.query.executeQuery(format="soa")
return result["_rowName"] | python | {
"resource": ""
} |
q38609 | Column.head | train | def head(self, n=5):
"""Returns first n rows"""
col = self.copy()
col.query.setLIMIT(n)
return col.toPandas() | python | {
"resource": ""
} |
q38610 | write_tsv | train | def write_tsv(output_stream, *tup, **kwargs):
"""
Write argument list in `tup` out as a tab-separeated row to the stream.
"""
encoding = kwargs.get('encoding') or 'utf-8'
value = '\t'.join([s for s in tup]) + '\n'
output_stream.write(value.encode(encoding)) | python | {
"resource": ""
} |
q38611 | iter_tsv | train | def iter_tsv(input_stream, cols=None, encoding='utf-8'):
"""
If a tuple is given in cols, use the elements as names to construct
a namedtuple.
Columns can be marked as ignored by using ``X`` or ``0`` as column name.
Example (ignore the first four columns of a five column TSV):
::
def run(self):
with self.input().open() as handle:
for row in handle.iter_tsv(cols=('X', 'X', 'X', 'X', 'iln')):
print(row.iln)
"""
if cols:
cols = [c if not c in ('x', 'X', 0, None) else random_string(length=5)
for c in cols]
Record = collections.namedtuple('Record', cols)
for line in input_stream:
yield Record._make(line.decode(encoding).rstrip('\n').split('\t'))
else:
for line in input_stream:
yield tuple(line.decode(encoding).rstrip('\n').split('\t')) | python | {
"resource": ""
} |
q38612 | is_authenticode_signed | train | def is_authenticode_signed(filename):
"""Returns True if the file is signed with authenticode"""
with open(filename, 'rb') as fp:
fp.seek(0)
magic = fp.read(2)
if magic != b'MZ':
return False
# First grab the pointer to the coff_header, which is at offset 60
fp.seek(60)
coff_header_offset = struct.unpack('<L', fp.read(4))[0]
# Check the COFF magic
fp.seek(coff_header_offset)
magic = fp.read(4)
if magic != b'PE\x00\x00':
return False
# Get the PE type
fp.seek(coff_header_offset + 0x18)
pe_type = struct.unpack('<h', fp.read(2))[0]
if pe_type == 0x10b:
# PE32 file (32-bit apps)
number_of_data_dirs_offset = coff_header_offset + 0x74
elif pe_type == 0x20b:
# PE32+ files (64-bit apps)
# PE32+ files have slightly larger fields in the header
number_of_data_dirs_offset = coff_header_offset + 0x74 + 16
else:
return False
fp.seek(number_of_data_dirs_offset)
num_data_dirs = struct.unpack('<L', fp.read(4))[0]
if num_data_dirs < 5:
# Probably shouldn't happen, but just in case
return False
cert_table_offset = number_of_data_dirs_offset + 4*8 + 4
fp.seek(cert_table_offset)
addr, size = struct.unpack('<LL', fp.read(8))
if not addr or not size:
return False
# Check that addr is inside the file
fp.seek(addr)
if fp.tell() != addr:
return False
cert = fp.read(size)
if len(cert) != size:
return False
return True | python | {
"resource": ""
} |
q38613 | _getCampaignDict | train | def _getCampaignDict():
"""Returns a dictionary specifying the details of all campaigns."""
global _campaign_dict_cache
if _campaign_dict_cache is None:
# All pointing parameters and dates are stored in a JSON file
fn = os.path.join(PACKAGEDIR, "data", "k2-campaign-parameters.json")
_campaign_dict_cache = json.load(open(fn))
return _campaign_dict_cache | python | {
"resource": ""
} |
q38614 | getFieldInfo | train | def getFieldInfo(fieldnum):
"""Returns a dictionary containing the metadata of a K2 Campaign field.
Raises a ValueError if the field number is unknown.
Parameters
----------
fieldnum : int
Campaign field number (e.g. 0, 1, 2, ...)
Returns
-------
field : dict
The dictionary contains the keys
'ra', 'dec', 'roll' (floats in decimal degrees),
'start', 'stop', (strings in YYYY-MM-DD format)
and 'comments' (free text).
"""
try:
info = _getCampaignDict()["c{0}".format(fieldnum)]
# Print warning messages if necessary
if "preliminary" in info and info["preliminary"] == "True":
logger.warning("Warning: the position of field {0} is preliminary. "
"Do not use this position for your final "
"target selection!".format(fieldnum))
return info
except KeyError:
raise ValueError("Field {0} not set in this version "
"of the code".format(fieldnum)) | python | {
"resource": ""
} |
q38615 | getKeplerFov | train | def getKeplerFov(fieldnum):
"""Returns a `fov.KeplerFov` object for a given campaign.
Parameters
----------
fieldnum : int
K2 Campaign number.
Returns
-------
fovobj : `fov.KeplerFov` object
Details the footprint of the requested K2 campaign.
"""
info = getFieldInfo(fieldnum)
ra, dec, scRoll = info["ra"], info["dec"], info["roll"]
# convert from SC roll to FOV coordinates
# do not use the fovRoll coords anywhere else
# they are internal to this script only
fovRoll = fov.getFovAngleFromSpacecraftRoll(scRoll)
# KeplerFov takes a listen of broken CCD channels as optional argument;
# these channels will be ignored during plotting and on-silicon determination.
# Modules 3 and 7 broke prior to the start of K2:
brokenChannels = [5, 6, 7, 8, 17, 18, 19, 20]
# Module 4 failed during Campaign 10
if fieldnum > 10:
brokenChannels.extend([9, 10, 11, 12])
# Hack: the Kepler field is defined as "Campaign 1000"
# and (initially) had no broken channels
if fieldnum == 1000:
brokenChannels = []
return fov.KeplerFov(ra, dec, fovRoll, brokenChannels=brokenChannels) | python | {
"resource": ""
} |
q38616 | indexer_receiver | train | def indexer_receiver(sender, json=None, record=None, index=None,
**dummy_kwargs):
"""Connect to before_record_index signal to transform record for ES."""
if index and index.startswith('grants-'):
# Generate suggest field
suggestions = [
json.get('code'),
json.get('acronym'),
json.get('title')
]
json['suggest'] = {
'input': [s for s in suggestions if s],
'output': json['title'],
'context': {
'funder': [json['funder']['doi']]
},
'payload': {
'id': json['internal_id'],
'legacy_id': (json['code'] if json.get('program') == 'FP7'
else json['internal_id']),
'code': json['code'],
'title': json['title'],
'acronym': json.get('acronym'),
'program': json.get('program'),
},
}
elif index and index.startswith('funders-'):
# Generate suggest field
suggestions = json.get('acronyms', []) + [json.get('name')]
json['suggest'] = {
'input': [s for s in suggestions if s],
'output': json['name'],
'payload': {
'id': json['doi']
},
} | python | {
"resource": ""
} |
q38617 | BusApi.get_calendar | train | def get_calendar(self, **kwargs):
"""Obtain EMT calendar for a range of dates.
Args:
start_day (int): Starting day of the month in format DD.
The number is automatically padded if it only has one digit.
start_month (int): Starting month number in format MM.
The number is automatically padded if it only has one digit.
start_year (int): Starting year number in format YYYY.
end_day (int): Ending day of the month in format DD.
The number is automatically padded if it only has one digit.
end_month (int): Ending month number in format MM.
The number is automatically padded if it only has one digit.
end_year (int): Ending year number in format YYYY.
Returns:
Status boolean and parsed response (list[CalendarItem]), or message
string in case of error.
"""
# Endpoint parameters
start_date = util.date_string(
kwargs.get('start_day', '01'),
kwargs.get('start_month', '01'),
kwargs.get('start_year', '1970')
)
end_date = util.date_string(
kwargs.get('end_day', '01'),
kwargs.get('end_month', '01'),
kwargs.get('end_year', '1970')
)
params = {'SelectDateBegin': start_date, 'SelectDateEnd': end_date}
# Request
result = self.make_request('bus', 'get_calendar', **params)
if not util.check_result(result):
return False, result.get('resultDescription', 'UNKNOWN ERROR')
# Parse
values = util.response_list(result, 'resultValues')
return True, [emtype.CalendarItem(**a) for a in values] | python | {
"resource": ""
} |
q38618 | BusApi.get_nodes_lines | train | def get_nodes_lines(self, **kwargs):
"""Obtain stop IDs, coordinates and line information.
Args:
nodes (list[int] | int): nodes to query, may be empty to get
all nodes.
Returns:
Status boolean and parsed response (list[NodeLinesItem]), or message
string in case of error.
"""
# Endpoint parameters
params = {'Nodes': util.ints_to_string(kwargs.get('nodes', []))}
# Request
result = self.make_request('bus', 'get_nodes_lines', **params)
if not util.check_result(result):
return False, result.get('resultDescription', 'UNKNOWN ERROR')
# Parse
values = util.response_list(result, 'resultValues')
return True, [emtype.NodeLinesItem(**a) for a in values] | python | {
"resource": ""
} |
q38619 | MantissaLivePage.beforeRender | train | def beforeRender(self, ctx):
"""
Before rendering, retrieve the hostname from the request being
responded to and generate an URL which will serve as the root for
all JavaScript modules to be loaded.
"""
request = IRequest(ctx)
root = self.webSite.rootURL(request)
self._moduleRoot = root.child('__jsmodule__') | python | {
"resource": ""
} |
q38620 | StaticSite.installSite | train | def installSite(self):
"""
Not using the dependency system for this class because it's only
installed via the command line, and multiple instances can be
installed.
"""
for iface, priority in self.__getPowerupInterfaces__([]):
self.store.powerUp(self, iface, priority) | python | {
"resource": ""
} |
q38621 | StylesheetFactory.makeStylesheetResource | train | def makeStylesheetResource(self, path, registry):
"""
Return a resource for the css at the given path with its urls rewritten
based on self.rootURL.
"""
return StylesheetRewritingResourceWrapper(
File(path), self.installedOfferingNames, self.rootURL) | python | {
"resource": ""
} |
q38622 | StylesheetRewritingRequestWrapper._replace | train | def _replace(self, url):
"""
Change URLs with absolute paths so they are rooted at the correct
location.
"""
segments = url.split('/')
if segments[0] == '':
root = self.rootURL(self.request)
if segments[1] == 'Mantissa':
root = root.child('static').child('mantissa-base')
segments = segments[2:]
elif segments[1] in self.installedOfferingNames:
root = root.child('static').child(segments[1])
segments = segments[2:]
for seg in segments:
root = root.child(seg)
return str(root)
return url | python | {
"resource": ""
} |
q38623 | StylesheetRewritingRequestWrapper.finish | train | def finish(self):
"""
Parse the buffered response body, rewrite its URLs, write the result to
the wrapped request, and finish the wrapped request.
"""
stylesheet = ''.join(self._buffer)
parser = CSSParser()
css = parser.parseString(stylesheet)
replaceUrls(css, self._replace)
self.request.write(css.cssText)
return self.request.finish() | python | {
"resource": ""
} |
q38624 | WebSite.cleartextRoot | train | def cleartextRoot(self, hostname=None):
"""
Return a string representing the HTTP URL which is at the root of this
site.
@param hostname: An optional unicode string which, if specified, will
be used as the hostname in the resulting URL, regardless of the
C{hostname} attribute of this item.
"""
warnings.warn(
"Use ISiteURLGenerator.rootURL instead of WebSite.cleartextRoot.",
category=DeprecationWarning,
stacklevel=2)
if self.store.parent is not None:
generator = ISiteURLGenerator(self.store.parent)
else:
generator = ISiteURLGenerator(self.store)
return generator.cleartextRoot(hostname) | python | {
"resource": ""
} |
q38625 | WebSite.rootURL | train | def rootURL(self, request):
"""
Simple utility function to provide a root URL for this website which is
appropriate to use in links generated in response to the given request.
@type request: L{twisted.web.http.Request}
@param request: The request which is being responded to.
@rtype: L{URL}
@return: The location at which the root of the resource hierarchy for
this website is available.
"""
warnings.warn(
"Use ISiteURLGenerator.rootURL instead of WebSite.rootURL.",
category=DeprecationWarning,
stacklevel=2)
if self.store.parent is not None:
generator = ISiteURLGenerator(self.store.parent)
else:
generator = ISiteURLGenerator(self.store)
return generator.rootURL(request) | python | {
"resource": ""
} |
q38626 | APIKey.getKeyForAPI | train | def getKeyForAPI(cls, siteStore, apiName):
"""
Get the API key for the named API, if one exists.
@param siteStore: The site store.
@type siteStore: L{axiom.store.Store}
@param apiName: The name of the API.
@type apiName: C{unicode} (L{APIKey} constant)
@rtype: L{APIKey} or C{NoneType}
"""
return siteStore.findUnique(
cls, cls.apiName == apiName, default=None) | python | {
"resource": ""
} |
q38627 | APIKey.setKeyForAPI | train | def setKeyForAPI(cls, siteStore, apiName, apiKey):
"""
Set the API key for the named API, overwriting any existing key.
@param siteStore: The site store to install the key in.
@type siteStore: L{axiom.store.Store}
@param apiName: The name of the API.
@type apiName: C{unicode} (L{APIKey} constant)
@param apiKey: The key for accessing the API.
@type apiKey: C{unicode}
@rtype: L{APIKey}
"""
existingKey = cls.getKeyForAPI(siteStore, apiName)
if existingKey is None:
return cls(store=siteStore, apiName=apiName, apiKey=apiKey)
existingKey.apiKey = apiKey
return existingKey | python | {
"resource": ""
} |
q38628 | SiteConfiguration.rootURL | train | def rootURL(self, request):
"""
Return the URL for the root of this website which is appropriate to use
in links generated in response to the given request.
@type request: L{twisted.web.http.Request}
@param request: The request which is being responded to.
@rtype: L{URL}
@return: The location at which the root of the resource hierarchy for
this website is available.
"""
host = request.getHeader('host') or self.hostname
if ':' in host:
host = host.split(':', 1)[0]
for domain in [self.hostname] + getDomainNames(self.store):
if (host == domain or
host.startswith('www.') and host[len('www.'):] == domain):
return URL(scheme='', netloc='', pathsegs=[''])
if request.isSecure():
return self.encryptedRoot(self.hostname)
else:
return self.cleartextRoot(self.hostname) | python | {
"resource": ""
} |
q38629 | UnguardedWrapper.child_static | train | def child_static(self, context):
"""
Serve a container page for static content for Mantissa and other
offerings.
"""
offeringTech = IOfferingTechnician(self.siteStore)
installedOfferings = offeringTech.getInstalledOfferings()
offeringsWithContent = dict([
(offering.name, offering.staticContentPath)
for offering
in installedOfferings.itervalues()
if offering.staticContentPath])
# If you wanted to do CSS rewriting for all CSS files served beneath
# /static/, you could do it by passing a processor for ".css" here.
# eg:
#
# website = IResource(self.store)
# factory = StylesheetFactory(
# offeringsWithContent.keys(), website.rootURL)
# StaticContent(offeringsWithContent, {
# ".css": factory.makeStylesheetResource})
return StaticContent(offeringsWithContent, {}) | python | {
"resource": ""
} |
q38630 | UnguardedWrapper.locateChild | train | def locateChild(self, context, segments):
"""
Return a statically defined child or a child defined by a sessionless
site root plugin or an avatar from guard.
"""
shortcut = getattr(self, 'child_' + segments[0], None)
if shortcut:
res = shortcut(context)
if res is not None:
return res, segments[1:]
req = IRequest(context)
for plg in self.siteStore.powerupsFor(ISessionlessSiteRootPlugin):
spr = getattr(plg, 'sessionlessProduceResource', None)
if spr is not None:
childAndSegments = spr(req, segments)
else:
childAndSegments = plg.resourceFactory(segments)
if childAndSegments is not None:
return childAndSegments
return self.guardedRoot.locateChild(context, segments) | python | {
"resource": ""
} |
q38631 | SecuringWrapper.locateChild | train | def locateChild(self, context, segments):
"""
Unwrap the wrapped resource if HTTPS is already being used, otherwise
wrap it in a helper which will preserve the wrapping all the way down
to the final resource.
"""
request = IRequest(context)
if request.isSecure():
return self.wrappedResource, segments
return _SecureWrapper(self.urlGenerator, self.wrappedResource), segments | python | {
"resource": ""
} |
q38632 | SecuringWrapper.renderHTTP | train | def renderHTTP(self, context):
"""
Render the wrapped resource if HTTPS is already being used, otherwise
invoke a helper which may generate a redirect.
"""
request = IRequest(context)
if request.isSecure():
renderer = self.wrappedResource
else:
renderer = _SecureWrapper(self.urlGenerator, self.wrappedResource)
return renderer.renderHTTP(context) | python | {
"resource": ""
} |
q38633 | _SecureWrapper.locateChild | train | def locateChild(self, context, segments):
"""
Delegate child lookup to the wrapped resource but wrap whatever results
in another instance of this wrapper.
"""
childDeferred = maybeDeferred(
self.wrappedResource.locateChild, context, segments)
def childLocated((resource, segments)):
if (resource, segments) == NotFound:
return NotFound
return _SecureWrapper(self.urlGenerator, resource), segments
childDeferred.addCallback(childLocated)
return childDeferred | python | {
"resource": ""
} |
q38634 | _SecureWrapper.renderHTTP | train | def renderHTTP(self, context):
"""
Check to see if the wrapped resource wants to be rendered over HTTPS
and generate a redirect if this is so, if HTTPS is available, and if
the request is not already over HTTPS.
"""
if getattr(self.wrappedResource, 'needsSecure', False):
request = IRequest(context)
url = self.urlGenerator.encryptedRoot()
if url is not None:
for seg in request.prepath:
url = url.child(seg)
return url
return self.wrappedResource.renderHTTP(context) | python | {
"resource": ""
} |
q38635 | RemoteService.handle_single_request | train | def handle_single_request(self, request_object):
"""
Handles a single request object and returns the raw response
:param request_object:
"""
if not isinstance(request_object, (MethodCall, Notification)):
raise TypeError("Invalid type for request_object")
method_name = request_object.method_name
params = request_object.params
req_id = request_object.id
request_body = self.build_request_body(method_name, params, id=req_id)
http_request = self.build_http_request_obj(request_body)
try:
response = urllib.request.urlopen(http_request)
except urllib.request.HTTPError as e:
raise CalledServiceError(e)
if not req_id:
return
response_body = json.loads(response.read().decode())
return response_body | python | {
"resource": ""
} |
q38636 | RemoteService.notify | train | def notify(self, method_name_or_object, params=None):
"""
Sends a notification to the service by calling the ``method_name``
method with the ``params`` parameters. Does not wait for a response, even
if the response triggers an error.
:param method_name_or_object: the name of the method to be called or a ``Notification``
instance
:param params: a list of dict representing the parameters for the call
:return: None
"""
if isinstance(method_name_or_object, Notification):
req_obj = method_name_or_object
else:
req_obj = Notification(method_name_or_object, params)
self.handle_single_request(req_obj) | python | {
"resource": ""
} |
q38637 | TornadoJsonRpcHandler.call_method | train | def call_method(self, method):
"""
Calls a blocking method in an executor, in order to preserve the non-blocking behaviour
If ``method`` is a coroutine, yields from it and returns, no need to execute in
in an executor.
:param method: The method or coroutine to be called (with no arguments).
:return: the result of the method call
"""
if self._method_is_async_generator(method):
result = yield method()
else:
result = yield self.executor.submit(method)
return result | python | {
"resource": ""
} |
q38638 | cygpath | train | def cygpath(filename):
"""Convert a cygwin path into a windows style path"""
if sys.platform == 'cygwin':
proc = Popen(['cygpath', '-am', filename], stdout=PIPE)
return proc.communicate()[0].strip()
else:
return filename | python | {
"resource": ""
} |
q38639 | convertPath | train | def convertPath(srcpath, dstdir):
"""Given `srcpath`, return a corresponding path within `dstdir`"""
bits = srcpath.split("/")
bits.pop(0)
# Strip out leading 'unsigned' from paths like unsigned/update/win32/...
if bits[0] == 'unsigned':
bits.pop(0)
return os.path.join(dstdir, *bits) | python | {
"resource": ""
} |
q38640 | finddirs | train | def finddirs(root):
"""Return a list of all the directories under `root`"""
retval = []
for root, dirs, files in os.walk(root):
for d in dirs:
retval.append(os.path.join(root, d))
return retval | python | {
"resource": ""
} |
q38641 | ThemeCache._realGetAllThemes | train | def _realGetAllThemes(self):
"""
Collect themes from all available offerings.
"""
l = []
for offering in getOfferings():
l.extend(offering.themes)
l.sort(key=lambda o: o.priority)
l.reverse()
return l | python | {
"resource": ""
} |
q38642 | ThemeCache._realGetInstalledThemes | train | def _realGetInstalledThemes(self, store):
"""
Collect themes from all offerings installed on this store.
"""
l = []
for offering in getInstalledOfferings(store).itervalues():
l.extend(offering.themes)
l.sort(key=lambda o: o.priority)
l.reverse()
return l | python | {
"resource": ""
} |
q38643 | XHTMLDirectoryTheme.getDocFactory | train | def getDocFactory(self, fragmentName, default=None):
"""
For a given fragment, return a loaded Nevow template.
@param fragmentName: the name of the template (can include relative
paths).
@param default: a default loader; only used if provided and the
given fragment name cannot be resolved.
@return: A loaded Nevow template.
@type return: L{nevow.loaders.xmlfile}
"""
if fragmentName in self.cachedLoaders:
return self.cachedLoaders[fragmentName]
segments = fragmentName.split('/')
segments[-1] += '.html'
file = self.directory
for segment in segments:
file = file.child(segment)
if file.exists():
loader = xmlfile(file.path)
self.cachedLoaders[fragmentName] = loader
return loader
return default | python | {
"resource": ""
} |
q38644 | unpackexe | train | def unpackexe(exefile, destdir):
"""Unpack the given exefile into destdir, using 7z"""
nullfd = open(os.devnull, "w")
exefile = cygpath(os.path.abspath(exefile))
try:
check_call([SEVENZIP, 'x', exefile], cwd=destdir,
stdout=nullfd, preexec_fn=_noumask)
except Exception:
log.exception("Error unpacking exe %s to %s", exefile, destdir)
raise
nullfd.close() | python | {
"resource": ""
} |
q38645 | packexe | train | def packexe(exefile, srcdir):
"""Pack the files in srcdir into exefile using 7z.
Requires that stub files are available in checkouts/stubs"""
exefile = cygpath(os.path.abspath(exefile))
appbundle = exefile + ".app.7z"
# Make sure that appbundle doesn't already exist
# We don't want to risk appending to an existing file
if os.path.exists(appbundle):
raise OSError("%s already exists" % appbundle)
files = os.listdir(srcdir)
SEVENZIP_ARGS = ['-r', '-t7z', '-mx', '-m0=BCJ2', '-m1=LZMA:d27',
'-m2=LZMA:d19:mf=bt2', '-m3=LZMA:d19:mf=bt2', '-mb0:1', '-mb0s1:2',
'-mb0s2:3', '-m1fb=128', '-m1lc=4']
# First, compress with 7z
stdout = tempfile.TemporaryFile()
try:
check_call([SEVENZIP, 'a'] + SEVENZIP_ARGS + [appbundle] + files,
cwd=srcdir, stdout=stdout, preexec_fn=_noumask)
except Exception:
stdout.seek(0)
data = stdout.read()
log.error(data)
log.exception("Error packing exe %s from %s", exefile, srcdir)
raise
stdout.close()
# Then prepend our stubs onto the compressed 7z data
o = open(exefile, "wb")
parts = [
'checkouts/stubs/7z/7zSD.sfx.compressed',
'checkouts/stubs/tagfile/app.tag',
appbundle
]
for part in parts:
i = open(part)
while True:
block = i.read(4096)
if not block:
break
o.write(block)
i.close()
o.close()
os.unlink(appbundle) | python | {
"resource": ""
} |
q38646 | bunzip2 | train | def bunzip2(filename):
"""Uncompress `filename` in place"""
log.debug("Uncompressing %s", filename)
tmpfile = "%s.tmp" % filename
os.rename(filename, tmpfile)
b = bz2.BZ2File(tmpfile)
f = open(filename, "wb")
while True:
block = b.read(512 * 1024)
if not block:
break
f.write(block)
f.close()
b.close()
shutil.copystat(tmpfile, filename)
shutil.copymode(tmpfile, filename)
os.unlink(tmpfile) | python | {
"resource": ""
} |
q38647 | unpackmar | train | def unpackmar(marfile, destdir):
"""Unpack marfile into destdir"""
marfile = cygpath(os.path.abspath(marfile))
nullfd = open(os.devnull, "w")
try:
check_call([MAR, '-x', marfile], cwd=destdir,
stdout=nullfd, preexec_fn=_noumask)
except Exception:
log.exception("Error unpacking mar file %s to %s", marfile, destdir)
raise
nullfd.close() | python | {
"resource": ""
} |
q38648 | packmar | train | def packmar(marfile, srcdir):
"""Create marfile from the contents of srcdir"""
nullfd = open(os.devnull, "w")
files = [f[len(srcdir) + 1:] for f in findfiles(srcdir)]
marfile = cygpath(os.path.abspath(marfile))
try:
check_call(
[MAR, '-c', marfile] + files, cwd=srcdir, preexec_fn=_noumask)
except Exception:
log.exception("Error packing mar file %s from %s", marfile, srcdir)
raise
nullfd.close() | python | {
"resource": ""
} |
q38649 | unpacktar | train | def unpacktar(tarfile, destdir):
""" Unpack given tarball into the specified dir """
nullfd = open(os.devnull, "w")
tarfile = cygpath(os.path.abspath(tarfile))
log.debug("unpack tar %s into %s", tarfile, destdir)
try:
check_call([TAR, '-xzf', tarfile], cwd=destdir,
stdout=nullfd, preexec_fn=_noumask)
except Exception:
log.exception("Error unpacking tar file %s to %s", tarfile, destdir)
raise
nullfd.close() | python | {
"resource": ""
} |
q38650 | tar_dir | train | def tar_dir(tarfile, srcdir):
""" Pack a tar file using all the files in the given srcdir """
files = os.listdir(srcdir)
packtar(tarfile, files, srcdir) | python | {
"resource": ""
} |
q38651 | packtar | train | def packtar(tarfile, files, srcdir):
""" Pack the given files into a tar, setting cwd = srcdir"""
nullfd = open(os.devnull, "w")
tarfile = cygpath(os.path.abspath(tarfile))
log.debug("pack tar %s from folder %s with files ", tarfile, srcdir)
log.debug(files)
try:
check_call([TAR, '-czf', tarfile] + files, cwd=srcdir,
stdout=nullfd, preexec_fn=_noumask)
except Exception:
log.exception("Error packing tar file %s to %s", tarfile, srcdir)
raise
nullfd.close() | python | {
"resource": ""
} |
q38652 | unpackfile | train | def unpackfile(filename, destdir):
"""Unpack a mar or exe into destdir"""
if filename.endswith(".mar"):
return unpackmar(filename, destdir)
elif filename.endswith(".exe"):
return unpackexe(filename, destdir)
elif filename.endswith(".tar") or filename.endswith(".tar.gz") \
or filename.endswith(".tgz"):
return unpacktar(filename, destdir)
else:
raise ValueError("Unknown file type: %s" % filename) | python | {
"resource": ""
} |
q38653 | packfile | train | def packfile(filename, srcdir):
"""Package up srcdir into filename, archived with 7z for exes or mar for
mar files"""
if filename.endswith(".mar"):
return packmar(filename, srcdir)
elif filename.endswith(".exe"):
return packexe(filename, srcdir)
elif filename.endswith(".tar"):
return tar_dir(filename, srcdir)
else:
raise ValueError("Unknown file type: %s" % filename) | python | {
"resource": ""
} |
q38654 | _reorderForPreference | train | def _reorderForPreference(themeList, preferredThemeName):
"""
Re-order the input themeList according to the preferred theme.
Returns None.
"""
for theme in themeList:
if preferredThemeName == theme.themeName:
themeList.remove(theme)
themeList.insert(0, theme)
return | python | {
"resource": ""
} |
q38655 | upgradePrivateApplication4to5 | train | def upgradePrivateApplication4to5(old):
"""
Install the newly required powerup.
"""
new = old.upgradeVersion(
PrivateApplication.typeName, 4, 5,
preferredTheme=old.preferredTheme,
privateKey=old.privateKey,
website=old.website,
customizedPublicPage=old.customizedPublicPage,
authenticationApplication=old.authenticationApplication,
preferenceAggregator=old.preferenceAggregator,
defaultPreferenceCollection=old.defaultPreferenceCollection,
searchAggregator=old.searchAggregator)
new.store.powerUp(new, IWebViewer)
return new | python | {
"resource": ""
} |
q38656 | _ShellRenderingMixin.render_startmenu | train | def render_startmenu(self, ctx, data):
"""
Add start-menu style navigation to the given tag.
@see {xmantissa.webnav.startMenu}
"""
return startMenu(
self.translator, self.pageComponents.navigation, ctx.tag) | python | {
"resource": ""
} |
q38657 | _ShellRenderingMixin.render_settingsLink | train | def render_settingsLink(self, ctx, data):
"""
Add the URL of the settings page to the given tag.
@see L{xmantissa.webnav.settingsLink}
"""
return settingsLink(
self.translator, self.pageComponents.settings, ctx.tag) | python | {
"resource": ""
} |
q38658 | _ShellRenderingMixin.render_applicationNavigation | train | def render_applicationNavigation(self, ctx, data):
"""
Add primary application navigation to the given tag.
@see L{xmantissa.webnav.applicationNavigation}
"""
return applicationNavigation(
ctx, self.translator, self.pageComponents.navigation) | python | {
"resource": ""
} |
q38659 | _PrivateRootPage.childFactory | train | def childFactory(self, ctx, name):
"""
Return a shell page wrapped around the Item model described by the
webID, or return None if no such item can be found.
"""
try:
o = self.webapp.fromWebID(name)
except _WebIDFormatException:
return None
if o is None:
return None
return self.webViewer.wrapModel(o) | python | {
"resource": ""
} |
q38660 | PrivateApplication.getDocFactory | train | def getDocFactory(self, fragmentName, default=None):
"""
Retrieve a Nevow document factory for the given name.
@param fragmentName: a short string that names a fragment template.
@param default: value to be returned if the named template is not
found.
"""
themes = self._preferredThemes()
for t in themes:
fact = t.getDocFactory(fragmentName, None)
if fact is not None:
return fact
return default | python | {
"resource": ""
} |
q38661 | Domain.fetch | train | def fetch(self):
"""
Fetch & return a new `Domain` object representing the domain's current
state
:rtype: Domain
:raises DOAPIError: if the API endpoint replies with an error (e.g., if
the domain no longer exists)
"""
api = self.doapi_manager
return api._domain(api.request(self.url)["domain"]) | python | {
"resource": ""
} |
q38662 | Domain.fetch_all_records | train | def fetch_all_records(self):
r"""
Returns a generator that yields all of the DNS records for the domain
:rtype: generator of `DomainRecord`\ s
:raises DOAPIError: if the API endpoint replies with an error
"""
api = self.doapi_manager
return map(self._record, api.paginate(self.record_url, 'domain_records')) | python | {
"resource": ""
} |
q38663 | Domain.create_record | train | def create_record(self, type, name, data, priority=None, port=None,
weight=None, **kwargs):
# pylint: disable=redefined-builtin
"""
Add a new DNS record to the domain
:param str type: the type of DNS record to add (``"A"``, ``"CNAME"``,
etc.)
:param str name: the name (hostname, alias, etc.) of the new record
:param str data: the value of the new record
:param int priority: the priority of the new record (SRV and MX records
only)
:param int port: the port that the service is accessible on (SRV
records only)
:param int weight: the weight of records with the same priority (SRV
records only)
:param kwargs: additional fields to include in the API request
:return: the new domain record
:rtype: DomainRecord
:raises DOAPIError: if the API endpoint replies with an error
"""
api = self.doapi_manager
data = {
"type": type,
"name": name,
"data": data,
"priority": priority,
"port": port,
"weight": weight,
}
data.update(kwargs)
return self._record(api.request(self.record_url, method='POST',
data=data)["domain_record"]) | python | {
"resource": ""
} |
q38664 | DomainRecord.fetch | train | def fetch(self):
"""
Fetch & return a new `DomainRecord` object representing the domain
record's current state
:rtype: DomainRecord
:raises DOAPIError: if the API endpoint replies with an error (e.g., if
the domain record no longer exists)
"""
return self.domain._record(self.doapi_manager.request(self.url)\
["domain_record"]) | python | {
"resource": ""
} |
q38665 | Projection.labelAxes | train | def labelAxes(self, numLines=(5,5)):
"""Put labels on axes
Note: I should do better than this by picking round numbers
as the places to put the labels.
Note: If I ever do rotated projections, this simple approach
will fail.
"""
x1, x2, y1, y2 = mp.axis()
ra1, dec0 = self.pixToSky(x1, y1)
raRange, decRange = self.getRaDecRanges(numLines)
ax = mp.gca()
x_ticks = self.skyToPix(raRange, dec0)[0]
y_ticks = self.skyToPix(ra1, decRange)[1]
ax.xaxis.set_ticks(x_ticks)
ax.xaxis.set_ticklabels([str(int(i)) for i in raRange])
mp.xlabel("Right Ascension (deg)")
ax.yaxis.set_ticks(y_ticks)
ax.yaxis.set_ticklabels([str(int(i)) for i in decRange])
mp.ylabel("Declination (deg)") | python | {
"resource": ""
} |
q38666 | Projection.getRaDecRanges | train | def getRaDecRanges(self, numLines):
"""Pick suitable values for ra and dec ticks
Used by plotGrid and labelAxes
"""
x1, x2, y1, y2 = mp.axis()
ra0, dec0 = self.pixToSky(x1, y1)
ra1, dec1 = self.pixToSky(x2, y2)
#Deal with the case where ra range straddles 0.
#Different code for case where ra increases left to right, or decreases.
if self.isPositiveMap():
if ra1 < ra0:
ra1 += 360
else:
if ra0 < ra1:
ra0 += 360
raMid = .5*(ra0+ra1)
decMid = .5*(dec0+dec1)
xNum, yNum = numLines
stepX = round((ra1 - ra0) / xNum)
stepY = round((dec1 - dec0) / yNum)
rangeX = stepX * (xNum - 1)
rangeY = stepY * (yNum - 1)
raStart = np.round(raMid - rangeX/2.)
decStart = np.round(decMid - rangeY/2.)
raRange = np.arange(raStart, raStart + stepX*xNum, stepX)
decRange = np.arange(decStart, decStart + stepY*yNum, stepY)
raRange = np.fmod(raRange, 360.)
return raRange, decRange | python | {
"resource": ""
} |
q38667 | KVStorage.get | train | def get(self, key, **kwargs):
'''
Fetch value at the given key
kwargs can hold `recurse`, `wait` and `index` params
'''
return self._get('/'.join([self._endpoint, key]), payload=kwargs) | python | {
"resource": ""
} |
q38668 | KVStorage.set | train | def set(self, key, value, **kwargs):
'''
Store a new value at the given key
kwargs can hold `cas` and `flags` params
'''
return requests.put(
'{}/{}/kv/{}'.format(
self.master, pyconsul.__consul_api_version__, key),
data=value,
params=kwargs
) | python | {
"resource": ""
} |
q38669 | Consul.health | train | def health(self, **kwargs):
'''
Support `node`, `service`, `check`, `state`
'''
if not len(kwargs):
raise ValueError('no resource provided')
for resource, name in kwargs.iteritems():
endpoint = 'health/{}/{}'.format(resource, name)
return self._get(endpoint) | python | {
"resource": ""
} |
q38670 | Route.routes | train | def routes(cls, application=None):
"""
Method for adding the routes to the `tornado.web.Application`.
"""
if application:
for route in cls._routes:
application.add_handlers(route['host'], route['spec'])
else:
return [route['spec'] for route in cls._routes] | python | {
"resource": ""
} |
q38671 | SR7230.start_asweep | train | def start_asweep(self, start=None, stop=None, step=None):
"""Starts a amplitude sweep.
:param start: Sets the start frequency.
:param stop: Sets the target frequency.
:param step: Sets the frequency step.
"""
if start:
self.amplitude_start = start
if stop:
self.amplitude_stop = stop
if step:
self.amplitude_step = step
self._write(('SWEEP', Integer), 2) | python | {
"resource": ""
} |
q38672 | SR7230.start_fsweep | train | def start_fsweep(self, start=None, stop=None, step=None):
"""Starts a frequency sweep.
:param start: Sets the start frequency.
:param stop: Sets the target frequency.
:param step: Sets the frequency step.
"""
if start:
self.frequency_start = start
if stop:
self.frequency_stop = stop
if step:
self.frequency_step = step
self._write(('SWEEP', Integer), 1) | python | {
"resource": ""
} |
q38673 | SR7230.take_data_triggered | train | def take_data_triggered(self, trigger, edge, stop):
"""Configures data acquisition to start on various trigger conditions.
:param trigger: The trigger condition, either 'curve' or 'point'.
======= =======================================================
Value Description
======= =======================================================
'curve' Each trigger signal starts a curve acquisition.
'point' A point is stored for each trigger signal. The max
trigger frequency in this mode is 1 kHz.
======= =======================================================
:param edge: Defines wether a 'rising' or 'falling' edge is interpreted
as a trigger signal.
:param stop: The stop condition. Valid are 'buffer', 'halt',
'rising' and 'falling'.
========= ==========================================================
Value Description
========= ==========================================================
'buffer' Data acquisition stops when the number of point
specified in :attr:`~.Buffer.length` is acquired.
'halt' Data acquisition stops when the halt command is issued.
'trigger' Takes data for the period of a trigger event. If edge is
'rising' then teh acquisition starts on the rising edge of
the trigger signal and stops on the falling edge and vice
versa
========= ==========================================================
"""
param = {
('curve', 'rising', 'buffer'): 0,
('point', 'rising', 'buffer'): 1,
('curve', 'falling', 'buffer'): 2,
('point', 'falling', 'buffer'): 3,
('curve', 'rising', 'halt'): 4,
('point', 'rising', 'halt'): 5,
('curve', 'falling', 'halt'): 6,
('point', 'falling', 'halt'): 7,
('curve', 'rising', 'trigger'): 8,
('curve', 'falling', 'trigger'): 9,
}
self._write(('TDT', Integer), param[(trigger, edge, stop)]) | python | {
"resource": ""
} |
q38674 | RecordAttribute._decompose | train | def _decompose(self, value):
"""
Decompose an instance of our record type into a dictionary mapping
attribute names to values.
@param value: an instance of self.recordType
@return: L{dict} containing the keys declared on L{record}.
"""
data = {}
for n, attr in zip(self.recordType.__names__, self.attrs):
data[attr.attrname] = getattr(value, n)
return data | python | {
"resource": ""
} |
q38675 | WithRecordAttributes.create | train | def create(cls, **kw):
"""
Create an instance of this class, first cleaning up the keyword
arguments so they will fill in any required values.
@return: an instance of C{cls}
"""
for k, v in kw.items():
attr = getattr(cls, k, None)
if isinstance(attr, RecordAttribute):
kw.pop(k)
kw.update(attr._decompose(v))
return cls(**kw) | python | {
"resource": ""
} |
q38676 | HBaseDAM.__rowResultToQuote | train | def __rowResultToQuote(self, row):
''' convert rowResult from Hbase to Quote'''
keyValues = row.columns
for field in QUOTE_FIELDS:
key = "%s:%s" % (HBaseDAM.QUOTE, field)
if 'time' != field and keyValues[key].value:
keyValues[key].value = float(keyValues[key].value)
return Quote(*[keyValues["%s:%s" % (HBaseDAM.QUOTE, field)].value for field in QUOTE_FIELDS]) | python | {
"resource": ""
} |
q38677 | HBaseDAM.__rowResultToTick | train | def __rowResultToTick(self, row):
''' convert rowResult from Hbase to Tick'''
keyValues = row.columns
for field in TICK_FIELDS:
key = "%s:%s" % (HBaseDAM.TICK, field)
if 'time' != field and keyValues[key].value:
keyValues[key].value = float(keyValues[key].value)
return Tick(*[keyValues["%s:%s" % (HBaseDAM.TICK, field)].value for field in TICK_FIELDS]) | python | {
"resource": ""
} |
q38678 | inMicrolensRegion_main | train | def inMicrolensRegion_main(args=None):
"""Exposes K2visible to the command line."""
import argparse
parser = argparse.ArgumentParser(
description="Check if a celestial coordinate is "
"inside the K2C9 microlensing superstamp.")
parser.add_argument('ra', nargs=1, type=float,
help="Right Ascension in decimal degrees (J2000).")
parser.add_argument('dec', nargs=1, type=float,
help="Declination in decimal degrees (J2000).")
args = parser.parse_args(args)
if inMicrolensRegion(args.ra[0], args.dec[0]):
print("Yes! The coordinate is inside the K2C9 superstamp.")
else:
print("Sorry, the coordinate is NOT inside the K2C9 superstamp.") | python | {
"resource": ""
} |
q38679 | inMicrolensRegion | train | def inMicrolensRegion(ra_deg, dec_deg, padding=0):
"""Returns `True` if the given sky oordinate falls on the K2C9 superstamp.
Parameters
----------
ra_deg : float
Right Ascension (J2000) in decimal degrees.
dec_deg : float
Declination (J2000) in decimal degrees.
padding : float
Target must be at least `padding` pixels away from the edge of the
superstamp. (Note that CCD boundaries are not considered as edges
in this case.)
Returns
-------
onMicrolensRegion : bool
`True` if the given coordinate is within the K2C9 microlens superstamp.
"""
fov = getKeplerFov(9)
try:
ch, col, row = fov.getChannelColRow(ra_deg, dec_deg,
allowIllegalReturnValues=False)
return maskInMicrolensRegion(ch, col, row, padding=padding)
except ValueError:
return False | python | {
"resource": ""
} |
q38680 | pixelInMicrolensRegion | train | def pixelInMicrolensRegion(ch, col, row):
"""Returns `True` if the given pixel falls inside the K2C9 superstamp.
The superstamp is used for microlensing experiment and is an almost
contiguous area of 2.8e6 pixels.
"""
# First try the superstamp
try:
vertices_col = SUPERSTAMP["channels"][str(int(ch))]["vertices_col"]
vertices_row = SUPERSTAMP["channels"][str(int(ch))]["vertices_row"]
# The point is in one of 5 channels which constitute the superstamp
# so check if it falls inside the polygon for this channel
if isPointInsidePolygon(col, row, vertices_col, vertices_row):
return True
except KeyError: # Channel does not appear in file
pass
# Then try the late target masks
for mask in LATE_TARGETS["masks"]:
if mask["channel"] == ch:
vertices_col = mask["vertices_col"]
vertices_row = mask["vertices_row"]
if isPointInsidePolygon(col, row, vertices_col, vertices_row):
return True
return False | python | {
"resource": ""
} |
q38681 | maskInMicrolensRegion | train | def maskInMicrolensRegion(ch, col, row, padding=0):
"""Is a target in the K2C9 superstamp, including padding?
This function is identical to pixelInMicrolensRegion, except it takes
the extra `padding` argument. The coordinate must be within the K2C9
superstamp by at least `padding` number of pixels on either side of the
coordinate. (Note that this function does not check whether something is
close to the CCD boundaries, it only checks whether something is close
to the edge of stamp.)
"""
if padding == 0:
return pixelInMicrolensRegion(ch, col, row)
combinations = [[col - padding, row],
[col + padding, row],
[col, row - padding],
[col, row + padding]]
for col, row in combinations:
# Science pixels occupy columns 12 - 1111, rows 20 - 1043
if col < 12:
col = 12
if col > 1111:
col = 1111
if row < 20:
row = 20
if row > 1043:
row = 1043
if not pixelInMicrolensRegion(ch, col, row):
return False
return True | python | {
"resource": ""
} |
q38682 | isPointInsidePolygon | train | def isPointInsidePolygon(x, y, vertices_x, vertices_y):
"""Check if a given point is inside a polygon.
Parameters vertices_x[] and vertices_y[] define the polygon.
The number of array elements is equal to number of vertices of the polygon.
This function works for convex and concave polygons.
Parameters
----------
vertices_x, vertices_y : lists or arrays of floats
Vertices that define the polygon.
x, y : float
Coordinates of the point to check.
Returns
-------
inside : bool
`True` if the point is inside the polygon.
"""
inside = False
for i in range(len(vertices_x)):
j = i - 1
if ((vertices_x[i] > x) != (vertices_x[j] > x)):
if (y < (x - vertices_x[i]) *
(vertices_y[i] - vertices_y[j]) /
(vertices_x[i] - vertices_x[j]) +
vertices_y[i]):
inside = not inside
return inside | python | {
"resource": ""
} |
q38683 | K2FootprintPlot.plot_campaign | train | def plot_campaign(self, campaign=0, annotate_channels=True, **kwargs):
"""Plot all the active channels of a campaign."""
fov = getKeplerFov(campaign)
corners = fov.getCoordsOfChannelCorners()
for ch in np.arange(1, 85, dtype=int):
if ch in fov.brokenChannels:
continue # certain channel are no longer used
idx = np.where(corners[::, 2] == ch)
mdl = int(corners[idx, 0][0][0])
out = int(corners[idx, 1][0][0])
ra = corners[idx, 3][0]
if campaign == 1002: # Concept Engineering Test overlapped the meridian
ra[ra < 180] += 360
dec = corners[idx, 4][0]
self.ax.fill(np.concatenate((ra, ra[:1])),
np.concatenate((dec, dec[:1])), **kwargs)
if annotate_channels:
txt = "K2C{0}\n{1}.{2}\n#{3}".format(campaign, mdl, out, ch)
txt = "{1}.{2}\n#{3}".format(campaign, mdl, out, ch)
self.ax.text(np.mean(ra), np.mean(dec), txt,
ha="center", va="center",
zorder=91, fontsize=10,
color="#000000", clip_on=True) | python | {
"resource": ""
} |
q38684 | AssetHelper.generate_static | train | def generate_static(self, path):
"""
This method generates a valid path to the public folder of the running project
"""
if not path:
return ""
if path[0] == '/':
return "%s?v=%s" % (path, self.version)
return "%s/%s?v=%s" % (self.static, path, self.version) | python | {
"resource": ""
} |
q38685 | _parse_values | train | def _parse_values(values, extra=None):
"""
Utility function to flatten out args.
For internal use only.
:param values: list, tuple, or str
:param extra: list or None
:return: list
"""
coerced = list(values)
if coerced == values:
values = coerced
else:
coerced = tuple(values)
if coerced == values:
values = list(values)
else:
values = [values]
if extra:
values.extend(extra)
return values | python | {
"resource": ""
} |
q38686 | Keyspace.redis_key | train | def redis_key(cls, key):
"""
Get the key we pass to redis.
If no namespace is declared, it will use the class name.
:param key: str the name of the redis key
:return: str
"""
keyspace = cls.keyspace
tpl = cls.keyspace_template
key = "%s" % key if keyspace is None else tpl % (keyspace, key)
return cls.keyparse.encode(key) | python | {
"resource": ""
} |
q38687 | Keyspace.super_pipe | train | def super_pipe(self):
"""
Creates a mechanism for us to internally bind two different
operations together in a shared pipeline on the class.
This will temporarily set self._pipe to be this new pipeline,
during this context and then when it leaves the context
reset self._pipe to its original value.
Example:
def get_set(self, key, val)
with self.super_pipe as pipe:
res = self.get(key)
self.set(key, val)
return res
This will have the effect of using only one network round trip if no
pipeline was passed to the constructor.
This method is still considered experimental and we are working out
the details, so don't use it unless you feel confident you have a
legitimate use-case for using this.
"""
orig_pipe = self._pipe
def exit_handler():
self._pipe = orig_pipe
self._pipe = autoexec(orig_pipe, name=self.connection,
exit_handler=exit_handler)
return self._pipe | python | {
"resource": ""
} |
q38688 | Keyspace.delete | train | def delete(self, *names):
"""
Remove the key from redis
:param names: tuple of strings - The keys to remove from redis.
:return: Future()
"""
names = [self.redis_key(n) for n in names]
with self.pipe as pipe:
return pipe.delete(*names) | python | {
"resource": ""
} |
q38689 | Keyspace.expire | train | def expire(self, name, time):
"""
Allow the key to expire after ``time`` seconds.
:param name: str the name of the redis key
:param time: time expressed in seconds.
:return: Future()
"""
with self.pipe as pipe:
return pipe.expire(self.redis_key(name), time) | python | {
"resource": ""
} |
q38690 | Keyspace.exists | train | def exists(self, name):
"""
does the key exist in redis?
:param name: str the name of the redis key
:return: Future()
"""
with self.pipe as pipe:
return pipe.exists(self.redis_key(name)) | python | {
"resource": ""
} |
q38691 | Keyspace.eval | train | def eval(self, script, numkeys, *keys_and_args):
"""
Run a lua script against the key.
Doesn't support multi-key lua operations because
we wouldn't be able to know what argument to namespace.
Also, redis cluster doesn't really support multi-key operations.
:param script: str A lua script targeting the current key.
:param numkeys: number of keys passed to the script
:param keys_and_args: list of keys and args passed to script
:return: Future()
"""
with self.pipe as pipe:
keys_and_args = [a if i >= numkeys else self.redis_key(a) for i, a
in enumerate(keys_and_args)]
return pipe.eval(script, numkeys, *keys_and_args) | python | {
"resource": ""
} |
q38692 | Keyspace.dump | train | def dump(self, name):
"""
get a redis RDB-like serialization of the object.
:param name: str the name of the redis key
:return: Future()
"""
with self.pipe as pipe:
return pipe.dump(self.redis_key(name)) | python | {
"resource": ""
} |
q38693 | Keyspace.ttl | train | def ttl(self, name):
"""
get the number of seconds until the key's expiration
:param name: str the name of the redis key
:return: Future()
"""
with self.pipe as pipe:
return pipe.ttl(self.redis_key(name)) | python | {
"resource": ""
} |
q38694 | Keyspace.persist | train | def persist(self, name):
"""
clear any expiration TTL set on the object
:param name: str the name of the redis key
:return: Future()
"""
with self.pipe as pipe:
return pipe.persist(self.redis_key(name)) | python | {
"resource": ""
} |
q38695 | Keyspace.pttl | train | def pttl(self, name):
"""
Returns the number of milliseconds until the key ``name`` will expire
:param name: str the name of the redis key
:return:
"""
with self.pipe as pipe:
return pipe.pttl(self.redis_key(name)) | python | {
"resource": ""
} |
q38696 | Keyspace.object | train | def object(self, infotype, key):
"""
get the key's info stats
:param name: str the name of the redis key
:param subcommand: REFCOUNT | ENCODING | IDLETIME
:return: Future()
"""
with self.pipe as pipe:
return pipe.object(infotype, self.redis_key(key)) | python | {
"resource": ""
} |
q38697 | String.setnx | train | def setnx(self, name, value):
"""
Set the value as a string in the key only if the key doesn't exist.
:param name: str the name of the redis key
:param value:
:return: Future()
"""
with self.pipe as pipe:
return pipe.setnx(self.redis_key(name),
self.valueparse.encode(value)) | python | {
"resource": ""
} |
q38698 | String.setex | train | def setex(self, name, value, time):
"""
Set the value of key to ``value`` that expires in ``time``
seconds. ``time`` can be represented by an integer or a Python
timedelta object.
:param name: str the name of the redis key
:param value: str
:param time: secs
:return: Future()
"""
with self.pipe as pipe:
return pipe.setex(self.redis_key(name),
value=self.valueparse.encode(value),
time=time) | python | {
"resource": ""
} |
q38699 | String.append | train | def append(self, name, value):
"""
Appends the string ``value`` to the value at ``key``. If ``key``
doesn't already exist, create it with a value of ``value``.
Returns the new length of the value at ``key``.
:param name: str the name of the redis key
:param value: str
:return: Future()
"""
with self.pipe as pipe:
return pipe.append(self.redis_key(name),
self.valueparse.encode(value)) | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.