sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
|---|---|---|
def upgradeParentHook1to2(oldHook):
"""
Add the scheduler attribute to the given L{_SubSchedulerParentHook}.
"""
newHook = oldHook.upgradeVersion(
oldHook.typeName, 1, 2,
loginAccount=oldHook.loginAccount,
scheduledAt=oldHook.scheduledAt,
scheduler=oldHook.store.findFirst(Scheduler))
return newHook
|
Add the scheduler attribute to the given L{_SubSchedulerParentHook}.
|
entailment
|
def upgradeParentHook3to4(old):
"""
Copy C{loginAccount} to C{subStore} and remove the installation marker.
"""
new = old.upgradeVersion(
old.typeName, 3, 4, subStore=old.loginAccount)
uninstallFrom(new, new.store)
return new
|
Copy C{loginAccount} to C{subStore} and remove the installation marker.
|
entailment
|
def invokeRunnable(self):
"""
Run my runnable, and reschedule or delete myself based on its result.
Must be run in a transaction.
"""
runnable = self.runnable
if runnable is None:
self.deleteFromStore()
else:
try:
self.running = True
newTime = runnable.run()
finally:
self.running = False
self._rescheduleFromRun(newTime)
|
Run my runnable, and reschedule or delete myself based on its result.
Must be run in a transaction.
|
entailment
|
def handleError(self, now, failureObj):
""" An error occurred running my runnable. Check my runnable for an
error-handling method called 'timedEventErrorHandler' that will take
the given failure as an argument, and execute that if available:
otherwise, create a TimedEventFailureLog with information about what
happened to this event.
Must be run in a transaction.
"""
errorHandler = getattr(self.runnable, 'timedEventErrorHandler', None)
if errorHandler is not None:
self._rescheduleFromRun(errorHandler(self, failureObj))
else:
self._defaultErrorHandler(now, failureObj)
|
An error occurred running my runnable. Check my runnable for an
error-handling method called 'timedEventErrorHandler' that will take
the given failure as an argument, and execute that if available:
otherwise, create a TimedEventFailureLog with information about what
happened to this event.
Must be run in a transaction.
|
entailment
|
def unscheduleFirst(self, runnable):
"""
Remove from given item from the schedule.
If runnable is scheduled to run multiple times, only the temporally first
is removed.
"""
for evt in self.store.query(TimedEvent, TimedEvent.runnable == runnable, sort=TimedEvent.time.ascending):
evt.deleteFromStore()
break
|
Remove from given item from the schedule.
If runnable is scheduled to run multiple times, only the temporally first
is removed.
|
entailment
|
def scheduledTimes(self, runnable):
"""
Return an iterable of the times at which the given item is scheduled to
run.
"""
events = self.store.query(
TimedEvent, TimedEvent.runnable == runnable)
return (event.time for event in events if not event.running)
|
Return an iterable of the times at which the given item is scheduled to
run.
|
entailment
|
def startService(self):
"""
Start calling persistent timed events whose time has come.
"""
super(_SiteScheduler, self).startService()
self._transientSchedule(self.now(), self.now())
|
Start calling persistent timed events whose time has come.
|
entailment
|
def stopService(self):
"""
Stop calling persistent timed events.
"""
super(_SiteScheduler, self).stopService()
if self.timer is not None:
self.timer.cancel()
self.timer = None
|
Stop calling persistent timed events.
|
entailment
|
def _transientSchedule(self, when, now):
"""
If the service is currently running, schedule a tick to happen no
later than C{when}.
@param when: The time at which to tick.
@type when: L{epsilon.extime.Time}
@param now: The current time.
@type now: L{epsilon.extime.Time}
"""
if not self.running:
return
if self.timer is not None:
if self.timer.getTime() < when.asPOSIXTimestamp():
return
self.timer.cancel()
delay = when.asPOSIXTimestamp() - now.asPOSIXTimestamp()
# reactor.callLater allows only positive delay values. The scheduler
# may want to have scheduled things in the past and that's OK, since we
# are dealing with Time() instances it's impossible to predict what
# they are relative to the current time from user code anyway.
delay = max(_EPSILON, delay)
self.timer = self.callLater(delay, self.tick)
self.nextEventAt = when
|
If the service is currently running, schedule a tick to happen no
later than C{when}.
@param when: The time at which to tick.
@type when: L{epsilon.extime.Time}
@param now: The current time.
@type now: L{epsilon.extime.Time}
|
entailment
|
def _transientSchedule(self, when, now):
"""
If this service's store is attached to its parent, ask the parent to
schedule this substore to tick at the given time.
@param when: The time at which to tick.
@type when: L{epsilon.extime.Time}
@param now: Present for signature compatibility with
L{_SiteScheduler._transientSchedule}, but ignored otherwise.
"""
if self.store.parent is not None:
subStore = self.store.parent.getItemByID(self.store.idInParent)
hook = self.store.parent.findOrCreate(
_SubSchedulerParentHook,
subStore=subStore)
hook._schedule(when)
|
If this service's store is attached to its parent, ask the parent to
schedule this substore to tick at the given time.
@param when: The time at which to tick.
@type when: L{epsilon.extime.Time}
@param now: Present for signature compatibility with
L{_SiteScheduler._transientSchedule}, but ignored otherwise.
|
entailment
|
def migrateDown(self):
"""
Remove the components in the site store for this SubScheduler.
"""
subStore = self.store.parent.getItemByID(self.store.idInParent)
ssph = self.store.parent.findUnique(
_SubSchedulerParentHook,
_SubSchedulerParentHook.subStore == subStore,
default=None)
if ssph is not None:
te = self.store.parent.findUnique(TimedEvent,
TimedEvent.runnable == ssph,
default=None)
if te is not None:
te.deleteFromStore()
ssph.deleteFromStore()
|
Remove the components in the site store for this SubScheduler.
|
entailment
|
def migrateUp(self):
"""
Recreate the hooks in the site store to trigger this SubScheduler.
"""
te = self.store.findFirst(TimedEvent, sort=TimedEvent.time.descending)
if te is not None:
self._transientSchedule(te.time, None)
|
Recreate the hooks in the site store to trigger this SubScheduler.
|
entailment
|
def activate(self):
"""
Whenever L{Scheduler} or L{SubScheduler} is created, either newly or
when loaded from a database, emit a deprecation warning referring
people to L{IScheduler}.
"""
# This is unfortunate. Perhaps it is the best thing which works (it is
# the first I found). -exarkun
if '_axiom_memory_dummy' in vars(self):
stacklevel = 7
else:
stacklevel = 5
warnings.warn(
self.__class__.__name__ + " is deprecated since Axiom 0.5.32. "
"Just adapt stores to IScheduler.",
category=PendingDeprecationWarning,
stacklevel=stacklevel)
|
Whenever L{Scheduler} or L{SubScheduler} is created, either newly or
when loaded from a database, emit a deprecation warning referring
people to L{IScheduler}.
|
entailment
|
def _schedule(self, when):
"""
Ensure that this hook is scheduled to run at or before C{when}.
"""
sched = IScheduler(self.store)
for scheduledAt in sched.scheduledTimes(self):
if when < scheduledAt:
sched.reschedule(self, scheduledAt, when)
break
else:
sched.schedule(self, when)
|
Ensure that this hook is scheduled to run at or before C{when}.
|
entailment
|
def build_graph(path, term_depth=1000, skim_depth=10,
d_weights=False, **kwargs):
"""
Tokenize a text, index a term matrix, and build out a graph.
Args:
path (str): The file path.
term_depth (int): Consider the N most frequent terms.
skim_depth (int): Connect each word to the N closest siblings.
d_weights (bool): If true, give "close" nodes low weights.
Returns:
Skimmer: The indexed graph.
"""
# Tokenize text.
click.echo('\nTokenizing text...')
t = Text.from_file(path)
click.echo('Extracted %d tokens' % len(t.tokens))
m = Matrix()
# Index the term matrix.
click.echo('\nIndexing terms:')
m.index(t, t.most_frequent_terms(term_depth), **kwargs)
g = Skimmer()
# Construct the network.
click.echo('\nGenerating graph:')
g.build(t, m, skim_depth, d_weights)
return g
|
Tokenize a text, index a term matrix, and build out a graph.
Args:
path (str): The file path.
term_depth (int): Consider the N most frequent terms.
skim_depth (int): Connect each word to the N closest siblings.
d_weights (bool): If true, give "close" nodes low weights.
Returns:
Skimmer: The indexed graph.
|
entailment
|
def draw_spring(self, **kwargs):
"""
Render a spring layout.
"""
nx.draw_spring(
self.graph,
with_labels=True,
font_size=10,
edge_color='#dddddd',
node_size=0,
**kwargs
)
plt.show()
|
Render a spring layout.
|
entailment
|
def build(self, text, matrix, skim_depth=10, d_weights=False):
"""
1. For each term in the passed matrix, score its KDE similarity with
all other indexed terms.
2. With the ordered stack of similarities in hand, skim off the top X
pairs and add them as edges.
Args:
text (Text): The source text instance.
matrix (Matrix): An indexed term matrix.
skim_depth (int): The number of siblings for each term.
d_weights (bool): If true, give "close" words low edge weights.
"""
for anchor in bar(matrix.keys):
n1 = text.unstem(anchor)
# Heaviest pair scores:
pairs = matrix.anchored_pairs(anchor).items()
for term, weight in list(pairs)[:skim_depth]:
# If edges represent distance, use the complement of the raw
# score, so that similar words are connected by "short" edges.
if d_weights: weight = 1-weight
n2 = text.unstem(term)
# NetworkX does not handle numpy types when writing graphml,
# so we cast the weight to a regular float.
self.graph.add_edge(n1, n2, weight=float(weight))
|
1. For each term in the passed matrix, score its KDE similarity with
all other indexed terms.
2. With the ordered stack of similarities in hand, skim off the top X
pairs and add them as edges.
Args:
text (Text): The source text instance.
matrix (Matrix): An indexed term matrix.
skim_depth (int): The number of siblings for each term.
d_weights (bool): If true, give "close" words low edge weights.
|
entailment
|
def get_settings(self, section=None, defaults=None):
"""
Gets a named section from the configuration source.
:param section: a :class:`str` representing the section you want to
retrieve from the configuration source. If ``None`` this will
fallback to the :attr:`plaster.PlasterURL.fragment`.
:param defaults: a :class:`dict` that will get passed to
:class:`configparser.ConfigParser` and will populate the
``DEFAULT`` section.
:return: A :class:`plaster_pastedeploy.ConfigDict` of key/value pairs.
"""
# This is a partial reimplementation of
# ``paste.deploy.loadwsgi.ConfigLoader:get_context`` which supports
# "set" and "get" options and filters out any other globals
section = self._maybe_get_default_name(section)
if self.filepath is None:
return {}
parser = self._get_parser(defaults)
defaults = parser.defaults()
try:
raw_items = parser.items(section)
except NoSectionError:
return {}
local_conf = OrderedDict()
get_from_globals = {}
for option, value in raw_items:
if option.startswith("set "):
name = option[4:].strip()
defaults[name] = value
elif option.startswith("get "):
name = option[4:].strip()
get_from_globals[name] = value
# insert a value into local_conf to preserve the order
local_conf[name] = None
else:
# annoyingly pastedeploy filters out all defaults unless
# "get foo" is used to pull it in
if option in defaults:
continue
local_conf[option] = value
for option, global_option in get_from_globals.items():
local_conf[option] = defaults[global_option]
return ConfigDict(local_conf, defaults, self)
|
Gets a named section from the configuration source.
:param section: a :class:`str` representing the section you want to
retrieve from the configuration source. If ``None`` this will
fallback to the :attr:`plaster.PlasterURL.fragment`.
:param defaults: a :class:`dict` that will get passed to
:class:`configparser.ConfigParser` and will populate the
``DEFAULT`` section.
:return: A :class:`plaster_pastedeploy.ConfigDict` of key/value pairs.
|
entailment
|
def get_wsgi_app(self, name=None, defaults=None):
"""
Reads the configuration source and finds and loads a WSGI
application defined by the entry with name ``name`` per the
PasteDeploy configuration format and loading mechanism.
:param name: The named WSGI app to find, load and return. Defaults to
``None`` which becomes ``main`` inside
:func:`paste.deploy.loadapp`.
:param defaults: The ``global_conf`` that will be used during app
instantiation.
:return: A WSGI application.
"""
name = self._maybe_get_default_name(name)
defaults = self._get_defaults(defaults)
return loadapp(
self.pastedeploy_spec,
name=name,
relative_to=self.relative_to,
global_conf=defaults,
)
|
Reads the configuration source and finds and loads a WSGI
application defined by the entry with name ``name`` per the
PasteDeploy configuration format and loading mechanism.
:param name: The named WSGI app to find, load and return. Defaults to
``None`` which becomes ``main`` inside
:func:`paste.deploy.loadapp`.
:param defaults: The ``global_conf`` that will be used during app
instantiation.
:return: A WSGI application.
|
entailment
|
def get_wsgi_server(self, name=None, defaults=None):
"""
Reads the configuration source and finds and loads a WSGI server
defined by the server entry with the name ``name`` per the PasteDeploy
configuration format and loading mechanism.
:param name: The named WSGI server to find, load and return. Defaults
to ``None`` which becomes ``main`` inside
:func:`paste.deploy.loadserver`.
:param defaults: The ``global_conf`` that will be used during server
instantiation.
:return: A WSGI server runner callable which accepts a WSGI app.
"""
name = self._maybe_get_default_name(name)
defaults = self._get_defaults(defaults)
return loadserver(
self.pastedeploy_spec,
name=name,
relative_to=self.relative_to,
global_conf=defaults,
)
|
Reads the configuration source and finds and loads a WSGI server
defined by the server entry with the name ``name`` per the PasteDeploy
configuration format and loading mechanism.
:param name: The named WSGI server to find, load and return. Defaults
to ``None`` which becomes ``main`` inside
:func:`paste.deploy.loadserver`.
:param defaults: The ``global_conf`` that will be used during server
instantiation.
:return: A WSGI server runner callable which accepts a WSGI app.
|
entailment
|
def get_wsgi_filter(self, name=None, defaults=None):
"""Reads the configuration soruce and finds and loads a WSGI filter
defined by the filter entry with the name ``name`` per the PasteDeploy
configuration format and loading mechanism.
:param name: The named WSGI filter to find, load and return. Defaults
to ``None`` which becomes ``main`` inside
:func:`paste.deploy.loadfilter`.
:param defaults: The ``global_conf`` that will be used during filter
instantiation.
:return: A callable that can filter a WSGI application.
"""
name = self._maybe_get_default_name(name)
defaults = self._get_defaults(defaults)
return loadfilter(
self.pastedeploy_spec,
name=name,
relative_to=self.relative_to,
global_conf=defaults,
)
|
Reads the configuration soruce and finds and loads a WSGI filter
defined by the filter entry with the name ``name`` per the PasteDeploy
configuration format and loading mechanism.
:param name: The named WSGI filter to find, load and return. Defaults
to ``None`` which becomes ``main`` inside
:func:`paste.deploy.loadfilter`.
:param defaults: The ``global_conf`` that will be used during filter
instantiation.
:return: A callable that can filter a WSGI application.
|
entailment
|
def get_wsgi_app_settings(self, name=None, defaults=None):
"""
Return an :class:`collections.OrderedDict` representing the
application config for a WSGI application named ``name`` in the
PasteDeploy config file specified by ``self.uri``.
``defaults``, if passed, should be a dictionary used as variable
assignments like ``{'http_port': 8080}``. This is useful if e.g.
``%(http_port)s`` is used in the config file.
If the ``name`` is None, this will attempt to parse the name from
the ``config_uri`` string expecting the format ``inifile#name``.
If no name is found, the name will default to "main".
:param name: The named WSGI app for which to find the settings.
Defaults to ``None`` which becomes ``main``
inside :func:`paste.deploy.loadapp`.
:param defaults: The ``global_conf`` that will be used during settings
generation.
:return: A :class:`plaster_pastedeploy.ConfigDict` of key/value pairs.
"""
name = self._maybe_get_default_name(name)
defaults = self._get_defaults(defaults)
conf = appconfig(
self.pastedeploy_spec,
name=name,
relative_to=self.relative_to,
global_conf=defaults,
)
return ConfigDict(conf.local_conf, conf.global_conf, self)
|
Return an :class:`collections.OrderedDict` representing the
application config for a WSGI application named ``name`` in the
PasteDeploy config file specified by ``self.uri``.
``defaults``, if passed, should be a dictionary used as variable
assignments like ``{'http_port': 8080}``. This is useful if e.g.
``%(http_port)s`` is used in the config file.
If the ``name`` is None, this will attempt to parse the name from
the ``config_uri`` string expecting the format ``inifile#name``.
If no name is found, the name will default to "main".
:param name: The named WSGI app for which to find the settings.
Defaults to ``None`` which becomes ``main``
inside :func:`paste.deploy.loadapp`.
:param defaults: The ``global_conf`` that will be used during settings
generation.
:return: A :class:`plaster_pastedeploy.ConfigDict` of key/value pairs.
|
entailment
|
def setup_logging(self, defaults=None):
"""
Set up logging via :func:`logging.config.fileConfig`.
Defaults are specified for the special ``__file__`` and ``here``
variables, similar to PasteDeploy config loading. Extra defaults can
optionally be specified as a dict in ``defaults``.
:param defaults: The defaults that will be used when passed to
:func:`logging.config.fileConfig`.
:return: ``None``.
"""
if "loggers" in self.get_sections():
defaults = self._get_defaults(defaults)
fileConfig(self.uri.path, defaults, disable_existing_loggers=False)
else:
logging.basicConfig()
|
Set up logging via :func:`logging.config.fileConfig`.
Defaults are specified for the special ``__file__`` and ``here``
variables, similar to PasteDeploy config loading. Extra defaults can
optionally be specified as a dict in ``defaults``.
:param defaults: The defaults that will be used when passed to
:func:`logging.config.fileConfig`.
:return: ``None``.
|
entailment
|
def _maybe_get_default_name(self, name):
"""Checks a name and determines whether to use the default name.
:param name: The current name to check.
:return: Either None or a :class:`str` representing the name.
"""
if name is None and self.uri.fragment:
name = self.uri.fragment
return name
|
Checks a name and determines whether to use the default name.
:param name: The current name to check.
:return: Either None or a :class:`str` representing the name.
|
entailment
|
def set(self, hue):
"""Set cursor position on the color corresponding to the hue value."""
x = hue / 360. * self.winfo_width()
self.coords('cursor', x, 0, x, self.winfo_height())
self._variable.set(hue)
|
Set cursor position on the color corresponding to the hue value.
|
entailment
|
def makeService(cls, options):
"""
Create an L{IService} for the database specified by the given
configuration.
"""
from axiom.store import Store
jm = options['journal-mode']
if jm is not None:
jm = jm.decode('ascii')
store = Store(options['dbdir'], debug=options['debug'], journalMode=jm)
service = IService(store)
_CheckSystemVersion(store).setServiceParent(service)
return service
|
Create an L{IService} for the database specified by the given
configuration.
|
entailment
|
def pageNames(matching=False,workbooks=True,graphs=True):
"""
Returns the names of everything (books, notes, graphs, etc.) in the project.
Args:
matching (str, optional): if given, only return names with this string in it
workbooks (bool): if True, return workbooks
graphs (bool): if True, return workbooks
Returns:
A list of the names of what you requested
"""
# first collect the pages we want
pages=[]
if workbooks:
pages.extend(PyOrigin.WorksheetPages())
if graphs:
pages.extend(PyOrigin.GraphPages())
# then turn them into a list of strings
pages = [x.GetName() for x in pages]
# do our string matching if it's needed
if matching:
pages=[x for x in pages if matching in x]
return pages
|
Returns the names of everything (books, notes, graphs, etc.) in the project.
Args:
matching (str, optional): if given, only return names with this string in it
workbooks (bool): if True, return workbooks
graphs (bool): if True, return workbooks
Returns:
A list of the names of what you requested
|
entailment
|
def getPageType(name,number=False):
"""Returns the type of the page with that name.
If that name doesn't exist, None is returned.
Args:
name (str): name of the page to get the folder from
number (bool): if True, return numbers (i.e., a graph will be 3)
if False, return words where appropriate (i.e, "graph")
Returns:
string of the type of object the page is
"""
if not name in pageNames():
return None
pageType=PyOrigin.Pages(name).GetType()
if number:
return str(pageType)
if pageType==1:
return "matrix"
if pageType==2:
return "book"
if pageType==3:
return "graph"
if pageType==4:
return "layout"
if pageType==5:
return "notes"
|
Returns the type of the page with that name.
If that name doesn't exist, None is returned.
Args:
name (str): name of the page to get the folder from
number (bool): if True, return numbers (i.e., a graph will be 3)
if False, return words where appropriate (i.e, "graph")
Returns:
string of the type of object the page is
|
entailment
|
def listEverything(matching=False):
"""Prints every page in the project to the console.
Args:
matching (str, optional): if given, only return names with this string in it
"""
pages=pageNames()
if matching:
pages=[x for x in pages if matching in x]
for i,page in enumerate(pages):
pages[i]="%s%s (%s)"%(pageFolder(page),page,getPageType(page))
print("\n".join(sorted(pages)))
|
Prints every page in the project to the console.
Args:
matching (str, optional): if given, only return names with this string in it
|
entailment
|
def sheetNames(book=None):
"""return sheet names of a book.
Args:
book (str, optional): If a book is given, pull names from
that book. Otherwise, try the active one
Returns:
list of sheet names (typical case).
None if book has no sheets.
False if book doesn't exlist.
"""
if book:
if not book.lower() in [x.lower() for x in bookNames()]:
return False
else:
book=activeBook()
if not book:
return False
poBook=PyOrigin.WorksheetPages(book)
if not len(poBook):
return None
return [x.GetName() for x in poBook.Layers()]
|
return sheet names of a book.
Args:
book (str, optional): If a book is given, pull names from
that book. Otherwise, try the active one
Returns:
list of sheet names (typical case).
None if book has no sheets.
False if book doesn't exlist.
|
entailment
|
def getSheet(book=None,sheet=None):
"""returns the pyorigin object for a sheet."""
# figure out what book to use
if book and not book.lower() in [x.lower() for x in bookNames()]:
print("book %s doesn't exist"%book)
return
if book is None:
book=activeBook().lower()
if book is None:
print("no book given or selected")
return
# figure out what sheet to use
if sheet and not sheet.lower() in [x.lower() for x in sheetNames(book)]:
print("sheet %s doesn't exist"%sheet)
return
if sheet is None:
sheet=activeSheet().lower()
if sheet is None:
return("no sheet given or selected")
print
# by now, we know the book/sheet exists and can be found
for poSheet in PyOrigin.WorksheetPages(book).Layers():
if poSheet.GetName().lower()==sheet.lower():
return poSheet
return False
|
returns the pyorigin object for a sheet.
|
entailment
|
def sheetDelete(book=None,sheet=None):
"""
Delete a sheet from a book. If either isn't given, use the active one.
"""
if book is None:
book=activeBook()
if sheet in sheetNames():
PyOrigin.WorksheetPages(book).Layers(sheetNames().index(sheet)).Destroy()
|
Delete a sheet from a book. If either isn't given, use the active one.
|
entailment
|
def sheetDeleteEmpty(bookName=None):
"""Delete all sheets which contain no data"""
if bookName is None:
bookName = activeBook()
if not bookName.lower() in [x.lower() for x in bookNames()]:
print("can't clean up a book that doesn't exist:",bookName)
return
poBook=PyOrigin.WorksheetPages(bookName)
namesToKill=[]
for i,poSheet in enumerate([poSheet for poSheet in poBook.Layers()]):
poFirstCol=poSheet.Columns(0)
if poFirstCol.GetLongName()=="" and poFirstCol.GetData()==[]:
namesToKill.append(poSheet.GetName())
for sheetName in namesToKill:
print("deleting empty sheet",sheetName)
sheetDelete(bookName,sheetName)
|
Delete all sheets which contain no data
|
entailment
|
def pickle_load(fname):
"""return the contents of a pickle file"""
assert type(fname) is str and os.path.exists(fname)
print("loaded",fname)
return pickle.load(open(fname,"rb"))
|
return the contents of a pickle file
|
entailment
|
def pickle_save(thing,fname=None):
"""save something to a pickle file"""
if fname is None:
fname=os.path.expanduser("~")+"/%d.pkl"%time.time()
assert type(fname) is str and os.path.isdir(os.path.dirname(fname))
pickle.dump(thing, open(fname,"wb"),pickle.HIGHEST_PROTOCOL)
print("saved",fname)
|
save something to a pickle file
|
entailment
|
def sheetToHTML(sheet):
"""
Put 2d numpy data into a temporary HTML file.
This is a hack, copy/pasted from an earlier version of this software.
It is very messy, but works great! Good enough for me.
"""
assert "SHEET" in str(type(sheet))
#data,names=None,units=None,bookName=None,sheetName=None,xCol=None
#sheet=OR.SHEET()
data=sheet.data
names=sheet.colDesc
units=sheet.colUnits
bookName=sheet.bookName
sheetName=sheet.sheetName
def htmlListToTR(l,trClass=None,tdClass=None,td1Class=None):
"""
turns a list into a <tr><td>something</td></tr>
call this when generating HTML tables dynamically.
"""
html="<tr>"
for item in l:
html+="<td>%s</td>"%item
html+="</tr>"
if trClass:
html=html.replace("<tr>",'<tr class="%s">'%trClass)
if td1Class:
html=html.replace("<td>",'<td class="%s">'%td1Class,1)
if tdClass:
html=html.replace("<td>",'<td class="%s">'%tdClass)
return html
htmlFname = os.path.expanduser("~")+"/WKS-%s.%s.html"%(bookName,sheetName)
html="""<body>
<style>
body {
background-color: #ababab;
padding:20px;
}
table {
font-size:12px;
border-spacing: 0;
border-collapse: collapse;
}
.name {background-color:#fafac8;text-align:center;}
.units {background-color:#fafac8;text-align:center;}
.data0 {background-color:#FFFFFF;font-family: monospace;text-align:center;}
.data1 {background-color:#FAFAFA;font-family: monospace;text-align:center;}
.labelRow {background-color:#e0dfe4; text-align:right;border:1px solid #000000;}
.labelCol {background-color:#e0dfe4; text-align:center;border:1px solid #000000; padding-left: 20px; padding-right: 20px;}
td {
border:1px solid #c0c0c0; padding:5px;
font-family: Arial, Helvetica, sans-serif;
}
</style>
<html>"""
html+="<h1>FauxRigin</h1>"
if bookName or sheetName:
html+='<code><b>%s / %s</b></code><br><br>'%(bookName,sheetName)
html+="<table>"
colNames=['']
for i in range(len(units)):
shortName=chr(i%26+ord('A'))
if i>=26:
shortName=chr(int(i/26-1)+ord('A'))+shortName
label="%s(%s)"%(shortName,"X" if sheet.colTypes[i]==3 else "Y")
colNames.append(label)
html+=htmlListToTR(colNames,'labelCol','labelCol')
html+=htmlListToTR(['Long Name']+list(names),'name',td1Class='labelRow')
html+=htmlListToTR(['Units']+list(units),'units',td1Class='labelRow')
cutOff=False
for y in range(len(data)):
html+=htmlListToTR([y+1]+list(data[y]),trClass='data%d'%(y%2),td1Class='labelRow')
if y>=200:
cutOff=True
break
html+="</table>"
html=html.replace(">nan<",">--<")
html=html.replace(">None<","><")
if cutOff:
html+="<h3>... showing only %d of %d rows ...</h3>"%(y,len(data))
html+="</body></html>"
with open(htmlFname,'w') as f:
f.write(html)
import webbrowser
webbrowser.open(htmlFname)
return
|
Put 2d numpy data into a temporary HTML file.
This is a hack, copy/pasted from an earlier version of this software.
It is very messy, but works great! Good enough for me.
|
entailment
|
def getCodeBlocks():
"""return a dict with the code for each function"""
raw=open("examples.py").read()
d={}
for block in raw.split("if __name__")[0].split("\ndef "):
title=block.split("\n")[0].split("(")[0]
if not title.startswith("demo_"):
continue
code=[x[4:] for x in block.split("\n")[1:] if x.startswith(" ")]
d[title]="\n".join(code).strip()
return d
|
return a dict with the code for each function
|
entailment
|
def getOutputBlocks():
"""return a dict with the output of each function"""
raw=open("output.txt").read()
d={}
for block in raw.split("\n####### ")[1:]:
title=block.split("\n")[0].split("(")[0]
block=block.split("\n",1)[1].strip()
d[title]=block.split("\nfinished in ")[0]
return d
|
return a dict with the output of each function
|
entailment
|
def decodeCommandLine(self, cmdline):
"""Turn a byte string from the command line into a unicode string.
"""
codec = getattr(sys.stdin, 'encoding', None) or sys.getdefaultencoding()
return unicode(cmdline, codec)
|
Turn a byte string from the command line into a unicode string.
|
entailment
|
def tokenize(text):
"""
Yield tokens.
Args:
text (str): The original text.
Yields:
dict: The next token.
"""
stem = PorterStemmer().stem
tokens = re.finditer('[a-z]+', text.lower())
for offset, match in enumerate(tokens):
# Get the raw token.
unstemmed = match.group(0)
yield { # Emit the token.
'stemmed': stem(unstemmed),
'unstemmed': unstemmed,
'offset': offset
}
|
Yield tokens.
Args:
text (str): The original text.
Yields:
dict: The next token.
|
entailment
|
def sort_dict(d, desc=True):
"""
Sort an ordered dictionary by value, descending.
Args:
d (OrderedDict): An ordered dictionary.
desc (bool): If true, sort desc.
Returns:
OrderedDict: The sorted dictionary.
"""
sort = sorted(d.items(), key=lambda x: x[1], reverse=desc)
return OrderedDict(sort)
|
Sort an ordered dictionary by value, descending.
Args:
d (OrderedDict): An ordered dictionary.
desc (bool): If true, sort desc.
Returns:
OrderedDict: The sorted dictionary.
|
entailment
|
def window(seq, n=2):
"""
Yield a sliding window over an iterable.
Args:
seq (iter): The sequence.
n (int): The window width.
Yields:
tuple: The next window.
"""
it = iter(seq)
result = tuple(islice(it, n))
if len(result) == n:
yield result
for token in it:
result = result[1:] + (token,)
yield result
|
Yield a sliding window over an iterable.
Args:
seq (iter): The sequence.
n (int): The window width.
Yields:
tuple: The next window.
|
entailment
|
def insertUserStore(siteStore, userStorePath):
"""
Move the SubStore at the indicated location into the given site store's
directory and then hook it up to the site store's authentication database.
@type siteStore: C{Store}
@type userStorePath: C{FilePath}
"""
# The following may, but does not need to be in a transaction, because it
# is merely an attempt to guess a reasonable filesystem name to use for
# this avatar. The user store being operated on is expected to be used
# exclusively by this process.
ls = siteStore.findUnique(LoginSystem)
unattachedSubStore = Store(userStorePath)
for lm in unattachedSubStore.query(LoginMethod,
LoginMethod.account == unattachedSubStore.findUnique(LoginAccount),
sort=LoginMethod.internal.descending):
if ls.accountByAddress(lm.localpart, lm.domain) is None:
localpart, domain = lm.localpart, lm.domain
break
else:
raise AllNamesConflict()
unattachedSubStore.close()
insertLocation = siteStore.newFilePath('account', domain, localpart + '.axiom')
insertParentLoc = insertLocation.parent()
if not insertParentLoc.exists():
insertParentLoc.makedirs()
if insertLocation.exists():
raise DatabaseDirectoryConflict()
userStorePath.moveTo(insertLocation)
ss = SubStore(store=siteStore, storepath=insertLocation)
attachedStore = ss.open()
# migrateUp() manages its own transactions because it interacts with two
# different stores.
attachedStore.findUnique(LoginAccount).migrateUp()
|
Move the SubStore at the indicated location into the given site store's
directory and then hook it up to the site store's authentication database.
@type siteStore: C{Store}
@type userStorePath: C{FilePath}
|
entailment
|
def extractUserStore(userAccount, extractionDestination, legacySiteAuthoritative=True):
"""
Move the SubStore for the given user account out of the given site store
completely. Place the user store's database directory into the given
destination directory.
@type userAccount: C{LoginAccount}
@type extractionDestination: C{FilePath}
@type legacySiteAuthoritative: C{bool}
@param legacySiteAuthoritative: before moving the user store, clear its
authentication information, copy that which is associated with it in the
site store rather than trusting its own. Currently this flag is necessary
(and defaults to true) because things like the ClickChronicle
password-changer gizmo still operate on the site store.
"""
if legacySiteAuthoritative:
# migrateDown() manages its own transactions, since it is copying items
# between two different stores.
userAccount.migrateDown()
av = userAccount.avatars
av.open().close()
def _():
# We're separately deleting several Items from the site store, then
# we're moving some files. If we cannot move the files, we don't want
# to delete the items.
# There is one unaccounted failure mode here: if the destination of the
# move is on a different mount point, the moveTo operation will fall
# back to a non-atomic copy; if all of the copying succeeds, but then
# part of the deletion of the source files fails, we will be left
# without a complete store in this site store's files directory, but
# the account Items will remain. This will cause odd errors on login
# and at other unpredictable times. The database is only one file, so
# we will either remove it all or none of it. Resolving this requires
# manual intervention currently: delete the substore's database
# directory and the account items (LoginAccount and LoginMethods)
# manually.
# However, this failure is extremely unlikely, as it would almost
# certainly indicate a misconfiguration of the permissions on the site
# store's files area. As described above, a failure of the call to
# os.rename(), if the platform's rename is atomic (which it generally
# is assumed to be) will not move any files and will cause a revert of
# the transaction which would have deleted the accompanying items.
av.deleteFromStore()
userAccount.deleteLoginMethods()
userAccount.deleteFromStore()
av.storepath.moveTo(extractionDestination)
userAccount.store.transact(_)
|
Move the SubStore for the given user account out of the given site store
completely. Place the user store's database directory into the given
destination directory.
@type userAccount: C{LoginAccount}
@type extractionDestination: C{FilePath}
@type legacySiteAuthoritative: C{bool}
@param legacySiteAuthoritative: before moving the user store, clear its
authentication information, copy that which is associated with it in the
site store rather than trusting its own. Currently this flag is necessary
(and defaults to true) because things like the ClickChronicle
password-changer gizmo still operate on the site store.
|
entailment
|
def getLoginMethods(store, protocol=None):
"""
Retrieve L{LoginMethod} items from store C{store}, optionally constraining
them by protocol
"""
if protocol is not None:
comp = OR(LoginMethod.protocol == u'*',
LoginMethod.protocol == protocol)
else:
comp = None
return store.query(LoginMethod, comp)
|
Retrieve L{LoginMethod} items from store C{store}, optionally constraining
them by protocol
|
entailment
|
def getAccountNames(store, protocol=None):
"""
Retrieve account name information about the given database.
@param store: An Axiom Store representing a user account. It must
have been opened through the store which contains its account
information.
@return: A generator of two-tuples of (username, domain) which
refer to the given store.
"""
return ((meth.localpart, meth.domain) for meth
in getLoginMethods(store, protocol))
|
Retrieve account name information about the given database.
@param store: An Axiom Store representing a user account. It must
have been opened through the store which contains its account
information.
@return: A generator of two-tuples of (username, domain) which
refer to the given store.
|
entailment
|
def getDomainNames(store):
"""
Retrieve a list of all local domain names represented in the given store.
"""
domains = set()
domains.update(store.query(
LoginMethod,
AND(LoginMethod.internal == True,
LoginMethod.domain != None)).getColumn("domain").distinct())
return sorted(domains)
|
Retrieve a list of all local domain names represented in the given store.
|
entailment
|
def migrateDown(self):
"""
Assuming that self.avatars is a SubStore which should contain *only*
the LoginAccount for the user I represent, remove all LoginAccounts and
LoginMethods from that store and copy all methods from the site store
down into it.
"""
ss = self.avatars.open()
def _():
oldAccounts = ss.query(LoginAccount)
oldMethods = ss.query(LoginMethod)
for x in list(oldAccounts) + list(oldMethods):
x.deleteFromStore()
self.cloneInto(ss, ss)
IScheduler(ss).migrateDown()
ss.transact(_)
|
Assuming that self.avatars is a SubStore which should contain *only*
the LoginAccount for the user I represent, remove all LoginAccounts and
LoginMethods from that store and copy all methods from the site store
down into it.
|
entailment
|
def migrateUp(self):
"""
Copy this LoginAccount and all associated LoginMethods from my store
(which is assumed to be a SubStore, most likely a user store) into the
site store which contains it.
"""
siteStore = self.store.parent
def _():
# No convenience method for the following because needing to do it is
# *rare*. It *should* be ugly; 99% of the time if you need to do this
# you're making a mistake. -glyph
siteStoreSubRef = siteStore.getItemByID(self.store.idInParent)
self.cloneInto(siteStore, siteStoreSubRef)
IScheduler(self.store).migrateUp()
siteStore.transact(_)
|
Copy this LoginAccount and all associated LoginMethods from my store
(which is assumed to be a SubStore, most likely a user store) into the
site store which contains it.
|
entailment
|
def cloneInto(self, newStore, avatars):
"""
Create a copy of this LoginAccount and all associated LoginMethods in a different Store.
Return the copied LoginAccount.
"""
la = LoginAccount(store=newStore,
password=self.password,
avatars=avatars,
disabled=self.disabled)
for siteMethod in self.store.query(LoginMethod,
LoginMethod.account == self):
LoginMethod(store=newStore,
localpart=siteMethod.localpart,
domain=siteMethod.domain,
internal=siteMethod.internal,
protocol=siteMethod.protocol,
verified=siteMethod.verified,
account=la)
return la
|
Create a copy of this LoginAccount and all associated LoginMethods in a different Store.
Return the copied LoginAccount.
|
entailment
|
def addLoginMethod(self, localpart, domain, protocol=ANY_PROTOCOL, verified=False, internal=False):
"""
Add a login method to this account, propogating up or down as necessary
to site store or user store to maintain consistency.
"""
# Out takes you west or something
if self.store.parent is None:
# West takes you in
otherStore = self.avatars.open()
peer = otherStore.findUnique(LoginAccount)
else:
# In takes you east
otherStore = self.store.parent
subStoreItem = self.store.parent.getItemByID(self.store.idInParent)
peer = otherStore.findUnique(LoginAccount,
LoginAccount.avatars == subStoreItem)
# Up and down take you home
for store, account in [(otherStore, peer), (self.store, self)]:
store.findOrCreate(LoginMethod,
account=account,
localpart=localpart,
domain=domain,
protocol=protocol,
verified=verified,
internal=internal)
|
Add a login method to this account, propogating up or down as necessary
to site store or user store to maintain consistency.
|
entailment
|
def replacePassword(self, currentPassword, newPassword):
"""
Set this account's password if the current password matches.
@param currentPassword: The password to match against the current one.
@param newPassword: The new password.
@return: A deferred firing when the password has been changed.
@raise BadCredentials: If the current password did not match.
"""
if unicode(currentPassword) != self.password:
return fail(BadCredentials())
return self.setPassword(newPassword)
|
Set this account's password if the current password matches.
@param currentPassword: The password to match against the current one.
@param newPassword: The new password.
@return: A deferred firing when the password has been changed.
@raise BadCredentials: If the current password did not match.
|
entailment
|
def addAccount(self, username, domain, password, avatars=None,
protocol=u'email', disabled=0, internal=False,
verified=True):
"""
Create a user account, add it to this LoginBase, and return it.
This method must be called within a transaction in my store.
@param username: the user's name.
@param domain: the domain part of the user's name [XXX TODO: this
really ought to say something about whether it's a Q2Q domain, a SIP
domain, an HTTP realm, or an email address domain - right now the
assumption is generally that it's an email address domain, but not
always]
@param password: A shared secret.
@param avatars: (Optional). A SubStore which, if passed, will be used
by cred as the target of all adaptations for this user. By default, I
will create a SubStore, and plugins can be installed on that substore
using the powerUp method to provide implementations of cred client
interfaces.
@raise DuplicateUniqueItem: if the 'avatars' argument already contains
a LoginAccount.
@return: an instance of a LoginAccount, with all attributes filled out
as they are passed in, stored in my store.
"""
# unicode(None) == u'None', kids.
if username is not None:
username = unicode(username)
if domain is not None:
domain = unicode(domain)
if password is not None:
password = unicode(password)
if self.accountByAddress(username, domain) is not None:
raise DuplicateUser(username, domain)
if avatars is None:
avatars = self.makeAvatars(domain, username)
subStore = avatars.open()
# create this unconditionally; as the docstring says, we must be run
# within a transaction, so if something goes wrong in the substore
# transaction this item's creation will be reverted...
la = LoginAccount(store=self.store,
password=password,
avatars=avatars,
disabled=disabled)
def createSubStoreAccountObjects():
LoginAccount(store=subStore,
password=password,
disabled=disabled,
avatars=subStore)
la.addLoginMethod(localpart=username,
domain=domain,
protocol=protocol,
internal=internal,
verified=verified)
subStore.transact(createSubStoreAccountObjects)
return la
|
Create a user account, add it to this LoginBase, and return it.
This method must be called within a transaction in my store.
@param username: the user's name.
@param domain: the domain part of the user's name [XXX TODO: this
really ought to say something about whether it's a Q2Q domain, a SIP
domain, an HTTP realm, or an email address domain - right now the
assumption is generally that it's an email address domain, but not
always]
@param password: A shared secret.
@param avatars: (Optional). A SubStore which, if passed, will be used
by cred as the target of all adaptations for this user. By default, I
will create a SubStore, and plugins can be installed on that substore
using the powerUp method to provide implementations of cred client
interfaces.
@raise DuplicateUniqueItem: if the 'avatars' argument already contains
a LoginAccount.
@return: an instance of a LoginAccount, with all attributes filled out
as they are passed in, stored in my store.
|
entailment
|
def identifySQLError(self, sql, args, e):
"""
Identify an appropriate SQL error object for the given message for the
supported versions of sqlite.
@return: an SQLError
"""
message = e.args[0]
if message.startswith("table") and message.endswith("already exists"):
return errors.TableAlreadyExists(sql, args, e)
return errors.SQLError(sql, args, e)
|
Identify an appropriate SQL error object for the given message for the
supported versions of sqlite.
@return: an SQLError
|
entailment
|
def createCacheRemoveCallback(cacheRef, key, finalizer):
"""
Construct a callable to be used as a weakref callback for cache entries.
The callable will invoke the provided finalizer, as well as removing the
cache entry if the cache still exists and contains an entry for the given
key.
@type cacheRef: L{weakref.ref} to L{FinalizingCache}
@param cacheRef: A weakref to the cache in which the corresponding cache
item was stored.
@param key: The key for which this value is cached.
@type finalizer: callable taking 0 arguments
@param finalizer: A user-provided callable that will be called when the
weakref callback runs.
"""
def remove(reference):
# Weakref callbacks cannot raise exceptions or DOOM ensues
try:
finalizer()
except:
logErrorNoMatterWhat()
try:
cache = cacheRef()
if cache is not None:
if key in cache.data:
if cache.data[key] is reference:
del cache.data[key]
except:
logErrorNoMatterWhat()
return remove
|
Construct a callable to be used as a weakref callback for cache entries.
The callable will invoke the provided finalizer, as well as removing the
cache entry if the cache still exists and contains an entry for the given
key.
@type cacheRef: L{weakref.ref} to L{FinalizingCache}
@param cacheRef: A weakref to the cache in which the corresponding cache
item was stored.
@param key: The key for which this value is cached.
@type finalizer: callable taking 0 arguments
@param finalizer: A user-provided callable that will be called when the
weakref callback runs.
|
entailment
|
def cache(self, key, value):
"""
Add an entry to the cache.
A weakref to the value is stored, rather than a direct reference. The
value must have a C{__finalizer__} method that returns a callable which
will be invoked when the weakref is broken.
@param key: The key identifying the cache entry.
@param value: The value for the cache entry.
"""
fin = value.__finalizer__()
try:
# It's okay if there's already a cache entry for this key as long
# as the weakref has already been broken. See the comment in
# get() for an explanation of why this might happen.
if self.data[key]() is not None:
raise CacheInconsistency(
"Duplicate cache key: %r %r %r" % (
key, value, self.data[key]))
except KeyError:
pass
callback = createCacheRemoveCallback(self._ref(self), key, fin)
self.data[key] = self._ref(value, callback)
return value
|
Add an entry to the cache.
A weakref to the value is stored, rather than a direct reference. The
value must have a C{__finalizer__} method that returns a callable which
will be invoked when the weakref is broken.
@param key: The key identifying the cache entry.
@param value: The value for the cache entry.
|
entailment
|
def uncache(self, key, value):
"""
Remove a key from the cache.
As a sanity check, if the specified key is present in the cache, it
must have the given value.
@param key: The key to remove.
@param value: The expected value for the key.
"""
try:
assert self.get(key) is value
del self.data[key]
except KeyError:
# If the entry has already been removed from the cache, this will
# result in KeyError which we ignore. If the entry is still in the
# cache, but the weakref has been broken, this will result in
# CacheFault (a KeyError subclass) which we also ignore. See the
# comment in get() for an explanation of why this might happen.
pass
|
Remove a key from the cache.
As a sanity check, if the specified key is present in the cache, it
must have the given value.
@param key: The key to remove.
@param value: The expected value for the key.
|
entailment
|
def get(self, key):
"""
Get an entry from the cache by key.
@raise KeyError: if the given key is not present in the cache.
@raise CacheFault: (a L{KeyError} subclass) if the given key is present
in the cache, but the value it points to is gone.
"""
o = self.data[key]()
if o is None:
# On CPython, the weakref callback will always(?) run before any
# other code has a chance to observe that the weakref is broken;
# and since the callback removes the item from the dict, this
# branch of code should never run. However, on PyPy (and possibly
# other Python implementations), the weakref callback does not run
# immediately, thus we may be able to observe this intermediate
# state. Should this occur, we remove the dict item ourselves,
# and raise CacheFault (which is a KeyError subclass).
del self.data[key]
raise CacheFault(
"FinalizingCache has %r but its value is no more." % (key,))
log.msg(interface=iaxiom.IStatEvent, stat_cache_hits=1, key=key)
return o
|
Get an entry from the cache by key.
@raise KeyError: if the given key is not present in the cache.
@raise CacheFault: (a L{KeyError} subclass) if the given key is present
in the cache, but the value it points to is gone.
|
entailment
|
def parse_question_xml(root):
"""
Parse <question> element in the UBCPI XBlock's content XML.
Args:
root (lxml.etree.Element): The root of the <question> node in the tree.
Returns:
dict, a deserialized representation of a question. E.g.
{
'text': 'What is the answer to life, the universe and everything?',
'image_url': '',
'image_position': 'below',
'image_show_fields': 0,
'image_alt': 'description'
}
Raises:
ValidationError: The XML definition is invalid.
"""
question_dict = dict()
question_prompt_el = root.find('text')
if question_prompt_el is not None:
question_dict['text'] = _safe_get_text(question_prompt_el)
else:
raise ValidationError(_('Question must have text element.'))
# optional image element
question_dict.update(parse_image_xml(root))
return question_dict
|
Parse <question> element in the UBCPI XBlock's content XML.
Args:
root (lxml.etree.Element): The root of the <question> node in the tree.
Returns:
dict, a deserialized representation of a question. E.g.
{
'text': 'What is the answer to life, the universe and everything?',
'image_url': '',
'image_position': 'below',
'image_show_fields': 0,
'image_alt': 'description'
}
Raises:
ValidationError: The XML definition is invalid.
|
entailment
|
def parse_options_xml(root):
"""
Parse <options> element in the UBCPI XBlock's content XML.
Args:
root (lxml.etree.Element): The root of the <options> node in the tree.
Returns:
a list of deserialized representation of options. E.g.
[{
'text': 'Option 1',
'image_url': '',
'image_position': 'below',
'image_show_fields': 0,
'image_alt': ''
},
{....
}]
Raises:
ValidationError: The XML definition is invalid.
"""
options = []
correct_option = None
rationale = None
for option_el in root.findall('option'):
option_dict = dict()
option_prompt_el = option_el.find('text')
if option_prompt_el is not None:
option_dict['text'] = _safe_get_text(option_prompt_el)
else:
raise ValidationError(_('Option must have text element.'))
# optional image element
option_dict.update(parse_image_xml(option_el))
if 'correct' in option_el.attrib and _parse_boolean(option_el.attrib['correct']):
if correct_option is None:
correct_option = len(options)
rationale_el = option_el.find('rationale')
if rationale_el is not None:
rationale = {'text': _safe_get_text(rationale_el)}
else:
raise ValidationError(_('Missing rationale for correct answer.'))
else:
raise ValidationError(_('Only one correct answer can be defined in options.'))
options.append(option_dict)
if correct_option is None or rationale is None:
raise ValidationError(_('Correct answer and rationale are required and have to be defined in one of the option.'))
return options, correct_option, rationale
|
Parse <options> element in the UBCPI XBlock's content XML.
Args:
root (lxml.etree.Element): The root of the <options> node in the tree.
Returns:
a list of deserialized representation of options. E.g.
[{
'text': 'Option 1',
'image_url': '',
'image_position': 'below',
'image_show_fields': 0,
'image_alt': ''
},
{....
}]
Raises:
ValidationError: The XML definition is invalid.
|
entailment
|
def parse_seeds_xml(root):
"""
Parse <seeds> element in the UBCPI XBlock's content XML.
Args:
root (lxml.etree.Element): The root of the <seeds> node in the tree.
Returns:
a list of deserialized representation of seeds. E.g.
[{
'answer': 1, # option index starting from one
'rationale': 'This is a seeded answer',
},
{....
}]
Raises:
ValidationError: The XML definition is invalid.
"""
seeds = []
for seed_el in root.findall('seed'):
seed_dict = dict()
seed_dict['rationale'] = _safe_get_text(seed_el)
if 'option' in seed_el.attrib:
seed_dict['answer'] = int(seed_el.attrib['option']) - 1
else:
raise ValidationError(_('Seed element must have an option attribute.'))
seeds.append(seed_dict)
return seeds
|
Parse <seeds> element in the UBCPI XBlock's content XML.
Args:
root (lxml.etree.Element): The root of the <seeds> node in the tree.
Returns:
a list of deserialized representation of seeds. E.g.
[{
'answer': 1, # option index starting from one
'rationale': 'This is a seeded answer',
},
{....
}]
Raises:
ValidationError: The XML definition is invalid.
|
entailment
|
def parse_from_xml(root):
"""
Update the UBCPI XBlock's content from an XML definition.
We need to be strict about the XML we accept, to avoid setting
the XBlock to an invalid state (which will then be persisted).
Args:
root (lxml.etree.Element): The XML definition of the XBlock's content.
Returns:
A dictionary of all of the XBlock's content.
Raises:
UpdateFromXmlError: The XML definition is invalid
"""
# Check that the root has the correct tag
if root.tag != 'ubcpi':
raise UpdateFromXmlError(_('Every peer instruction tool must contain an "ubcpi" element.'))
display_name_el = root.find('display_name')
if display_name_el is None:
raise UpdateFromXmlError(_('Every peer instruction tool must contain a "display_name" element.'))
else:
display_name = _safe_get_text(display_name_el)
rationale_size_min = int(root.attrib['rationale_size_min']) if 'rationale_size_min' in root.attrib else None
rationale_size_max = int(root.attrib['rationale_size_max']) if 'rationale_size_max' in root.attrib else None
question_el = root.find('question')
if question_el is None:
raise UpdateFromXmlError(_('Every peer instruction must tool contain a "question" element.'))
else:
question = parse_question_xml(question_el)
options_el = root.find('options')
if options_el is None:
raise UpdateFromXmlError(_('Every peer instruction must tool contain a "options" element.'))
else:
options, correct_answer, correct_rationale = parse_options_xml(options_el)
seeds_el = root.find('seeds')
if seeds_el is None:
raise UpdateFromXmlError(_('Every peer instruction must tool contain a "seeds" element.'))
else:
seeds = parse_seeds_xml(seeds_el)
algo = unicode(root.attrib['algorithm']) if 'algorithm' in root.attrib else None
num_responses = unicode(root.attrib['num_responses']) if 'num_responses' in root.attrib else None
return {
'display_name': display_name,
'question_text': question,
'options': options,
'rationale_size': {'min': rationale_size_min, 'max': rationale_size_max},
'correct_answer': correct_answer,
'correct_rationale': correct_rationale,
'seeds': seeds,
'algo': {"name": algo, 'num_responses': num_responses}
}
|
Update the UBCPI XBlock's content from an XML definition.
We need to be strict about the XML we accept, to avoid setting
the XBlock to an invalid state (which will then be persisted).
Args:
root (lxml.etree.Element): The XML definition of the XBlock's content.
Returns:
A dictionary of all of the XBlock's content.
Raises:
UpdateFromXmlError: The XML definition is invalid
|
entailment
|
def serialize_options(options, block):
"""
Serialize the options in peer instruction XBlock to xml
Args:
options (lxml.etree.Element): The <options> XML element.
block (PeerInstructionXBlock): The XBlock with configuration to serialize.
Returns:
None
"""
for index, option_dict in enumerate(block.options):
option = etree.SubElement(options, 'option')
# set correct option and rationale
if index == block.correct_answer:
option.set('correct', u'True')
if hasattr(block, 'correct_rationale'):
rationale = etree.SubElement(option, 'rationale')
rationale.text = block.correct_rationale['text']
text = etree.SubElement(option, 'text')
text.text = option_dict.get('text', '')
serialize_image(option_dict, option)
|
Serialize the options in peer instruction XBlock to xml
Args:
options (lxml.etree.Element): The <options> XML element.
block (PeerInstructionXBlock): The XBlock with configuration to serialize.
Returns:
None
|
entailment
|
def serialize_seeds(seeds, block):
"""
Serialize the seeds in peer instruction XBlock to xml
Args:
seeds (lxml.etree.Element): The <seeds> XML element.
block (PeerInstructionXBlock): The XBlock with configuration to serialize.
Returns:
None
"""
for seed_dict in block.seeds:
seed = etree.SubElement(seeds, 'seed')
# options in xml starts with 1
seed.set('option', unicode(seed_dict.get('answer', 0) + 1))
seed.text = seed_dict.get('rationale', '')
|
Serialize the seeds in peer instruction XBlock to xml
Args:
seeds (lxml.etree.Element): The <seeds> XML element.
block (PeerInstructionXBlock): The XBlock with configuration to serialize.
Returns:
None
|
entailment
|
def serialize_to_xml(root, block):
"""
Serialize the Peer Instruction XBlock's content to XML.
Args:
block (PeerInstructionXBlock): The peer instruction block to serialize.
root (etree.Element): The XML root node to update.
Returns:
etree.Element
"""
root.tag = 'ubcpi'
if block.rationale_size is not None:
if block.rationale_size.get('min'):
root.set('rationale_size_min', unicode(block.rationale_size.get('min')))
if block.rationale_size.get('max'):
root.set('rationale_size_max', unicode(block.rationale_size['max']))
if block.algo:
if block.algo.get('name'):
root.set('algorithm', block.algo.get('name'))
if block.algo.get('num_responses'):
root.set('num_responses', unicode(block.algo.get('num_responses')))
display_name = etree.SubElement(root, 'display_name')
display_name.text = block.display_name
question = etree.SubElement(root, 'question')
question_text = etree.SubElement(question, 'text')
question_text.text = block.question_text['text']
serialize_image(block.question_text, question)
options = etree.SubElement(root, 'options')
serialize_options(options, block)
seeds = etree.SubElement(root, 'seeds')
serialize_seeds(seeds, block)
|
Serialize the Peer Instruction XBlock's content to XML.
Args:
block (PeerInstructionXBlock): The peer instruction block to serialize.
root (etree.Element): The XML root node to update.
Returns:
etree.Element
|
entailment
|
def open(self):
"""
Obtains the lvm and vg_t handle. Usually you would never need to use this method
unless you are doing operations using the ctypes function wrappers in conversion.py
*Raises:*
* HandleError
"""
if not self.handle:
self.lvm.open()
self.__vgh = lvm_vg_open(self.lvm.handle, self.name, self.mode)
if not bool(self.__vgh):
raise HandleError("Failed to initialize VG Handle.")
|
Obtains the lvm and vg_t handle. Usually you would never need to use this method
unless you are doing operations using the ctypes function wrappers in conversion.py
*Raises:*
* HandleError
|
entailment
|
def close(self):
"""
Closes the lvm and vg_t handle. Usually you would never need to use this method
unless you are doing operations using the ctypes function wrappers in conversion.py
*Raises:*
* HandleError
"""
if self.handle:
cl = lvm_vg_close(self.handle)
if cl != 0:
raise HandleError("Failed to close VG handle after init check.")
self.__vgh = None
self.lvm.close()
|
Closes the lvm and vg_t handle. Usually you would never need to use this method
unless you are doing operations using the ctypes function wrappers in conversion.py
*Raises:*
* HandleError
|
entailment
|
def uuid(self):
"""
Returns the volume group uuid.
"""
self.open()
uuid = lvm_vg_get_uuid(self.handle)
self.close()
return uuid
|
Returns the volume group uuid.
|
entailment
|
def extent_count(self):
"""
Returns the volume group extent count.
"""
self.open()
count = lvm_vg_get_extent_count(self.handle)
self.close()
return count
|
Returns the volume group extent count.
|
entailment
|
def free_extent_count(self):
"""
Returns the volume group free extent count.
"""
self.open()
count = lvm_vg_get_free_extent_count(self.handle)
self.close()
return count
|
Returns the volume group free extent count.
|
entailment
|
def pv_count(self):
"""
Returns the physical volume count.
"""
self.open()
count = lvm_vg_get_pv_count(self.handle)
self.close()
return count
|
Returns the physical volume count.
|
entailment
|
def max_pv_count(self):
"""
Returns the maximum allowed physical volume count.
"""
self.open()
count = lvm_vg_get_max_pv(self.handle)
self.close()
return count
|
Returns the maximum allowed physical volume count.
|
entailment
|
def max_lv_count(self):
"""
Returns the maximum allowed logical volume count.
"""
self.open()
count = lvm_vg_get_max_lv(self.handle)
self.close()
return count
|
Returns the maximum allowed logical volume count.
|
entailment
|
def is_clustered(self):
"""
Returns True if the VG is clustered, False otherwise.
"""
self.open()
clust = lvm_vg_is_clustered(self.handle)
self.close()
return bool(clust)
|
Returns True if the VG is clustered, False otherwise.
|
entailment
|
def is_exported(self):
"""
Returns True if the VG is exported, False otherwise.
"""
self.open()
exp = lvm_vg_is_exported(self.handle)
self.close()
return bool(exp)
|
Returns True if the VG is exported, False otherwise.
|
entailment
|
def is_partial(self):
"""
Returns True if the VG is partial, False otherwise.
"""
self.open()
part = lvm_vg_is_partial(self.handle)
self.close()
return bool(part)
|
Returns True if the VG is partial, False otherwise.
|
entailment
|
def sequence(self):
"""
Returns the volume group sequence number. This number increases
everytime the volume group is modified.
"""
self.open()
seq = lvm_vg_get_seqno(self.handle)
self.close()
return seq
|
Returns the volume group sequence number. This number increases
everytime the volume group is modified.
|
entailment
|
def size(self, units="MiB"):
"""
Returns the volume group size in the given units. Default units are MiB.
*Args:*
* units (str): Unit label ('MiB', 'GiB', etc...). Default is MiB.
"""
self.open()
size = lvm_vg_get_size(self.handle)
self.close()
return size_convert(size, units)
|
Returns the volume group size in the given units. Default units are MiB.
*Args:*
* units (str): Unit label ('MiB', 'GiB', etc...). Default is MiB.
|
entailment
|
def free_size(self, units="MiB"):
"""
Returns the volume group free size in the given units. Default units are MiB.
*Args:*
* units (str): Unit label ('MiB', 'GiB', etc...). Default is MiB.
"""
self.open()
size = lvm_vg_get_free_size(self.handle)
self.close()
return size_convert(size, units)
|
Returns the volume group free size in the given units. Default units are MiB.
*Args:*
* units (str): Unit label ('MiB', 'GiB', etc...). Default is MiB.
|
entailment
|
def extent_size(self, units="MiB"):
"""
Returns the volume group extent size in the given units. Default units are MiB.
*Args:*
* units (str): Unit label ('MiB', 'GiB', etc...). Default is MiB.
"""
self.open()
size = lvm_vg_get_extent_size(self.handle)
self.close()
return size_convert(size, units)
|
Returns the volume group extent size in the given units. Default units are MiB.
*Args:*
* units (str): Unit label ('MiB', 'GiB', etc...). Default is MiB.
|
entailment
|
def add_pv(self, device):
"""
Initializes a device as a physical volume and adds it to the volume group::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg", "w")
vg.add_pv("/dev/sdbX")
*Args:*
* device (str): An existing device.
*Raises:*
* ValueError, CommitError, HandleError
.. note::
The VolumeGroup instance must be in write mode, otherwise CommitError
is raised.
"""
if not os.path.exists(device):
raise ValueError("%s does not exist." % device)
self.open()
ext = lvm_vg_extend(self.handle, device)
if ext != 0:
self.close()
raise CommitError("Failed to extend Volume Group.")
self._commit()
self.close()
return PhysicalVolume(self, name=device)
|
Initializes a device as a physical volume and adds it to the volume group::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg", "w")
vg.add_pv("/dev/sdbX")
*Args:*
* device (str): An existing device.
*Raises:*
* ValueError, CommitError, HandleError
.. note::
The VolumeGroup instance must be in write mode, otherwise CommitError
is raised.
|
entailment
|
def get_pv(self, device):
"""
Returns the physical volume associated with the given device::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg", "w")
vg.get_pv("/dev/sdb1")
*Args:*
* device (str): An existing device.
*Raises:*
* ValueError, HandleError
"""
if not os.path.exists(device):
raise ValueError("%s does not exist." % device)
return PhysicalVolume(self, name=device)
|
Returns the physical volume associated with the given device::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg", "w")
vg.get_pv("/dev/sdb1")
*Args:*
* device (str): An existing device.
*Raises:*
* ValueError, HandleError
|
entailment
|
def remove_pv(self, pv):
"""
Removes a physical volume from the volume group::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg", "w")
pv = vg.pvscan()[0]
vg.remove_pv(pv)
*Args:*
* pv (obj): A PhysicalVolume instance.
*Raises:*
* HandleError, CommitError
.. note::
The VolumeGroup instance must be in write mode, otherwise CommitError
is raised. Also, when removing the last physical volume, the volume
group is deleted in lvm, leaving the instance with a null handle.
"""
name = pv.name
self.open()
rm = lvm_vg_reduce(self.handle, name)
if rm != 0:
self.close()
raise CommitError("Failed to remove %s." % name)
self._commit()
self.close()
|
Removes a physical volume from the volume group::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg", "w")
pv = vg.pvscan()[0]
vg.remove_pv(pv)
*Args:*
* pv (obj): A PhysicalVolume instance.
*Raises:*
* HandleError, CommitError
.. note::
The VolumeGroup instance must be in write mode, otherwise CommitError
is raised. Also, when removing the last physical volume, the volume
group is deleted in lvm, leaving the instance with a null handle.
|
entailment
|
def pvscan(self):
"""
Probes the volume group for physical volumes and returns a list of
PhysicalVolume instances::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg")
pvs = vg.pvscan()
*Raises:*
* HandleError
"""
self.open()
pv_list = []
pv_handles = lvm_vg_list_pvs(self.handle)
if not bool(pv_handles):
return pv_list
pvh = dm_list_first(pv_handles)
while pvh:
c = cast(pvh, POINTER(lvm_pv_list))
pv = PhysicalVolume(self, pvh=c.contents.pv)
pv_list.append(pv)
if dm_list_end(pv_handles, pvh):
# end of linked list
break
pvh = dm_list_next(pv_handles, pvh)
self.close()
return pv_list
|
Probes the volume group for physical volumes and returns a list of
PhysicalVolume instances::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg")
pvs = vg.pvscan()
*Raises:*
* HandleError
|
entailment
|
def lvscan(self):
"""
Probes the volume group for logical volumes and returns a list of
LogicalVolume instances::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg")
lvs = vg.lvscan()
*Raises:*
* HandleError
"""
self.open()
lv_list = []
lv_handles = lvm_vg_list_lvs(self.handle)
if not bool(lv_handles):
return lv_list
lvh = dm_list_first(lv_handles)
while lvh:
c = cast(lvh, POINTER(lvm_lv_list))
lv = LogicalVolume(self, lvh=c.contents.lv)
lv_list.append(lv)
if dm_list_end(lv_handles, lvh):
# end of linked list
break
lvh = dm_list_next(lv_handles, lvh)
self.close()
return lv_list
|
Probes the volume group for logical volumes and returns a list of
LogicalVolume instances::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg")
lvs = vg.lvscan()
*Raises:*
* HandleError
|
entailment
|
def create_lv(self, name, length, units):
"""
Creates a logical volume and returns the LogicalVolume instance associated with
the lv_t handle::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg", "w")
lv = vg.create_lv("mylv", 40, "MiB")
*Args:*
* name (str): The desired logical volume name.
* length (int): The desired size.
* units (str): The size units.
*Raises:*
* HandleError, CommitError, ValueError
.. note::
The VolumeGroup instance must be in write mode, otherwise CommitError
is raised.
"""
if units != "%":
size = size_units[units] * length
else:
if not (0 < length <= 100) or type(length) is float:
raise ValueError("Length not supported.")
size = (self.size("B") / 100) * length
self.open()
lvh = lvm_vg_create_lv_linear(self.handle, name, c_ulonglong(size))
if not bool(lvh):
self.close()
raise CommitError("Failed to create LV.")
lv = LogicalVolume(self, lvh=lvh)
self.close()
return lv
|
Creates a logical volume and returns the LogicalVolume instance associated with
the lv_t handle::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg", "w")
lv = vg.create_lv("mylv", 40, "MiB")
*Args:*
* name (str): The desired logical volume name.
* length (int): The desired size.
* units (str): The size units.
*Raises:*
* HandleError, CommitError, ValueError
.. note::
The VolumeGroup instance must be in write mode, otherwise CommitError
is raised.
|
entailment
|
def remove_lv(self, lv):
"""
Removes a logical volume from the volume group::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg", "w")
lv = vg.lvscan()[0]
vg.remove_lv(lv)
*Args:*
* lv (obj): A LogicalVolume instance.
*Raises:*
* HandleError, CommitError, ValueError
.. note::
The VolumeGroup instance must be in write mode, otherwise CommitError
is raised.
"""
lv.open()
rm = lvm_vg_remove_lv(lv.handle)
lv.close()
if rm != 0:
raise CommitError("Failed to remove LV.")
|
Removes a logical volume from the volume group::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg", "w")
lv = vg.lvscan()[0]
vg.remove_lv(lv)
*Args:*
* lv (obj): A LogicalVolume instance.
*Raises:*
* HandleError, CommitError, ValueError
.. note::
The VolumeGroup instance must be in write mode, otherwise CommitError
is raised.
|
entailment
|
def remove_all_lvs(self):
"""
Removes all logical volumes from the volume group.
*Raises:*
* HandleError, CommitError
"""
lvs = self.lvscan()
for lv in lvs:
self.remove_lv(lv)
|
Removes all logical volumes from the volume group.
*Raises:*
* HandleError, CommitError
|
entailment
|
def set_extent_size(self, length, units):
"""
Sets the volume group extent size in the given units::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg", "w")
vg.set_extent_size(2, "MiB")
*Args:*
* length (int): The desired length size.
* units (str): The desired units ("MiB", "GiB", etc...).
*Raises:*
* HandleError, CommitError, KeyError
.. note::
The VolumeGroup instance must be in write mode, otherwise CommitError
is raised.
"""
size = length * size_units[units]
self.open()
ext = lvm_vg_set_extent_size(self.handle, c_ulong(size))
self._commit()
self.close()
if ext != 0:
raise CommitError("Failed to set extent size.")
|
Sets the volume group extent size in the given units::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg", "w")
vg.set_extent_size(2, "MiB")
*Args:*
* length (int): The desired length size.
* units (str): The desired units ("MiB", "GiB", etc...).
*Raises:*
* HandleError, CommitError, KeyError
.. note::
The VolumeGroup instance must be in write mode, otherwise CommitError
is raised.
|
entailment
|
def set_pair(self, term1, term2, value, **kwargs):
"""
Set the value for a pair of terms.
Args:
term1 (str)
term2 (str)
value (mixed)
"""
key = self.key(term1, term2)
self.keys.update([term1, term2])
self.pairs[key] = value
|
Set the value for a pair of terms.
Args:
term1 (str)
term2 (str)
value (mixed)
|
entailment
|
def get_pair(self, term1, term2):
"""
Get the value for a pair of terms.
Args:
term1 (str)
term2 (str)
Returns:
The stored value.
"""
key = self.key(term1, term2)
return self.pairs.get(key, None)
|
Get the value for a pair of terms.
Args:
term1 (str)
term2 (str)
Returns:
The stored value.
|
entailment
|
def index(self, text, terms=None, **kwargs):
"""
Index all term pair distances.
Args:
text (Text): The source text.
terms (list): Terms to index.
"""
self.clear()
# By default, use all terms.
terms = terms or text.terms.keys()
pairs = combinations(terms, 2)
count = comb(len(terms), 2)
for t1, t2 in bar(pairs, expected_size=count, every=1000):
# Set the Bray-Curtis distance.
score = text.score_braycurtis(t1, t2, **kwargs)
self.set_pair(t1, t2, score)
|
Index all term pair distances.
Args:
text (Text): The source text.
terms (list): Terms to index.
|
entailment
|
def anchored_pairs(self, anchor):
"""
Get distances between an anchor term and all other terms.
Args:
anchor (str): The anchor term.
Returns:
OrderedDict: The distances, in descending order.
"""
pairs = OrderedDict()
for term in self.keys:
score = self.get_pair(anchor, term)
if score: pairs[term] = score
return utils.sort_dict(pairs)
|
Get distances between an anchor term and all other terms.
Args:
anchor (str): The anchor term.
Returns:
OrderedDict: The distances, in descending order.
|
entailment
|
def from_rgb(r, g=None, b=None):
"""
Return the nearest xterm 256 color code from rgb input.
"""
c = r if isinstance(r, list) else [r, g, b]
best = {}
for index, item in enumerate(colors):
d = __distance(item, c)
if(not best or d <= best['distance']):
best = {'distance': d, 'index': index}
if 'index' in best:
return best['index']
else:
return 1
|
Return the nearest xterm 256 color code from rgb input.
|
entailment
|
def entry():
"""Parse command line arguments and run utilities."""
parser = argparse.ArgumentParser()
parser.add_argument(
'action', help='Action to take',
choices=['from_hex', 'to_rgb', 'to_hex'],
)
parser.add_argument(
'value', help='Value for the action',
)
parsed = parser.parse_args()
if parsed.action != "from_hex":
try:
parsed.value = int(parsed.value)
except ValueError:
raise argparse.ArgumentError(
"Value for this action should be an integer",
)
print(globals()[parsed.action](parsed.value))
|
Parse command line arguments and run utilities.
|
entailment
|
def makeSoftwareVersion(store, version, systemVersion):
"""
Return the SoftwareVersion object from store corresponding to the
version object, creating it if it doesn't already exist.
"""
return store.findOrCreate(SoftwareVersion,
systemVersion=systemVersion,
package=unicode(version.package),
version=unicode(version.short()),
major=version.major,
minor=version.minor,
micro=version.micro)
|
Return the SoftwareVersion object from store corresponding to the
version object, creating it if it doesn't already exist.
|
entailment
|
def listVersionHistory(store):
"""
List the software package version history of store.
"""
q = store.query(SystemVersion, sort=SystemVersion.creation.descending)
return [sv.longWindedRepr() for sv in q]
|
List the software package version history of store.
|
entailment
|
def checkSystemVersion(s, versions=None):
"""
Check if the current version is different from the previously recorded
version. If it is, or if there is no previously recorded version,
create a version matching the current config.
"""
if versions is None:
versions = getSystemVersions()
currentVersionMap = dict([(v.package, v) for v in versions])
mostRecentSystemVersion = s.findFirst(SystemVersion,
sort=SystemVersion.creation.descending)
mostRecentVersionMap = dict([(v.package, v.asVersion()) for v in
s.query(SoftwareVersion,
(SoftwareVersion.systemVersion ==
mostRecentSystemVersion))])
if mostRecentVersionMap != currentVersionMap:
currentSystemVersion = SystemVersion(store=s, creation=Time())
for v in currentVersionMap.itervalues():
makeSoftwareVersion(s, v, currentSystemVersion)
|
Check if the current version is different from the previously recorded
version. If it is, or if there is no previously recorded version,
create a version matching the current config.
|
entailment
|
def asVersion(self):
"""
Convert the version data in this item to a
L{twisted.python.versions.Version}.
"""
return versions.Version(self.package, self.major, self.minor, self.micro)
|
Convert the version data in this item to a
L{twisted.python.versions.Version}.
|
entailment
|
def reset(self):
"""clears all columns"""
self.colNames,self.colDesc,self.colUnits,self.colComments,\
self.colTypes,self.colData=[],[],[],[],[],[]
|
clears all columns
|
entailment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.