Search is not available for this dataset
text stringlengths 75 104k |
|---|
def p_navigation_step_2(self, p):
'''navigation_step : ARROW identifier LSQBR identifier DOT phrase RSQBR'''
p[0] = NavigationStepNode(key_letter=p[2],
rel_id=p[4],
phrase=p[6]) |
def p_implicit_invocation(self, p):
'''implicit_invocation : namespace DOUBLECOLON identifier LPAREN parameter_list RPAREN'''
p[0] = ImplicitInvocationNode(namespace=p[1],
action_name=p[3],
parameter_list=p[5]) |
def p_operation_invocation_1(self, p):
'''instance_invocation : structure DOT identifier LPAREN parameter_list RPAREN'''
p[0] = InstanceInvocationNode(handle=p[1],
action_name=p[3],
parameter_list=p[5]) |
def p_arithmetic_expression(self, p):
'''
expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIV expression
| expression MOD expression
'''
p[0] = BinaryOperationNode(left=p[1],
operator=p[2],
right=p[3]) |
def p_boolean_expression(self, p):
'''
expression : expression LE expression
| expression LESSTHAN expression
| expression DOUBLEEQUAL expression
| expression NOTEQUAL expression
| expression GE expression
| expression GT expression
| expression AND expression
| expression OR expression
'''
p[0] = BinaryOperationNode(left=p[1],
operator=p[2],
right=p[3]) |
def wrap(cls, message_parts):
"""Wraps exceptions in the context with :exc:`MalformedMessage`."""
try:
yield
except BaseException as exception:
__, __, tb = sys.exc_info()
reraise(cls, cls(exception, message_parts), tb) |
async def api_crime(request):
"""
Gets the crime nearby to a given postcode.
:param request: The aiohttp request.
:return: A json representation of the crimes near the postcode.
"""
postcode: Optional[str] = request.match_info.get('postcode', None)
try:
coroutine = get_postcode_random() if postcode == "random" else get_postcode(postcode)
postcode: Optional[Postcode] = await coroutine
except CachingError as e:
return web.Response(body=e.status, status=500)
try:
crime = await fetch_crime(postcode.lat, postcode.long)
except (ApiError, CircuitBreakerError):
raise web.HTTPInternalServerError(body=f"Requested crime is not cached, and can't be retrieved.")
if crime is None:
return web.HTTPNotFound(body="No Police Data")
else:
return str_json_response(crime) |
async def api_neighbourhood(request):
"""
Gets police data about a neighbourhood.
:param request: The aiohttp request.
:return: The police data for that post code.
"""
postcode: Optional[str] = request.match_info.get('postcode', None)
try:
postcode = (await get_postcode_random()) if postcode == "random" else postcode
neighbourhood = await get_neighbourhood(postcode)
except CachingError as e:
raise web.HTTPInternalServerError(text=e.status)
if neighbourhood is None:
raise web.HTTPNotFound(text="No Police Data")
else:
return str_json_response(neighbourhood.serialize()) |
def create_queue(self, name, strict=True, auto_delete=False, auto_delete_timeout=0):
"""Create message content and properties to create queue with QMFv2
:param name: Name of queue to create
:type name: str
:param strict: Whether command should fail when unrecognized properties are provided
Not used by QMFv2
Default: True
:type strict: bool
:param auto_delete: Whether queue should be auto deleted
Default: False
:type auto_delete: bool
:param auto_delete_timeout: Timeout in seconds for auto deleting queue
Default: 10
:type auto_delete_timeout: int
:returns: Tuple containing content and method properties
"""
content = {"_object_id": {"_object_name": self.object_name},
"_method_name": "create",
"_arguments": {"type": "queue",
"name": name,
"strict": strict,
"properties": {"auto-delete": auto_delete,
"qpid.auto_delete_timeout": auto_delete_timeout}}}
logger.debug("Message content -> {0}".format(content))
return content, self.method_properties |
def delete_queue(self, name):
"""Create message content and properties to delete queue with QMFv2
:param name: Name of queue to delete
:type name: str
:returns: Tuple containing content and method properties
"""
content = {"_object_id": {"_object_name": self.object_name},
"_method_name": "delete",
"_arguments": {"type": "queue",
"name": name,
"options": dict()}} # "A nested map with the key options. This is presently unused."
logger.debug("Message content -> {0}".format(content))
return content, self.method_properties |
def list_queues(self):
"""Create message content and properties to list all queues with QMFv2
:returns: Tuple containing content and query properties
"""
content = {"_what": "OBJECT",
"_schema_id": {"_class_name": "queue"}}
logger.debug("Message content -> {0}".format(content))
return content, self.query_properties |
def list_exchanges(self):
"""Create message content and properties to list all exchanges with QMFv2
:returns: Tuple containing content and query properties
"""
content = {"_what": "OBJECT",
"_schema_id": {"_class_name": "exchange"}}
logger.debug("Message content -> {0}".format(content))
return content, self.query_properties |
def purge_queue(self, name):
"""Create message content and properties to purge queue with QMFv2
:param name: Name of queue to purge
:type name: str
:returns: Tuple containing content and method properties
"""
content = {"_object_id": {"_object_name": "org.apache.qpid.broker:queue:{0}".format(name)},
"_method_name": "purge",
"_arguments": {"type": "queue",
"name": name,
"filter": dict()}}
logger.debug("Message content -> {0}".format(content))
return content, self.method_properties |
def _create_msg(self, to, subject, msgHtml, msgPlain, attachments=None):
'''
attachments should be a list of paths
'''
sender = self.sender
if attachments and isinstance(attachments, str):
attachments = [attachments]
else:
attachments = list(attachments or [])
msg = MIMEMultipart('alternative')
msg['Subject'] = subject
msg['From'] = sender
msg['To'] = to
msg.attach(MIMEText(msgPlain, 'plain'))
msg.attach(MIMEText(msgHtml, 'html'))
# append attachments if any
for path in attachments:
_attachment = self._prep_attachment(path)
msg.attach(_attachment)
raw = base64.urlsafe_b64encode(msg.as_bytes()).decode()
#raw = raw.decode()
body = {'raw': raw}
return body |
def set_remote_exception(self, remote_exc_info):
"""Raises an exception as a :exc:`RemoteException`."""
exc_type, exc_str, filename, lineno = remote_exc_info[:4]
exc_type = RemoteException.compose(exc_type)
exc = exc_type(exc_str, filename, lineno, self.worker_info)
if len(remote_exc_info) > 4:
state = remote_exc_info[4]
exc.__setstate__(state)
self.set_exception(exc) |
def read(self):
"""
Returns the text from an image at a given url.
"""
# Only download the image if it has changed
if self.connection.has_changed():
image_path = self.connection.download_image()
image = Image.open(image_path)
self.text_cache = pytesseract.image_to_string(image)
image.close()
return self.text_cache |
def text_visible(self):
"""
Returns true or false based on if the OCR process has read
actual words. This is needed to prevent non-words from being
added to the queue since the ocr process can sometimes return
values that are not meaningfull.
"""
# Split the input string at points with any amount of whitespace
words = self.read().split()
# Light weight check to see if a word exists
for word in words:
# If the word is a numeric value
if word.lstrip('-').replace('.', '', 1).isdigit():
return True
# If the word contains only letters with a length from 2 to 20
if word.isalpha() and (len(word) > 1 or len(word) <= 20):
return True
return False |
def get_most_recent_bike() -> Optional['Bike']:
"""
Gets the most recently cached bike from the database.
:return: The bike that was cached most recently.
"""
try:
return Bike.select().order_by(Bike.cached_date.desc()).get()
except pw.DoesNotExist:
return None |
def main():
'''
Parse command line options and launch the interpreter
'''
parser = optparse.OptionParser(usage="%prog [options] <model_path> [another_model_path..]",
version=xtuml.version.complete_string,
formatter=optparse.TitledHelpFormatter())
parser.add_option("-v", "--verbosity", dest='verbosity', action="count",
default=1, help="increase debug logging level")
parser.add_option("-f", "--function", dest='function', action="store",
help="invoke function named NAME", metavar='NAME')
parser.add_option("-c", "--component", dest='component', action="store",
help="look for the function in a component named NAME",
metavar='NAME', default=None)
(opts, args) = parser.parse_args()
if len(args) == 0 or not opts.function:
parser.print_help()
sys.exit(1)
levels = {
0: logging.ERROR,
1: logging.WARNING,
2: logging.INFO,
3: logging.DEBUG,
}
logging.basicConfig(level=levels.get(opts.verbosity, logging.DEBUG))
from bridgepoint import ooaofooa
mm = ooaofooa.load_metamodel(args)
c_c = mm.select_any('C_C', where(Name=opts.component))
domain = ooaofooa.mk_component(mm, c_c, derived_attributes=False)
func = domain.find_symbol(opts.function)
return func() |
def main():
'''
Parse argv for options and arguments, and start schema generation.
'''
parser = optparse.OptionParser(usage="%prog [options] <model_path> [another_model_path...]",
version=xtuml.version.complete_string,
formatter=optparse.TitledHelpFormatter())
parser.set_description(__doc__.strip())
parser.add_option("-c", "--component", dest="component", metavar="NAME",
help="export sql schema for the component named NAME",
action="store", default=None)
parser.add_option("-d", "--derived-attributes", dest="derived",
help="include derived attributes in the schema",
action="store_true", default=False)
parser.add_option("-o", "--output", dest='output', metavar="PATH",
help="save sql schema to PATH (required)",
action="store", default=None)
parser.add_option("-v", "--verbosity", dest='verbosity', action="count",
help="increase debug logging level", default=2)
(opts, args) = parser.parse_args()
if len(args) == 0 or opts.output is None:
parser.print_help()
sys.exit(1)
levels = {
0: logging.ERROR,
1: logging.WARNING,
2: logging.INFO,
3: logging.DEBUG,
}
logging.basicConfig(level=levels.get(opts.verbosity, logging.DEBUG))
loader = ooaofooa.Loader()
for filename in args:
loader.filename_input(filename)
c = loader.build_component(opts.component, opts.derived)
xtuml.persist_database(c, opts.output) |
def get_token(client_id, client_secret, client_access_token, page=None):
"""
See: http://nodotcom.org/python-facebook-tutorial.html
"""
payload = {
'grant_type': 'client_credentials',
'client_id': client_id,
'client_secret': client_secret,
}
if client_access_token:
payload['grant_type'] = 'fb_exchange_token'
payload['fb_exchange_token'] = client_access_token
# response {"access_token":" ... ", "token_type":"bearer", "expires_in":..}
response = requests.post(
'https://graph.facebook.com/oauth/access_token?',
params=payload)
access_token = response.json()['access_token']
return access_token |
def get_page_api(client_access_token, page_id):
"""
You can also skip the above if you get a page token:
http://stackoverflow.com/questions/8231877
and make that long-lived token as in Step 3
"""
graph = GraphAPI(client_access_token)
# Get page token to post as the page. You can skip
# the following if you want to post as yourself.
resp = graph.get('me/accounts')
page_access_token = None
for page in resp['data']:
if page['id'] == page_id:
page_access_token = page['access_token']
break
return GraphAPI(page_access_token) |
def serialize_value(value, ty):
'''
Serialize a value from an xtuml metamodel instance.
'''
ty = ty.upper()
null_value = {
'BOOLEAN' : False,
'INTEGER' : 0,
'REAL' : 0.0,
'STRING' : '',
'UNIQUE_ID' : 0
}
transfer_fn = {
'BOOLEAN' : lambda v: '%d' % int(v),
'INTEGER' : lambda v: '%d' % v,
'REAL' : lambda v: '%f' % v,
'STRING' : lambda v: "'%s'" % v.replace("'", "''"),
'UNIQUE_ID' : lambda v: '"%s"' % uuid.UUID(int=v)
}
if value is None:
value = null_value[ty]
return transfer_fn[ty](value) |
def serialize_instance(instance):
'''
Serialize an *instance* from a metamodel.
'''
attr_count = 0
metaclass = xtuml.get_metaclass(instance)
s = 'INSERT INTO %s VALUES (' % metaclass.kind
for name, ty in metaclass.attributes:
value = getattr(instance, name)
s += '\n '
s += serialize_value(value, ty)
attr_count += 1
if attr_count < len(metaclass.attributes):
s += ', -- %s : %s' % (name, ty)
else:
s += ' -- %s : %s' % (name, ty)
s += '\n);\n'
return s |
def serialize_instances(metamodel):
'''
Serialize all instances in a *metamodel*.
'''
s = ''
for inst in metamodel.instances:
s += serialize_instance(inst)
return s |
def serialize_association(ass):
'''
Serialize an xtuml metamodel association.
'''
s1 = '%s %s (%s)' % (ass.source_link.cardinality,
ass.source_link.to_metaclass.kind,
', '.join(ass.source_keys))
if ass.target_link.phrase:
s1 += " PHRASE '%s'" % ass.target_link.phrase
s2 = '%s %s (%s)' % (ass.target_link.cardinality,
ass.target_link.to_metaclass.kind,
', '.join(ass.target_keys))
if ass.source_link.phrase:
s2 += " PHRASE '%s'" % ass.source_link.phrase
return 'CREATE ROP REF_ID %s FROM %s TO %s;\n' % (ass.rel_id,
s1,
s2) |
def serialize_class(Cls):
'''
Serialize an xtUML metamodel class.
'''
metaclass = xtuml.get_metaclass(Cls)
attributes = ['%s %s' % (name, ty.upper()) for name, ty in metaclass.attributes]
s = 'CREATE TABLE %s (\n ' % metaclass.kind
s += ',\n '.join(attributes)
s += '\n);\n'
return s |
def serialize_schema(metamodel):
'''
Serialize all class and association definitions in a *metamodel*.
'''
s = ''
for kind in sorted(metamodel.metaclasses.keys()):
s += serialize_class(metamodel.metaclasses[kind].clazz)
for ass in sorted(metamodel.associations, key=lambda x: x.rel_id):
s += serialize_association(ass)
return s |
def serialize_database(metamodel):
'''
Serialize all instances, class definitions, association definitions, and
unique identifiers in a *metamodel*.
'''
schema = serialize_schema(metamodel)
instances = serialize_instances(metamodel)
identifiers = serialize_unique_identifiers(metamodel)
return ''.join([schema, instances, identifiers]) |
def serialize(resource):
'''
Serialize some xtuml *resource*, e.g. an instance or a complete metamodel.
'''
if isinstance(resource, xtuml.MetaModel):
return serialize_database(resource)
elif isinstance(resource, type) and issubclass(resource, xtuml.Class):
return serialize_class(resource)
elif isinstance(resource, xtuml.Association):
return serialize_association(resource)
elif isinstance(resource, xtuml.Class):
return serialize_instance(resource) |
def persist_instances(metamodel, path, mode='w'):
'''
Persist all instances in a *metamodel* by serializing them and saving to a
*path* on disk.
'''
with open(path, mode) as f:
for inst in metamodel.instances:
s = serialize_instance(inst)
f.write(s) |
def persist_schema(metamodel, path, mode='w'):
'''
Persist all class and association definitions in a *metamodel* by
serializing them and saving to a *path* on disk.
'''
with open(path, mode) as f:
for kind in sorted(metamodel.metaclasses.keys()):
s = serialize_class(metamodel.metaclasses[kind].clazz)
f.write(s)
for ass in sorted(metamodel.associations, key=lambda x: x.rel_id):
s = serialize_association(ass)
f.write(s) |
def persist_unique_identifiers(metamodel, path, mode='w'):
'''
Persist all unique identifiers in a *metamodel* by serializing them and
saving to a *path* on disk.
'''
with open(path, mode) as f:
for metaclass in metamodel.metaclasses.values():
for index_name, attribute_names in metaclass.indices.items():
attribute_names = ', '.join(attribute_names)
s = 'CREATE UNIQUE INDEX %s ON %s (%s);\n' % (index_name,
metaclass.kind,
attribute_names)
f.write(s) |
def persist_database(metamodel, path, mode='w'):
'''
Persist all instances, class definitions and association definitions in a
*metamodel* by serializing them and saving to a *path* on disk.
'''
with open(path, mode) as f:
for kind in sorted(metamodel.metaclasses.keys()):
metaclass = metamodel.metaclasses[kind]
s = serialize_class(metaclass.clazz)
f.write(s)
for index_name, attribute_names in metaclass.indices.items():
attribute_names = ', '.join(attribute_names)
s = 'CREATE UNIQUE INDEX %s ON %s (%s);\n' % (index_name,
metaclass.kind,
attribute_names)
f.write(s)
for ass in sorted(metamodel.associations, key=lambda x: x.rel_id):
s = serialize_association(ass)
f.write(s)
for inst in metamodel.instances:
s = serialize_instance(inst)
f.write(s) |
def save(variable, filename):
"""Save variable on given path using Pickle
Args:
variable: what to save
path (str): path of the output
"""
fileObj = open(filename, 'wb')
pickle.dump(variable, fileObj)
fileObj.close() |
def load(filename):
"""Load variable from Pickle file
Args:
path (str): path of the file to load
Returns:
variable read from path
"""
fileObj = open(filename, 'rb')
variable = pickle.load(fileObj)
fileObj.close()
return variable |
def main():
"""Function for command line execution"""
parser = ArgumentParser(description="search files using n-grams")
parser.add_argument('--path', dest='path', help="where to search", nargs=1, action="store", default=getcwd())
parser.add_argument('--update', dest='update', help="update the index", action='store_true', default=True)
parser.add_argument('--filetype', dest='filetype', help="any, images, documents, code, audio, video", nargs=1, action="store", default=["any"])
parser.add_argument('--verbose', dest='verbose', help="extended output", action='store_true', default=False)
parser.add_argument('--results', dest='results', help="number of results to display", action="store", default=10)
parser.add_argument('query', nargs='+', help="what to search", action="store")
args = parser.parse_args()
if args.verbose:
verbose = 2
pprint(args)
else:
verbose = 0
query = args.query[0]
for arg in args.query[1:]:
query = query + " " + arg
slb = min([len(w) for w in query.split(" ")])
files = Files(path=args.path, filetype=args.filetype[0], exclude=[], update=args.update, verbose=verbose)
index = Index(files, slb=slb, verbose=verbose)
results = index.search(query, verbose=verbose)
Handler(results, results_number=int(args.results)) |
def search(self, query, verbose=0):
"""Searches files satisfying query
It first decompose the query in ngrams, then score each document containing
at least one ngram with the number. The ten document having the most ngrams
in common with the query are selected.
Args:
query (str): what to search;
results_number (int): number of results to return (default: 10)
"""
if verbose > 0:
print("searching " + query)
query = query.lower()
qgram = ng(query, self.slb)
qocument = set()
for q in qgram:
if q in self.ngrams.keys():
for i in self.ngrams[q]:
qocument.add(i)
self.qocument = qocument
results = {}
for i in qocument:
for j in self.D[i].keys():
if not j in results.keys():
results[j] = 0
results[j] = results[j] + self.D[i][j]
sorted_results = sorted(results.items(), key=operator.itemgetter(1), reverse=True)
return [self.elements[f[0]] for f in sorted_results] |
def partition(condition, collection) -> Tuple[List, List]:
"""Partitions a list into two based on a condition."""
succeed, fail = [], []
for x in collection:
if condition(x):
succeed.append(x)
else:
fail.append(x)
return succeed, fail |
async def cli(location_strings: Tuple[str], random_postcodes_count: int, *,
bikes: bool = False, crime: bool = False,
nearby: bool = False, as_json: bool = False):
"""
Runs the CLI app.
Tries to execute as many steps as possible to give the user
the best understanding of the errors (if there are any).
:param location_strings: A list of desired postcodes or coordinates.
:param random_postcodes_count: A number of random postcodes to fetch..
:param bikes: A flag to include bikes.
:param crime: A flag to include crime.
:param nearby: A flag to include nearby.
:param as_json: A flag to make json output.
"""
def match_getter(location) -> Optional[PostcodeGetter]:
for getter in getters:
if getter.can_provide(location):
return getter(location)
else:
return None
async def handle_getter(exception_list, getter):
try:
return await getter.get_postcodes()
except (CachingError, ApiError):
exception_list.append(f"Could not get data for {getter}")
async def handle_datas(exception_list, postcode):
postcode_data, new_exceptions = await get_postcode_data(postcode, bikes, crime, nearby)
exception_list += new_exceptions
return postcode_data
exception_list: List[Exception] = []
handle_getter = partial(handle_getter, exception_list)
handle_datas = partial(handle_datas, exception_list)
postcode_getters = {location: match_getter(location) for location in
set(location_strings) | ({random_postcodes_count} if random_postcodes_count > 0 else set())}
matched, unmatched = partition(lambda k_v: k_v[1] is not None, postcode_getters.items())
for location, getter in unmatched:
echo(f"Invalid input for {location}")
postcodes_collection = [await handle_getter(getter) for location, getter in matched]
if len(exception_list) > 0:
for f in exception_list:
echo(str(f))
return 1
postcode_datas = [await handle_datas(postcode) for entry in postcodes_collection for postcode in entry]
serializer = (PostcodeSerializerJSON if as_json else PostcodeSerializerHuman)(postcode_datas)
echo(serializer.serialize()) |
async def fetch_neighbourhood(lat: float, long: float) -> Optional[dict]:
"""
Gets the neighbourhood from the fetch that is associated with the given postcode.
:return: A neighbourhood object parsed from the fetch.
:raise ApiError: When there was an error connecting to the API.
"""
lookup_url = f"https://data.police.uk/api/locate-neighbourhood?q={lat},{long}"
async with ClientSession() as session:
try:
async with session.get(lookup_url) as request:
if request.status == 404:
return None
neighbourhood = await request.json()
except ClientConnectionError as con_err:
logger.debug(f"Could not connect to {con_err.host}")
raise ApiError(f"Could not connect to {con_err.host}")
except JSONDecodeError as dec_err:
logger.error(f"Could not decode data: {dec_err}")
raise ApiError(f"Could not decode data: {dec_err}")
neighbourhood_url = f"https://data.police.uk/api/{neighbourhood['force']}/{neighbourhood['neighbourhood']}"
try:
async with session.get(neighbourhood_url) as request:
neighbourhood_data = await request.json()
except ConnectionError as con_err:
logger.debug(f"Could not connect to {con_err.args[0].pool.host}")
raise ApiError(f"Could not connect to {con_err.args[0].pool.host}")
except JSONDecodeError as dec_err:
logger.error(f"Could not decode data: {dec_err}")
raise ApiError(f"Could not decode data: {dec_err}")
return neighbourhood_data |
async def fetch_crime(lat: float, long: float) -> List[Dict]:
"""
Gets crime for a given lat and long.
:raise ApiError: When there was an error connecting to the API.
todo cache
"""
crime_lookup = f"https://data.police.uk/api/crimes-street/all-crime?lat={lat}&lng={long}"
async with ClientSession() as session:
try:
async with session.get(crime_lookup) as request:
crime_request = await request.json()
except ClientConnectionError as con_err:
logger.debug(f"Could not connect to {con_err.args[0].pool.host}")
raise ApiError(f"Could not connect to {con_err.args[0].pool.host}")
except JSONDecodeError as dec_err:
logger.error(f"Could not decode data: {dec_err}")
raise ApiError(f"Could not decode data: {dec_err}")
else:
return crime_request |
def run(locations, random, bikes, crime, nearby, json, update_bikes, api_server, cross_origin, host, port, db_path,
verbose):
"""
Runs the program. Takes a list of postcodes or coordinates and
returns various information about them. If using the cli, make
sure to update the bikes database with the -u command.
Locations can be either a specific postcode, or a pair of coordinates.
Coordinates are passed in the form "55.948824,-3.196425".
:param locations: The list of postcodes or coordinates to search.
:param random: The number of random postcodes to include.
:param bikes: Includes a list of stolen bikes in that area.
:param crime: Includes a list of committed crimes in that area.
:param nearby: Includes a list of wikipedia articles in that area.
:param json: Returns the data in json format.
:param update_bikes: Whether to force update bikes.
:param api_server: If given, the program will instead run a rest api.
:param cross_origin:
:param host:
:param port: Defines the port to run the rest api on.
:param db_path: The path to the sqlite db to use.
:param verbose: The verbosity.
"""
log_levels = [logging.WARNING, logging.INFO, logging.DEBUG]
logging.basicConfig(level=log_levels[min(verbose, 2)])
initialize_database(db_path)
loop = get_event_loop()
if update_bikes:
logger.info("Force updating bikes.")
loop.run_until_complete(util.update_bikes())
if api_server:
if cross_origin:
enable_cross_origin(app)
try:
web.run_app(app, host=host, port=port)
except CancelledError as e:
if e.__context__ is not None:
click.echo(Fore.RED + (
f"Could not bind to address {host}:{port}" if e.__context__.errno == 48 else e.__context__))
exit(1)
else:
click.echo("Goodbye!")
elif len(locations) > 0 or random > 0:
exit(loop.run_until_complete(cli(locations, random, bikes=bikes, crime=crime, nearby=nearby, as_json=json)))
else:
click.echo(Fore.RED + "Either include a post code, or the --api-server flag.") |
def bidi(request):
"""Adds to the context BiDi related variables
LANGUAGE_DIRECTION -- Direction of current language ('ltr' or 'rtl')
LANGUAGE_START -- Start of language layout ('right' for rtl, 'left'
for 'ltr')
LANGUAGE_END -- End of language layout ('left' for rtl, 'right'
for 'ltr')
LANGUAGE_MARKER -- Language marker entity ('‏' for rtl, '&lrm'
for ltr)
"""
from django.utils import translation
from django.utils.safestring import mark_safe
if translation.get_language_bidi():
extra_context = {
'LANGUAGE_DIRECTION':'rtl',
'LANGUAGE_START':'right',
'LANGUAGE_END':'left',
'LANGUAGE_MARKER': mark_safe('‏'),
}
else:
extra_context = {
'LANGUAGE_DIRECTION':'ltr',
'LANGUAGE_START':'left',
'LANGUAGE_END':'right',
'LANGUAGE_MARKER': mark_safe('‎'),
}
return extra_context |
def _is_null(instance, name):
'''
Determine if an attribute of an *instance* with a specific *name*
is null.
'''
if name in instance.__dict__:
value = instance.__dict__[name]
else:
value = getattr(instance, name)
if value:
return False
elif value is None:
return True
name = name.upper()
metaclass = get_metaclass(instance)
for attr_name, attr_ty in metaclass.attributes:
if attr_name.upper() != name:
continue
attr_ty = attr_ty.upper()
if attr_ty == 'UNIQUE_ID':
# UUID(int=0) is reserved for null
return value == 0
elif attr_ty == 'STRING':
# empty string is reserved for null
return len(value) == 0
else:
#null-values for integer, boolean and real are not supported
return False |
def apply_query_operators(iterable, ops):
'''
Apply a series of query operators to a sequence of instances, e.g.
where_eq(), order_by() or filter functions.
'''
for op in ops:
if isinstance(op, WhereEqual):
iterable = op(iterable)
elif isinstance(op, OrderBy):
iterable = op(iterable)
elif isinstance(op, dict):
iterable = WhereEqual(op)(iterable)
else:
iterable = filter(op, iterable)
return iterable |
def navigate_subtype(supertype, rel_id):
'''
Perform a navigation from *supertype* to its subtype across *rel_id*. The
navigated association must be modeled as a subtype-supertype association.
The return value will an instance or None.
'''
if not supertype:
return
if isinstance(rel_id, int):
rel_id = 'R%d' % rel_id
metaclass = get_metaclass(supertype)
for kind, rel_id_candidate, _ in metaclass.links:
if rel_id != rel_id_candidate:
continue
subtype = navigate_one(supertype).nav(kind, rel_id)()
if subtype:
return subtype |
def sort_reflexive(set_of_instances, rel_id, phrase):
'''
Sort a *set of instances* in the order they appear across a conditional and
reflexive association. The first instance in the resulting ordered set is
**not** associated to an instance across the given *phrase*.
'''
if not isinstance(set_of_instances, QuerySet):
raise MetaException('The collection to sort must be a QuerySet')
if not set_of_instances.first:
return QuerySet()
if isinstance(rel_id, int):
rel_id = 'R%d' % rel_id
# Figure out the phrase in the other direction
metaclass = get_metaclass(set_of_instances.first)
for link in metaclass.links.values():
if link.to_metaclass != metaclass:
continue
if link.rel_id != rel_id:
continue
if link.phrase == phrase:
continue
other_phrase = link.phrase
break
else:
raise UnknownLinkException(metaclass.kind, metaclass.kind, rel_id, phrase)
first_filt = lambda sel: not navigate_one(sel).nav(metaclass.kind, rel_id, phrase)()
first_instances = list(filter(first_filt, set_of_instances))
if not first_instances:
#the instance sequence is recursive, start anywhere
first_instances = [set_of_instances.first]
def sequence_generator():
for first in first_instances:
inst = first
while inst:
if inst in set_of_instances:
yield inst
inst = navigate_one(inst).nav(metaclass.kind, rel_id, other_phrase)()
if inst is first:
break
return QuerySet(sequence_generator()) |
def _find_link(inst1, inst2, rel_id, phrase):
'''
Find links that correspond to the given arguments.
'''
metaclass1 = get_metaclass(inst1)
metaclass2 = get_metaclass(inst2)
if isinstance(rel_id, int):
rel_id = 'R%d' % rel_id
for ass in metaclass1.metamodel.associations:
if ass.rel_id != rel_id:
continue
if (ass.source_link.from_metaclass.kind == metaclass1.kind and
ass.source_link.to_metaclass.kind == metaclass2.kind and
ass.source_link.phrase == phrase):
return inst1, inst2, ass
if (ass.target_link.from_metaclass.kind == metaclass1.kind and
ass.target_link.to_metaclass.kind == metaclass2.kind and
ass.target_link.phrase == phrase):
return inst2, inst1, ass
raise UnknownLinkException(metaclass1.kind, metaclass2.kind, rel_id, phrase) |
def relate(from_instance, to_instance, rel_id, phrase=''):
'''
Relate *from_instance* to *to_instance* across *rel_id*. For reflexive
association, a *phrase* indicating the direction must also be provided.
The two instances are related to each other by copying the identifying
attributes from the instance on the TO side of a association to the instance
n the FROM side. Updated values which affect existing associations are
propagated. A set of all affected instances will be returned.
'''
if None in [from_instance, to_instance]:
return False
inst1, inst2, ass = _find_link(from_instance, to_instance, rel_id, phrase)
if not ass.source_link.connect(inst1, inst2):
raise RelateException(from_instance, to_instance, rel_id, phrase)
if not ass.target_link.connect(inst2, inst1):
raise RelateException(from_instance, to_instance, rel_id, phrase)
return True |
def unrelate(from_instance, to_instance, rel_id, phrase=''):
'''
Unrelate *from_instance* from *to_instance* across *rel_id*. For reflexive
associations, a *phrase* indicating the direction must also be provided.
The two instances are unrelated from each other by reseting the identifying
attributes on the FROM side of the association. Updated values which affect
existing associations are propagated. A set of all affected instances will
be returned.
'''
if None in [from_instance, to_instance]:
return False
inst1, inst2, ass = _find_link(from_instance, to_instance, rel_id, phrase)
if not ass.source_link.disconnect(inst1, inst2):
raise UnrelateException(from_instance, to_instance, rel_id, phrase)
if not ass.target_link.disconnect(inst2, inst1):
raise UnrelateException(from_instance, to_instance, rel_id, phrase)
return True |
def get_metaclass(class_or_instance):
'''
Get the metaclass for a *class_or_instance*.
'''
if isinstance(class_or_instance, Class):
return class_or_instance.__metaclass__
elif issubclass(class_or_instance, Class):
return class_or_instance.__metaclass__
raise MetaException("the provided argument is not an xtuml class or instance") |
def delete(instance, disconnect=True):
'''
Delete an *instance* from its metaclass instance pool and optionally
*disconnect* it from any links it might be connected to.
'''
if not isinstance(instance, Class):
raise DeleteException("the provided argument is not an xtuml instance")
return get_metaclass(instance).delete(instance, disconnect) |
def formalize(self):
'''
Formalize the association and expose referential attributes
on instances.
'''
source_class = self.source_link.to_metaclass
target_class = self.target_link.to_metaclass
source_class.referential_attributes |= set(self.source_keys)
target_class.identifying_attributes |= set(self.target_keys)
def fget(inst, ref_name, alt_prop):
other_inst = self.target_link.navigate_one(inst)
if other_inst is None and alt_prop:
return alt_prop.fget(inst)
return getattr(other_inst, ref_name, None)
def fset(inst, value, name, ref_name, alt_prop):
kind = get_metaclass(inst).kind
raise MetaException('%s.%s is a referential attribute '\
'and cannot be assigned directly'% (kind, name))
#other_inst = self.target_link.navigate_one(inst)
#if other_inst is None and alt_prop:
# return alt_prop.fset(inst, value)
#
#elif other_inst:
# return setattr(other_inst, ref_name, value)
for ref_key, primary_key in zip(self.source_keys, self.target_keys):
prop = getattr(source_class.clazz, ref_key, None)
prop = property(partial(fget, ref_name=primary_key, alt_prop=prop),
partial(fset, name=ref_key, ref_name=primary_key, alt_prop=prop))
setattr(source_class.clazz, ref_key, prop) |
def cardinality(self):
'''
Obtain the cardinality string.
Example: '1C' for a conditional link with a single instance [0..1]
'MC' for a link with any number of instances [0..*]
'M' for a more than one instance [1..*]
'M' for a link with exactly one instance [1]
'''
if self.many:
s = 'M'
else:
s = '1'
if self.conditional:
s += 'C'
return s |
def connect(self, instance, another_instance, check=True):
'''
Connect an *instance* to *another_instance*.
Optionally, disable any cardinality *check* that would prevent the two
instances from being connected.
'''
if instance not in self:
self[instance] = xtuml.OrderedSet()
if another_instance in self[instance]:
return True
if self[instance] and not self.many and check:
return False
self[instance].add(another_instance)
return True |
def disconnect(self, instance, another_instance):
'''
Disconnect an *instance* from *another_instance*.
'''
if instance not in self:
return False
if another_instance not in self[instance]:
return False
self[instance].remove(another_instance)
return True |
def compute_lookup_key(self, from_instance):
'''
Compute the lookup key for an instance, i.e. a foreign key that
can be used to identify an instance at the end of the link.
'''
kwargs = dict()
for attr, other_attr in self.key_map.items():
if _is_null(from_instance, attr):
return None
if attr in from_instance.__dict__:
kwargs[other_attr] = from_instance.__dict__[attr]
else:
kwargs[other_attr] = getattr(from_instance, attr)
return frozenset(tuple(kwargs.items())) |
def compute_index_key(self, to_instance):
'''
Compute the index key that can be used to identify an instance
on the link.
'''
kwargs = dict()
for attr in self.key_map.values():
if _is_null(to_instance, attr):
return None
if attr in to_instance.__dict__:
kwargs[attr] = to_instance.__dict__[attr]
else:
kwargs[attr] = getattr(to_instance, attr)
return frozenset(tuple(kwargs.items())) |
def attribute_type(self, attribute_name):
'''
Obtain the type of an attribute.
'''
attribute_name = attribute_name.upper()
for name, ty in self.attributes:
if name.upper() == attribute_name:
return ty |
def add_link(self, metaclass, rel_id, phrase, conditional, many):
'''
Add a new link from *self* to *metaclass*.
'''
link = Link(self, rel_id, metaclass, phrase, conditional, many)
key = (metaclass.kind.upper(), rel_id, phrase)
self.links[key] = link
return link |
def append_attribute(self, name, type_name):
'''
Append an attribute with a given *name* and *type name* at the end of
the list of attributes.
'''
attr = (name, type_name)
self.attributes.append(attr) |
def insert_attribute(self, index, name, type_name):
'''
Insert an attribute with a given *name* and *type name* at some *index*
in the list of attributes.
'''
attr = (name, type_name)
self.attributes.insert(index, attr) |
def delete_attribute(self, name):
'''
Delete an attribute with a given *name* from the list of attributes.
'''
for idx, attr in enumerate(self.attributes):
attr_name, _ = attr
if attr_name == name:
del self.attributes[idx]
return |
def default_value(self, type_name):
'''
Obtain the default value for some *type name*.
'''
uname = type_name.upper()
if uname == 'BOOLEAN':
return False
elif uname == 'INTEGER':
return 0
elif uname == 'REAL':
return 0.0
elif uname == 'STRING':
return ''
elif uname == 'UNIQUE_ID':
if self.metamodel:
return next(self.metamodel.id_generator)
else:
return None
else:
raise MetaException("Unknown type named '%s'" % type_name) |
def new(self, *args, **kwargs):
'''
Create and return a new instance.
'''
inst = self.clazz()
self.storage.append(inst)
# set all attributes with an initial default value
referential_attributes = dict()
for name, ty in self.attributes:
if name not in self.referential_attributes:
value = self.default_value(ty)
setattr(inst, name, value)
# set all positional arguments
for attr, value in zip(self.attributes, args):
name, ty = attr
if name not in self.referential_attributes:
setattr(inst, name, value)
else:
referential_attributes[name] = value
# set all named arguments
for name, value in kwargs.items():
if name not in self.referential_attributes:
setattr(inst, name, value)
else:
referential_attributes[name] = value
if not referential_attributes:
return inst
# batch relate referential attributes
for link in self.links.values():
if set(link.key_map.values()) - set(referential_attributes.keys()):
continue
kwargs = dict()
for key, value in link.key_map.items():
kwargs[key] = referential_attributes[value]
if not kwargs:
continue
for other_inst in link.to_metaclass.query(kwargs):
relate(other_inst, inst, link.rel_id, link.phrase)
for name, value in referential_attributes.items():
if getattr(inst, name) != value:
logger.warning('unable to assign %s to %s', name, inst)
return inst |
def clone(self, instance):
'''
Create a shallow clone of an *instance*.
**Note:** the clone and the original instance **does not** have to be
part of the same metaclass.
'''
args = list()
for name, _ in get_metaclass(instance).attributes:
value = getattr(instance, name)
args.append(value)
return self.new(*args) |
def delete(self, instance, disconnect=True):
'''
Delete an *instance* from the instance pool and optionally *disconnect*
it from any links it might be connected to. If the *instance* is not
part of the metaclass, a *MetaException* is thrown.
'''
if instance in self.storage:
self.storage.remove(instance)
else:
raise DeleteException("Instance not found in the instance pool")
if not disconnect:
return
for link in self.links.values():
if instance not in link:
continue
for other in link[instance]:
unrelate(instance, other, link.rel_id, link.phrase) |
def select_one(self, *args):
'''
Select a single instance from the instance pool. Query operators such as
where_eq(), order_by() or filter functions may be passed as optional
arguments.
'''
s = apply_query_operators(self.storage, args)
return next(iter(s), None) |
def select_many(self, *args):
'''
Select several instances from the instance pool. Query operators such as
where_eq(), order_by() or filter functions may be passed as optional
arguments.
'''
s = apply_query_operators(self.storage, args)
if isinstance(s, QuerySet):
return s
else:
return QuerySet(s) |
def navigate(self, inst, kind, rel_id, phrase=''):
'''
Navigate across a link with some *rel_id* and *phrase* that yields
instances of some *kind*.
'''
key = (kind.upper(), rel_id, phrase)
if key in self.links:
link = self.links[key]
return link.navigate(inst)
link1, link2 = self._find_assoc_links(kind, rel_id, phrase)
inst_set = xtuml.OrderedSet()
for inst in link1.navigate(inst):
inst_set |= link2.navigate(inst)
return inst_set |
def instances(self):
'''
Obtain a sequence of all instances in the metamodel.
'''
for metaclass in self.metaclasses.values():
for inst in metaclass.storage:
yield inst |
def define_class(self, kind, attributes, doc=''):
'''
Define a new class in the metamodel, and return its metaclass.
'''
ukind = kind.upper()
if ukind in self.metaclasses:
raise MetaModelException('A class with the name %s is already defined' % kind)
metaclass = MetaClass(kind, self)
for name, ty in attributes:
metaclass.append_attribute(name, ty)
self.metaclasses[ukind] = metaclass
return metaclass |
def find_metaclass(self, kind):
'''
Find a metaclass of some *kind* in the metamodel.
'''
ukind = kind.upper()
if ukind in self.metaclasses:
return self.metaclasses[ukind]
else:
raise UnknownClassException(kind) |
def new(self, kind, *args, **kwargs):
'''
Create and return a new instance in the metamodel of some *kind*.
Optionally, initial attribute values may be assigned to the new instance
by passing them as positional or keyword arguments. Positional arguments
are assigned in the order in which they appear in the metaclass.
'''
metaclass = self.find_metaclass(kind)
return metaclass.new(*args, **kwargs) |
def clone(self, instance):
'''
Create a shallow clone of an *instance*.
**Note:** the clone and the original instance **does not** have to be
part of the same metaclass.
'''
metaclass = get_metaclass(instance)
metaclass = self.find_metaclass(metaclass.kind)
return metaclass.clone(instance) |
def define_association(self, rel_id, source_kind, source_keys, source_many,
source_conditional, source_phrase, target_kind,
target_keys, target_many, target_conditional,
target_phrase):
'''
Define and return an association from one kind of class (the source
kind) to some other kind of class (the target kind).
'''
if isinstance(rel_id, int):
rel_id = 'R%d' % rel_id
source_metaclass = self.find_metaclass(source_kind)
target_metaclass = self.find_metaclass(target_kind)
source_link = target_metaclass.add_link(source_metaclass, rel_id,
many=source_many,
phrase=target_phrase,
conditional=source_conditional)
target_link = source_metaclass.add_link(target_metaclass, rel_id,
many=target_many,
phrase=source_phrase,
conditional=target_conditional)
ass = Association(rel_id,
source_keys, source_link,
target_keys, target_link)
source_link.key_map = dict(zip(source_keys, target_keys))
target_link.key_map = dict(zip(target_keys, source_keys))
self.associations.append(ass)
return ass |
def define_unique_identifier(self, kind, name, *named_attributes):
'''
Define a unique identifier for some *kind* of class based on its
*named attributes*.
'''
if not named_attributes:
return
if isinstance(name, int):
name = 'I%d' % name
metaclass = self.find_metaclass(kind)
metaclass.indices[name] = tuple(named_attributes)
metaclass.identifying_attributes |= set(named_attributes) |
def select_many(self, kind, *args):
'''
Query the metamodel for a set of instances of some *kind*. Query
operators such as where_eq(), order_by() or filter functions may be
passed as optional arguments.
Usage example:
>>> m = xtuml.load_metamodel('db.sql')
>>> inst_set = m.select_many('My_Class', lambda sel: sel.number > 5)
'''
metaclass = self.find_metaclass(kind)
return metaclass.select_many(*args) |
def select_one(self, kind, *args):
'''
Query the metamodel for a single instance of some *kind*. Query
operators such as where_eq(), order_by() or filter functions may be
passed as optional arguments.
Usage example:
>>> m = xtuml.load_metamodel('db.sql')
>>> inst = m.select_one('My_Class', lambda sel: sel.name == 'Test')
'''
metaclass = self.find_metaclass(kind)
return metaclass.select_one(*args) |
async def api_twitter(request):
"""
Gets the twitter feed from a given handle.
:return: The feed in json format.
"""
handle = request.match_info.get('handle', None)
if handle is None:
raise web.HTTPNotFound(body="Not found.")
try:
posts = await fetch_twitter(handle)
except ApiError as e:
raise web.HTTPInternalServerError(body=e.status)
return str_json_response(posts) |
def send(socket, header, payload, topics=(), flags=0):
"""Sends header, payload, and topics through a ZeroMQ socket.
:param socket: a zmq socket.
:param header: a list of byte strings which represent a message header.
:param payload: the serialized byte string of a payload.
:param topics: a chain of topics.
:param flags: zmq flags to send messages.
"""
msgs = []
msgs.extend(topics)
msgs.append(SEAM)
msgs.extend(header)
msgs.append(payload)
return eintr_retry_zmq(socket.send_multipart, msgs, flags) |
def recv(socket, flags=0, capture=(lambda msgs: None)):
"""Receives header, payload, and topics through a ZeroMQ socket.
:param socket: a zmq socket.
:param flags: zmq flags to receive messages.
:param capture: a function to capture received messages.
"""
msgs = eintr_retry_zmq(socket.recv_multipart, flags)
capture(msgs)
return parse(msgs) |
def dead_code():
"""
This also finds code you are working on today!
"""
with safe_cd(SRC):
if IS_TRAVIS:
command = "{0} vulture {1}".format(PYTHON, PROJECT_NAME).strip().split()
else:
command = "{0} vulture {1}".format(PIPENV, PROJECT_NAME).strip().split()
output_file_name = "dead_code.txt"
with open(output_file_name, "w") as outfile:
env = config_pythonpath()
subprocess.call(command, stdout=outfile, env=env)
cutoff = 20
num_lines = sum(1 for line in open(output_file_name) if line)
if num_lines > cutoff:
print("Too many lines of dead code : {0}, max {1}".format(num_lines, cutoff))
exit(-1) |
def parse_emails(values):
'''
Take a string or list of strings and try to extract all the emails
'''
emails = []
if isinstance(values, str):
values = [values]
# now we know we have a list of strings
for value in values:
matches = re_emails.findall(value)
emails.extend([match[2] for match in matches])
return emails |
def rpc(f=None, **kwargs):
"""Marks a method as RPC."""
if f is not None:
if isinstance(f, six.string_types):
if 'name' in kwargs:
raise ValueError('name option duplicated')
kwargs['name'] = f
else:
return rpc(**kwargs)(f)
return functools.partial(_rpc, **kwargs) |
def rpc_spec_table(app):
"""Collects methods which are speced as RPC."""
table = {}
for attr, value in inspect.getmembers(app):
rpc_spec = get_rpc_spec(value, default=None)
if rpc_spec is None:
continue
table[rpc_spec.name] = (value, rpc_spec)
return table |
async def normalize_postcode_middleware(request, handler):
"""
If there is a postcode in the url it validates and normalizes it.
"""
postcode: Optional[str] = request.match_info.get('postcode', None)
if postcode is None or postcode == "random":
return await handler(request)
elif not is_uk_postcode(postcode):
raise web.HTTPNotFound(text="Invalid Postcode")
postcode_processed = postcode.upper().replace(" ", "")
if postcode_processed == postcode:
return await handler(request)
else:
url_name = request.match_info.route.name
url = request.app.router[url_name]
params = dict(request.match_info)
params['postcode'] = postcode_processed
raise web.HTTPMovedPermanently(str(url.url_for(**params))) |
def make_repr(obj, params=None, keywords=None, data=None, name=None,
reprs=None):
"""Generates a string of object initialization code style. It is useful
for custom __repr__ methods::
class Example(object):
def __init__(self, param, keyword=None):
self.param = param
self.keyword = keyword
def __repr__(self):
return make_repr(self, ['param'], ['keyword'])
See the representation of example object::
>>> Example('hello', keyword='world')
Example('hello', keyword='world')
"""
opts = []
if params is not None:
opts.append(', '.join(
_repr_attr(obj, attr, data, reprs) for attr in params))
if keywords is not None:
opts.append(', '.join(
'%s=%s' % (attr, _repr_attr(obj, attr, data, reprs))
for attr in keywords))
if name is None:
name = class_name(obj)
return '%s(%s)' % (name, ', '.join(opts)) |
def eintr_retry(exc_type, f, *args, **kwargs):
"""Calls a function. If an error of the given exception type with
interrupted system call (EINTR) occurs calls the function again.
"""
while True:
try:
return f(*args, **kwargs)
except exc_type as exc:
if exc.errno != EINTR:
raise
else:
break |
def eintr_retry_zmq(f, *args, **kwargs):
"""The specialization of :func:`eintr_retry` by :exc:`zmq.ZMQError`."""
return eintr_retry(zmq.ZMQError, f, *args, **kwargs) |
def next(self):
'''
Progress to the next identifier, and return the current one.
'''
val = self._current
self._current = self.readfunc()
return val |
def enter(self, node):
'''
Tries to invoke a method matching the pattern *enter_<type name>*, where
<type name> is the name of the type of the *node*.
'''
name = 'enter_' + node.__class__.__name__
fn = getattr(self, name, self.default_enter)
fn(node) |
def leave(self, node):
'''
Tries to invoke a method matching the pattern *leave_<type name>*, where
<type name> is the name of the type of the *node*.
'''
name = 'leave_' + node.__class__.__name__
fn = getattr(self, name, self.default_leave)
fn(node) |
def accept(self, node, **kwargs):
'''
Invoke the visitors before and after decending down the tree.
The walker will also try to invoke a method matching the pattern
*accept_<type name>*, where <type name> is the name of the accepted
*node*.
'''
if node is None:
return
for v in self.visitors:
v.enter(node)
name = 'accept_' + node.__class__.__name__
fn = getattr(self, name, self.default_accept)
r = fn(node, **kwargs)
for v in self.visitors:
v.leave(node)
return r |
def default_accept(self, node, **kwargs):
'''
The default accept behaviour is to decend into the iterable member
*node.children* (if available).
'''
if not hasattr(node, 'children'):
return
for child in node.children:
self.accept(child, **kwargs) |
def render(self, node):
'''
Try to invoke a method matching the pattern *render_<type name>*, where
<type name> is the name of the rendering *node*.
'''
name = 'render_' + type(node).__name__
fn = getattr(self, name, self.default_render)
return fn(node) |
def accept_S_SYS(self, inst):
'''
A System Model contains top-level packages
'''
for child in many(inst).EP_PKG[1401]():
self.accept(child) |
def accept_C_C(self, inst):
'''
A Component contains packageable elements
'''
for child in many(inst).PE_PE[8003]():
self.accept(child) |
def accept_EP_PKG(self, inst):
'''
A Package contains packageable elements
'''
for child in many(inst).PE_PE[8000]():
self.accept(child) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.