text
stringlengths 0
828
|
|---|
Returns:
|
The betacode equivalent of the inputted text where applicable.
|
""""""
|
u = _UNICODE_MAP
|
transform = []
|
for ch in text:
|
try:
|
conv = u[ch]
|
except KeyError:
|
conv = ch
|
transform.append(conv)
|
converted = ''.join(transform)
|
return converted"
|
743,"def __calculate_order(self, node_dict):
|
""""""
|
Determine a valid ordering of the nodes in which a node is not called before all of it's dependencies.
|
Raise an error if there is a cycle, or nodes are missing.
|
""""""
|
if len(node_dict.keys()) != len(set(node_dict.keys())):
|
raise DependencyTreeException(""Duplicate Keys Exist in node dictionary!"")
|
valid_order = [node for node, dependencies in node_dict.items() if len(dependencies) == 0]
|
remaining_nodes = [node for node in node_dict.keys() if node not in valid_order]
|
while len(remaining_nodes) > 0:
|
node_added = False
|
for node in remaining_nodes:
|
dependencies = [d for d in node_dict[node] if d not in valid_order]
|
if len(dependencies) == 0:
|
valid_order.append(node)
|
remaining_nodes.remove(node)
|
node_added = True
|
if not node_added:
|
# the tree must be invalid, as it was not possible to remove a node.
|
# it's hard to find all the errors, so just spit out the first one you can find.
|
invalid_node = remaining_nodes[0]
|
invalid_dependency = ', '.join(node_dict[invalid_node])
|
if invalid_dependency not in remaining_nodes:
|
raise DependencyTreeException(
|
""Missing dependency! One or more of ({dependency}) are missing for {dependant}."".format(
|
dependant=invalid_node, dependency=invalid_dependency))
|
else:
|
raise DependencyTreeException(""The dependency %s is cyclic or dependent on a cyclic dependency"" % invalid_dependency)
|
return valid_order"
|
744,"def read_input(self, filename, has_header=True):
|
""""""
|
filename is any filename, or something on which open() can be called
|
for example:
|
csv_input = CSVInput()
|
csv_input.read_input(""csvfile.csv"")
|
""""""
|
stream = open(filename)
|
reader = csv.reader(stream)
|
csv_data = []
|
for (i, row) in enumerate(reader):
|
if i==0:
|
if not has_header:
|
csv_data.append([str(i) for i in xrange(0,len(row))])
|
csv_data.append(row)
|
self.data = csv_data"
|
745,"def pprint_out(dct: Dict):
|
""""""
|
Utility methods to pretty-print a dictionary that is typically outputted by parsyfiles (an ordered dict)
|
:param dct:
|
:return:
|
""""""
|
for name, val in dct.items():
|
print(name + ':')
|
pprint(val, indent=4)"
|
746,"def warn_import_error(type_of_obj_support: str, caught: ImportError):
|
""""""
|
Utility method to print a warning message about failed import of some modules
|
:param type_of_obj_support:
|
:param caught:
|
:return:
|
""""""
|
msg = StringIO()
|
msg.writelines('Import Error while trying to add support for ' + type_of_obj_support + '. You may continue but '
|
'the associated parsers and converters wont be available : \n')
|
traceback.print_tb(caught.__traceback__, file=msg)
|
msg.writelines(str(caught.__class__.__name__) + ' : ' + str(caught) + '\n')
|
warn(msg.getvalue())"
|
747,"def create_parser_options(lazy_mfcollection_parsing: bool = False) -> Dict[str, Dict[str, Any]]:
|
""""""
|
Utility method to create a default options structure with the lazy parsing inside
|
:param lazy_mfcollection_parsing:
|
:return: the options structure filled with lazyparsing option (for the MultifileCollectionParser)
|
""""""
|
return {MultifileCollectionParser.__name__: {'lazy_parsing': lazy_mfcollection_parsing}}"
|
748,"def add_parser_options(options: Dict[str, Dict[str, Any]], parser_id: str, parser_options: Dict[str, Dict[str, Any]],
|
overwrite: bool = False):
|
""""""
|
Utility method to add options for a given parser, to the provided options structure
|
:param options:
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.