text
stringlengths
0
828
if mma:
self._init_table_from_dfa(mma)
else:
self._init_table()
logging.info('Generating a closed and consistent observation table.')
while True:
closed = False
# Make sure that the table is closed
while not closed:
logging.debug('Checking if table is closed.')
closed, string = self.observation_table.is_closed()
if not closed:
logging.debug('Closing table.')
self._ot_make_closed(string)
else:
logging.debug('Table closed.')
# Create conjecture
dfa = self.get_dfa_conjecture()
logging.info('Generated conjecture machine with %d states.',len(list(dfa.states)))
# _check correctness
logging.debug('Running equivalence query.')
found, counter_example = self._equivalence_query(dfa)
# Are we done?
if found:
logging.info('No counterexample found. Hypothesis is correct!')
break
# Add the new experiments into the table to reiterate the
# learning loop
logging.info('Processing counterexample %s with length %d.', counter_example, len(counter_example))
self._process_counter_example(dfa, counter_example)
logging.info('Learning complete.')
logging.info('Learned em_vector table is the following:')
logging.info(self.observation_table.em_vector)
return '', dfa"
1233,"def print_error_to_io_stream(err: Exception, io: TextIOBase, print_big_traceback : bool = True):
""""""
Utility method to print an exception's content to a stream
:param err:
:param io:
:param print_big_traceback:
:return:
""""""
if print_big_traceback:
traceback.print_tb(err.__traceback__, file=io, limit=-GLOBAL_CONFIG.multiple_errors_tb_limit)
else:
traceback.print_tb(err.__traceback__, file=io, limit=-1)
io.writelines(' ' + str(err.__class__.__name__) + ' : ' + str(err))"
1234,"def should_hide_traceback(e):
"""""" Returns True if we can hide the error traceback in the warnings messages """"""
if type(e) in {WrongTypeCreatedError, CascadeError, TypeInformationRequiredError}:
return True
elif type(e).__name__ in {'InvalidAttributeNameForConstructorError', 'MissingMandatoryAttributeFiles'}:
return True
else:
return False"
1235,"def _get_parsing_plan_for_multifile_children(self, obj_on_fs: PersistedObject, desired_type: Type[Any],
logger: Logger) -> Dict[str, Any]:
""""""
Implementation of AnyParser API
""""""
raise Exception('This should never happen, since this parser relies on underlying parsers')"
1236,"def _parse_multifile(self, desired_type: Type[T], obj: PersistedObject,
parsing_plan_for_children: Dict[str, AnyParser._RecursiveParsingPlan],
logger: Logger, options: Dict[str, Dict[str, Any]]) -> T:
""""""
Implementation of AnyParser API
""""""
raise Exception('This should never happen, since this parser relies on underlying parsers')"
1237,"def add_parser_to_cascade(self, parser: AnyParser, typ: Type = None):
""""""
Adds the provided parser to this cascade. If this is the first parser, it will configure the cascade according
to the parser capabilities (single and multifile support, extensions).
Subsequent parsers will have to support the same capabilities at least, to be added.
:param parser:
:param typ:
:return:
""""""
# the first parser added will configure the cascade
if not self.configured:
self.supported_exts = parser.supported_exts
self.supported_types = parser.supported_types
# check if new parser is compliant with previous ones
if self.supports_singlefile():
if not parser.supports_singlefile():
raise ValueError(
'Cannot add this parser to this parsing cascade : it does not match the rest of the cascades '
'configuration (singlefile support)')