signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
|---|---|---|---|
def message(self, message, source, point, ln):
|
if message is None:<EOL><INDENT>message = "<STR_LIT>"<EOL><DEDENT>if ln is not None:<EOL><INDENT>message += "<STR_LIT>" + str(ln) + "<STR_LIT:)>"<EOL><DEDENT>if source:<EOL><INDENT>if point is None:<EOL><INDENT>message += "<STR_LIT:\n>" + "<STR_LIT:U+0020>" * taberrfmt + clean(source)<EOL><DEDENT>else:<EOL><INDENT>part = clean(source.splitlines()[lineno(point, source) - <NUM_LIT:1>], False).lstrip()<EOL>point -= len(source) - len(part) <EOL>part = part.rstrip() <EOL>message += "<STR_LIT:\n>" + "<STR_LIT:U+0020>" * taberrfmt + part<EOL>if point > <NUM_LIT:0>:<EOL><INDENT>if point >= len(part):<EOL><INDENT>point = len(part) - <NUM_LIT:1><EOL><DEDENT>message += "<STR_LIT:\n>" + "<STR_LIT:U+0020>" * (taberrfmt + point) + "<STR_LIT>"<EOL><DEDENT><DEDENT><DEDENT>return message<EOL>
|
Creates a SyntaxError-like message.
|
f11278:c1:m1
|
def syntax_err(self):
|
args = self.args[:<NUM_LIT:2>] + (None, None) + self.args[<NUM_LIT:4>:]<EOL>err = SyntaxError(self.message(*args))<EOL>err.offset = args[<NUM_LIT:2>]<EOL>err.lineno = args[<NUM_LIT:3>]<EOL>return err<EOL>
|
Creates a SyntaxError.
|
f11278:c1:m2
|
def message(self, message, source, point, ln):
|
message += "<STR_LIT>"<EOL>return super(CoconutStyleError, self).message(message, source, point, ln)<EOL>
|
Creates the --strict Coconut error message.
|
f11278:c2:m0
|
def __init__(self, message, source=None, point=None, ln=None, target=None):
|
self.args = (message, source, point, ln, target)<EOL>
|
Creates the --target Coconut error.
|
f11278:c3:m0
|
def message(self, message, source, point, ln, target):
|
if target is not None:<EOL><INDENT>message += "<STR_LIT>" + target + "<STR_LIT>"<EOL><DEDENT>return super(CoconutTargetError, self).message(message, source, point, ln)<EOL>
|
Creates the --target Coconut error message.
|
f11278:c3:m1
|
def __init__(self, message=None, source=None, point=None, ln=None):
|
self.args = (message, source, point, ln)<EOL>
|
Creates the ParseError.
|
f11278:c4:m0
|
def message(self, message, item, extra):
|
return (<EOL>super(CoconutInternalException, self).message(message, item, extra)<EOL>+ "<STR_LIT:U+0020>" + report_this_text<EOL>)<EOL>
|
Creates the Coconut internal exception message.
|
f11278:c7:m0
|
def __init__(self, message, loc):
|
self.args = (message, loc)<EOL>
|
Creates the Coconut exception.
|
f11278:c8:m0
|
def message(self, message, loc):
|
return message<EOL>
|
Uses arguments to create the message.
|
f11278:c8:m1
|
def cmd(args, interact=False):
|
if isinstance(args, (str, bytes)):<EOL><INDENT>args = args.split()<EOL><DEDENT>return CLI.cmd(args=args, interact=interact)<EOL>
|
Process command-line arguments.
|
f11279:m0
|
def version(which="<STR_LIT>"):
|
if which in VERSIONS:<EOL><INDENT>return VERSIONS[which]<EOL><DEDENT>else:<EOL><INDENT>raise CoconutException(<EOL>"<STR_LIT>" + ascii(which),<EOL>extra="<STR_LIT>" + "<STR_LIT:U+002CU+0020>".join(VERSIONS),<EOL>)<EOL><DEDENT>
|
Get the Coconut version.
|
f11279:m1
|
def parse(code="<STR_LIT>", mode="<STR_LIT>"):
|
if CLI.comp is None:<EOL><INDENT>setup()<EOL><DEDENT>if mode in PARSERS:<EOL><INDENT>return PARSERS[mode](CLI.comp)(code)<EOL><DEDENT>else:<EOL><INDENT>raise CoconutException(<EOL>"<STR_LIT>" + ascii(mode),<EOL>extra="<STR_LIT>" + "<STR_LIT:U+002CU+0020>".join(PARSERS),<EOL>)<EOL><DEDENT>
|
Compile Coconut code.
|
f11279:m2
|
def auto_compilation(on=True):
|
if on:<EOL><INDENT>if coconut_importer not in sys.meta_path:<EOL><INDENT>sys.meta_path.insert(<NUM_LIT:0>, coconut_importer)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>try:<EOL><INDENT>sys.meta_path.remove(coconut_importer)<EOL><DEDENT>except ValueError:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>
|
Turn automatic compilation of Coconut files on or off.
|
f11279:m3
|
@staticmethod<EOL><INDENT>def run_compiler(path):<DEDENT>
|
cmd([path] + list(coconut_import_hook_args))<EOL>
|
Run the Coconut compiler on the given path.
|
f11279:c0:m0
|
def find_module(self, fullname, path=None):
|
basepaths = ["<STR_LIT>"] + list(sys.path)<EOL>if fullname.startswith("<STR_LIT:.>"):<EOL><INDENT>if path is None:<EOL><INDENT>return None<EOL><DEDENT>fullname = fullname[<NUM_LIT:1>:]<EOL>basepaths.insert(<NUM_LIT:0>, path)<EOL><DEDENT>fullpath = os.path.join(*fullname.split("<STR_LIT:.>"))<EOL>for head in basepaths:<EOL><INDENT>path = os.path.join(head, fullpath)<EOL>filepath = path + self.ext<EOL>dirpath = os.path.join(path, "<STR_LIT>" + self.ext)<EOL>if os.path.exists(filepath):<EOL><INDENT>self.run_compiler(filepath)<EOL>return None<EOL><DEDENT>if os.path.exists(dirpath):<EOL><INDENT>self.run_compiler(path)<EOL>return None<EOL><DEDENT><DEDENT>return None<EOL>
|
Searches for a Coconut file of the given name and compiles it.
|
f11279:c0:m1
|
def patched_nested_parse(self, *args, **kwargs):
|
kwargs["<STR_LIT>"] = True<EOL>return self.stored_nested_parse(*args, **kwargs)<EOL>
|
Sets match_titles then calls stored_nested_parse.
|
f11281:c0:m0
|
def auto_code_block(self, *args, **kwargs):
|
self.stored_nested_parse = self.state_machine.state.nested_parse<EOL>self.state_machine.state.nested_parse = self.patched_nested_parse<EOL>try:<EOL><INDENT>return super(PatchedAutoStructify, self).auto_code_block(*args, **kwargs)<EOL><DEDENT>finally:<EOL><INDENT>self.state_machine.state.nested_parse = self.stored_nested_parse<EOL><DEDENT>
|
Modified auto_code_block that patches nested_parse.
|
f11281:c0:m1
|
def user_to_request(handler):
|
@wraps(handler)<EOL>async def decorator(*args):<EOL><INDENT>request = _get_request(args)<EOL>request[cfg.REQUEST_USER_KEY] = await get_cur_user(request)<EOL>return await handler(*args)<EOL><DEDENT>return decorator<EOL>
|
Add user to request if user logged in
|
f11298:m1
|
def find_one_sql(table, filter, fields=None):
|
keys, values = _split_dict(filter)<EOL>fields = '<STR_LIT:U+002CU+0020>'.join(fields) if fields else '<STR_LIT:*>'<EOL>where = _pairs(keys)<EOL>sql = '<STR_LIT>'.format(fields, table, where)<EOL>return sql, values<EOL>
|
>>> find_one_sql('tbl', {'foo': 10, 'bar': 'baz'})
('SELECT * FROM tbl WHERE bar=$1 AND foo=$2', ['baz', 10])
>>> find_one_sql('tbl', {'id': 10}, fields=['foo', 'bar'])
('SELECT foo, bar FROM tbl WHERE id=$1', [10])
|
f11299:m1
|
def insert_sql(table, data, returning='<STR_LIT:id>'):
|
keys, values = _split_dict(data)<EOL>sql = '<STR_LIT>'.format(<EOL>table,<EOL>'<STR_LIT:U+002CU+0020>'.join(keys),<EOL>'<STR_LIT:U+002CU+0020>'.join(_placeholders(data)),<EOL>'<STR_LIT>'.format(returning) if returning else '<STR_LIT>')<EOL>return sql, values<EOL>
|
>>> insert_sql('tbl', {'foo': 'bar', 'id': 1})
('INSERT INTO tbl (foo, id) VALUES ($1, $2) RETURNING id', ['bar', 1])
>>> insert_sql('tbl', {'foo': 'bar', 'id': 1}, returning=None)
('INSERT INTO tbl (foo, id) VALUES ($1, $2)', ['bar', 1])
>>> insert_sql('tbl', {'foo': 'bar', 'id': 1}, returning='pk')
('INSERT INTO tbl (foo, id) VALUES ($1, $2) RETURNING pk', ['bar', 1])
|
f11299:m3
|
def update_sql(table, filter, updates):
|
where_keys, where_vals = _split_dict(filter)<EOL>up_keys, up_vals = _split_dict(updates)<EOL>changes = _pairs(up_keys, sep='<STR_LIT:U+002CU+0020>')<EOL>where = _pairs(where_keys, start=len(up_keys) + <NUM_LIT:1>)<EOL>sql = '<STR_LIT>'.format(<EOL>table, changes, where)<EOL>return sql, up_vals + where_vals<EOL>
|
>>> update_sql('tbl', {'foo': 'a', 'bar': 1}, {'bar': 2, 'baz': 'b'})
('UPDATE tbl SET bar=$1, baz=$2 WHERE bar=$3 AND foo=$4', [2, 'b', 1, 'a'])
|
f11299:m5
|
def delete_sql(table, filter):
|
keys, values = _split_dict(filter)<EOL>where = _pairs(keys)<EOL>sql = '<STR_LIT>'.format(table, where)<EOL>return sql, values<EOL>
|
>>> delete_sql('tbl', {'foo': 10, 'bar': 'baz'})
('DELETE FROM tbl WHERE bar=$1 AND foo=$2', ['baz', 10])
|
f11299:m7
|
def _pairs(keys, *, start=<NUM_LIT:1>, sep='<STR_LIT>'):
|
return sep.join('<STR_LIT>'.format(k, i) for i, k in enumerate(keys, start))<EOL>
|
>>> _pairs(['foo', 'bar', 'baz'], sep=', ')
'foo=$1, bar=$2, baz=$3'
>>> _pairs(['foo', 'bar', 'baz'], start=2)
'foo=$2 AND bar=$3 AND baz=$4'
|
f11299:m8
|
def _placeholders(variables):
|
return ['<STR_LIT>'.format(i) for i, _ in enumerate(variables, <NUM_LIT:1>)]<EOL>
|
Returns placeholders by number of variables
>>> _placeholders(['foo', 'bar', 1])
['$1', '$2', '$3']
|
f11299:m9
|
def _split_dict(dic):
|
keys = sorted(dic.keys())<EOL>return keys, [dic[k] for k in keys]<EOL>
|
Split dict into sorted keys and values
>>> _split_dict({'b': 2, 'a': 1})
(['a', 'b'], [1, 2])
|
f11299:m10
|
def main():
|
parser = argparse.ArgumentParser(<EOL>description=__doc__,<EOL>formatter_class=argparse.RawDescriptionHelpFormatter)<EOL>parser.add_argument('<STR_LIT:source>', help="<STR_LIT>")<EOL>parser.add_argument('<STR_LIT>', help="<STR_LIT>")<EOL>parser.add_argument(<EOL>'<STR_LIT>', '<STR_LIT>', help="<STR_LIT>",<EOL>type=int, default=<NUM_LIT>)<EOL>parser.add_argument(<EOL>'<STR_LIT>', '<STR_LIT>', help="<STR_LIT>",<EOL>type=int, default=<NUM_LIT>)<EOL>parser.add_argument(<EOL>'<STR_LIT>', '<STR_LIT>', help="<STR_LIT>",<EOL>action='<STR_LIT:store_true>', default=False)<EOL>parser.add_argument(<EOL>'<STR_LIT>', '<STR_LIT>', default=False, action='<STR_LIT:store_true>',<EOL>help="<STR_LIT>")<EOL>args = parser.parse_args()<EOL>config_logging(args.debug)<EOL>source = Path(args.source)<EOL>destination = Path(args.destination)<EOL>nb_test = args.nb_test_files<EOL>nb_learn = args.nb_learn_files<EOL>remove = args.remove<EOL>repos = _find_repos(source)<EOL>split_repos = _split_repos(repos, nb_test, nb_learn)<EOL>split_files = _find_files(*split_repos, nb_test, nb_learn, remove)<EOL>_unzip_all(*split_files, destination)<EOL>LOGGER.info("<STR_LIT>", destination)<EOL>LOGGER.debug("<STR_LIT>")<EOL>
|
Files extractor command line
|
f11304:m0
|
def main():
|
parser = argparse.ArgumentParser(description=__doc__)<EOL>parser.add_argument('<STR_LIT>', help="<STR_LIT>")<EOL>parser.add_argument('<STR_LIT>', help="<STR_LIT>")<EOL>parser.add_argument(<EOL>'<STR_LIT>', '<STR_LIT>', type=int, default=<NUM_LIT>,<EOL>help="<STR_LIT>")<EOL>parser.add_argument(<EOL>'<STR_LIT>', '<STR_LIT>', default=False, action='<STR_LIT:store_true>',<EOL>help="<STR_LIT>")<EOL>args = parser.parse_args()<EOL>config_logging(args.debug)<EOL>learn_path = Path(args.learn)<EOL>keywords_path = Path(args.keywords)<EOL>nb_keywords = args.nbkeywords<EOL>languages = config_dict('<STR_LIT>')<EOL>exts = {ext: lang for lang, exts in languages.items() for ext in exts}<EOL>term_count = Counter()<EOL>document_count = Counter()<EOL>pos = <NUM_LIT:0><EOL>LOGGER.info("<STR_LIT>", learn_path)<EOL>for pos, path in enumerate(Path(learn_path).glob('<STR_LIT>'), <NUM_LIT:1>):<EOL><INDENT>if pos % STEP == <NUM_LIT:0>:<EOL><INDENT>LOGGER.debug("<STR_LIT>", pos)<EOL>gc.collect() <EOL><DEDENT>if not path.is_file() or not exts.get(path.suffix.lstrip('<STR_LIT:.>')):<EOL><INDENT>continue<EOL><DEDENT>counter = _extract(path)<EOL>term_count.update(counter)<EOL>document_count.update(counter.keys())<EOL><DEDENT>nb_terms = sum(term_count.values())<EOL>nb_documents = pos - <NUM_LIT:1><EOL>if not nb_documents:<EOL><INDENT>LOGGER.error("<STR_LIT>", learn_path)<EOL>raise RuntimeError('<STR_LIT>'.format(learn_path))<EOL><DEDENT>LOGGER.info("<STR_LIT>", len(term_count))<EOL>terms = _most_frequent(<EOL>(term_count, nb_terms), (document_count, nb_documents), nb_keywords)<EOL>keywords = {<EOL>token: int(hashlib.sha1(token.encode()).hexdigest(), <NUM_LIT:16>)<EOL>for token in terms<EOL>}<EOL>with keywords_path.open('<STR_LIT:w>') as keywords_file:<EOL><INDENT>json.dump(keywords, keywords_file, indent=<NUM_LIT:2>, sort_keys=True)<EOL><DEDENT>LOGGER.info("<STR_LIT>", len(keywords), keywords_path)<EOL>LOGGER.debug("<STR_LIT>")<EOL>
|
Keywords generator command line
|
f11305:m0
|
def main():
|
parser = argparse.ArgumentParser(description=__doc__)<EOL>parser.add_argument(<EOL>'<STR_LIT>',<EOL>help="<STR_LIT>")<EOL>parser.add_argument('<STR_LIT>', help="<STR_LIT>")<EOL>parser.add_argument(<EOL>'<STR_LIT>', '<STR_LIT>', help="<STR_LIT>",<EOL>type=int, default=<NUM_LIT:1000>)<EOL>parser.add_argument(<EOL>'<STR_LIT>', '<STR_LIT>', default=False, action='<STR_LIT:store_true>',<EOL>help="<STR_LIT>")<EOL>args = parser.parse_args()<EOL>config_logging(args.debug)<EOL>destination = Path(args.destination)<EOL>nb_repos = args.nbrepo<EOL>token = args.githubtoken<EOL>languages = config_dict('<STR_LIT>')<EOL>destination.mkdir(exist_ok=True)<EOL>for pos, language in enumerate(sorted(languages), <NUM_LIT:1>):<EOL><INDENT>LOGGER.info("<STR_LIT>", <NUM_LIT:100> * pos / len(languages), language)<EOL>LOGGER.info("<STR_LIT>", nb_repos, language)<EOL>repos = _retrieve_repo_details(language, nb_repos, token)<EOL>LOGGER.info("<STR_LIT>", len(repos))<EOL>_download_repos(language, repos, destination)<EOL>LOGGER.info("<STR_LIT>", language)<EOL><DEDENT>LOGGER.debug("<STR_LIT>")<EOL>
|
Github repositories downloaded command line
|
f11306:m0
|
def retry(default=None):
|
def decorator(func):<EOL><INDENT>"""<STR_LIT>"""<EOL>@functools.wraps(func)<EOL>def _wrapper(*args, **kw):<EOL><INDENT>for pos in range(<NUM_LIT:1>, MAX_RETRIES):<EOL><INDENT>try:<EOL><INDENT>return func(*args, **kw)<EOL><DEDENT>except (RuntimeError, requests.ConnectionError) as error:<EOL><INDENT>LOGGER.warning("<STR_LIT>", type(error), error)<EOL><DEDENT>for _ in range(pos):<EOL><INDENT>_rest()<EOL><DEDENT><DEDENT>LOGGER.warning("<STR_LIT>")<EOL>return default<EOL><DEDENT>return _wrapper<EOL><DEDENT>return decorator<EOL>
|
Retry functions after failures
|
f11306:m4
|
def main():
|
parser = argparse.ArgumentParser(description=__doc__)<EOL>parser.add_argument(<EOL>'<STR_LIT>', type=argparse.FileType('<STR_LIT:r>'),<EOL>help="<STR_LIT>")<EOL>parser.add_argument(<EOL>'<STR_LIT>', '<STR_LIT>', default=False, action='<STR_LIT:store_true>',<EOL>help="<STR_LIT>")<EOL>args = parser.parse_args()<EOL>config_logging(args.debug)<EOL>report = json.load(args.reportfile)<EOL>graph_data = _build_graph(report)<EOL>index_path = _prepare_resources(graph_data)<EOL>webbrowser.open(str(index_path))<EOL>
|
Report graph creator command line
|
f11307:m0
|
def config_logging(debug: bool = False) -> None:
|
if debug:<EOL><INDENT>level = '<STR_LIT>'<EOL>tf_level = tf.logging.INFO<EOL><DEDENT>else:<EOL><INDENT>level = '<STR_LIT>'<EOL>tf_level = tf.logging.ERROR<EOL><DEDENT>logging_config = config_dict('<STR_LIT>')<EOL>for logger in logging_config['<STR_LIT>'].values():<EOL><INDENT>logger['<STR_LIT>'] = level<EOL><DEDENT>logging.config.dictConfig(logging_config)<EOL>tf.logging.set_verbosity(tf_level)<EOL>
|
Set-up application and `tensorflow` logging.
:param debug: show or hide debug messages
|
f11313:m0
|
def config_dict(name: str) -> Dict[str, Any]:
|
try:<EOL><INDENT>content = resource_string(PACKAGE, DATADIR.format(name)).decode()<EOL><DEDENT>except DistributionNotFound as error:<EOL><INDENT>LOGGER.warning("<STR_LIT>", name, error)<EOL>content = DATA_FALLBACK.joinpath(name).read_text()<EOL><DEDENT>return cast(Dict[str, Any], json.loads(content))<EOL>
|
Load a JSON configuration dict from Guesslang config directory.
:param name: the JSON file name.
:return: configuration
|
f11313:m1
|
def model_info(model_dir: Optional[str] = None) -> Tuple[str, bool]:
|
if model_dir is None:<EOL><INDENT>try:<EOL><INDENT>model_dir = resource_filename(PACKAGE, DATADIR.format('<STR_LIT>'))<EOL><DEDENT>except DistributionNotFound as error:<EOL><INDENT>LOGGER.warning("<STR_LIT>", error)<EOL>model_dir = str(DATA_FALLBACK.joinpath('<STR_LIT>').absolute())<EOL><DEDENT>is_default_model = True<EOL><DEDENT>else:<EOL><INDENT>is_default_model = False<EOL><DEDENT>model_path = Path(model_dir)<EOL>model_path.mkdir(exist_ok=True)<EOL>LOGGER.debug("<STR_LIT>", model_path, is_default_model)<EOL>return (model_dir, is_default_model)<EOL>
|
Retrieve Guesslang model directory name,
and tells if it is the default model.
:param model_dir: model location, if `None` default model is selected
:return: selected model directory with an indication
that the model is the default or not
|
f11313:m2
|
def format(self, record: logging.LogRecord) -> str:
|
if platform.system() != '<STR_LIT>': <EOL><INDENT>return super().format(record)<EOL><DEDENT>record.msg = (<EOL>self.STYLE[record.levelname] + record.msg + self.STYLE['<STR_LIT>'])<EOL>record.levelname = (<EOL>self.STYLE['<STR_LIT>'] + record.levelname + self.STYLE['<STR_LIT>'])<EOL>return super().format(record)<EOL>
|
Format log records to produce colored messages.
:param record: log record
:return: log message
|
f11313:c0:m0
|
def language_name(self, text: str) -> str:
|
values = extract(text)<EOL>input_fn = _to_func(([values], []))<EOL>pos: int = next(self._classifier.predict_classes(input_fn=input_fn))<EOL>LOGGER.debug("<STR_LIT>", pos)<EOL>return sorted(self.languages)[pos]<EOL>
|
Predict the programming language name of the given source code.
:param text: source code.
:return: language name
|
f11314:c0:m1
|
def scores(self, text: str) -> Dict[str, float]:
|
values = extract(text)<EOL>input_fn = _to_func(([values], []))<EOL>prediction = self._classifier.predict_proba(input_fn=input_fn)<EOL>probabilities = next(prediction).tolist()<EOL>sorted_languages = sorted(self.languages)<EOL>return dict(zip(sorted_languages, probabilities))<EOL>
|
A score for each language corresponding to the probability that
the text is written in the given language.
The score is a `float` value between 0.0 and 1.0
:param text: source code.
:return: language to score dictionary
|
f11314:c0:m2
|
def probable_languages(<EOL>self,<EOL>text: str,<EOL>max_languages: int = <NUM_LIT:3>) -> Tuple[str, ...]:
|
scores = self.scores(text)<EOL>sorted_scores = sorted(scores.items(), key=itemgetter(<NUM_LIT:1>), reverse=True)<EOL>languages, probabilities = list(zip(*sorted_scores))<EOL>rescaled_probabilities = [log(proba) for proba in probabilities]<EOL>distances = [<EOL>rescaled_probabilities[pos] - rescaled_probabilities[pos+<NUM_LIT:1>]<EOL>for pos in range(len(rescaled_probabilities)-<NUM_LIT:1>)]<EOL>max_distance_pos = max(enumerate(distances, <NUM_LIT:1>), key=itemgetter(<NUM_LIT:1>))[<NUM_LIT:0>]<EOL>limit = min(max_distance_pos, max_languages)<EOL>return languages[:limit]<EOL>
|
List of most probable programming languages,
the list is ordered from the most probable to the least probable one.
:param text: source code.
:param max_languages: maximum number of listed languages.
:return: languages list
|
f11314:c0:m3
|
def learn(self, input_dir: str) -> float:
|
if self.is_default:<EOL><INDENT>LOGGER.error("<STR_LIT>")<EOL>raise GuesslangError('<STR_LIT>')<EOL><DEDENT>languages = self.languages<EOL>LOGGER.info("<STR_LIT>")<EOL>extensions = [ext for exts in languages.values() for ext in exts]<EOL>files = search_files(input_dir, extensions)<EOL>nb_files = len(files)<EOL>chunk_size = min(int(CHUNK_PROPORTION * nb_files), CHUNK_SIZE)<EOL>LOGGER.debug("<STR_LIT>", chunk_size)<EOL>LOGGER.debug("<STR_LIT>", nb_files - chunk_size)<EOL>batches = _pop_many(files, chunk_size)<EOL>LOGGER.debug("<STR_LIT>")<EOL>evaluation_data = extract_from_files(next(batches), languages)<EOL>LOGGER.debug("<STR_LIT>", len(evaluation_data[<NUM_LIT:0>]))<EOL>accuracy = <NUM_LIT:0><EOL>total = ceil(nb_files / chunk_size) - <NUM_LIT:1><EOL>LOGGER.info("<STR_LIT>")<EOL>for pos, training_files in enumerate(batches, <NUM_LIT:1>):<EOL><INDENT>LOGGER.info("<STR_LIT>", <NUM_LIT:100> * pos / total)<EOL>LOGGER.debug("<STR_LIT>")<EOL>training_data = extract_from_files(training_files, languages)<EOL>LOGGER.debug("<STR_LIT>", len(training_data[<NUM_LIT:0>]))<EOL>steps = int(FITTING_FACTOR * len(training_data[<NUM_LIT:0>]) / <NUM_LIT:100>)<EOL>LOGGER.debug("<STR_LIT>", steps)<EOL>self._classifier.fit(input_fn=_to_func(training_data), steps=steps)<EOL>LOGGER.debug("<STR_LIT>")<EOL>accuracy = self._classifier.evaluate(<EOL>input_fn=_to_func(evaluation_data), steps=<NUM_LIT:1>)['<STR_LIT>']<EOL>_comment(accuracy)<EOL><DEDENT>return accuracy<EOL>
|
Learn languages features from source files.
:raise GuesslangError: when the default model is used for learning
:param input_dir: source code files directory.
:return: learning accuracy
|
f11314:c0:m4
|
def main() -> None:
|
try:<EOL><INDENT>_real_main()<EOL><DEDENT>except GuesslangError as error:<EOL><INDENT>LOGGER.critical("<STR_LIT>", error)<EOL>sys.exit(-<NUM_LIT:1>)<EOL><DEDENT>except KeyboardInterrupt:<EOL><INDENT>LOGGER.critical("<STR_LIT>")<EOL>sys.exit(-<NUM_LIT:2>)<EOL><DEDENT>
|
Run command line
|
f11315:m0
|
def extract(text: str) -> List[float]:
|
return _normalize(_vectorize(split(text)))<EOL>
|
Transform the text into a vector of float values.
The vector is a representation of the text.
:param text: the text to represent
:return: representation
|
f11316:m0
|
def split(text: str) -> List[str]:
|
return [word for word in SEPARATOR.split(text) if word.strip('<STR_LIT>')]<EOL>
|
Split a text into a list of tokens.
:param text: the text to split
:return: tokens
|
f11316:m1
|
def search_files(source: str, extensions: List[str]) -> List[Path]:
|
files = [<EOL>path for path in Path(source).glob('<STR_LIT>')<EOL>if path.is_file() and path.suffix.lstrip('<STR_LIT:.>') in extensions]<EOL>nb_files = len(files)<EOL>LOGGER.debug("<STR_LIT>", nb_files)<EOL>if nb_files < NB_FILES_MIN:<EOL><INDENT>LOGGER.error("<STR_LIT>")<EOL>raise GuesslangError(<EOL>'<STR_LIT>'.format(<EOL>nb_files, source, NB_FILES_MIN))<EOL><DEDENT>random.shuffle(files)<EOL>return files<EOL>
|
Retrieve files located the source directory and its subdirectories,
whose extension match one of the listed extensions.
:raise GuesslangError: when there is not enough files in the directory
:param source: directory name
:param extensions: list of file extensions
:return: filenames
|
f11318:m0
|
def extract_from_files(<EOL>files: List[Path],<EOL>languages: Dict[str, List[str]]) -> DataSet:
|
enumerator = enumerate(sorted(languages.items()))<EOL>rank_map = {ext: rank for rank, (_, exts) in enumerator for ext in exts}<EOL>with multiprocessing.Pool(initializer=_process_init) as pool:<EOL><INDENT>file_iterator = ((path, rank_map) for path in files)<EOL>arrays = _to_arrays(pool.starmap(_extract_features, file_iterator))<EOL><DEDENT>LOGGER.debug("<STR_LIT>", len(arrays[<NUM_LIT:0>]))<EOL>return arrays<EOL>
|
Extract arrays of features from the given files.
:param files: list of paths
:param languages: language name =>
associated file extension list
:return: features
|
f11318:m1
|
def safe_read_file(file_path: Path) -> str:
|
for encoding in FILE_ENCODINGS:<EOL><INDENT>try:<EOL><INDENT>return file_path.read_text(encoding=encoding)<EOL><DEDENT>except UnicodeError:<EOL><INDENT>pass <EOL><DEDENT><DEDENT>raise GuesslangError('<STR_LIT>'.format(file_path))<EOL>
|
Read a text file. Several text encodings are tried until
the file content is correctly decoded.
:raise GuesslangError: when the file encoding is not supported
:param file_path: path to the input file
:return: text file content
|
f11318:m5
|
def setup_logging(<EOL>default_level=logging.INFO,<EOL>default_path="<STR_LIT>".format(<EOL>os.getenv(<EOL>"<STR_LIT>",<EOL>os.path.dirname(os.path.realpath(__file__)))),<EOL>env_key="<STR_LIT>",<EOL>config_name=None):
|
path = default_path<EOL>file_name = default_path.split("<STR_LIT:/>")[-<NUM_LIT:1>]<EOL>if config_name:<EOL><INDENT>file_name = config_name<EOL><DEDENT>path = "<STR_LIT>".format(<EOL>"<STR_LIT:/>".join(default_path.split("<STR_LIT:/>")[:-<NUM_LIT:1>]),<EOL>file_name)<EOL>value = os.getenv(env_key, None)<EOL>if value:<EOL><INDENT>path = value<EOL><DEDENT>if os.path.exists(path):<EOL><INDENT>with open(path, "<STR_LIT>") as f:<EOL><INDENT>config = json.load(f)<EOL><DEDENT>logging.config.dictConfig(config)<EOL>return<EOL><DEDENT>else:<EOL><INDENT>cwd_path = os.getcwd() + "<STR_LIT>".format(<EOL>file_name)<EOL>if os.path.exists(cwd_path):<EOL><INDENT>with open(cwd_path, "<STR_LIT>") as f:<EOL><INDENT>config = json.load(f)<EOL><DEDENT>logging.config.dictConfig(config)<EOL>return<EOL><DEDENT>rels_path = os.getcwd() + "<STR_LIT>".format(<EOL>file_name)<EOL>if os.path.exists(rels_path):<EOL><INDENT>with open(rels_path, "<STR_LIT>") as f:<EOL><INDENT>config = json.load(f)<EOL><DEDENT>logging.config.dictConfig(config)<EOL>return<EOL><DEDENT>else:<EOL><INDENT>logging.basicConfig(level=default_level)<EOL>return<EOL><DEDENT><DEDENT>
|
setup_logging
Setup logging configuration
:param default_level: level to log
:param default_path: path to config (optional)
:param env_key: path to config in this env var
:param config_name: filename for config
|
f11321:m0
|
def build_logger(<EOL>name=os.getenv(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>"),<EOL>config="<STR_LIT>",<EOL>log_level=logging.INFO,<EOL>log_config_path="<STR_LIT>".format(<EOL>os.getenv(<EOL>"<STR_LIT>",<EOL>os.path.dirname(os.path.realpath(__file__))))):
|
use_config = ("<STR_LIT>").format(<EOL>"<STR_LIT:{}>".format(<EOL>config))<EOL>if not os.path.exists(use_config):<EOL><INDENT>use_config = log_config_path<EOL>if not os.path.exists(use_config):<EOL><INDENT>use_config = ("<STR_LIT>").format(<EOL>"<STR_LIT>")<EOL><DEDENT><DEDENT>setup_logging(<EOL>default_level=log_level,<EOL>default_path=use_config)<EOL>return logging.getLogger(name)<EOL>
|
build_logger
:param name: name that shows in the logger
:param config: name of the config file
:param log_level: level to log
:param log_config_path: path to log config file
|
f11321:m1
|
def build_colorized_logger(<EOL>name=os.getenv(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>"),<EOL>config="<STR_LIT>",<EOL>log_level=logging.INFO,<EOL>log_config_path="<STR_LIT>".format(<EOL>os.getenv(<EOL>"<STR_LIT>",<EOL>os.path.dirname(os.path.realpath(__file__))))):
|
override_config = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>debug_log_config = bool(os.getenv(<EOL>"<STR_LIT>",<EOL>"<STR_LIT:0>") == "<STR_LIT:1>")<EOL>if override_config:<EOL><INDENT>if debug_log_config:<EOL><INDENT>print((<EOL>"<STR_LIT>"<EOL>"<STR_LIT>".format(<EOL>override_config)))<EOL><DEDENT>if os.path.exists(override_config):<EOL><INDENT>setup_logging(<EOL>default_level=log_level,<EOL>default_path=override_config)<EOL>return logging.getLogger(name)<EOL><DEDENT>if debug_log_config:<EOL><INDENT>print((<EOL>"<STR_LIT>"<EOL>"<STR_LIT>".format(<EOL>override_config)))<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if debug_log_config:<EOL><INDENT>print((<EOL>"<STR_LIT>"<EOL>"<STR_LIT>".format(<EOL>override_config)))<EOL><DEDENT><DEDENT>use_config = ("<STR_LIT:{}>").format(<EOL>config)<EOL>if not os.path.exists(use_config):<EOL><INDENT>use_config = ("<STR_LIT>").format(<EOL>config)<EOL>if not os.path.exists(use_config):<EOL><INDENT>use_config = log_config_path<EOL>if not os.path.exists(use_config):<EOL><INDENT>use_config = ("<STR_LIT>").format(<EOL>config)<EOL>if not os.path.exists(use_config):<EOL><INDENT>use_config = ("<STR_LIT>").format(<EOL>"<STR_LIT>")<EOL><DEDENT><DEDENT><DEDENT><DEDENT>setup_logging(<EOL>default_level=log_level,<EOL>default_path=use_config)<EOL>return logging.getLogger(name)<EOL>
|
build_colorized_logger
:param name: name that shows in the logger
:param config: name of the config file
:param log_level: level to log
:param log_config_path: path to log config file
|
f11321:m2
|
def build_ai_client_from_env(<EOL>verbose=ANTINEX_CLIENT_VERBOSE,<EOL>debug=ANTINEX_CLIENT_DEBUG,<EOL>ca_dir=None,<EOL>cert_file=None,<EOL>key_file=None):
|
if not ANTINEX_PUBLISH_ENABLED:<EOL><INDENT>log.info((<EOL>"<STR_LIT>").format(<EOL>ANTINEX_PUBLISH_ENABLED))<EOL>return None<EOL><DEDENT>use_ca_dir = ca_dir<EOL>use_cert_file = cert_file<EOL>use_key_file = key_file<EOL>if ANTINEX_CA_FILE or ANTINEX_KEY_FILE or ANTINEX_CERT_FILE:<EOL><INDENT>use_ca_dir = ANTINEX_CA_FILE<EOL>use_cert_file = ANTINEX_CERT_FILE<EOL>use_key_file = ANTINEX_KEY_FILE<EOL>log.info((<EOL>"<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>ANTINEX_USER,<EOL>ANTINEX_URL,<EOL>ANTINEX_CA_FILE,<EOL>ANTINEX_CERT_FILE,<EOL>ANTINEX_KEY_FILE))<EOL><DEDENT>else:<EOL><INDENT>log.info((<EOL>"<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>ANTINEX_USER,<EOL>ANTINEX_URL,<EOL>use_ca_dir,<EOL>use_cert_file,<EOL>use_key_file))<EOL><DEDENT>return AIClient(<EOL>user=ANTINEX_USER,<EOL>email=ANTINEX_EMAIL,<EOL>password=ANTINEX_PASSWORD,<EOL>url=ANTINEX_URL,<EOL>ca_dir=use_ca_dir,<EOL>cert_file=use_cert_file,<EOL>key_file=use_key_file,<EOL>verbose=verbose,<EOL>debug=debug)<EOL>
|
build_ai_client_from_env
Use environment variables to build a client
:param verbose: verbose logging
:param debug: debug internal client calls
:param ca_dir: optional path to CA bundle dir
:param cert_file: optional path to x509 ssl cert file
:param key_file: optional path to x509 ssl key file
|
f11323:m0
|
def ev(k, v):
|
return os.getenv(k, v).strip().lstrip()<EOL>
|
ev
:param k: environment variable key
:param v: environment variable value
|
f11324:m0
|
def rnow(f="<STR_LIT>"):
|
return datetime.datetime.now().strftime(f)<EOL>
|
rnow
:param f: format for the string
|
f11324:m1
|
def convert_to_date(<EOL>value=None,<EOL>format="<STR_LIT>"):
|
if value:<EOL><INDENT>return value.strftime(format)<EOL><DEDENT>return "<STR_LIT>"<EOL>
|
convert_to_date
param: value - datetime object
param: format - string format
|
f11324:m2
|
def ppj(json_data):
|
return str(json.dumps(<EOL>json_data,<EOL>sort_keys=True,<EOL>indent=<NUM_LIT:4>,<EOL>separators=('<STR_LIT:U+002C>', '<STR_LIT>')))<EOL>
|
ppj
:param json_data: dictionary to print
|
f11324:m3
|
def __init__(<EOL>self,<EOL>user=ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>"),<EOL>password=ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>"),<EOL>url=ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>"),<EOL>email=ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>"),<EOL>verbose=True,<EOL>ca_dir=None,<EOL>cert_file=None,<EOL>key_file=None,<EOL>debug=False):
|
self.user = user<EOL>self.email = email<EOL>self.password = password<EOL>self.url = url<EOL>self.verbose = verbose<EOL>self.ca_dir = ca_dir<EOL>self.cert_file = cert_file<EOL>self.key_file = key_file<EOL>self.cert = None<EOL>self.use_verify = False<EOL>if self.ca_dir:<EOL><INDENT>self.use_verify = self.ca_dir<EOL><DEDENT>elif self.cert_file:<EOL><INDENT>self.use_verify = self.cert_file<EOL><DEDENT>if self.cert_file and self.key_file:<EOL><INDENT>self.cert = (<EOL>self.cert_file,<EOL>self.key_file)<EOL><DEDENT>self.debug = debug<EOL>if self.debug:<EOL><INDENT>self.verbose = True<EOL><DEDENT>self.api_urls = {<EOL>"<STR_LIT>": "<STR_LIT>".format(self.url),<EOL>"<STR_LIT>": "<STR_LIT>".format(self.url),<EOL>"<STR_LIT>": "<STR_LIT>".format(self.url),<EOL>"<STR_LIT>": "<STR_LIT>".format(self.url),<EOL>"<STR_LIT>": "<STR_LIT>".format(self.url)<EOL>}<EOL>self.token = "<STR_LIT>"<EOL>self.login_status = LOGIN_NOT_ATTEMPTED<EOL>self.user_id = None<EOL>self.max_retries = <NUM_LIT:10><EOL>self.login_retry_wait_time = <NUM_LIT:0.1> <EOL>self.all_prepares = {}<EOL>self.all_jobs = {}<EOL>self.all_results = {}<EOL>
|
__init__
:param user: username
:param email: email address
:param password: password for the user
:param url: url running the django rest framework
:param verbose: turn off setup_logging
:param ca_dir: optional path to CA bundle dir
:param cert_file: optional path to x509 ssl cert file
:param key_file: optional path to x509 ssl private key
:param debug: turn on debugging - this will print passwords to stdout
|
f11325:c0:m0
|
def login(<EOL>self):
|
auth_url = self.api_urls["<STR_LIT>"]<EOL>if self.verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>self.user,<EOL>auth_url,<EOL>self.ca_dir,<EOL>self.cert))<EOL><DEDENT>use_headers = {<EOL>"<STR_LIT>": "<STR_LIT:application/json>"<EOL>}<EOL>login_data = {<EOL>"<STR_LIT:username>": self.user,<EOL>"<STR_LIT:password>": self.password<EOL>}<EOL>if self.debug:<EOL><INDENT>log.info((<EOL>"<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>login_data,<EOL>use_headers,<EOL>auth_url,<EOL>self.use_verify,<EOL>self.cert))<EOL><DEDENT>response = requests.post(<EOL>auth_url,<EOL>verify=self.use_verify,<EOL>cert=self.cert,<EOL>data=json.dumps(login_data),<EOL>headers=use_headers)<EOL>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response.status_code,<EOL>response.text,<EOL>response.reason))<EOL><DEDENT>user_token = "<STR_LIT>"<EOL>if response.status_code == <NUM_LIT:200>:<EOL><INDENT>user_token = json.loads(response.text)["<STR_LIT>"]<EOL><DEDENT>if user_token != "<STR_LIT>":<EOL><INDENT>self.token = user_token<EOL>self.login_status = LOGIN_SUCCESS<EOL>if self.verbose:<EOL><INDENT>log.debug("<STR_LIT>")<EOL><DEDENT><DEDENT>else:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>self.user,<EOL>auth_url,<EOL>response.text))<EOL>self.login_status = LOGIN_FAILED<EOL><DEDENT>return self.login_status<EOL>
|
login
|
f11325:c0:m1
|
def is_logged_in(<EOL>self):
|
return self.login_status == LOGIN_SUCCESS<EOL>
|
is_logged_in
|
f11325:c0:m2
|
def get_token(<EOL>self):
|
return self.token<EOL>
|
get_token
|
f11325:c0:m3
|
def get_auth_header(<EOL>self):
|
headers = {<EOL>"<STR_LIT>": "<STR_LIT:application/json>",<EOL>"<STR_LIT>": "<STR_LIT>".format(self.get_token())<EOL>}<EOL>return headers<EOL>
|
get_auth_header
|
f11325:c0:m4
|
def build_response(<EOL>self,<EOL>status=NOT_SET,<EOL>error="<STR_LIT>",<EOL>data=None):
|
res_node = {<EOL>"<STR_LIT:status>": status,<EOL>"<STR_LIT:error>": error,<EOL>"<STR_LIT:data>": data<EOL>}<EOL>return res_node<EOL>
|
build_response
:param status: status code
:param error: error message
:param data: dictionary to send back
|
f11325:c0:m5
|
def retry_login(<EOL>self):
|
if not self.user or not self.password:<EOL><INDENT>return self.build_response(<EOL>status=ERROR,<EOL>error="<STR_LIT>")<EOL><DEDENT>retry = <NUM_LIT:0><EOL>not_done = True<EOL>while not_done:<EOL><INDENT>if self.is_logged_in():<EOL><INDENT>return self.build_response(<EOL>status=SUCCESS)<EOL><DEDENT>else:<EOL><INDENT>if self.verbose:<EOL><INDENT>log.debug(("<STR_LIT>")<EOL>.format(<EOL>retry,<EOL>self.max_retries))<EOL><DEDENT>if self.login() == LOGIN_SUCCESS:<EOL><INDENT>return self.build_response(<EOL>status=SUCCESS)<EOL><DEDENT>else:<EOL><INDENT>time.sleep(<EOL>self.login_retry_wait_time)<EOL><DEDENT><DEDENT>retry += <NUM_LIT:1><EOL>if retry > self.max_retries:<EOL><INDENT>return self.build_response(<EOL>status=ERROR,<EOL>error="<STR_LIT>".format(<EOL>self.user,<EOL>self.max_retries))<EOL><DEDENT><DEDENT>return self.build_response(<EOL>status=FAILED,<EOL>error="<STR_LIT>".format(<EOL>self.user,<EOL>retry))<EOL>
|
retry_login
|
f11325:c0:m6
|
def get_prepare_by_id(<EOL>self,<EOL>prepare_id=None):
|
if not prepare_id:<EOL><INDENT>log.error("<STR_LIT>")<EOL>return self.build_response(<EOL>status=ERROR,<EOL>error="<STR_LIT>")<EOL><DEDENT>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>self.user,<EOL>prepare_id))<EOL><DEDENT>url = "<STR_LIT>".format(<EOL>self.api_urls["<STR_LIT>"],<EOL>prepare_id)<EOL>not_done = True<EOL>while not_done:<EOL><INDENT>if self.debug:<EOL><INDENT>log.info((<EOL>"<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>prepare_id,<EOL>url,<EOL>self.use_verify,<EOL>self.cert))<EOL><DEDENT>response = requests.get(<EOL>url,<EOL>verify=self.use_verify,<EOL>cert=self.cert,<EOL>headers=self.get_auth_header())<EOL>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response.status_code,<EOL>response.text,<EOL>response.reason))<EOL><DEDENT>if response.status_code == <NUM_LIT>:<EOL><INDENT>login_res = self.retry_login()<EOL>if login_res["<STR_LIT:status>"] != SUCCESS:<EOL><INDENT>if self.verbose:<EOL><INDENT>log.error(<EOL>"<STR_LIT>")<EOL><DEDENT>return self.build_response(<EOL>status=login_res["<STR_LIT:status>"],<EOL>error=login_res["<STR_LIT:error>"])<EOL><DEDENT><DEDENT>elif response.status_code == <NUM_LIT:200>:<EOL><INDENT>if self.verbose:<EOL><INDENT>log.debug("<STR_LIT>")<EOL><DEDENT>prepare_data = json.loads(<EOL>response.text)<EOL>prepare_id = prepare_data.get(<EOL>"<STR_LIT:id>",<EOL>None)<EOL>if not prepare_id:<EOL><INDENT>return self.build_response(<EOL>status=ERROR,<EOL>error="<STR_LIT>",<EOL>data="<STR_LIT>".format(<EOL>response.reason,<EOL>response.text))<EOL><DEDENT>self.all_prepares[str(prepare_id)] = prepare_data<EOL>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>prepare_id,<EOL>len(self.all_prepares)))<EOL><DEDENT>return self.build_response(<EOL>status=SUCCESS,<EOL>error="<STR_LIT>",<EOL>data=prepare_data)<EOL><DEDENT>else:<EOL><INDENT>err_msg = ("<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>response.status_code,<EOL>response.text,<EOL>response.reason)<EOL>if self.verbose:<EOL><INDENT>log.error(err_msg)<EOL><DEDENT>return self.build_response(<EOL>status=ERROR,<EOL>error=err_msg)<EOL><DEDENT><DEDENT>
|
get_prepare_by_id
:param prepare_id: MLJob.id in the database
|
f11325:c0:m7
|
def get_job_by_id(<EOL>self,<EOL>job_id=None):
|
if not job_id:<EOL><INDENT>log.error("<STR_LIT>")<EOL>return self.build_response(<EOL>status=ERROR,<EOL>error="<STR_LIT>")<EOL><DEDENT>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>self.user,<EOL>job_id))<EOL><DEDENT>url = "<STR_LIT>".format(<EOL>self.api_urls["<STR_LIT>"],<EOL>job_id)<EOL>not_done = True<EOL>while not_done:<EOL><INDENT>if self.debug:<EOL><INDENT>log.info((<EOL>"<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>job_id,<EOL>url,<EOL>self.use_verify,<EOL>self.cert))<EOL><DEDENT>response = requests.get(<EOL>url,<EOL>verify=self.use_verify,<EOL>cert=self.cert,<EOL>headers=self.get_auth_header())<EOL>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response.status_code,<EOL>response.text,<EOL>response.reason))<EOL><DEDENT>if response.status_code == <NUM_LIT>:<EOL><INDENT>login_res = self.retry_login()<EOL>if login_res["<STR_LIT:status>"] != SUCCESS:<EOL><INDENT>if self.verbose:<EOL><INDENT>log.error(<EOL>"<STR_LIT>")<EOL><DEDENT>return self.build_response(<EOL>status=login_res["<STR_LIT:status>"],<EOL>error=login_res["<STR_LIT:error>"])<EOL><DEDENT><DEDENT>elif response.status_code == <NUM_LIT:200>:<EOL><INDENT>if self.verbose:<EOL><INDENT>log.debug("<STR_LIT>")<EOL><DEDENT>job_data = json.loads(<EOL>response.text)<EOL>job_id = job_data.get(<EOL>"<STR_LIT:id>",<EOL>None)<EOL>if not job_id:<EOL><INDENT>return self.build_response(<EOL>status=ERROR,<EOL>error="<STR_LIT>",<EOL>data="<STR_LIT>".format(<EOL>response.reason,<EOL>response.text))<EOL><DEDENT>self.all_jobs[str(job_id)] = job_data<EOL>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>job_id,<EOL>len(self.all_jobs)))<EOL><DEDENT>return self.build_response(<EOL>status=SUCCESS,<EOL>error="<STR_LIT>",<EOL>data=job_data)<EOL><DEDENT>else:<EOL><INDENT>err_msg = ("<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>response.status_code,<EOL>response.text,<EOL>response.reason)<EOL>if self.verbose:<EOL><INDENT>log.error(err_msg)<EOL><DEDENT>return self.build_response(<EOL>status=ERROR,<EOL>error=err_msg)<EOL><DEDENT><DEDENT>
|
get_job_by_id
:param job_id: MLJob.id in the database
|
f11325:c0:m8
|
def get_result_by_id(<EOL>self,<EOL>result_id=None):
|
if not result_id:<EOL><INDENT>log.error("<STR_LIT>")<EOL>return self.build_response(<EOL>status=ERROR,<EOL>error="<STR_LIT>")<EOL><DEDENT>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>self.user,<EOL>result_id))<EOL><DEDENT>url = "<STR_LIT>".format(<EOL>self.api_urls["<STR_LIT>"],<EOL>result_id)<EOL>not_done = True<EOL>while not_done:<EOL><INDENT>if self.debug:<EOL><INDENT>log.info((<EOL>"<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>result_id,<EOL>url,<EOL>self.use_verify,<EOL>self.cert))<EOL><DEDENT>response = requests.get(<EOL>url,<EOL>verify=self.use_verify,<EOL>cert=self.cert,<EOL>headers=self.get_auth_header())<EOL>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response.status_code,<EOL>response.text,<EOL>response.reason))<EOL><DEDENT>if response.status_code == <NUM_LIT>:<EOL><INDENT>login_res = self.retry_login()<EOL>if login_res["<STR_LIT:status>"] != SUCCESS:<EOL><INDENT>if self.verbose:<EOL><INDENT>log.error(<EOL>"<STR_LIT>")<EOL><DEDENT>return self.build_response(<EOL>status=login_res["<STR_LIT:status>"],<EOL>error=login_res["<STR_LIT:error>"])<EOL><DEDENT><DEDENT>elif response.status_code == <NUM_LIT:200>:<EOL><INDENT>if self.verbose:<EOL><INDENT>log.debug("<STR_LIT>")<EOL><DEDENT>result_data = json.loads(<EOL>response.text)<EOL>result_id = result_data.get(<EOL>"<STR_LIT:id>",<EOL>None)<EOL>if not result_id:<EOL><INDENT>return self.build_response(<EOL>status=ERROR,<EOL>error="<STR_LIT>",<EOL>data="<STR_LIT>".format(<EOL>response.reason,<EOL>response.text))<EOL><DEDENT>self.all_results[str(result_id)] = result_data<EOL>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>result_id,<EOL>len(self.all_results)))<EOL><DEDENT>return self.build_response(<EOL>status=SUCCESS,<EOL>error="<STR_LIT>",<EOL>data=result_data)<EOL><DEDENT>else:<EOL><INDENT>err_msg = ("<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>response.status_code,<EOL>response.text,<EOL>response.reason)<EOL>if self.verbose:<EOL><INDENT>log.error(err_msg)<EOL><DEDENT>return self.build_response(<EOL>status=ERROR,<EOL>error=err_msg)<EOL><DEDENT><DEDENT>
|
get_result_by_id
:param result_id: MLJobResult.id in the database
|
f11325:c0:m9
|
def run_job(<EOL>self,<EOL>body):
|
if self.verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>self.user,<EOL>str(body)[<NUM_LIT:0>:<NUM_LIT:32>]))<EOL><DEDENT>url = "<STR_LIT:{}>".format(<EOL>self.api_urls["<STR_LIT>"])<EOL>not_done = True<EOL>while not_done:<EOL><INDENT>if self.debug:<EOL><INDENT>log.info((<EOL>"<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>json.dumps(body),<EOL>url,<EOL>self.use_verify,<EOL>self.cert))<EOL><DEDENT>response = requests.post(<EOL>url,<EOL>verify=self.use_verify,<EOL>cert=self.cert,<EOL>data=json.dumps(body),<EOL>headers=self.get_auth_header())<EOL>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response.status_code,<EOL>response.text,<EOL>response.reason))<EOL><DEDENT>if response.status_code == <NUM_LIT>:<EOL><INDENT>login_res = self.retry_login()<EOL>if login_res["<STR_LIT:status>"] != SUCCESS:<EOL><INDENT>if self.verbose:<EOL><INDENT>log.error(<EOL>"<STR_LIT>")<EOL><DEDENT>return self.build_response(<EOL>status=login_res["<STR_LIT:status>"],<EOL>error=login_res["<STR_LIT:error>"])<EOL><DEDENT><DEDENT>elif response.status_code == <NUM_LIT>:<EOL><INDENT>if self.verbose:<EOL><INDENT>log.debug("<STR_LIT>")<EOL><DEDENT>res_dict = json.loads(<EOL>response.text)<EOL>job_data = res_dict.get(<EOL>"<STR_LIT>",<EOL>None)<EOL>result_data = res_dict.get(<EOL>"<STR_LIT>",<EOL>None)<EOL>if not job_data:<EOL><INDENT>return self.build_response(<EOL>status=ERROR,<EOL>error="<STR_LIT>",<EOL>data="<STR_LIT>".format(<EOL>response.reason,<EOL>response.text))<EOL><DEDENT>job_id = job_data.get(<EOL>"<STR_LIT:id>",<EOL>None)<EOL>result_id = result_data.get(<EOL>"<STR_LIT:id>",<EOL>None)<EOL>if not job_id:<EOL><INDENT>return self.build_response(<EOL>status=ERROR,<EOL>error="<STR_LIT>",<EOL>data="<STR_LIT>".format(<EOL>response.reason,<EOL>response.text))<EOL><DEDENT>if not result_id:<EOL><INDENT>return self.build_response(<EOL>status=ERROR,<EOL>error="<STR_LIT>",<EOL>data="<STR_LIT>".format(<EOL>response.reason,<EOL>response.text))<EOL><DEDENT>self.all_jobs[str(job_id)] = job_data<EOL>self.all_results[str(result_id)] = result_data<EOL>if self.verbose:<EOL><INDENT>log.info(("<STR_LIT>"<EOL>"<STR_LIT>")<EOL>.format(<EOL>job_id,<EOL>result_id,<EOL>len(self.all_jobs),<EOL>len(self.all_results)))<EOL><DEDENT>return self.build_response(<EOL>status=SUCCESS,<EOL>error="<STR_LIT>",<EOL>data=res_dict)<EOL><DEDENT>else:<EOL><INDENT>err_msg = ("<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>response.status_code,<EOL>response.text,<EOL>response.reason)<EOL>if self.verbose:<EOL><INDENT>log.error(err_msg)<EOL><DEDENT>return self.build_response(<EOL>status=ERROR,<EOL>error=err_msg)<EOL><DEDENT><DEDENT>
|
run_job
:param body: dictionary to launch job
|
f11325:c0:m10
|
def wait_for_job_to_finish(<EOL>self,<EOL>job_id,<EOL>sec_to_sleep=<NUM_LIT>,<EOL>max_retries=<NUM_LIT>):
|
not_done = True<EOL>retry_attempt = <NUM_LIT:1><EOL>while not_done:<EOL><INDENT>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>job_id))<EOL><DEDENT>response = self.get_job_by_id(job_id)<EOL>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>job_id,<EOL>response))<EOL><DEDENT>if response["<STR_LIT:status>"] != SUCCESS:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>job_id,<EOL>response["<STR_LIT:error>"]))<EOL>return self.build_response(<EOL>status=ERROR,<EOL>error=response["<STR_LIT:error>"],<EOL>data=response["<STR_LIT:data>"])<EOL><DEDENT>job_data = response.get(<EOL>"<STR_LIT:data>",<EOL>None)<EOL>if not job_data:<EOL><INDENT>return self.build_response(<EOL>status=ERROR,<EOL>error="<STR_LIT>",<EOL>data=response["<STR_LIT:data>"])<EOL><DEDENT>job_status = job_data["<STR_LIT:status>"]<EOL>if job_status == "<STR_LIT>"or job_status == "<STR_LIT>"or job_status == "<STR_LIT>":<EOL><INDENT>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>job_id,<EOL>job_status))<EOL><DEDENT>result_id = job_data["<STR_LIT>"]["<STR_LIT>"]<EOL>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>result_id))<EOL><DEDENT>response = self.get_result_by_id(result_id)<EOL>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>result_id,<EOL>response))<EOL><DEDENT>if response["<STR_LIT:status>"] != SUCCESS:<EOL><INDENT>log.error(("<STR_LIT>"<EOL>"<STR_LIT>")<EOL>.format(<EOL>result_id,<EOL>response["<STR_LIT:error>"]))<EOL>return self.build_response(<EOL>status=ERROR,<EOL>error=response["<STR_LIT:error>"],<EOL>data=response["<STR_LIT:data>"])<EOL><DEDENT>result_data = response.get(<EOL>"<STR_LIT:data>",<EOL>None)<EOL>if result_data["<STR_LIT:status>"] == "<STR_LIT>":<EOL><INDENT>full_response = {<EOL>"<STR_LIT>": job_data,<EOL>"<STR_LIT:result>": result_data<EOL>}<EOL>not_done = False<EOL>return self.build_response(<EOL>status=SUCCESS,<EOL>error="<STR_LIT>",<EOL>data=full_response)<EOL><DEDENT>else:<EOL><INDENT>if retry_attempt % <NUM_LIT:100> == <NUM_LIT:0>:<EOL><INDENT>if self.verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>result_id,<EOL>retry_attempt))<EOL><DEDENT><DEDENT>retry_attempt += <NUM_LIT:1><EOL>if retry_attempt > max_retries:<EOL><INDENT>err_msg = ("<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>job_id,<EOL>result_id)<EOL>log.error(err_msg)<EOL>return self.build_response(<EOL>status=ERROR,<EOL>error=err_msg)<EOL><DEDENT>else:<EOL><INDENT>time.sleep(sec_to_sleep)<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>retry_attempt += <NUM_LIT:1><EOL>if retry_attempt > max_retries:<EOL><INDENT>err_msg = ("<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>job_id)<EOL>log.error(err_msg)<EOL>return self.build_response(<EOL>status=ERROR,<EOL>error=err_msg)<EOL><DEDENT>else:<EOL><INDENT>if self.verbose:<EOL><INDENT>if retry_attempt % <NUM_LIT:100> == <NUM_LIT:0>:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>job_id,<EOL>retry_attempt))<EOL><DEDENT><DEDENT>time.sleep(sec_to_sleep)<EOL><DEDENT><DEDENT><DEDENT>
|
wait_for_job_to_finish
:param job_id: MLJob.id to wait on
:param sec_to_sleep: seconds to sleep during polling
:param max_retries: max retires until stopping
|
f11325:c0:m11
|
def run_prepare(<EOL>self,<EOL>body):
|
if self.verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>self.user,<EOL>str(body)[<NUM_LIT:0>:<NUM_LIT:32>]))<EOL><DEDENT>url = "<STR_LIT:{}>".format(<EOL>self.api_urls["<STR_LIT>"])<EOL>not_done = True<EOL>while not_done:<EOL><INDENT>if self.debug:<EOL><INDENT>log.info((<EOL>"<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>json.dumps(body),<EOL>url,<EOL>self.use_verify,<EOL>self.cert))<EOL><DEDENT>response = requests.post(<EOL>url,<EOL>verify=self.use_verify,<EOL>cert=self.cert,<EOL>data=json.dumps(body),<EOL>headers=self.get_auth_header())<EOL>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response.status_code,<EOL>response.text,<EOL>response.reason))<EOL><DEDENT>if response.status_code == <NUM_LIT>:<EOL><INDENT>login_res = self.retry_login()<EOL>if login_res["<STR_LIT:status>"] != SUCCESS:<EOL><INDENT>if self.verbose:<EOL><INDENT>log.error(<EOL>"<STR_LIT>")<EOL><DEDENT>return self.build_response(<EOL>status=login_res["<STR_LIT:status>"],<EOL>error=login_res["<STR_LIT:error>"])<EOL><DEDENT><DEDENT>elif response.status_code == <NUM_LIT>:<EOL><INDENT>if self.verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response.text))<EOL><DEDENT>prepare_data = json.loads(<EOL>response.text)<EOL>if not prepare_data:<EOL><INDENT>return self.build_response(<EOL>status=ERROR,<EOL>error="<STR_LIT>",<EOL>data="<STR_LIT>".format(<EOL>response.reason,<EOL>response.text))<EOL><DEDENT>prepare_id = prepare_data.get(<EOL>"<STR_LIT:id>",<EOL>None)<EOL>if not prepare_id:<EOL><INDENT>return self.build_response(<EOL>status=ERROR,<EOL>error="<STR_LIT>",<EOL>data="<STR_LIT>".format(<EOL>response.reason,<EOL>response.text))<EOL><DEDENT>self.all_prepares[str(prepare_id)] = prepare_data<EOL>if self.verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>prepare_id,<EOL>len(self.all_prepares)))<EOL><DEDENT>return self.build_response(<EOL>status=SUCCESS,<EOL>error="<STR_LIT>",<EOL>data=prepare_data)<EOL><DEDENT>else:<EOL><INDENT>err_msg = ("<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>response.status_code,<EOL>response.text,<EOL>response.reason)<EOL>if self.verbose:<EOL><INDENT>log.error(err_msg)<EOL><DEDENT>return self.build_response(<EOL>status=ERROR,<EOL>error=err_msg)<EOL><DEDENT><DEDENT>
|
run_prepare
:param body: dictionary to launch prepare
|
f11325:c0:m12
|
def wait_for_prepare_to_finish(<EOL>self,<EOL>prepare_id,<EOL>sec_to_sleep=<NUM_LIT>,<EOL>max_retries=<NUM_LIT>):
|
not_done = True<EOL>retry_attempt = <NUM_LIT:1><EOL>while not_done:<EOL><INDENT>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>prepare_id))<EOL><DEDENT>response = self.get_prepare_by_id(prepare_id)<EOL>if self.debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>prepare_id,<EOL>response))<EOL><DEDENT>if response["<STR_LIT:status>"] != SUCCESS:<EOL><INDENT>log.error(("<STR_LIT>"<EOL>"<STR_LIT>")<EOL>.format(<EOL>prepare_id,<EOL>response["<STR_LIT:error>"]))<EOL>return self.build_response(<EOL>status=ERROR,<EOL>error=response["<STR_LIT:error>"],<EOL>data=response["<STR_LIT:data>"])<EOL><DEDENT>prepare_data = response.get(<EOL>"<STR_LIT:data>",<EOL>None)<EOL>if not prepare_data:<EOL><INDENT>return self.build_response(<EOL>status=ERROR,<EOL>error="<STR_LIT>",<EOL>data=response["<STR_LIT:data>"])<EOL><DEDENT>prepare_status = prepare_data["<STR_LIT:status>"]<EOL>if prepare_status == "<STR_LIT>"or prepare_status == "<STR_LIT>":<EOL><INDENT>not_done = False<EOL>return self.build_response(<EOL>status=SUCCESS,<EOL>error="<STR_LIT>",<EOL>data=prepare_data)<EOL><DEDENT>else:<EOL><INDENT>retry_attempt += <NUM_LIT:1><EOL>if retry_attempt > max_retries:<EOL><INDENT>err_msg = ("<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>prepare_id)<EOL>log.error(err_msg)<EOL>return self.build_response(<EOL>status=ERROR,<EOL>error=err_msg)<EOL><DEDENT>else:<EOL><INDENT>if self.verbose:<EOL><INDENT>if retry_attempt % <NUM_LIT:100> == <NUM_LIT:0>:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>prepare_id,<EOL>retry_attempt))<EOL><DEDENT><DEDENT>time.sleep(sec_to_sleep)<EOL><DEDENT><DEDENT><DEDENT>
|
wait_for_prepare_to_finish
:param prepare_id: MLPrepare.id to wait on
:param sec_to_sleep: seconds to sleep during polling
:param max_retries: max retires until stopping
|
f11325:c0:m13
|
def get_prepared_dataset():
|
parser = argparse.ArgumentParser(<EOL>description=(<EOL>"<STR_LIT>"))<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT:username>",<EOL>required=False,<EOL>dest="<STR_LIT:user>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT:password>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT:email>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT:url>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT:-c>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>",<EOL>action="<STR_LIT:store_true>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>",<EOL>action="<STR_LIT:store_true>")<EOL>args = parser.parse_args()<EOL>user = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>password = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>email = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>url = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>prepare_id = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>ca_dir = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>cert_file = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>key_file = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>verbose = bool(str(ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT:true>")).lower() == "<STR_LIT:true>")<EOL>debug = bool(str(ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT:false>")).lower() == "<STR_LIT:true>")<EOL>if args.user:<EOL><INDENT>user = args.user<EOL><DEDENT>if args.password:<EOL><INDENT>password = args.password<EOL><DEDENT>if args.email:<EOL><INDENT>email = args.email<EOL><DEDENT>if args.url:<EOL><INDENT>url = args.url<EOL><DEDENT>if args.prepare_id:<EOL><INDENT>prepare_id = args.prepare_id<EOL><DEDENT>if args.ca_dir:<EOL><INDENT>ca_dir = args.ca_dir<EOL><DEDENT>if args.cert_file:<EOL><INDENT>cert_file = args.cert_file<EOL><DEDENT>if args.key_file:<EOL><INDENT>key_file = args.key_file<EOL><DEDENT>if args.silent:<EOL><INDENT>verbose = False<EOL><DEDENT>if args.debug:<EOL><INDENT>debug = True<EOL><DEDENT>usage = (<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>")<EOL>valid = True<EOL>if not user or user == "<STR_LIT>":<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT>if not password or password == "<STR_LIT>":<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT>if not prepare_id or prepare_id == "<STR_LIT>":<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT>else:<EOL><INDENT>try:<EOL><INDENT>prepare_id = int(prepare_id)<EOL><DEDENT>except Exception as e:<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT><DEDENT>if not valid:<EOL><INDENT>log.error(usage)<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>if verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>user,<EOL>url,<EOL>prepare_id))<EOL><DEDENT>client = AIClient(<EOL>user=user,<EOL>email=email,<EOL>password=password,<EOL>url=url,<EOL>ca_dir=ca_dir,<EOL>cert_file=cert_file,<EOL>key_file=key_file,<EOL>verbose=verbose,<EOL>debug=debug)<EOL>if verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>prepare_id))<EOL><DEDENT>response = client.get_prepare_by_id(<EOL>prepare_id=prepare_id)<EOL>if response["<STR_LIT:status>"] == SUCCESS:<EOL><INDENT>if debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT><DEDENT>elif response["<STR_LIT:status>"] == FAILED:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>elif response["<STR_LIT:status>"] == ERROR:<EOL><INDENT>if "<STR_LIT>" in response["<STR_LIT:error>"]:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>prepare_id,<EOL>user))<EOL><DEDENT>else:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>elif response["<STR_LIT:status>"] == LOGIN_FAILED:<EOL><INDENT>log.error(("<STR_LIT>"<EOL>"<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>prepare_data = response["<STR_LIT:data>"]<EOL>if len(prepare_data) == <NUM_LIT:0>:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>prepare_id,<EOL>user))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>prepare_id = prepare_data.get("<STR_LIT:id>", None)<EOL>prepare_status = prepare_data.get("<STR_LIT:status>", None)<EOL>log.info(("<STR_LIT>")<EOL>.format(<EOL>ppj(prepare_data)))<EOL>log.info(("<STR_LIT>")<EOL>.format(<EOL>prepare_id,<EOL>prepare_status))<EOL>
|
get_prepared_dataset
Get an ``MLPrepare`` by database id.
|
f11326:m0
|
def get_ml_job():
|
parser = argparse.ArgumentParser(<EOL>description=("<STR_LIT>"))<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT:username>",<EOL>required=False,<EOL>dest="<STR_LIT:user>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT:password>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT:email>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT:url>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT:-c>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>",<EOL>action="<STR_LIT:store_true>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>",<EOL>action="<STR_LIT:store_true>")<EOL>args = parser.parse_args()<EOL>user = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>password = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>email = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>url = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>job_id = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>ca_dir = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>cert_file = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>key_file = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>verbose = bool(str(ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT:true>")).lower() == "<STR_LIT:true>")<EOL>debug = bool(str(ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT:false>")).lower() == "<STR_LIT:true>")<EOL>if args.user:<EOL><INDENT>user = args.user<EOL><DEDENT>if args.password:<EOL><INDENT>password = args.password<EOL><DEDENT>if args.email:<EOL><INDENT>email = args.email<EOL><DEDENT>if args.url:<EOL><INDENT>url = args.url<EOL><DEDENT>if args.job_id:<EOL><INDENT>job_id = args.job_id<EOL><DEDENT>if args.ca_dir:<EOL><INDENT>ca_dir = args.ca_dir<EOL><DEDENT>if args.cert_file:<EOL><INDENT>cert_file = args.cert_file<EOL><DEDENT>if args.key_file:<EOL><INDENT>key_file = args.key_file<EOL><DEDENT>if args.silent:<EOL><INDENT>verbose = False<EOL><DEDENT>if args.debug:<EOL><INDENT>debug = True<EOL><DEDENT>usage = (<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>")<EOL>valid = True<EOL>if not user or user == "<STR_LIT>":<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT>if not password or password == "<STR_LIT>":<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT>if not job_id or job_id == "<STR_LIT>":<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT>else:<EOL><INDENT>try:<EOL><INDENT>job_id = int(job_id)<EOL><DEDENT>except Exception as e:<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT><DEDENT>if not valid:<EOL><INDENT>log.error(usage)<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>if verbose:<EOL><INDENT>log.info((<EOL>"<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>user,<EOL>url,<EOL>job_id,<EOL>ca_dir,<EOL>cert_file,<EOL>key_file))<EOL><DEDENT>client = AIClient(<EOL>user=user,<EOL>email=email,<EOL>password=password,<EOL>url=url,<EOL>ca_dir=ca_dir,<EOL>cert_file=cert_file,<EOL>key_file=key_file,<EOL>verbose=verbose,<EOL>debug=debug)<EOL>if verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>job_id))<EOL><DEDENT>response = client.get_job_by_id(<EOL>job_id=job_id)<EOL>if response["<STR_LIT:status>"] == SUCCESS:<EOL><INDENT>if debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT><DEDENT>elif response["<STR_LIT:status>"] == FAILED:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>elif response["<STR_LIT:status>"] == ERROR:<EOL><INDENT>if "<STR_LIT>" in response["<STR_LIT:error>"]:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>job_id,<EOL>user))<EOL><DEDENT>else:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>elif response["<STR_LIT:status>"] == LOGIN_FAILED:<EOL><INDENT>log.error(("<STR_LIT>"<EOL>"<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>job_data = response["<STR_LIT:data>"]<EOL>if len(job_data) == <NUM_LIT:0>:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>job_id,<EOL>user))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>job_id = job_data.get("<STR_LIT:id>", None)<EOL>job_status = job_data.get("<STR_LIT:status>", None)<EOL>log.info(("<STR_LIT>")<EOL>.format(<EOL>ppj(job_data)))<EOL>log.info(("<STR_LIT>")<EOL>.format(<EOL>job_id,<EOL>job_status))<EOL>
|
get_ml_job
Get an ``MLJob`` by database id.
|
f11327:m0
|
def get_ml_job_results():
|
parser = argparse.ArgumentParser(<EOL>description=(<EOL>"<STR_LIT>"))<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT:username>",<EOL>required=False,<EOL>dest="<STR_LIT:user>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT:password>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT:email>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT:url>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT:-c>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>",<EOL>action="<STR_LIT:store_true>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>",<EOL>action="<STR_LIT:store_true>")<EOL>args = parser.parse_args()<EOL>user = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>password = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>email = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>url = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>result_id = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>ca_dir = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>cert_file = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>key_file = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>verbose = bool(str(ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT:true>")).lower() == "<STR_LIT:true>")<EOL>debug = bool(str(ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT:false>")).lower() == "<STR_LIT:true>")<EOL>if args.user:<EOL><INDENT>user = args.user<EOL><DEDENT>if args.password:<EOL><INDENT>password = args.password<EOL><DEDENT>if args.email:<EOL><INDENT>email = args.email<EOL><DEDENT>if args.url:<EOL><INDENT>url = args.url<EOL><DEDENT>if args.result_id:<EOL><INDENT>result_id = args.result_id<EOL><DEDENT>if args.ca_dir:<EOL><INDENT>ca_dir = args.ca_dir<EOL><DEDENT>if args.cert_file:<EOL><INDENT>cert_file = args.cert_file<EOL><DEDENT>if args.key_file:<EOL><INDENT>key_file = args.key_file<EOL><DEDENT>if args.silent:<EOL><INDENT>verbose = False<EOL><DEDENT>if args.debug:<EOL><INDENT>debug = True<EOL><DEDENT>usage = (<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>")<EOL>valid = True<EOL>if not user or user == "<STR_LIT>":<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT>if not password or password == "<STR_LIT>":<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT>if not result_id or result_id == "<STR_LIT>":<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT>else:<EOL><INDENT>try:<EOL><INDENT>result_id = int(result_id)<EOL><DEDENT>except Exception as e:<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT><DEDENT>if not valid:<EOL><INDENT>log.error(usage)<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>if verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>user,<EOL>url,<EOL>result_id))<EOL><DEDENT>client = AIClient(<EOL>user=user,<EOL>email=email,<EOL>password=password,<EOL>url=url,<EOL>ca_dir=ca_dir,<EOL>cert_file=cert_file,<EOL>key_file=key_file,<EOL>verbose=verbose,<EOL>debug=debug)<EOL>if verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>result_id))<EOL><DEDENT>response = client.get_result_by_id(<EOL>result_id=result_id)<EOL>if response["<STR_LIT:status>"] == SUCCESS:<EOL><INDENT>if debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT><DEDENT>elif response["<STR_LIT:status>"] == FAILED:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>elif response["<STR_LIT:status>"] == ERROR:<EOL><INDENT>if "<STR_LIT>" in response["<STR_LIT:error>"]:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>result_id,<EOL>user))<EOL><DEDENT>else:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>elif response["<STR_LIT:status>"] == LOGIN_FAILED:<EOL><INDENT>log.error(("<STR_LIT>"<EOL>"<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>result_data = response["<STR_LIT:data>"]<EOL>if len(result_data) == <NUM_LIT:0>:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>result_id,<EOL>user))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>result_id = result_data.get("<STR_LIT:id>", None)<EOL>result_status = result_data.get("<STR_LIT:status>", None)<EOL>if debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>ppj(result_data)))<EOL>for k in result_data:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>k))<EOL><DEDENT><DEDENT>if result_data["<STR_LIT:status>"] == "<STR_LIT>":<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>result_data["<STR_LIT>"]["<STR_LIT>"],<EOL>len(result_data["<STR_LIT>"]["<STR_LIT>"])))<EOL><DEDENT>else:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>ppj(result_data)))<EOL><DEDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>result_id,<EOL>result_status))<EOL>
|
get_ml_job_results
Get an ``MLJobResult`` by database id.
|
f11328:m0
|
def start_predictions():
|
parser = argparse.ArgumentParser(<EOL>description=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"))<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>",<EOL>action="<STR_LIT:store_true>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT:-c>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>",<EOL>action="<STR_LIT:store_true>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>",<EOL>action="<STR_LIT:store_true>")<EOL>args = parser.parse_args()<EOL>datafile = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>ca_dir = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>cert_file = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>key_file = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>verbose = bool(str(ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT:1>")).lower() == "<STR_LIT:1>")<EOL>debug = bool(str(ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT:0>")).lower() == "<STR_LIT:1>")<EOL>use_fake_rows = False<EOL>if args.use_fake_rows:<EOL><INDENT>use_fake_rows = True<EOL><DEDENT>if args.datafile:<EOL><INDENT>datafile = args.datafile<EOL><DEDENT>if args.ca_dir:<EOL><INDENT>ca_dir = args.ca_dir<EOL><DEDENT>if args.cert_file:<EOL><INDENT>cert_file = args.cert_file<EOL><DEDENT>if args.key_file:<EOL><INDENT>key_file = args.key_file<EOL><DEDENT>if args.silent:<EOL><INDENT>verbose = False<EOL><DEDENT>if args.debug:<EOL><INDENT>debug = True<EOL><DEDENT>if verbose:<EOL><INDENT>log.info("<STR_LIT>")<EOL><DEDENT>client = build_ai_client_from_env(<EOL>ca_dir=ca_dir,<EOL>cert_file=cert_file,<EOL>key_file=key_file,<EOL>verbose=verbose,<EOL>debug=debug)<EOL>if verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>datafile))<EOL><DEDENT>fake_rows_for_predicting = [<EOL>{<EOL>"<STR_LIT>": <NUM_LIT:1><EOL>},<EOL>{<EOL>"<STR_LIT>": <NUM_LIT:2><EOL>},<EOL>{<EOL>"<STR_LIT>": <NUM_LIT:3><EOL>},<EOL>{<EOL>"<STR_LIT>": <NUM_LIT:4><EOL>}<EOL>]<EOL>res_gen = None<EOL>if use_fake_rows:<EOL><INDENT>res_gen = generate_ai_request(<EOL>predict_rows=fake_rows_for_predicting)<EOL><DEDENT>else:<EOL><INDENT>req_with_org_rows = None<EOL>with open(datafile, "<STR_LIT:r>") as f:<EOL><INDENT>req_with_org_rows = json.loads(f.read())<EOL><DEDENT>res_gen = generate_ai_request(<EOL>predict_rows=req_with_org_rows["<STR_LIT>"])<EOL><DEDENT>if res_gen["<STR_LIT:status>"] != SUCCESS:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>res_gen["<STR_LIT:error>"]))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>req_body = res_gen["<STR_LIT:data>"]<EOL>if verbose:<EOL><INDENT>log.info("<STR_LIT>")<EOL><DEDENT>job_was_started = False<EOL>response = client.run_job(<EOL>body=req_body)<EOL>if response["<STR_LIT:status>"] == SUCCESS:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:data>"]))<EOL>job_was_started = True<EOL><DEDENT>elif response["<STR_LIT:status>"] == FAILED:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT>elif response["<STR_LIT:status>"] == ERROR:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT>elif response["<STR_LIT:status>"] == LOGIN_FAILED:<EOL><INDENT>log.error(("<STR_LIT>"<EOL>"<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT>if not job_was_started:<EOL><INDENT>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>if debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT>else:<EOL><INDENT>if verbose:<EOL><INDENT>log.info("<STR_LIT>")<EOL><DEDENT><DEDENT>res_data = response["<STR_LIT:data>"]<EOL>job_data = res_data.get(<EOL>"<STR_LIT>",<EOL>None)<EOL>result_data = res_data.get(<EOL>"<STR_LIT>",<EOL>None)<EOL>if not job_data:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:data>"]))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>if not result_data:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:data>"]))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>job_id = job_data.get("<STR_LIT:id>", None)<EOL>job_status = job_data.get("<STR_LIT:status>", None)<EOL>result_id = result_data.get("<STR_LIT:id>", None)<EOL>result_status = result_data.get("<STR_LIT:status>", None)<EOL>log.info(("<STR_LIT>"<EOL>"<STR_LIT>")<EOL>.format(<EOL>job_id,<EOL>job_status,<EOL>result_id,<EOL>result_status))<EOL>job_results = client.wait_for_job_to_finish(<EOL>job_id=job_id)<EOL>if job_results["<STR_LIT:status>"] != SUCCESS:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>job_id,<EOL>job_results["<STR_LIT:error>"],<EOL>job_results["<STR_LIT:data>"]))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>final_job = job_results["<STR_LIT:data>"]["<STR_LIT>"]<EOL>final_result = job_results["<STR_LIT:data>"]["<STR_LIT:result>"]<EOL>log.info(("<STR_LIT>")<EOL>.format(<EOL>ppj(final_job)))<EOL>log.info(("<STR_LIT>")<EOL>.format(<EOL>ppj(final_result)))<EOL>log.info(("<STR_LIT>")<EOL>.format(<EOL>job_id))<EOL>predictions = final_result["<STR_LIT>"].get(<EOL>"<STR_LIT>",<EOL>[])<EOL>log.info(("<STR_LIT>")<EOL>.format(<EOL>len(predictions)))<EOL>df = pd.DataFrame(predictions)<EOL>log.info(("<STR_LIT>")<EOL>.format(<EOL>df))<EOL>
|
start_predictions
Using environment variables, create an AntiNex AI Client.
You can also use command line args if you want.
This can train a new deep neural network if it does not
exist or it can use an existing pre-trained deep neural
network within the AntiNex Core to make new predictions.
|
f11329:m0
|
def prepare_new_dataset():
|
parser = argparse.ArgumentParser(<EOL>description=(<EOL>"<STR_LIT>"))<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT:username>",<EOL>required=False,<EOL>dest="<STR_LIT:user>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT:password>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT:email>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT:url>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT:-c>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>",<EOL>action="<STR_LIT:store_true>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>",<EOL>action="<STR_LIT:store_true>")<EOL>args = parser.parse_args()<EOL>user = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>password = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>email = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>url = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>prepare_file = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>ca_dir = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>cert_file = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>key_file = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>verbose = bool(str(ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT:true>")).lower() == "<STR_LIT:true>")<EOL>debug = bool(str(ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT:false>")).lower() == "<STR_LIT:true>")<EOL>if args.user:<EOL><INDENT>user = args.user<EOL><DEDENT>if args.password:<EOL><INDENT>password = args.password<EOL><DEDENT>if args.email:<EOL><INDENT>email = args.email<EOL><DEDENT>if args.url:<EOL><INDENT>url = args.url<EOL><DEDENT>if args.prepare_file:<EOL><INDENT>prepare_file = args.prepare_file<EOL><DEDENT>if args.ca_dir:<EOL><INDENT>ca_dir = args.ca_dir<EOL><DEDENT>if args.cert_file:<EOL><INDENT>cert_file = args.cert_file<EOL><DEDENT>if args.key_file:<EOL><INDENT>key_file = args.key_file<EOL><DEDENT>if args.silent:<EOL><INDENT>verbose = False<EOL><DEDENT>if args.debug:<EOL><INDENT>debug = True<EOL><DEDENT>usage = (<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>")<EOL>valid = True<EOL>if not user or user == "<STR_LIT>":<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT>if not password or password == "<STR_LIT>":<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT>if not prepare_file or prepare_file == "<STR_LIT>":<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT>else:<EOL><INDENT>if not os.path.exists(prepare_file):<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>prepare_file))<EOL>valid = False<EOL><DEDENT><DEDENT>if not valid:<EOL><INDENT>log.error(usage)<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>if verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>user,<EOL>url,<EOL>prepare_file))<EOL><DEDENT>client = AIClient(<EOL>user=user,<EOL>email=email,<EOL>password=password,<EOL>url=url,<EOL>ca_dir=ca_dir,<EOL>cert_file=cert_file,<EOL>key_file=key_file,<EOL>verbose=verbose,<EOL>debug=debug)<EOL>if verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>prepare_file))<EOL><DEDENT>req_body = None<EOL>with open(prepare_file, "<STR_LIT:r>") as f:<EOL><INDENT>req_body = json.loads(f.read())<EOL><DEDENT>if verbose:<EOL><INDENT>log.info("<STR_LIT>")<EOL><DEDENT>prepare_was_started = False<EOL>response = client.run_prepare(<EOL>body=req_body)<EOL>if response["<STR_LIT:status>"] == SUCCESS:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:data>"]))<EOL>prepare_was_started = True<EOL><DEDENT>elif response["<STR_LIT:status>"] == FAILED:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT>elif response["<STR_LIT:status>"] == ERROR:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT>elif response["<STR_LIT:status>"] == LOGIN_FAILED:<EOL><INDENT>log.error(("<STR_LIT>"<EOL>"<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT>if not prepare_was_started:<EOL><INDENT>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>if debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT>else:<EOL><INDENT>if verbose:<EOL><INDENT>log.info("<STR_LIT>")<EOL><DEDENT><DEDENT>prepare_data = response["<STR_LIT:data>"]<EOL>if not prepare_data:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:data>"]))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>prepare_id = prepare_data.get("<STR_LIT:id>", None)<EOL>prepare_status = prepare_data.get("<STR_LIT:status>", None)<EOL>log.info(("<STR_LIT>")<EOL>.format(<EOL>prepare_id,<EOL>prepare_status))<EOL>prepare_results = client.wait_for_prepare_to_finish(<EOL>prepare_id=prepare_id)<EOL>if prepare_results["<STR_LIT:status>"] != SUCCESS:<EOL><INDENT>log.error(("<STR_LIT>"<EOL>"<STR_LIT>")<EOL>.format(<EOL>prepare_id,<EOL>prepare_results["<STR_LIT:error>"],<EOL>prepare_results["<STR_LIT:data>"]))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>final_prepare = prepare_results["<STR_LIT:data>"]<EOL>log.info(("<STR_LIT>")<EOL>.format(<EOL>ppj(final_prepare)))<EOL>log.info(("<STR_LIT>")<EOL>.format(<EOL>prepare_id))<EOL>
|
prepare_new_dataset
Prepare a new ``MLPrepare`` record and dataset files on disk.
|
f11330:m0
|
def train_new_deep_neural_network():
|
parser = argparse.ArgumentParser(<EOL>description=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"))<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT:username>",<EOL>required=False,<EOL>dest="<STR_LIT:user>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT:password>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT:email>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT:url>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT:-c>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help=(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"),<EOL>required=False,<EOL>dest="<STR_LIT>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>",<EOL>action="<STR_LIT:store_true>")<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>required=False,<EOL>dest="<STR_LIT>",<EOL>action="<STR_LIT:store_true>")<EOL>args = parser.parse_args()<EOL>user = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>password = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>email = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>url = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>datafile = ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>")<EOL>ca_dir = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>cert_file = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>key_file = os.getenv(<EOL>"<STR_LIT>",<EOL>None)<EOL>verbose = bool(str(ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT:true>")).lower() == "<STR_LIT:true>")<EOL>debug = bool(str(ev(<EOL>"<STR_LIT>",<EOL>"<STR_LIT:false>")).lower() == "<STR_LIT:true>")<EOL>if args.user:<EOL><INDENT>user = args.user<EOL><DEDENT>if args.password:<EOL><INDENT>password = args.password<EOL><DEDENT>if args.email:<EOL><INDENT>email = args.email<EOL><DEDENT>if args.url:<EOL><INDENT>url = args.url<EOL><DEDENT>if args.datafile:<EOL><INDENT>datafile = args.datafile<EOL><DEDENT>if args.ca_dir:<EOL><INDENT>ca_dir = args.ca_dir<EOL><DEDENT>if args.cert_file:<EOL><INDENT>cert_file = args.cert_file<EOL><DEDENT>if args.key_file:<EOL><INDENT>key_file = args.key_file<EOL><DEDENT>if args.silent:<EOL><INDENT>verbose = False<EOL><DEDENT>if args.debug:<EOL><INDENT>debug = True<EOL><DEDENT>usage = (<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>")<EOL>valid = True<EOL>if not user or user == "<STR_LIT>":<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT>if not password or password == "<STR_LIT>":<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT>if not datafile or datafile == "<STR_LIT>":<EOL><INDENT>log.error("<STR_LIT>")<EOL>valid = False<EOL><DEDENT>else:<EOL><INDENT>if not os.path.exists(datafile):<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>datafile))<EOL>valid = False<EOL><DEDENT><DEDENT>if not valid:<EOL><INDENT>log.error(usage)<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>if verbose:<EOL><INDENT>log.info((<EOL>"<STR_LIT>"<EOL>"<STR_LIT>").format(<EOL>user,<EOL>url,<EOL>datafile,<EOL>ca_dir,<EOL>cert_file,<EOL>key_file))<EOL><DEDENT>client = AIClient(<EOL>user=user,<EOL>email=email,<EOL>password=password,<EOL>url=url,<EOL>ca_dir=ca_dir,<EOL>cert_file=cert_file,<EOL>key_file=key_file,<EOL>verbose=verbose,<EOL>debug=debug)<EOL>if verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>datafile))<EOL><DEDENT>req_body = None<EOL>with open(datafile, "<STR_LIT:r>") as f:<EOL><INDENT>req_body = json.loads(f.read())<EOL><DEDENT>if verbose:<EOL><INDENT>log.info("<STR_LIT>")<EOL><DEDENT>job_was_started = False<EOL>response = client.run_job(<EOL>body=req_body)<EOL>if response["<STR_LIT:status>"] == SUCCESS:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:data>"]))<EOL>job_was_started = True<EOL><DEDENT>elif response["<STR_LIT:status>"] == FAILED:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT>elif response["<STR_LIT:status>"] == ERROR:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT>elif response["<STR_LIT:status>"] == LOGIN_FAILED:<EOL><INDENT>log.error(("<STR_LIT>"<EOL>"<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:error>"],<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT>if not job_was_started:<EOL><INDENT>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>if debug:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:data>"]))<EOL><DEDENT>else:<EOL><INDENT>if verbose:<EOL><INDENT>log.info("<STR_LIT>")<EOL><DEDENT><DEDENT>res_data = response["<STR_LIT:data>"]<EOL>job_data = res_data.get(<EOL>"<STR_LIT>",<EOL>None)<EOL>result_data = res_data.get(<EOL>"<STR_LIT>",<EOL>None)<EOL>if not job_data:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:data>"]))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>if not result_data:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>response["<STR_LIT:data>"]))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>job_id = job_data.get("<STR_LIT:id>", None)<EOL>job_status = job_data.get("<STR_LIT:status>", None)<EOL>result_id = result_data.get("<STR_LIT:id>", None)<EOL>result_status = result_data.get("<STR_LIT:status>", None)<EOL>log.info(("<STR_LIT>"<EOL>"<STR_LIT>")<EOL>.format(<EOL>job_id,<EOL>job_status,<EOL>result_id,<EOL>result_status))<EOL>job_results = client.wait_for_job_to_finish(<EOL>job_id=job_id)<EOL>if job_results["<STR_LIT:status>"] != SUCCESS:<EOL><INDENT>log.error(("<STR_LIT>")<EOL>.format(<EOL>job_id,<EOL>job_results["<STR_LIT:error>"],<EOL>job_results["<STR_LIT:data>"]))<EOL>sys.exit(<NUM_LIT:1>)<EOL><DEDENT>final_job = job_results["<STR_LIT:data>"]["<STR_LIT>"]<EOL>final_result = job_results["<STR_LIT:data>"]["<STR_LIT:result>"]<EOL>if verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>ppj(final_job)))<EOL><DEDENT>else:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>str(final_job)[<NUM_LIT:0>:<NUM_LIT:10>]))<EOL><DEDENT>if verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>ppj(final_result)))<EOL><DEDENT>else:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>str(final_result)[<NUM_LIT:0>:<NUM_LIT:10>]))<EOL><DEDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>job_id))<EOL>predictions = final_result["<STR_LIT>"].get(<EOL>"<STR_LIT>",<EOL>[])<EOL>log.info(("<STR_LIT>")<EOL>.format(<EOL>len(predictions)))<EOL>df = pd.DataFrame(predictions)<EOL>if verbose:<EOL><INDENT>log.info(("<STR_LIT>")<EOL>.format(<EOL>df))<EOL><DEDENT>
|
train_new_deep_neural_network
Train a new deep neural network and store the results as a new:
``MLJob`` and ``MLJobResult`` database records.
|
f11331:m0
|
def init_publisher(app):
|
@app.context_processor<EOL>def inject_links():<EOL><INDENT>return {<EOL>'<STR_LIT>': stack.top.websub_self_url,<EOL>'<STR_LIT>': stack.top.websub_hub_url,<EOL>'<STR_LIT>': stack.top.websub_self_link,<EOL>'<STR_LIT>': stack.top.websub_hub_link,<EOL>}<EOL><DEDENT>
|
Calling this with your flask app as argument is required for the
publisher decorator to work.
|
f11342:m0
|
def publisher(self_url=None, hub_url=None):
|
def decorator(topic_view):<EOL><INDENT>@functools.wraps(topic_view)<EOL>def wrapper(*args, **kwargs):<EOL><INDENT>nonlocal hub_url, self_url<EOL>if not self_url:<EOL><INDENT>self_url = request.url<EOL><DEDENT>if not hub_url:<EOL><INDENT>try:<EOL><INDENT>hub_url = url_for('<STR_LIT>', _external=True)<EOL><DEDENT>except BuildError:<EOL><INDENT>hub_url = current_app.config['<STR_LIT>']<EOL><DEDENT><DEDENT>stack.top.websub_self_url = self_url<EOL>stack.top.websub_hub_url = hub_url<EOL>stack.top.websub_self_link = Markup(SELF_LINK % self_url)<EOL>stack.top.websub_hub_link = Markup(HUB_LINK % hub_url)<EOL>resp = make_response(topic_view(*args, **kwargs))<EOL>resp.headers.add('<STR_LIT>', HEADER_VALUE % (self_url, hub_url))<EOL>return resp<EOL><DEDENT>return wrapper<EOL><DEDENT>return decorator<EOL>
|
This decorator makes it easier to implement a websub publisher. You use
it on an endpoint, and Link headers will automatically be added. To also
include these links in your template html/atom/rss (and you should!) you
can use the following to get the raw links:
- {{ websub_self_url }}
- {{ websub_hub_url }}
And the following to get them wrapped in <link tags>:
- {{ websub_self_link }}
- {{ websub_hub_link }}
If hub_url is not given, the hub needs to be a flask_websub one and the
hub and publisher need to share their application for the url to be
auto-discovered. If that is not the case, you need to set
config['HUB_URL'].
If self_url is not given, the url of the current request will be used. Note
that this includes url query arguments. If this is not what you want,
override it.
|
f11342:m1
|
def discover(url, timeout=None):
|
resp = get_content({'<STR_LIT>': timeout}, url)<EOL>parser = LinkParser()<EOL>parser.hub_url = (resp.links.get('<STR_LIT>') or {}).get('<STR_LIT:url>')<EOL>parser.topic_url = (resp.links.get('<STR_LIT>') or {}).get('<STR_LIT:url>')<EOL>try:<EOL><INDENT>parser.updated()<EOL>for chunk in resp.iter_content(chunk_size=None, decode_unicode=True):<EOL><INDENT>parser.feed(chunk)<EOL><DEDENT>parser.close()<EOL><DEDENT>except Finished:<EOL><INDENT>return {'<STR_LIT>': parser.hub_url, '<STR_LIT>': parser.topic_url}<EOL><DEDENT>raise DiscoveryError("<STR_LIT>")<EOL>
|
Discover the hub url and topic url of a given url. Firstly, by inspecting
the page's headers, secondarily by inspecting the content for link tags.
timeout determines how long to wait for the url to load. It defaults to 3.
|
f11343:m0
|
@abc.abstractmethod<EOL><INDENT>def __setitem__(self, callback_id, subscription_request):<DEDENT>
|
Store a new subscription request under the key callback_id. A
subscription request is a dict-like object with the following keys:
- mode
- topic_url
- hub_url
- secret
- lease_seconds
- timeout: after this amount of seconds, the request itself does no
longer have to be stored.
|
f11344:c1:m0
|
|
@abc.abstractmethod<EOL><INDENT>def pop(self, callback_id):<DEDENT>
|
Get a subscription request as stored by __setitem__, return it, and
remove the request from the store. Make sure the request has not
expired!
If there is no value for callback_id, raise a KeyError.
|
f11344:c1:m1
|
|
def cleanup(self):
|
Remove any expired subscription requests from the store. If your
backend handles this automatically, there is no need to override this
method.
|
f11344:c1:m2
|
|
def __init__(self, cache):
|
self.cache = cache<EOL>
|
Cache should share the API of werkzeug.contrib.cache.BaseCache
|
f11344:c2:m0
|
@abc.abstractmethod<EOL><INDENT>def __getitem__(self, callback_id):<DEDENT>
|
Get a subscription by its callback_id, which is a dict-like object
with the following keys:
- mode
- topic_url
- hub_url
- secret
- lease_seconds
|
f11344:c4:m0
|
|
@abc.abstractmethod<EOL><INDENT>def __delitem__(self, callback_id):<DEDENT>
|
Delete an object by its callback_id
|
f11344:c4:m1
|
|
@abc.abstractmethod<EOL><INDENT>def __setitem__(self, callback_id, subscription):<DEDENT>
|
Store a new subscription under the key callback_id. Note that a
subscription should disappear from any queries after lease_seconds has
passed from the moment of storage on, with the exception of
close_to_expiration.
|
f11344:c4:m2
|
|
@abc.abstractmethod<EOL><INDENT>def close_to_expiration(self, margin_in_seconds):<DEDENT>
|
Return an iterator of subscriptions that are near (or already past)
their expiration time. margin_in_seconds specifies what 'near' is.
Note that the key 'callback_id' needs to be included in the resulting
object as well!
|
f11344:c4:m3
|
|
@abc.abstractmethod<EOL><INDENT>def pop(self, callback_id):<DEDENT>
|
Atomic combination of __getitem__ and __delitem__.
|
f11344:c4:m4
|
|
def build_blueprint(self, url_prefix='<STR_LIT>'):
|
self.blueprint_name, self.blueprint = build_blueprint(self, url_prefix)<EOL>return self.blueprint<EOL>
|
Build a blueprint that contains the endpoints for callback URLs of
the current subscriber. Only call this once per instance. Arguments:
- url_prefix; this allows you to prefix the callback URLs in your app.
|
f11345:c0:m1
|
def subscribe(self, **subscription_request):
|
return self.subscribe_impl(mode='<STR_LIT>', **subscription_request)<EOL>
|
Subscribe to a certain topic. All arguments are keyword arguments.
They are:
- topic_url: the url of the topic to subscribe to.
- hub_url: the url of the hub that the topic url links to.
- secret (optional): a secret to use in the communication. If
AUTO_SET_SECRET is enabled (and it is by default), the library
creates a random secret for you, unless you override it.
- lease_seconds (optional): the lease length you request from the
hub. Note that the hub may override it. If it's not given, the hub
gets to decide by itself.
- requests_opts (optional): allows you to pass in extra options for the
initial subscribe requests. Handy when a hub e.g. demands
authentication. It's against the spec, but these things happen.
Note that, while possible, it is not always necessary to find the
topic_url and hub_url yourself. If you have a WebSub-supporting URL,
you can find them using the discover function. That makes calling this
function as simple as:
.. code:: python
subscriber.subscribe(**discover('http://some_websub_supporting.url'))
This function returns a callback_id. This value is an implementation
detail, so you should not ascribe any meaning to it other than it being
a unique identifier of the subscription.
|
f11345:c0:m2
|
def unsubscribe(self, callback_id):
|
request = self.get_active_subscription(callback_id)<EOL>request['<STR_LIT>'] = '<STR_LIT>'<EOL>self.subscribe_impl(callback_id, **request)<EOL>
|
Ask the hub to cancel the subscription for callback_id, then delete
it from the local database if successful.
|
f11345:c0:m5
|
def renew(self, callback_id):
|
return self.subscribe_impl(callback_id,<EOL>**self.get_active_subscription(callback_id))<EOL>
|
Renew the subscription given by callback_id with the hub. Note that
this should work even when the subscription has expired.
|
f11345:c0:m7
|
def renew_close_to_expiration(self, margin_in_seconds=A_DAY):
|
subscriptions = self.storage.close_to_expiration(margin_in_seconds)<EOL>for subscription in subscriptions:<EOL><INDENT>try:<EOL><INDENT>self.subscribe_impl(**subscription)<EOL><DEDENT>except SubscriberError as e:<EOL><INDENT>warn(RENEW_FAILURE % (subscription['<STR_LIT>'],<EOL>subscription['<STR_LIT>']), e)<EOL><DEDENT><DEDENT>
|
Automatically renew subscriptions that are close to expiring, or
have already expired. margin_in_seconds determines if a subscription is
in fact close to expiring. By default, said margin is set to be a
single day (24 hours).
This is a long-running method for any non-trivial usage of the
subscriber module, as renewal requires several http requests, and
subscriptions are processed serially. Because of that, it is
recommended to run this method in a celery task.
|
f11345:c0:m8
|
@abc.abstractmethod<EOL><INDENT>def __delitem__(self, key):<DEDENT>
|
A key consists of two components: (topic_url, callback_url).
If the operation cannot be performed (e.g. because of there not being
an item matching the key in the database), you may log an error. An
exception should not be raised, though.
|
f11349:c0:m0
|
|
@abc.abstractmethod<EOL><INDENT>def __setitem__(self, key, value):<DEDENT>
|
For key info, see __delitem__. value is a dict with the following
properties:
- expiration_time
- secret
|
f11349:c0:m1
|
|
@abc.abstractmethod<EOL><INDENT>def get_callbacks(self, topic_url):<DEDENT>
|
A generator function that should return tuples with the following
values for each item in storage that has a matching topic_url:
- callback_url
- secret
Note that expired objects should not be yielded.
|
f11349:c0:m2
|
|
def cleanup_expired_subscriptions(self):
|
If your storage backend enforces the expiration times, there's
nothing more to do. If it does not do so by default, you should
override this method, and remove all expired entries.
|
f11349:c0:m3
|
|
def endpoint_hook(self):
|
Override this method to hook into the endpoint handling. Anything
this method returns will be forwarded to validation functions when
subscribing.
|
f11350:c0:m1
|
|
def build_blueprint(hub, url_prefix='<STR_LIT>'):
|
return build_blueprint(hub, url_prefix)<EOL>
|
Build a blueprint containing a Flask route that is the hub endpoint.
|
f11350:c0:m2
|
def init_celery(self, celery):
|
count = next(self.counter)<EOL>def task_with_hub(f, **opts):<EOL><INDENT>@functools.wraps(f)<EOL>def wrapper(*args, **kwargs):<EOL><INDENT>return f(self, *args, **kwargs)<EOL><DEDENT>wrapper.__name__ = wrapper.__name__ + '<STR_LIT:_>' + str(count)<EOL>return celery.task(**opts)(wrapper)<EOL><DEDENT>self.subscribe = task_with_hub(subscribe)<EOL>self.unsubscribe = task_with_hub(unsubscribe)<EOL>max_attempts = self.config.get('<STR_LIT>', <NUM_LIT:10>)<EOL>make_req = task_with_hub(make_request_retrying, bind=True,<EOL>max_retries=max_attempts)<EOL>self.make_request_retrying = make_req<EOL>self.send_change = task_with_hub(send_change_notification)<EOL>@task_with_hub<EOL>def cleanup(hub):<EOL><INDENT>self.storage.cleanup_expired_subscriptions()<EOL><DEDENT>self.cleanup = cleanup<EOL>def schedule(every_x_seconds=A_DAY):<EOL><INDENT>celery.add_periodic_task(every_x_seconds,<EOL>self.cleanup_expired_subscriptions.s())<EOL><DEDENT>self.schedule = schedule<EOL>
|
Registers the celery tasks on the hub object.
|
f11350:c0:m3
|
@property<EOL><INDENT>def send_change_notification(self):<DEDENT>
|
return self.send_change<EOL>
|
Allows you to notify subscribers of a change to a `topic_url`. This
is a celery task, so you probably will actually want to call
hub.send_change_notification.delay(topic_url, updated_content). The
last argument is optional. If passed in, it should be an object with
two properties: `headers` (dict-like), and `content` (a base64-encoded
string). If left out, the updated content will be fetched from the
topic url directly.
|
f11350:c0:m4
|
@property<EOL><INDENT>def cleanup_expired_subscriptions(self):<DEDENT>
|
return self.cleanup<EOL>
|
Removes any expired subscriptions from the backing data store.
It takes no arguments, and is a celery task.
|
f11350:c0:m5
|
@property<EOL><INDENT>def schedule_cleanup(self):<DEDENT>
|
return self.schedule<EOL>
|
schedule_cleanup(every_x_seconds=A_DAY): schedules the celery
task `cleanup_expired_subscriptions` as a recurring event, the
frequency of which is determined by its parameter. This is not a
celery task itself (as the cleanup is only scheduled), and is a
convenience function.
|
f11350:c0:m6
|
def register_validator(self, f):
|
self.validators.append(f)<EOL>
|
Register `f` as a validation function for subscription requests. It
gets a callback_url and topic_url as its arguments, and should return
None if the validation succeeded, or a string describing the problem
otherwise.
|
f11350:c0:m7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.