partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
train
Language.use_params
Replace weights of models in the pipeline with those provided in the params dictionary. Can be used as a contextmanager, in which case, models go back to their original weights after the block. params (dict): A dictionary of parameters keyed by model ID. **cfg: Config parameters. EXAMPLE: >>> with nlp.use_params(optimizer.averages): >>> nlp.to_disk('/tmp/checkpoint')
spacy/language.py
def use_params(self, params, **cfg): """Replace weights of models in the pipeline with those provided in the params dictionary. Can be used as a contextmanager, in which case, models go back to their original weights after the block. params (dict): A dictionary of parameters keyed by model ID. **cfg: Config parameters. EXAMPLE: >>> with nlp.use_params(optimizer.averages): >>> nlp.to_disk('/tmp/checkpoint') """ contexts = [ pipe.use_params(params) for name, pipe in self.pipeline if hasattr(pipe, "use_params") ] # TODO: Having trouble with contextlib # Workaround: these aren't actually context managers atm. for context in contexts: try: next(context) except StopIteration: pass yield for context in contexts: try: next(context) except StopIteration: pass
def use_params(self, params, **cfg): """Replace weights of models in the pipeline with those provided in the params dictionary. Can be used as a contextmanager, in which case, models go back to their original weights after the block. params (dict): A dictionary of parameters keyed by model ID. **cfg: Config parameters. EXAMPLE: >>> with nlp.use_params(optimizer.averages): >>> nlp.to_disk('/tmp/checkpoint') """ contexts = [ pipe.use_params(params) for name, pipe in self.pipeline if hasattr(pipe, "use_params") ] # TODO: Having trouble with contextlib # Workaround: these aren't actually context managers atm. for context in contexts: try: next(context) except StopIteration: pass yield for context in contexts: try: next(context) except StopIteration: pass
[ "Replace", "weights", "of", "models", "in", "the", "pipeline", "with", "those", "provided", "in", "the", "params", "dictionary", ".", "Can", "be", "used", "as", "a", "contextmanager", "in", "which", "case", "models", "go", "back", "to", "their", "original", "weights", "after", "the", "block", "." ]
explosion/spaCy
python
https://github.com/explosion/spaCy/blob/8ee4100f8ffb336886208a1ea827bf4c745e2709/spacy/language.py#L619-L648
[ "def", "use_params", "(", "self", ",", "params", ",", "*", "*", "cfg", ")", ":", "contexts", "=", "[", "pipe", ".", "use_params", "(", "params", ")", "for", "name", ",", "pipe", "in", "self", ".", "pipeline", "if", "hasattr", "(", "pipe", ",", "\"use_params\"", ")", "]", "# TODO: Having trouble with contextlib", "# Workaround: these aren't actually context managers atm.", "for", "context", "in", "contexts", ":", "try", ":", "next", "(", "context", ")", "except", "StopIteration", ":", "pass", "yield", "for", "context", "in", "contexts", ":", "try", ":", "next", "(", "context", ")", "except", "StopIteration", ":", "pass" ]
8ee4100f8ffb336886208a1ea827bf4c745e2709
train
Language.pipe
Process texts as a stream, and yield `Doc` objects in order. texts (iterator): A sequence of texts to process. as_tuples (bool): If set to True, inputs should be a sequence of (text, context) tuples. Output will then be a sequence of (doc, context) tuples. Defaults to False. batch_size (int): The number of texts to buffer. disable (list): Names of the pipeline components to disable. cleanup (bool): If True, unneeded strings are freed to control memory use. Experimental. component_cfg (dict): An optional dictionary with extra keyword arguments for specific components. YIELDS (Doc): Documents in the order of the original text. DOCS: https://spacy.io/api/language#pipe
spacy/language.py
def pipe( self, texts, as_tuples=False, n_threads=-1, batch_size=1000, disable=[], cleanup=False, component_cfg=None, ): """Process texts as a stream, and yield `Doc` objects in order. texts (iterator): A sequence of texts to process. as_tuples (bool): If set to True, inputs should be a sequence of (text, context) tuples. Output will then be a sequence of (doc, context) tuples. Defaults to False. batch_size (int): The number of texts to buffer. disable (list): Names of the pipeline components to disable. cleanup (bool): If True, unneeded strings are freed to control memory use. Experimental. component_cfg (dict): An optional dictionary with extra keyword arguments for specific components. YIELDS (Doc): Documents in the order of the original text. DOCS: https://spacy.io/api/language#pipe """ if n_threads != -1: deprecation_warning(Warnings.W016) if as_tuples: text_context1, text_context2 = itertools.tee(texts) texts = (tc[0] for tc in text_context1) contexts = (tc[1] for tc in text_context2) docs = self.pipe( texts, batch_size=batch_size, disable=disable, component_cfg=component_cfg, ) for doc, context in izip(docs, contexts): yield (doc, context) return docs = (self.make_doc(text) for text in texts) if component_cfg is None: component_cfg = {} for name, proc in self.pipeline: if name in disable: continue kwargs = component_cfg.get(name, {}) # Allow component_cfg to overwrite the top-level kwargs. kwargs.setdefault("batch_size", batch_size) if hasattr(proc, "pipe"): docs = proc.pipe(docs, **kwargs) else: # Apply the function, but yield the doc docs = _pipe(proc, docs, kwargs) # Track weakrefs of "recent" documents, so that we can see when they # expire from memory. When they do, we know we don't need old strings. # This way, we avoid maintaining an unbounded growth in string entries # in the string store. recent_refs = weakref.WeakSet() old_refs = weakref.WeakSet() # Keep track of the original string data, so that if we flush old strings, # we can recover the original ones. However, we only want to do this if we're # really adding strings, to save up-front costs. original_strings_data = None nr_seen = 0 for doc in docs: yield doc if cleanup: recent_refs.add(doc) if nr_seen < 10000: old_refs.add(doc) nr_seen += 1 elif len(old_refs) == 0: old_refs, recent_refs = recent_refs, old_refs if original_strings_data is None: original_strings_data = list(self.vocab.strings) else: keys, strings = self.vocab.strings._cleanup_stale_strings( original_strings_data ) self.vocab._reset_cache(keys, strings) self.tokenizer._reset_cache(keys) nr_seen = 0
def pipe( self, texts, as_tuples=False, n_threads=-1, batch_size=1000, disable=[], cleanup=False, component_cfg=None, ): """Process texts as a stream, and yield `Doc` objects in order. texts (iterator): A sequence of texts to process. as_tuples (bool): If set to True, inputs should be a sequence of (text, context) tuples. Output will then be a sequence of (doc, context) tuples. Defaults to False. batch_size (int): The number of texts to buffer. disable (list): Names of the pipeline components to disable. cleanup (bool): If True, unneeded strings are freed to control memory use. Experimental. component_cfg (dict): An optional dictionary with extra keyword arguments for specific components. YIELDS (Doc): Documents in the order of the original text. DOCS: https://spacy.io/api/language#pipe """ if n_threads != -1: deprecation_warning(Warnings.W016) if as_tuples: text_context1, text_context2 = itertools.tee(texts) texts = (tc[0] for tc in text_context1) contexts = (tc[1] for tc in text_context2) docs = self.pipe( texts, batch_size=batch_size, disable=disable, component_cfg=component_cfg, ) for doc, context in izip(docs, contexts): yield (doc, context) return docs = (self.make_doc(text) for text in texts) if component_cfg is None: component_cfg = {} for name, proc in self.pipeline: if name in disable: continue kwargs = component_cfg.get(name, {}) # Allow component_cfg to overwrite the top-level kwargs. kwargs.setdefault("batch_size", batch_size) if hasattr(proc, "pipe"): docs = proc.pipe(docs, **kwargs) else: # Apply the function, but yield the doc docs = _pipe(proc, docs, kwargs) # Track weakrefs of "recent" documents, so that we can see when they # expire from memory. When they do, we know we don't need old strings. # This way, we avoid maintaining an unbounded growth in string entries # in the string store. recent_refs = weakref.WeakSet() old_refs = weakref.WeakSet() # Keep track of the original string data, so that if we flush old strings, # we can recover the original ones. However, we only want to do this if we're # really adding strings, to save up-front costs. original_strings_data = None nr_seen = 0 for doc in docs: yield doc if cleanup: recent_refs.add(doc) if nr_seen < 10000: old_refs.add(doc) nr_seen += 1 elif len(old_refs) == 0: old_refs, recent_refs = recent_refs, old_refs if original_strings_data is None: original_strings_data = list(self.vocab.strings) else: keys, strings = self.vocab.strings._cleanup_stale_strings( original_strings_data ) self.vocab._reset_cache(keys, strings) self.tokenizer._reset_cache(keys) nr_seen = 0
[ "Process", "texts", "as", "a", "stream", "and", "yield", "Doc", "objects", "in", "order", "." ]
explosion/spaCy
python
https://github.com/explosion/spaCy/blob/8ee4100f8ffb336886208a1ea827bf4c745e2709/spacy/language.py#L650-L733
[ "def", "pipe", "(", "self", ",", "texts", ",", "as_tuples", "=", "False", ",", "n_threads", "=", "-", "1", ",", "batch_size", "=", "1000", ",", "disable", "=", "[", "]", ",", "cleanup", "=", "False", ",", "component_cfg", "=", "None", ",", ")", ":", "if", "n_threads", "!=", "-", "1", ":", "deprecation_warning", "(", "Warnings", ".", "W016", ")", "if", "as_tuples", ":", "text_context1", ",", "text_context2", "=", "itertools", ".", "tee", "(", "texts", ")", "texts", "=", "(", "tc", "[", "0", "]", "for", "tc", "in", "text_context1", ")", "contexts", "=", "(", "tc", "[", "1", "]", "for", "tc", "in", "text_context2", ")", "docs", "=", "self", ".", "pipe", "(", "texts", ",", "batch_size", "=", "batch_size", ",", "disable", "=", "disable", ",", "component_cfg", "=", "component_cfg", ",", ")", "for", "doc", ",", "context", "in", "izip", "(", "docs", ",", "contexts", ")", ":", "yield", "(", "doc", ",", "context", ")", "return", "docs", "=", "(", "self", ".", "make_doc", "(", "text", ")", "for", "text", "in", "texts", ")", "if", "component_cfg", "is", "None", ":", "component_cfg", "=", "{", "}", "for", "name", ",", "proc", "in", "self", ".", "pipeline", ":", "if", "name", "in", "disable", ":", "continue", "kwargs", "=", "component_cfg", ".", "get", "(", "name", ",", "{", "}", ")", "# Allow component_cfg to overwrite the top-level kwargs.", "kwargs", ".", "setdefault", "(", "\"batch_size\"", ",", "batch_size", ")", "if", "hasattr", "(", "proc", ",", "\"pipe\"", ")", ":", "docs", "=", "proc", ".", "pipe", "(", "docs", ",", "*", "*", "kwargs", ")", "else", ":", "# Apply the function, but yield the doc", "docs", "=", "_pipe", "(", "proc", ",", "docs", ",", "kwargs", ")", "# Track weakrefs of \"recent\" documents, so that we can see when they", "# expire from memory. When they do, we know we don't need old strings.", "# This way, we avoid maintaining an unbounded growth in string entries", "# in the string store.", "recent_refs", "=", "weakref", ".", "WeakSet", "(", ")", "old_refs", "=", "weakref", ".", "WeakSet", "(", ")", "# Keep track of the original string data, so that if we flush old strings,", "# we can recover the original ones. However, we only want to do this if we're", "# really adding strings, to save up-front costs.", "original_strings_data", "=", "None", "nr_seen", "=", "0", "for", "doc", "in", "docs", ":", "yield", "doc", "if", "cleanup", ":", "recent_refs", ".", "add", "(", "doc", ")", "if", "nr_seen", "<", "10000", ":", "old_refs", ".", "add", "(", "doc", ")", "nr_seen", "+=", "1", "elif", "len", "(", "old_refs", ")", "==", "0", ":", "old_refs", ",", "recent_refs", "=", "recent_refs", ",", "old_refs", "if", "original_strings_data", "is", "None", ":", "original_strings_data", "=", "list", "(", "self", ".", "vocab", ".", "strings", ")", "else", ":", "keys", ",", "strings", "=", "self", ".", "vocab", ".", "strings", ".", "_cleanup_stale_strings", "(", "original_strings_data", ")", "self", ".", "vocab", ".", "_reset_cache", "(", "keys", ",", "strings", ")", "self", ".", "tokenizer", ".", "_reset_cache", "(", "keys", ")", "nr_seen", "=", "0" ]
8ee4100f8ffb336886208a1ea827bf4c745e2709
train
Language.to_disk
Save the current state to a directory. If a model is loaded, this will include the model. path (unicode or Path): Path to a directory, which will be created if it doesn't exist. exclude (list): Names of components or serialization fields to exclude. DOCS: https://spacy.io/api/language#to_disk
spacy/language.py
def to_disk(self, path, exclude=tuple(), disable=None): """Save the current state to a directory. If a model is loaded, this will include the model. path (unicode or Path): Path to a directory, which will be created if it doesn't exist. exclude (list): Names of components or serialization fields to exclude. DOCS: https://spacy.io/api/language#to_disk """ if disable is not None: deprecation_warning(Warnings.W014) exclude = disable path = util.ensure_path(path) serializers = OrderedDict() serializers["tokenizer"] = lambda p: self.tokenizer.to_disk(p, exclude=["vocab"]) serializers["meta.json"] = lambda p: p.open("w").write(srsly.json_dumps(self.meta)) for name, proc in self.pipeline: if not hasattr(proc, "name"): continue if name in exclude: continue if not hasattr(proc, "to_disk"): continue serializers[name] = lambda p, proc=proc: proc.to_disk(p, exclude=["vocab"]) serializers["vocab"] = lambda p: self.vocab.to_disk(p) util.to_disk(path, serializers, exclude)
def to_disk(self, path, exclude=tuple(), disable=None): """Save the current state to a directory. If a model is loaded, this will include the model. path (unicode or Path): Path to a directory, which will be created if it doesn't exist. exclude (list): Names of components or serialization fields to exclude. DOCS: https://spacy.io/api/language#to_disk """ if disable is not None: deprecation_warning(Warnings.W014) exclude = disable path = util.ensure_path(path) serializers = OrderedDict() serializers["tokenizer"] = lambda p: self.tokenizer.to_disk(p, exclude=["vocab"]) serializers["meta.json"] = lambda p: p.open("w").write(srsly.json_dumps(self.meta)) for name, proc in self.pipeline: if not hasattr(proc, "name"): continue if name in exclude: continue if not hasattr(proc, "to_disk"): continue serializers[name] = lambda p, proc=proc: proc.to_disk(p, exclude=["vocab"]) serializers["vocab"] = lambda p: self.vocab.to_disk(p) util.to_disk(path, serializers, exclude)
[ "Save", "the", "current", "state", "to", "a", "directory", ".", "If", "a", "model", "is", "loaded", "this", "will", "include", "the", "model", "." ]
explosion/spaCy
python
https://github.com/explosion/spaCy/blob/8ee4100f8ffb336886208a1ea827bf4c745e2709/spacy/language.py#L735-L761
[ "def", "to_disk", "(", "self", ",", "path", ",", "exclude", "=", "tuple", "(", ")", ",", "disable", "=", "None", ")", ":", "if", "disable", "is", "not", "None", ":", "deprecation_warning", "(", "Warnings", ".", "W014", ")", "exclude", "=", "disable", "path", "=", "util", ".", "ensure_path", "(", "path", ")", "serializers", "=", "OrderedDict", "(", ")", "serializers", "[", "\"tokenizer\"", "]", "=", "lambda", "p", ":", "self", ".", "tokenizer", ".", "to_disk", "(", "p", ",", "exclude", "=", "[", "\"vocab\"", "]", ")", "serializers", "[", "\"meta.json\"", "]", "=", "lambda", "p", ":", "p", ".", "open", "(", "\"w\"", ")", ".", "write", "(", "srsly", ".", "json_dumps", "(", "self", ".", "meta", ")", ")", "for", "name", ",", "proc", "in", "self", ".", "pipeline", ":", "if", "not", "hasattr", "(", "proc", ",", "\"name\"", ")", ":", "continue", "if", "name", "in", "exclude", ":", "continue", "if", "not", "hasattr", "(", "proc", ",", "\"to_disk\"", ")", ":", "continue", "serializers", "[", "name", "]", "=", "lambda", "p", ",", "proc", "=", "proc", ":", "proc", ".", "to_disk", "(", "p", ",", "exclude", "=", "[", "\"vocab\"", "]", ")", "serializers", "[", "\"vocab\"", "]", "=", "lambda", "p", ":", "self", ".", "vocab", ".", "to_disk", "(", "p", ")", "util", ".", "to_disk", "(", "path", ",", "serializers", ",", "exclude", ")" ]
8ee4100f8ffb336886208a1ea827bf4c745e2709
train
Language.from_disk
Loads state from a directory. Modifies the object in place and returns it. If the saved `Language` object contains a model, the model will be loaded. path (unicode or Path): A path to a directory. exclude (list): Names of components or serialization fields to exclude. RETURNS (Language): The modified `Language` object. DOCS: https://spacy.io/api/language#from_disk
spacy/language.py
def from_disk(self, path, exclude=tuple(), disable=None): """Loads state from a directory. Modifies the object in place and returns it. If the saved `Language` object contains a model, the model will be loaded. path (unicode or Path): A path to a directory. exclude (list): Names of components or serialization fields to exclude. RETURNS (Language): The modified `Language` object. DOCS: https://spacy.io/api/language#from_disk """ if disable is not None: deprecation_warning(Warnings.W014) exclude = disable path = util.ensure_path(path) deserializers = OrderedDict() deserializers["meta.json"] = lambda p: self.meta.update(srsly.read_json(p)) deserializers["vocab"] = lambda p: self.vocab.from_disk(p) and _fix_pretrained_vectors_name(self) deserializers["tokenizer"] = lambda p: self.tokenizer.from_disk(p, exclude=["vocab"]) for name, proc in self.pipeline: if name in exclude: continue if not hasattr(proc, "from_disk"): continue deserializers[name] = lambda p, proc=proc: proc.from_disk(p, exclude=["vocab"]) if not (path / "vocab").exists() and "vocab" not in exclude: # Convert to list here in case exclude is (default) tuple exclude = list(exclude) + ["vocab"] util.from_disk(path, deserializers, exclude) self._path = path return self
def from_disk(self, path, exclude=tuple(), disable=None): """Loads state from a directory. Modifies the object in place and returns it. If the saved `Language` object contains a model, the model will be loaded. path (unicode or Path): A path to a directory. exclude (list): Names of components or serialization fields to exclude. RETURNS (Language): The modified `Language` object. DOCS: https://spacy.io/api/language#from_disk """ if disable is not None: deprecation_warning(Warnings.W014) exclude = disable path = util.ensure_path(path) deserializers = OrderedDict() deserializers["meta.json"] = lambda p: self.meta.update(srsly.read_json(p)) deserializers["vocab"] = lambda p: self.vocab.from_disk(p) and _fix_pretrained_vectors_name(self) deserializers["tokenizer"] = lambda p: self.tokenizer.from_disk(p, exclude=["vocab"]) for name, proc in self.pipeline: if name in exclude: continue if not hasattr(proc, "from_disk"): continue deserializers[name] = lambda p, proc=proc: proc.from_disk(p, exclude=["vocab"]) if not (path / "vocab").exists() and "vocab" not in exclude: # Convert to list here in case exclude is (default) tuple exclude = list(exclude) + ["vocab"] util.from_disk(path, deserializers, exclude) self._path = path return self
[ "Loads", "state", "from", "a", "directory", ".", "Modifies", "the", "object", "in", "place", "and", "returns", "it", ".", "If", "the", "saved", "Language", "object", "contains", "a", "model", "the", "model", "will", "be", "loaded", "." ]
explosion/spaCy
python
https://github.com/explosion/spaCy/blob/8ee4100f8ffb336886208a1ea827bf4c745e2709/spacy/language.py#L763-L793
[ "def", "from_disk", "(", "self", ",", "path", ",", "exclude", "=", "tuple", "(", ")", ",", "disable", "=", "None", ")", ":", "if", "disable", "is", "not", "None", ":", "deprecation_warning", "(", "Warnings", ".", "W014", ")", "exclude", "=", "disable", "path", "=", "util", ".", "ensure_path", "(", "path", ")", "deserializers", "=", "OrderedDict", "(", ")", "deserializers", "[", "\"meta.json\"", "]", "=", "lambda", "p", ":", "self", ".", "meta", ".", "update", "(", "srsly", ".", "read_json", "(", "p", ")", ")", "deserializers", "[", "\"vocab\"", "]", "=", "lambda", "p", ":", "self", ".", "vocab", ".", "from_disk", "(", "p", ")", "and", "_fix_pretrained_vectors_name", "(", "self", ")", "deserializers", "[", "\"tokenizer\"", "]", "=", "lambda", "p", ":", "self", ".", "tokenizer", ".", "from_disk", "(", "p", ",", "exclude", "=", "[", "\"vocab\"", "]", ")", "for", "name", ",", "proc", "in", "self", ".", "pipeline", ":", "if", "name", "in", "exclude", ":", "continue", "if", "not", "hasattr", "(", "proc", ",", "\"from_disk\"", ")", ":", "continue", "deserializers", "[", "name", "]", "=", "lambda", "p", ",", "proc", "=", "proc", ":", "proc", ".", "from_disk", "(", "p", ",", "exclude", "=", "[", "\"vocab\"", "]", ")", "if", "not", "(", "path", "/", "\"vocab\"", ")", ".", "exists", "(", ")", "and", "\"vocab\"", "not", "in", "exclude", ":", "# Convert to list here in case exclude is (default) tuple", "exclude", "=", "list", "(", "exclude", ")", "+", "[", "\"vocab\"", "]", "util", ".", "from_disk", "(", "path", ",", "deserializers", ",", "exclude", ")", "self", ".", "_path", "=", "path", "return", "self" ]
8ee4100f8ffb336886208a1ea827bf4c745e2709
train
Language.to_bytes
Serialize the current state to a binary string. exclude (list): Names of components or serialization fields to exclude. RETURNS (bytes): The serialized form of the `Language` object. DOCS: https://spacy.io/api/language#to_bytes
spacy/language.py
def to_bytes(self, exclude=tuple(), disable=None, **kwargs): """Serialize the current state to a binary string. exclude (list): Names of components or serialization fields to exclude. RETURNS (bytes): The serialized form of the `Language` object. DOCS: https://spacy.io/api/language#to_bytes """ if disable is not None: deprecation_warning(Warnings.W014) exclude = disable serializers = OrderedDict() serializers["vocab"] = lambda: self.vocab.to_bytes() serializers["tokenizer"] = lambda: self.tokenizer.to_bytes(exclude=["vocab"]) serializers["meta.json"] = lambda: srsly.json_dumps(self.meta) for name, proc in self.pipeline: if name in exclude: continue if not hasattr(proc, "to_bytes"): continue serializers[name] = lambda proc=proc: proc.to_bytes(exclude=["vocab"]) exclude = util.get_serialization_exclude(serializers, exclude, kwargs) return util.to_bytes(serializers, exclude)
def to_bytes(self, exclude=tuple(), disable=None, **kwargs): """Serialize the current state to a binary string. exclude (list): Names of components or serialization fields to exclude. RETURNS (bytes): The serialized form of the `Language` object. DOCS: https://spacy.io/api/language#to_bytes """ if disable is not None: deprecation_warning(Warnings.W014) exclude = disable serializers = OrderedDict() serializers["vocab"] = lambda: self.vocab.to_bytes() serializers["tokenizer"] = lambda: self.tokenizer.to_bytes(exclude=["vocab"]) serializers["meta.json"] = lambda: srsly.json_dumps(self.meta) for name, proc in self.pipeline: if name in exclude: continue if not hasattr(proc, "to_bytes"): continue serializers[name] = lambda proc=proc: proc.to_bytes(exclude=["vocab"]) exclude = util.get_serialization_exclude(serializers, exclude, kwargs) return util.to_bytes(serializers, exclude)
[ "Serialize", "the", "current", "state", "to", "a", "binary", "string", "." ]
explosion/spaCy
python
https://github.com/explosion/spaCy/blob/8ee4100f8ffb336886208a1ea827bf4c745e2709/spacy/language.py#L795-L817
[ "def", "to_bytes", "(", "self", ",", "exclude", "=", "tuple", "(", ")", ",", "disable", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "disable", "is", "not", "None", ":", "deprecation_warning", "(", "Warnings", ".", "W014", ")", "exclude", "=", "disable", "serializers", "=", "OrderedDict", "(", ")", "serializers", "[", "\"vocab\"", "]", "=", "lambda", ":", "self", ".", "vocab", ".", "to_bytes", "(", ")", "serializers", "[", "\"tokenizer\"", "]", "=", "lambda", ":", "self", ".", "tokenizer", ".", "to_bytes", "(", "exclude", "=", "[", "\"vocab\"", "]", ")", "serializers", "[", "\"meta.json\"", "]", "=", "lambda", ":", "srsly", ".", "json_dumps", "(", "self", ".", "meta", ")", "for", "name", ",", "proc", "in", "self", ".", "pipeline", ":", "if", "name", "in", "exclude", ":", "continue", "if", "not", "hasattr", "(", "proc", ",", "\"to_bytes\"", ")", ":", "continue", "serializers", "[", "name", "]", "=", "lambda", "proc", "=", "proc", ":", "proc", ".", "to_bytes", "(", "exclude", "=", "[", "\"vocab\"", "]", ")", "exclude", "=", "util", ".", "get_serialization_exclude", "(", "serializers", ",", "exclude", ",", "kwargs", ")", "return", "util", ".", "to_bytes", "(", "serializers", ",", "exclude", ")" ]
8ee4100f8ffb336886208a1ea827bf4c745e2709
train
Language.from_bytes
Load state from a binary string. bytes_data (bytes): The data to load from. exclude (list): Names of components or serialization fields to exclude. RETURNS (Language): The `Language` object. DOCS: https://spacy.io/api/language#from_bytes
spacy/language.py
def from_bytes(self, bytes_data, exclude=tuple(), disable=None, **kwargs): """Load state from a binary string. bytes_data (bytes): The data to load from. exclude (list): Names of components or serialization fields to exclude. RETURNS (Language): The `Language` object. DOCS: https://spacy.io/api/language#from_bytes """ if disable is not None: deprecation_warning(Warnings.W014) exclude = disable deserializers = OrderedDict() deserializers["meta.json"] = lambda b: self.meta.update(srsly.json_loads(b)) deserializers["vocab"] = lambda b: self.vocab.from_bytes(b) and _fix_pretrained_vectors_name(self) deserializers["tokenizer"] = lambda b: self.tokenizer.from_bytes(b, exclude=["vocab"]) for name, proc in self.pipeline: if name in exclude: continue if not hasattr(proc, "from_bytes"): continue deserializers[name] = lambda b, proc=proc: proc.from_bytes(b, exclude=["vocab"]) exclude = util.get_serialization_exclude(deserializers, exclude, kwargs) util.from_bytes(bytes_data, deserializers, exclude) return self
def from_bytes(self, bytes_data, exclude=tuple(), disable=None, **kwargs): """Load state from a binary string. bytes_data (bytes): The data to load from. exclude (list): Names of components or serialization fields to exclude. RETURNS (Language): The `Language` object. DOCS: https://spacy.io/api/language#from_bytes """ if disable is not None: deprecation_warning(Warnings.W014) exclude = disable deserializers = OrderedDict() deserializers["meta.json"] = lambda b: self.meta.update(srsly.json_loads(b)) deserializers["vocab"] = lambda b: self.vocab.from_bytes(b) and _fix_pretrained_vectors_name(self) deserializers["tokenizer"] = lambda b: self.tokenizer.from_bytes(b, exclude=["vocab"]) for name, proc in self.pipeline: if name in exclude: continue if not hasattr(proc, "from_bytes"): continue deserializers[name] = lambda b, proc=proc: proc.from_bytes(b, exclude=["vocab"]) exclude = util.get_serialization_exclude(deserializers, exclude, kwargs) util.from_bytes(bytes_data, deserializers, exclude) return self
[ "Load", "state", "from", "a", "binary", "string", "." ]
explosion/spaCy
python
https://github.com/explosion/spaCy/blob/8ee4100f8ffb336886208a1ea827bf4c745e2709/spacy/language.py#L819-L843
[ "def", "from_bytes", "(", "self", ",", "bytes_data", ",", "exclude", "=", "tuple", "(", ")", ",", "disable", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "disable", "is", "not", "None", ":", "deprecation_warning", "(", "Warnings", ".", "W014", ")", "exclude", "=", "disable", "deserializers", "=", "OrderedDict", "(", ")", "deserializers", "[", "\"meta.json\"", "]", "=", "lambda", "b", ":", "self", ".", "meta", ".", "update", "(", "srsly", ".", "json_loads", "(", "b", ")", ")", "deserializers", "[", "\"vocab\"", "]", "=", "lambda", "b", ":", "self", ".", "vocab", ".", "from_bytes", "(", "b", ")", "and", "_fix_pretrained_vectors_name", "(", "self", ")", "deserializers", "[", "\"tokenizer\"", "]", "=", "lambda", "b", ":", "self", ".", "tokenizer", ".", "from_bytes", "(", "b", ",", "exclude", "=", "[", "\"vocab\"", "]", ")", "for", "name", ",", "proc", "in", "self", ".", "pipeline", ":", "if", "name", "in", "exclude", ":", "continue", "if", "not", "hasattr", "(", "proc", ",", "\"from_bytes\"", ")", ":", "continue", "deserializers", "[", "name", "]", "=", "lambda", "b", ",", "proc", "=", "proc", ":", "proc", ".", "from_bytes", "(", "b", ",", "exclude", "=", "[", "\"vocab\"", "]", ")", "exclude", "=", "util", ".", "get_serialization_exclude", "(", "deserializers", ",", "exclude", ",", "kwargs", ")", "util", ".", "from_bytes", "(", "bytes_data", ",", "deserializers", ",", "exclude", ")", "return", "self" ]
8ee4100f8ffb336886208a1ea827bf4c745e2709
train
DisabledPipes.restore
Restore the pipeline to its state when DisabledPipes was created.
spacy/language.py
def restore(self): """Restore the pipeline to its state when DisabledPipes was created.""" current, self.nlp.pipeline = self.nlp.pipeline, self.original_pipeline unexpected = [name for name, pipe in current if not self.nlp.has_pipe(name)] if unexpected: # Don't change the pipeline if we're raising an error. self.nlp.pipeline = current raise ValueError(Errors.E008.format(names=unexpected)) self[:] = []
def restore(self): """Restore the pipeline to its state when DisabledPipes was created.""" current, self.nlp.pipeline = self.nlp.pipeline, self.original_pipeline unexpected = [name for name, pipe in current if not self.nlp.has_pipe(name)] if unexpected: # Don't change the pipeline if we're raising an error. self.nlp.pipeline = current raise ValueError(Errors.E008.format(names=unexpected)) self[:] = []
[ "Restore", "the", "pipeline", "to", "its", "state", "when", "DisabledPipes", "was", "created", "." ]
explosion/spaCy
python
https://github.com/explosion/spaCy/blob/8ee4100f8ffb336886208a1ea827bf4c745e2709/spacy/language.py#L886-L894
[ "def", "restore", "(", "self", ")", ":", "current", ",", "self", ".", "nlp", ".", "pipeline", "=", "self", ".", "nlp", ".", "pipeline", ",", "self", ".", "original_pipeline", "unexpected", "=", "[", "name", "for", "name", ",", "pipe", "in", "current", "if", "not", "self", ".", "nlp", ".", "has_pipe", "(", "name", ")", "]", "if", "unexpected", ":", "# Don't change the pipeline if we're raising an error.", "self", ".", "nlp", ".", "pipeline", "=", "current", "raise", "ValueError", "(", "Errors", ".", "E008", ".", "format", "(", "names", "=", "unexpected", ")", ")", "self", "[", ":", "]", "=", "[", "]" ]
8ee4100f8ffb336886208a1ea827bf4c745e2709
train
get_loaded_rules
Yields all available rules. :type rules_paths: [Path] :rtype: Iterable[Rule]
thefuck/corrector.py
def get_loaded_rules(rules_paths): """Yields all available rules. :type rules_paths: [Path] :rtype: Iterable[Rule] """ for path in rules_paths: if path.name != '__init__.py': rule = Rule.from_path(path) if rule.is_enabled: yield rule
def get_loaded_rules(rules_paths): """Yields all available rules. :type rules_paths: [Path] :rtype: Iterable[Rule] """ for path in rules_paths: if path.name != '__init__.py': rule = Rule.from_path(path) if rule.is_enabled: yield rule
[ "Yields", "all", "available", "rules", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/corrector.py#L8-L19
[ "def", "get_loaded_rules", "(", "rules_paths", ")", ":", "for", "path", "in", "rules_paths", ":", "if", "path", ".", "name", "!=", "'__init__.py'", ":", "rule", "=", "Rule", ".", "from_path", "(", "path", ")", "if", "rule", ".", "is_enabled", ":", "yield", "rule" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
get_rules_import_paths
Yields all rules import paths. :rtype: Iterable[Path]
thefuck/corrector.py
def get_rules_import_paths(): """Yields all rules import paths. :rtype: Iterable[Path] """ # Bundled rules: yield Path(__file__).parent.joinpath('rules') # Rules defined by user: yield settings.user_dir.joinpath('rules') # Packages with third-party rules: for path in sys.path: for contrib_module in Path(path).glob('thefuck_contrib_*'): contrib_rules = contrib_module.joinpath('rules') if contrib_rules.is_dir(): yield contrib_rules
def get_rules_import_paths(): """Yields all rules import paths. :rtype: Iterable[Path] """ # Bundled rules: yield Path(__file__).parent.joinpath('rules') # Rules defined by user: yield settings.user_dir.joinpath('rules') # Packages with third-party rules: for path in sys.path: for contrib_module in Path(path).glob('thefuck_contrib_*'): contrib_rules = contrib_module.joinpath('rules') if contrib_rules.is_dir(): yield contrib_rules
[ "Yields", "all", "rules", "import", "paths", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/corrector.py#L22-L37
[ "def", "get_rules_import_paths", "(", ")", ":", "# Bundled rules:", "yield", "Path", "(", "__file__", ")", ".", "parent", ".", "joinpath", "(", "'rules'", ")", "# Rules defined by user:", "yield", "settings", ".", "user_dir", ".", "joinpath", "(", "'rules'", ")", "# Packages with third-party rules:", "for", "path", "in", "sys", ".", "path", ":", "for", "contrib_module", "in", "Path", "(", "path", ")", ".", "glob", "(", "'thefuck_contrib_*'", ")", ":", "contrib_rules", "=", "contrib_module", ".", "joinpath", "(", "'rules'", ")", "if", "contrib_rules", ".", "is_dir", "(", ")", ":", "yield", "contrib_rules" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
get_rules
Returns all enabled rules. :rtype: [Rule]
thefuck/corrector.py
def get_rules(): """Returns all enabled rules. :rtype: [Rule] """ paths = [rule_path for path in get_rules_import_paths() for rule_path in sorted(path.glob('*.py'))] return sorted(get_loaded_rules(paths), key=lambda rule: rule.priority)
def get_rules(): """Returns all enabled rules. :rtype: [Rule] """ paths = [rule_path for path in get_rules_import_paths() for rule_path in sorted(path.glob('*.py'))] return sorted(get_loaded_rules(paths), key=lambda rule: rule.priority)
[ "Returns", "all", "enabled", "rules", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/corrector.py#L40-L49
[ "def", "get_rules", "(", ")", ":", "paths", "=", "[", "rule_path", "for", "path", "in", "get_rules_import_paths", "(", ")", "for", "rule_path", "in", "sorted", "(", "path", ".", "glob", "(", "'*.py'", ")", ")", "]", "return", "sorted", "(", "get_loaded_rules", "(", "paths", ")", ",", "key", "=", "lambda", "rule", ":", "rule", ".", "priority", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
organize_commands
Yields sorted commands without duplicates. :type corrected_commands: Iterable[thefuck.types.CorrectedCommand] :rtype: Iterable[thefuck.types.CorrectedCommand]
thefuck/corrector.py
def organize_commands(corrected_commands): """Yields sorted commands without duplicates. :type corrected_commands: Iterable[thefuck.types.CorrectedCommand] :rtype: Iterable[thefuck.types.CorrectedCommand] """ try: first_command = next(corrected_commands) yield first_command except StopIteration: return without_duplicates = { command for command in sorted( corrected_commands, key=lambda command: command.priority) if command != first_command} sorted_commands = sorted( without_duplicates, key=lambda corrected_command: corrected_command.priority) logs.debug('Corrected commands: '.format( ', '.join(u'{}'.format(cmd) for cmd in [first_command] + sorted_commands))) for command in sorted_commands: yield command
def organize_commands(corrected_commands): """Yields sorted commands without duplicates. :type corrected_commands: Iterable[thefuck.types.CorrectedCommand] :rtype: Iterable[thefuck.types.CorrectedCommand] """ try: first_command = next(corrected_commands) yield first_command except StopIteration: return without_duplicates = { command for command in sorted( corrected_commands, key=lambda command: command.priority) if command != first_command} sorted_commands = sorted( without_duplicates, key=lambda corrected_command: corrected_command.priority) logs.debug('Corrected commands: '.format( ', '.join(u'{}'.format(cmd) for cmd in [first_command] + sorted_commands))) for command in sorted_commands: yield command
[ "Yields", "sorted", "commands", "without", "duplicates", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/corrector.py#L52-L78
[ "def", "organize_commands", "(", "corrected_commands", ")", ":", "try", ":", "first_command", "=", "next", "(", "corrected_commands", ")", "yield", "first_command", "except", "StopIteration", ":", "return", "without_duplicates", "=", "{", "command", "for", "command", "in", "sorted", "(", "corrected_commands", ",", "key", "=", "lambda", "command", ":", "command", ".", "priority", ")", "if", "command", "!=", "first_command", "}", "sorted_commands", "=", "sorted", "(", "without_duplicates", ",", "key", "=", "lambda", "corrected_command", ":", "corrected_command", ".", "priority", ")", "logs", ".", "debug", "(", "'Corrected commands: '", ".", "format", "(", "', '", ".", "join", "(", "u'{}'", ".", "format", "(", "cmd", ")", "for", "cmd", "in", "[", "first_command", "]", "+", "sorted_commands", ")", ")", ")", "for", "command", "in", "sorted_commands", ":", "yield", "command" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
get_corrected_commands
Returns generator with sorted and unique corrected commands. :type command: thefuck.types.Command :rtype: Iterable[thefuck.types.CorrectedCommand]
thefuck/corrector.py
def get_corrected_commands(command): """Returns generator with sorted and unique corrected commands. :type command: thefuck.types.Command :rtype: Iterable[thefuck.types.CorrectedCommand] """ corrected_commands = ( corrected for rule in get_rules() if rule.is_match(command) for corrected in rule.get_corrected_commands(command)) return organize_commands(corrected_commands)
def get_corrected_commands(command): """Returns generator with sorted and unique corrected commands. :type command: thefuck.types.Command :rtype: Iterable[thefuck.types.CorrectedCommand] """ corrected_commands = ( corrected for rule in get_rules() if rule.is_match(command) for corrected in rule.get_corrected_commands(command)) return organize_commands(corrected_commands)
[ "Returns", "generator", "with", "sorted", "and", "unique", "corrected", "commands", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/corrector.py#L81-L92
[ "def", "get_corrected_commands", "(", "command", ")", ":", "corrected_commands", "=", "(", "corrected", "for", "rule", "in", "get_rules", "(", ")", "if", "rule", ".", "is_match", "(", "command", ")", "for", "corrected", "in", "rule", ".", "get_corrected_commands", "(", "command", ")", ")", "return", "organize_commands", "(", "corrected_commands", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
fix_command
Fixes previous command. Used when `thefuck` called without arguments.
thefuck/entrypoints/fix_command.py
def fix_command(known_args): """Fixes previous command. Used when `thefuck` called without arguments.""" settings.init(known_args) with logs.debug_time('Total'): logs.debug(u'Run with settings: {}'.format(pformat(settings))) raw_command = _get_raw_command(known_args) try: command = types.Command.from_raw_script(raw_command) except EmptyCommand: logs.debug('Empty command, nothing to do') return corrected_commands = get_corrected_commands(command) selected_command = select_command(corrected_commands) if selected_command: selected_command.run(command) else: sys.exit(1)
def fix_command(known_args): """Fixes previous command. Used when `thefuck` called without arguments.""" settings.init(known_args) with logs.debug_time('Total'): logs.debug(u'Run with settings: {}'.format(pformat(settings))) raw_command = _get_raw_command(known_args) try: command = types.Command.from_raw_script(raw_command) except EmptyCommand: logs.debug('Empty command, nothing to do') return corrected_commands = get_corrected_commands(command) selected_command = select_command(corrected_commands) if selected_command: selected_command.run(command) else: sys.exit(1)
[ "Fixes", "previous", "command", ".", "Used", "when", "thefuck", "called", "without", "arguments", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/fix_command.py#L28-L47
[ "def", "fix_command", "(", "known_args", ")", ":", "settings", ".", "init", "(", "known_args", ")", "with", "logs", ".", "debug_time", "(", "'Total'", ")", ":", "logs", ".", "debug", "(", "u'Run with settings: {}'", ".", "format", "(", "pformat", "(", "settings", ")", ")", ")", "raw_command", "=", "_get_raw_command", "(", "known_args", ")", "try", ":", "command", "=", "types", ".", "Command", ".", "from_raw_script", "(", "raw_command", ")", "except", "EmptyCommand", ":", "logs", ".", "debug", "(", "'Empty command, nothing to do'", ")", "return", "corrected_commands", "=", "get_corrected_commands", "(", "command", ")", "selected_command", "=", "select_command", "(", "corrected_commands", ")", "if", "selected_command", ":", "selected_command", ".", "run", "(", "command", ")", "else", ":", "sys", ".", "exit", "(", "1", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
get_output
Gets command output from shell logger.
thefuck/output_readers/shell_logger.py
def get_output(script): """Gets command output from shell logger.""" with logs.debug_time(u'Read output from external shell logger'): commands = _get_last_n(const.SHELL_LOGGER_LIMIT) for command in commands: if command['command'] == script: lines = _get_output_lines(command['output']) output = '\n'.join(lines).strip() return output else: logs.warn("Output isn't available in shell logger") return None
def get_output(script): """Gets command output from shell logger.""" with logs.debug_time(u'Read output from external shell logger'): commands = _get_last_n(const.SHELL_LOGGER_LIMIT) for command in commands: if command['command'] == script: lines = _get_output_lines(command['output']) output = '\n'.join(lines).strip() return output else: logs.warn("Output isn't available in shell logger") return None
[ "Gets", "command", "output", "from", "shell", "logger", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/output_readers/shell_logger.py#L49-L60
[ "def", "get_output", "(", "script", ")", ":", "with", "logs", ".", "debug_time", "(", "u'Read output from external shell logger'", ")", ":", "commands", "=", "_get_last_n", "(", "const", ".", "SHELL_LOGGER_LIMIT", ")", "for", "command", "in", "commands", ":", "if", "command", "[", "'command'", "]", "==", "script", ":", "lines", "=", "_get_output_lines", "(", "command", "[", "'output'", "]", ")", "output", "=", "'\\n'", ".", "join", "(", "lines", ")", ".", "strip", "(", ")", "return", "output", "else", ":", "logs", ".", "warn", "(", "\"Output isn't available in shell logger\"", ")", "return", "None" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Generic._get_history_lines
Returns list of history entries.
thefuck/shells/generic.py
def _get_history_lines(self): """Returns list of history entries.""" history_file_name = self._get_history_file_name() if os.path.isfile(history_file_name): with io.open(history_file_name, 'r', encoding='utf-8', errors='ignore') as history_file: lines = history_file.readlines() if settings.history_limit: lines = lines[-settings.history_limit:] for line in lines: prepared = self._script_from_history(line) \ .strip() if prepared: yield prepared
def _get_history_lines(self): """Returns list of history entries.""" history_file_name = self._get_history_file_name() if os.path.isfile(history_file_name): with io.open(history_file_name, 'r', encoding='utf-8', errors='ignore') as history_file: lines = history_file.readlines() if settings.history_limit: lines = lines[-settings.history_limit:] for line in lines: prepared = self._script_from_history(line) \ .strip() if prepared: yield prepared
[ "Returns", "list", "of", "history", "entries", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/shells/generic.py#L54-L69
[ "def", "_get_history_lines", "(", "self", ")", ":", "history_file_name", "=", "self", ".", "_get_history_file_name", "(", ")", "if", "os", ".", "path", ".", "isfile", "(", "history_file_name", ")", ":", "with", "io", ".", "open", "(", "history_file_name", ",", "'r'", ",", "encoding", "=", "'utf-8'", ",", "errors", "=", "'ignore'", ")", "as", "history_file", ":", "lines", "=", "history_file", ".", "readlines", "(", ")", "if", "settings", ".", "history_limit", ":", "lines", "=", "lines", "[", "-", "settings", ".", "history_limit", ":", "]", "for", "line", "in", "lines", ":", "prepared", "=", "self", ".", "_script_from_history", "(", "line", ")", ".", "strip", "(", ")", "if", "prepared", ":", "yield", "prepared" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Generic.split_command
Split the command using shell-like syntax.
thefuck/shells/generic.py
def split_command(self, command): """Split the command using shell-like syntax.""" encoded = self.encode_utf8(command) try: splitted = [s.replace("??", "\\ ") for s in shlex.split(encoded.replace('\\ ', '??'))] except ValueError: splitted = encoded.split(' ') return self.decode_utf8(splitted)
def split_command(self, command): """Split the command using shell-like syntax.""" encoded = self.encode_utf8(command) try: splitted = [s.replace("??", "\\ ") for s in shlex.split(encoded.replace('\\ ', '??'))] except ValueError: splitted = encoded.split(' ') return self.decode_utf8(splitted)
[ "Split", "the", "command", "using", "shell", "-", "like", "syntax", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/shells/generic.py#L80-L89
[ "def", "split_command", "(", "self", ",", "command", ")", ":", "encoded", "=", "self", ".", "encode_utf8", "(", "command", ")", "try", ":", "splitted", "=", "[", "s", ".", "replace", "(", "\"??\"", ",", "\"\\\\ \"", ")", "for", "s", "in", "shlex", ".", "split", "(", "encoded", ".", "replace", "(", "'\\\\ '", ",", "'??'", ")", ")", "]", "except", "ValueError", ":", "splitted", "=", "encoded", ".", "split", "(", "' '", ")", "return", "self", ".", "decode_utf8", "(", "splitted", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Generic.quote
Return a shell-escaped version of the string s.
thefuck/shells/generic.py
def quote(self, s): """Return a shell-escaped version of the string s.""" if six.PY2: from pipes import quote else: from shlex import quote return quote(s)
def quote(self, s): """Return a shell-escaped version of the string s.""" if six.PY2: from pipes import quote else: from shlex import quote return quote(s)
[ "Return", "a", "shell", "-", "escaped", "version", "of", "the", "string", "s", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/shells/generic.py#L101-L109
[ "def", "quote", "(", "self", ",", "s", ")", ":", "if", "six", ".", "PY2", ":", "from", "pipes", "import", "quote", "else", ":", "from", "shlex", "import", "quote", "return", "quote", "(", "s", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Fish.info
Returns the name and version of the current shell
thefuck/shells/fish.py
def info(self): """Returns the name and version of the current shell""" proc = Popen(['fish', '--version'], stdout=PIPE, stderr=DEVNULL) version = proc.stdout.read().decode('utf-8').split()[-1] return u'Fish Shell {}'.format(version)
def info(self): """Returns the name and version of the current shell""" proc = Popen(['fish', '--version'], stdout=PIPE, stderr=DEVNULL) version = proc.stdout.read().decode('utf-8').split()[-1] return u'Fish Shell {}'.format(version)
[ "Returns", "the", "name", "and", "version", "of", "the", "current", "shell" ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/shells/fish.py#L107-L112
[ "def", "info", "(", "self", ")", ":", "proc", "=", "Popen", "(", "[", "'fish'", ",", "'--version'", "]", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "DEVNULL", ")", "version", "=", "proc", ".", "stdout", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", ".", "split", "(", ")", "[", "-", "1", "]", "return", "u'Fish Shell {}'", ".", "format", "(", "version", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Fish._put_to_history
Puts command script to shell history.
thefuck/shells/fish.py
def _put_to_history(self, command_script): """Puts command script to shell history.""" history_file_name = self._get_history_file_name() if os.path.isfile(history_file_name): with open(history_file_name, 'a') as history: entry = self._get_history_line(command_script) if six.PY2: history.write(entry.encode('utf-8')) else: history.write(entry)
def _put_to_history(self, command_script): """Puts command script to shell history.""" history_file_name = self._get_history_file_name() if os.path.isfile(history_file_name): with open(history_file_name, 'a') as history: entry = self._get_history_line(command_script) if six.PY2: history.write(entry.encode('utf-8')) else: history.write(entry)
[ "Puts", "command", "script", "to", "shell", "history", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/shells/fish.py#L120-L129
[ "def", "_put_to_history", "(", "self", ",", "command_script", ")", ":", "history_file_name", "=", "self", ".", "_get_history_file_name", "(", ")", "if", "os", ".", "path", ".", "isfile", "(", "history_file_name", ")", ":", "with", "open", "(", "history_file_name", ",", "'a'", ")", "as", "history", ":", "entry", "=", "self", ".", "_get_history_line", "(", "command_script", ")", "if", "six", ".", "PY2", ":", "history", ".", "write", "(", "entry", ".", "encode", "(", "'utf-8'", ")", ")", "else", ":", "history", ".", "write", "(", "entry", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
_get_brew_commands
To get brew default commands on local environment
thefuck/rules/brew_unknown_command.py
def _get_brew_commands(brew_path_prefix): """To get brew default commands on local environment""" brew_cmd_path = brew_path_prefix + BREW_CMD_PATH return [name[:-3] for name in os.listdir(brew_cmd_path) if name.endswith(('.rb', '.sh'))]
def _get_brew_commands(brew_path_prefix): """To get brew default commands on local environment""" brew_cmd_path = brew_path_prefix + BREW_CMD_PATH return [name[:-3] for name in os.listdir(brew_cmd_path) if name.endswith(('.rb', '.sh'))]
[ "To", "get", "brew", "default", "commands", "on", "local", "environment" ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/rules/brew_unknown_command.py#L13-L18
[ "def", "_get_brew_commands", "(", "brew_path_prefix", ")", ":", "brew_cmd_path", "=", "brew_path_prefix", "+", "BREW_CMD_PATH", "return", "[", "name", "[", ":", "-", "3", "]", "for", "name", "in", "os", ".", "listdir", "(", "brew_cmd_path", ")", "if", "name", ".", "endswith", "(", "(", "'.rb'", ",", "'.sh'", ")", ")", "]" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
_get_brew_tap_specific_commands
To get tap's specific commands https://github.com/Homebrew/homebrew/blob/master/Library/brew.rb#L115
thefuck/rules/brew_unknown_command.py
def _get_brew_tap_specific_commands(brew_path_prefix): """To get tap's specific commands https://github.com/Homebrew/homebrew/blob/master/Library/brew.rb#L115""" commands = [] brew_taps_path = brew_path_prefix + TAP_PATH for user in _get_directory_names_only(brew_taps_path): taps = _get_directory_names_only(brew_taps_path + '/%s' % user) # Brew Taps's naming rule # https://github.com/Homebrew/homebrew/blob/master/share/doc/homebrew/brew-tap.md#naming-conventions-and-limitations taps = (tap for tap in taps if tap.startswith('homebrew-')) for tap in taps: tap_cmd_path = brew_taps_path + TAP_CMD_PATH % (user, tap) if os.path.isdir(tap_cmd_path): commands += (name.replace('brew-', '').replace('.rb', '') for name in os.listdir(tap_cmd_path) if _is_brew_tap_cmd_naming(name)) return commands
def _get_brew_tap_specific_commands(brew_path_prefix): """To get tap's specific commands https://github.com/Homebrew/homebrew/blob/master/Library/brew.rb#L115""" commands = [] brew_taps_path = brew_path_prefix + TAP_PATH for user in _get_directory_names_only(brew_taps_path): taps = _get_directory_names_only(brew_taps_path + '/%s' % user) # Brew Taps's naming rule # https://github.com/Homebrew/homebrew/blob/master/share/doc/homebrew/brew-tap.md#naming-conventions-and-limitations taps = (tap for tap in taps if tap.startswith('homebrew-')) for tap in taps: tap_cmd_path = brew_taps_path + TAP_CMD_PATH % (user, tap) if os.path.isdir(tap_cmd_path): commands += (name.replace('brew-', '').replace('.rb', '') for name in os.listdir(tap_cmd_path) if _is_brew_tap_cmd_naming(name)) return commands
[ "To", "get", "tap", "s", "specific", "commands", "https", ":", "//", "github", ".", "com", "/", "Homebrew", "/", "homebrew", "/", "blob", "/", "master", "/", "Library", "/", "brew", ".", "rb#L115" ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/rules/brew_unknown_command.py#L21-L41
[ "def", "_get_brew_tap_specific_commands", "(", "brew_path_prefix", ")", ":", "commands", "=", "[", "]", "brew_taps_path", "=", "brew_path_prefix", "+", "TAP_PATH", "for", "user", "in", "_get_directory_names_only", "(", "brew_taps_path", ")", ":", "taps", "=", "_get_directory_names_only", "(", "brew_taps_path", "+", "'/%s'", "%", "user", ")", "# Brew Taps's naming rule", "# https://github.com/Homebrew/homebrew/blob/master/share/doc/homebrew/brew-tap.md#naming-conventions-and-limitations", "taps", "=", "(", "tap", "for", "tap", "in", "taps", "if", "tap", ".", "startswith", "(", "'homebrew-'", ")", ")", "for", "tap", "in", "taps", ":", "tap_cmd_path", "=", "brew_taps_path", "+", "TAP_CMD_PATH", "%", "(", "user", ",", "tap", ")", "if", "os", ".", "path", ".", "isdir", "(", "tap_cmd_path", ")", ":", "commands", "+=", "(", "name", ".", "replace", "(", "'brew-'", ",", "''", ")", ".", "replace", "(", "'.rb'", ",", "''", ")", "for", "name", "in", "os", ".", "listdir", "(", "tap_cmd_path", ")", "if", "_is_brew_tap_cmd_naming", "(", "name", ")", ")", "return", "commands" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Zsh.info
Returns the name and version of the current shell
thefuck/shells/zsh.py
def info(self): """Returns the name and version of the current shell""" proc = Popen(['zsh', '-c', 'echo $ZSH_VERSION'], stdout=PIPE, stderr=DEVNULL) version = proc.stdout.read().decode('utf-8').strip() return u'ZSH {}'.format(version)
def info(self): """Returns the name and version of the current shell""" proc = Popen(['zsh', '-c', 'echo $ZSH_VERSION'], stdout=PIPE, stderr=DEVNULL) version = proc.stdout.read().decode('utf-8').strip() return u'ZSH {}'.format(version)
[ "Returns", "the", "name", "and", "version", "of", "the", "current", "shell" ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/shells/zsh.py#L90-L95
[ "def", "info", "(", "self", ")", ":", "proc", "=", "Popen", "(", "[", "'zsh'", ",", "'-c'", ",", "'echo $ZSH_VERSION'", "]", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "DEVNULL", ")", "version", "=", "proc", ".", "stdout", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", ".", "strip", "(", ")", "return", "u'ZSH {}'", ".", "format", "(", "version", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
git_support
Resolves git aliases and supports testing for both git and hub.
thefuck/specific/git.py
def git_support(fn, command): """Resolves git aliases and supports testing for both git and hub.""" # supports GitHub's `hub` command # which is recommended to be used with `alias git=hub` # but at this point, shell aliases have already been resolved if not is_app(command, 'git', 'hub'): return False # perform git aliases expansion if 'trace: alias expansion:' in command.output: search = re.search("trace: alias expansion: ([^ ]*) => ([^\n]*)", command.output) alias = search.group(1) # by default git quotes everything, for example: # 'commit' '--amend' # which is surprising and does not allow to easily test for # eg. 'git commit' expansion = ' '.join(shell.quote(part) for part in shell.split_command(search.group(2))) new_script = command.script.replace(alias, expansion) command = command.update(script=new_script) return fn(command)
def git_support(fn, command): """Resolves git aliases and supports testing for both git and hub.""" # supports GitHub's `hub` command # which is recommended to be used with `alias git=hub` # but at this point, shell aliases have already been resolved if not is_app(command, 'git', 'hub'): return False # perform git aliases expansion if 'trace: alias expansion:' in command.output: search = re.search("trace: alias expansion: ([^ ]*) => ([^\n]*)", command.output) alias = search.group(1) # by default git quotes everything, for example: # 'commit' '--amend' # which is surprising and does not allow to easily test for # eg. 'git commit' expansion = ' '.join(shell.quote(part) for part in shell.split_command(search.group(2))) new_script = command.script.replace(alias, expansion) command = command.update(script=new_script) return fn(command)
[ "Resolves", "git", "aliases", "and", "supports", "testing", "for", "both", "git", "and", "hub", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/specific/git.py#L8-L32
[ "def", "git_support", "(", "fn", ",", "command", ")", ":", "# supports GitHub's `hub` command", "# which is recommended to be used with `alias git=hub`", "# but at this point, shell aliases have already been resolved", "if", "not", "is_app", "(", "command", ",", "'git'", ",", "'hub'", ")", ":", "return", "False", "# perform git aliases expansion", "if", "'trace: alias expansion:'", "in", "command", ".", "output", ":", "search", "=", "re", ".", "search", "(", "\"trace: alias expansion: ([^ ]*) => ([^\\n]*)\"", ",", "command", ".", "output", ")", "alias", "=", "search", ".", "group", "(", "1", ")", "# by default git quotes everything, for example:", "# 'commit' '--amend'", "# which is surprising and does not allow to easily test for", "# eg. 'git commit'", "expansion", "=", "' '", ".", "join", "(", "shell", ".", "quote", "(", "part", ")", "for", "part", "in", "shell", ".", "split_command", "(", "search", ".", "group", "(", "2", ")", ")", ")", "new_script", "=", "command", ".", "script", ".", "replace", "(", "alias", ",", "expansion", ")", "command", "=", "command", ".", "update", "(", "script", "=", "new_script", ")", "return", "fn", "(", "command", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
read_actions
Yields actions for pressed keys.
thefuck/ui.py
def read_actions(): """Yields actions for pressed keys.""" while True: key = get_key() # Handle arrows, j/k (qwerty), and n/e (colemak) if key in (const.KEY_UP, const.KEY_CTRL_N, 'k', 'e'): yield const.ACTION_PREVIOUS elif key in (const.KEY_DOWN, const.KEY_CTRL_P, 'j', 'n'): yield const.ACTION_NEXT elif key in (const.KEY_CTRL_C, 'q'): yield const.ACTION_ABORT elif key in ('\n', '\r'): yield const.ACTION_SELECT
def read_actions(): """Yields actions for pressed keys.""" while True: key = get_key() # Handle arrows, j/k (qwerty), and n/e (colemak) if key in (const.KEY_UP, const.KEY_CTRL_N, 'k', 'e'): yield const.ACTION_PREVIOUS elif key in (const.KEY_DOWN, const.KEY_CTRL_P, 'j', 'n'): yield const.ACTION_NEXT elif key in (const.KEY_CTRL_C, 'q'): yield const.ACTION_ABORT elif key in ('\n', '\r'): yield const.ACTION_SELECT
[ "Yields", "actions", "for", "pressed", "keys", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/ui.py#L11-L24
[ "def", "read_actions", "(", ")", ":", "while", "True", ":", "key", "=", "get_key", "(", ")", "# Handle arrows, j/k (qwerty), and n/e (colemak)", "if", "key", "in", "(", "const", ".", "KEY_UP", ",", "const", ".", "KEY_CTRL_N", ",", "'k'", ",", "'e'", ")", ":", "yield", "const", ".", "ACTION_PREVIOUS", "elif", "key", "in", "(", "const", ".", "KEY_DOWN", ",", "const", ".", "KEY_CTRL_P", ",", "'j'", ",", "'n'", ")", ":", "yield", "const", ".", "ACTION_NEXT", "elif", "key", "in", "(", "const", ".", "KEY_CTRL_C", ",", "'q'", ")", ":", "yield", "const", ".", "ACTION_ABORT", "elif", "key", "in", "(", "'\\n'", ",", "'\\r'", ")", ":", "yield", "const", ".", "ACTION_SELECT" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
select_command
Returns: - the first command when confirmation disabled; - None when ctrl+c pressed; - selected command. :type corrected_commands: Iterable[thefuck.types.CorrectedCommand] :rtype: thefuck.types.CorrectedCommand | None
thefuck/ui.py
def select_command(corrected_commands): """Returns: - the first command when confirmation disabled; - None when ctrl+c pressed; - selected command. :type corrected_commands: Iterable[thefuck.types.CorrectedCommand] :rtype: thefuck.types.CorrectedCommand | None """ try: selector = CommandSelector(corrected_commands) except NoRuleMatched: logs.failed('No fucks given' if get_alias() == 'fuck' else 'Nothing found') return if not settings.require_confirmation: logs.show_corrected_command(selector.value) return selector.value logs.confirm_text(selector.value) for action in read_actions(): if action == const.ACTION_SELECT: sys.stderr.write('\n') return selector.value elif action == const.ACTION_ABORT: logs.failed('\nAborted') return elif action == const.ACTION_PREVIOUS: selector.previous() logs.confirm_text(selector.value) elif action == const.ACTION_NEXT: selector.next() logs.confirm_text(selector.value)
def select_command(corrected_commands): """Returns: - the first command when confirmation disabled; - None when ctrl+c pressed; - selected command. :type corrected_commands: Iterable[thefuck.types.CorrectedCommand] :rtype: thefuck.types.CorrectedCommand | None """ try: selector = CommandSelector(corrected_commands) except NoRuleMatched: logs.failed('No fucks given' if get_alias() == 'fuck' else 'Nothing found') return if not settings.require_confirmation: logs.show_corrected_command(selector.value) return selector.value logs.confirm_text(selector.value) for action in read_actions(): if action == const.ACTION_SELECT: sys.stderr.write('\n') return selector.value elif action == const.ACTION_ABORT: logs.failed('\nAborted') return elif action == const.ACTION_PREVIOUS: selector.previous() logs.confirm_text(selector.value) elif action == const.ACTION_NEXT: selector.next() logs.confirm_text(selector.value)
[ "Returns", ":" ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/ui.py#L59-L95
[ "def", "select_command", "(", "corrected_commands", ")", ":", "try", ":", "selector", "=", "CommandSelector", "(", "corrected_commands", ")", "except", "NoRuleMatched", ":", "logs", ".", "failed", "(", "'No fucks given'", "if", "get_alias", "(", ")", "==", "'fuck'", "else", "'Nothing found'", ")", "return", "if", "not", "settings", ".", "require_confirmation", ":", "logs", ".", "show_corrected_command", "(", "selector", ".", "value", ")", "return", "selector", ".", "value", "logs", ".", "confirm_text", "(", "selector", ".", "value", ")", "for", "action", "in", "read_actions", "(", ")", ":", "if", "action", "==", "const", ".", "ACTION_SELECT", ":", "sys", ".", "stderr", ".", "write", "(", "'\\n'", ")", "return", "selector", ".", "value", "elif", "action", "==", "const", ".", "ACTION_ABORT", ":", "logs", ".", "failed", "(", "'\\nAborted'", ")", "return", "elif", "action", "==", "const", ".", "ACTION_PREVIOUS", ":", "selector", ".", "previous", "(", ")", "logs", ".", "confirm_text", "(", "selector", ".", "value", ")", "elif", "action", "==", "const", ".", "ACTION_NEXT", ":", "selector", ".", "next", "(", ")", "logs", ".", "confirm_text", "(", "selector", ".", "value", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
_spawn
Create a spawned process. Modified version of pty.spawn with terminal size support.
thefuck/entrypoints/shell_logger.py
def _spawn(shell, master_read): """Create a spawned process. Modified version of pty.spawn with terminal size support. """ pid, master_fd = pty.fork() if pid == pty.CHILD: os.execlp(shell, shell) try: mode = tty.tcgetattr(pty.STDIN_FILENO) tty.setraw(pty.STDIN_FILENO) restore = True except tty.error: # This is the same as termios.error restore = False _set_pty_size(master_fd) signal.signal(signal.SIGWINCH, lambda *_: _set_pty_size(master_fd)) try: pty._copy(master_fd, master_read, pty._read) except OSError: if restore: tty.tcsetattr(pty.STDIN_FILENO, tty.TCSAFLUSH, mode) os.close(master_fd) return os.waitpid(pid, 0)[1]
def _spawn(shell, master_read): """Create a spawned process. Modified version of pty.spawn with terminal size support. """ pid, master_fd = pty.fork() if pid == pty.CHILD: os.execlp(shell, shell) try: mode = tty.tcgetattr(pty.STDIN_FILENO) tty.setraw(pty.STDIN_FILENO) restore = True except tty.error: # This is the same as termios.error restore = False _set_pty_size(master_fd) signal.signal(signal.SIGWINCH, lambda *_: _set_pty_size(master_fd)) try: pty._copy(master_fd, master_read, pty._read) except OSError: if restore: tty.tcsetattr(pty.STDIN_FILENO, tty.TCSAFLUSH, mode) os.close(master_fd) return os.waitpid(pid, 0)[1]
[ "Create", "a", "spawned", "process", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/shell_logger.py#L33-L61
[ "def", "_spawn", "(", "shell", ",", "master_read", ")", ":", "pid", ",", "master_fd", "=", "pty", ".", "fork", "(", ")", "if", "pid", "==", "pty", ".", "CHILD", ":", "os", ".", "execlp", "(", "shell", ",", "shell", ")", "try", ":", "mode", "=", "tty", ".", "tcgetattr", "(", "pty", ".", "STDIN_FILENO", ")", "tty", ".", "setraw", "(", "pty", ".", "STDIN_FILENO", ")", "restore", "=", "True", "except", "tty", ".", "error", ":", "# This is the same as termios.error", "restore", "=", "False", "_set_pty_size", "(", "master_fd", ")", "signal", ".", "signal", "(", "signal", ".", "SIGWINCH", ",", "lambda", "*", "_", ":", "_set_pty_size", "(", "master_fd", ")", ")", "try", ":", "pty", ".", "_copy", "(", "master_fd", ",", "master_read", ",", "pty", ".", "_read", ")", "except", "OSError", ":", "if", "restore", ":", "tty", ".", "tcsetattr", "(", "pty", ".", "STDIN_FILENO", ",", "tty", ".", "TCSAFLUSH", ",", "mode", ")", "os", ".", "close", "(", "master_fd", ")", "return", "os", ".", "waitpid", "(", "pid", ",", "0", ")", "[", "1", "]" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
shell_logger
Logs shell output to the `output`. Works like unix script command with `-f` flag.
thefuck/entrypoints/shell_logger.py
def shell_logger(output): """Logs shell output to the `output`. Works like unix script command with `-f` flag. """ if not os.environ.get('SHELL'): logs.warn("Shell logger doesn't support your platform.") sys.exit(1) fd = os.open(output, os.O_CREAT | os.O_TRUNC | os.O_RDWR) os.write(fd, b'\x00' * const.LOG_SIZE_IN_BYTES) buffer = mmap.mmap(fd, const.LOG_SIZE_IN_BYTES, mmap.MAP_SHARED, mmap.PROT_WRITE) return_code = _spawn(os.environ['SHELL'], partial(_read, buffer)) sys.exit(return_code)
def shell_logger(output): """Logs shell output to the `output`. Works like unix script command with `-f` flag. """ if not os.environ.get('SHELL'): logs.warn("Shell logger doesn't support your platform.") sys.exit(1) fd = os.open(output, os.O_CREAT | os.O_TRUNC | os.O_RDWR) os.write(fd, b'\x00' * const.LOG_SIZE_IN_BYTES) buffer = mmap.mmap(fd, const.LOG_SIZE_IN_BYTES, mmap.MAP_SHARED, mmap.PROT_WRITE) return_code = _spawn(os.environ['SHELL'], partial(_read, buffer)) sys.exit(return_code)
[ "Logs", "shell", "output", "to", "the", "output", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/shell_logger.py#L64-L79
[ "def", "shell_logger", "(", "output", ")", ":", "if", "not", "os", ".", "environ", ".", "get", "(", "'SHELL'", ")", ":", "logs", ".", "warn", "(", "\"Shell logger doesn't support your platform.\"", ")", "sys", ".", "exit", "(", "1", ")", "fd", "=", "os", ".", "open", "(", "output", ",", "os", ".", "O_CREAT", "|", "os", ".", "O_TRUNC", "|", "os", ".", "O_RDWR", ")", "os", ".", "write", "(", "fd", ",", "b'\\x00'", "*", "const", ".", "LOG_SIZE_IN_BYTES", ")", "buffer", "=", "mmap", ".", "mmap", "(", "fd", ",", "const", ".", "LOG_SIZE_IN_BYTES", ",", "mmap", ".", "MAP_SHARED", ",", "mmap", ".", "PROT_WRITE", ")", "return_code", "=", "_spawn", "(", "os", ".", "environ", "[", "'SHELL'", "]", ",", "partial", "(", "_read", ",", "buffer", ")", ")", "sys", ".", "exit", "(", "return_code", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
get_output
Get output of the script. :param script: Console script. :type script: str :param expanded: Console script with expanded aliases. :type expanded: str :rtype: str
thefuck/output_readers/__init__.py
def get_output(script, expanded): """Get output of the script. :param script: Console script. :type script: str :param expanded: Console script with expanded aliases. :type expanded: str :rtype: str """ if shell_logger.is_available(): return shell_logger.get_output(script) if settings.instant_mode: return read_log.get_output(script) else: return rerun.get_output(script, expanded)
def get_output(script, expanded): """Get output of the script. :param script: Console script. :type script: str :param expanded: Console script with expanded aliases. :type expanded: str :rtype: str """ if shell_logger.is_available(): return shell_logger.get_output(script) if settings.instant_mode: return read_log.get_output(script) else: return rerun.get_output(script, expanded)
[ "Get", "output", "of", "the", "script", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/output_readers/__init__.py#L5-L20
[ "def", "get_output", "(", "script", ",", "expanded", ")", ":", "if", "shell_logger", ".", "is_available", "(", ")", ":", "return", "shell_logger", ".", "get_output", "(", "script", ")", "if", "settings", ".", "instant_mode", ":", "return", "read_log", ".", "get_output", "(", "script", ")", "else", ":", "return", "rerun", ".", "get_output", "(", "script", ",", "expanded", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Parser._add_arguments
Adds arguments to parser.
thefuck/argument_parser.py
def _add_arguments(self): """Adds arguments to parser.""" self._parser.add_argument( '-v', '--version', action='store_true', help="show program's version number and exit") self._parser.add_argument( '-a', '--alias', nargs='?', const=get_alias(), help='[custom-alias-name] prints alias for current shell') self._parser.add_argument( '-l', '--shell-logger', action='store', help='log shell output to the file') self._parser.add_argument( '--enable-experimental-instant-mode', action='store_true', help='enable experimental instant mode, use on your own risk') self._parser.add_argument( '-h', '--help', action='store_true', help='show this help message and exit') self._add_conflicting_arguments() self._parser.add_argument( '-d', '--debug', action='store_true', help='enable debug output') self._parser.add_argument( '--force-command', action='store', help=SUPPRESS) self._parser.add_argument( 'command', nargs='*', help='command that should be fixed')
def _add_arguments(self): """Adds arguments to parser.""" self._parser.add_argument( '-v', '--version', action='store_true', help="show program's version number and exit") self._parser.add_argument( '-a', '--alias', nargs='?', const=get_alias(), help='[custom-alias-name] prints alias for current shell') self._parser.add_argument( '-l', '--shell-logger', action='store', help='log shell output to the file') self._parser.add_argument( '--enable-experimental-instant-mode', action='store_true', help='enable experimental instant mode, use on your own risk') self._parser.add_argument( '-h', '--help', action='store_true', help='show this help message and exit') self._add_conflicting_arguments() self._parser.add_argument( '-d', '--debug', action='store_true', help='enable debug output') self._parser.add_argument( '--force-command', action='store', help=SUPPRESS) self._parser.add_argument( 'command', nargs='*', help='command that should be fixed')
[ "Adds", "arguments", "to", "parser", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/argument_parser.py#L17-L52
[ "def", "_add_arguments", "(", "self", ")", ":", "self", ".", "_parser", ".", "add_argument", "(", "'-v'", ",", "'--version'", ",", "action", "=", "'store_true'", ",", "help", "=", "\"show program's version number and exit\"", ")", "self", ".", "_parser", ".", "add_argument", "(", "'-a'", ",", "'--alias'", ",", "nargs", "=", "'?'", ",", "const", "=", "get_alias", "(", ")", ",", "help", "=", "'[custom-alias-name] prints alias for current shell'", ")", "self", ".", "_parser", ".", "add_argument", "(", "'-l'", ",", "'--shell-logger'", ",", "action", "=", "'store'", ",", "help", "=", "'log shell output to the file'", ")", "self", ".", "_parser", ".", "add_argument", "(", "'--enable-experimental-instant-mode'", ",", "action", "=", "'store_true'", ",", "help", "=", "'enable experimental instant mode, use on your own risk'", ")", "self", ".", "_parser", ".", "add_argument", "(", "'-h'", ",", "'--help'", ",", "action", "=", "'store_true'", ",", "help", "=", "'show this help message and exit'", ")", "self", ".", "_add_conflicting_arguments", "(", ")", "self", ".", "_parser", ".", "add_argument", "(", "'-d'", ",", "'--debug'", ",", "action", "=", "'store_true'", ",", "help", "=", "'enable debug output'", ")", "self", ".", "_parser", ".", "add_argument", "(", "'--force-command'", ",", "action", "=", "'store'", ",", "help", "=", "SUPPRESS", ")", "self", ".", "_parser", ".", "add_argument", "(", "'command'", ",", "nargs", "=", "'*'", ",", "help", "=", "'command that should be fixed'", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Parser._add_conflicting_arguments
It's too dangerous to use `-y` and `-r` together.
thefuck/argument_parser.py
def _add_conflicting_arguments(self): """It's too dangerous to use `-y` and `-r` together.""" group = self._parser.add_mutually_exclusive_group() group.add_argument( '-y', '--yes', '--yeah', action='store_true', help='execute fixed command without confirmation') group.add_argument( '-r', '--repeat', action='store_true', help='repeat on failure')
def _add_conflicting_arguments(self): """It's too dangerous to use `-y` and `-r` together.""" group = self._parser.add_mutually_exclusive_group() group.add_argument( '-y', '--yes', '--yeah', action='store_true', help='execute fixed command without confirmation') group.add_argument( '-r', '--repeat', action='store_true', help='repeat on failure')
[ "It", "s", "too", "dangerous", "to", "use", "-", "y", "and", "-", "r", "together", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/argument_parser.py#L54-L64
[ "def", "_add_conflicting_arguments", "(", "self", ")", ":", "group", "=", "self", ".", "_parser", ".", "add_mutually_exclusive_group", "(", ")", "group", ".", "add_argument", "(", "'-y'", ",", "'--yes'", ",", "'--yeah'", ",", "action", "=", "'store_true'", ",", "help", "=", "'execute fixed command without confirmation'", ")", "group", ".", "add_argument", "(", "'-r'", ",", "'--repeat'", ",", "action", "=", "'store_true'", ",", "help", "=", "'repeat on failure'", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Parser._prepare_arguments
Prepares arguments by: - removing placeholder and moving arguments after it to beginning, we need this to distinguish arguments from `command` with ours; - adding `--` before `command`, so our parse would ignore arguments of `command`.
thefuck/argument_parser.py
def _prepare_arguments(self, argv): """Prepares arguments by: - removing placeholder and moving arguments after it to beginning, we need this to distinguish arguments from `command` with ours; - adding `--` before `command`, so our parse would ignore arguments of `command`. """ if ARGUMENT_PLACEHOLDER in argv: index = argv.index(ARGUMENT_PLACEHOLDER) return argv[index + 1:] + ['--'] + argv[:index] elif argv and not argv[0].startswith('-') and argv[0] != '--': return ['--'] + argv else: return argv
def _prepare_arguments(self, argv): """Prepares arguments by: - removing placeholder and moving arguments after it to beginning, we need this to distinguish arguments from `command` with ours; - adding `--` before `command`, so our parse would ignore arguments of `command`. """ if ARGUMENT_PLACEHOLDER in argv: index = argv.index(ARGUMENT_PLACEHOLDER) return argv[index + 1:] + ['--'] + argv[:index] elif argv and not argv[0].startswith('-') and argv[0] != '--': return ['--'] + argv else: return argv
[ "Prepares", "arguments", "by", ":" ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/argument_parser.py#L66-L82
[ "def", "_prepare_arguments", "(", "self", ",", "argv", ")", ":", "if", "ARGUMENT_PLACEHOLDER", "in", "argv", ":", "index", "=", "argv", ".", "index", "(", "ARGUMENT_PLACEHOLDER", ")", "return", "argv", "[", "index", "+", "1", ":", "]", "+", "[", "'--'", "]", "+", "argv", "[", ":", "index", "]", "elif", "argv", "and", "not", "argv", "[", "0", "]", ".", "startswith", "(", "'-'", ")", "and", "argv", "[", "0", "]", "!=", "'--'", ":", "return", "[", "'--'", "]", "+", "argv", "else", ":", "return", "argv" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
get_scripts
Get custom npm scripts.
thefuck/specific/npm.py
def get_scripts(): """Get custom npm scripts.""" proc = Popen(['npm', 'run-script'], stdout=PIPE) should_yeild = False for line in proc.stdout.readlines(): line = line.decode() if 'available via `npm run-script`:' in line: should_yeild = True continue if should_yeild and re.match(r'^ [^ ]+', line): yield line.strip().split(' ')[0]
def get_scripts(): """Get custom npm scripts.""" proc = Popen(['npm', 'run-script'], stdout=PIPE) should_yeild = False for line in proc.stdout.readlines(): line = line.decode() if 'available via `npm run-script`:' in line: should_yeild = True continue if should_yeild and re.match(r'^ [^ ]+', line): yield line.strip().split(' ')[0]
[ "Get", "custom", "npm", "scripts", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/specific/npm.py#L10-L21
[ "def", "get_scripts", "(", ")", ":", "proc", "=", "Popen", "(", "[", "'npm'", ",", "'run-script'", "]", ",", "stdout", "=", "PIPE", ")", "should_yeild", "=", "False", "for", "line", "in", "proc", ".", "stdout", ".", "readlines", "(", ")", ":", "line", "=", "line", ".", "decode", "(", ")", "if", "'available via `npm run-script`:'", "in", "line", ":", "should_yeild", "=", "True", "continue", "if", "should_yeild", "and", "re", ".", "match", "(", "r'^ [^ ]+'", ",", "line", ")", ":", "yield", "line", ".", "strip", "(", ")", ".", "split", "(", "' '", ")", "[", "0", "]" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Settings.init
Fills `settings` with values from `settings.py` and env.
thefuck/conf.py
def init(self, args=None): """Fills `settings` with values from `settings.py` and env.""" from .logs import exception self._setup_user_dir() self._init_settings_file() try: self.update(self._settings_from_file()) except Exception: exception("Can't load settings from file", sys.exc_info()) try: self.update(self._settings_from_env()) except Exception: exception("Can't load settings from env", sys.exc_info()) self.update(self._settings_from_args(args))
def init(self, args=None): """Fills `settings` with values from `settings.py` and env.""" from .logs import exception self._setup_user_dir() self._init_settings_file() try: self.update(self._settings_from_file()) except Exception: exception("Can't load settings from file", sys.exc_info()) try: self.update(self._settings_from_env()) except Exception: exception("Can't load settings from env", sys.exc_info()) self.update(self._settings_from_args(args))
[ "Fills", "settings", "with", "values", "from", "settings", ".", "py", "and", "env", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L17-L34
[ "def", "init", "(", "self", ",", "args", "=", "None", ")", ":", "from", ".", "logs", "import", "exception", "self", ".", "_setup_user_dir", "(", ")", "self", ".", "_init_settings_file", "(", ")", "try", ":", "self", ".", "update", "(", "self", ".", "_settings_from_file", "(", ")", ")", "except", "Exception", ":", "exception", "(", "\"Can't load settings from file\"", ",", "sys", ".", "exc_info", "(", ")", ")", "try", ":", "self", ".", "update", "(", "self", ".", "_settings_from_env", "(", ")", ")", "except", "Exception", ":", "exception", "(", "\"Can't load settings from env\"", ",", "sys", ".", "exc_info", "(", ")", ")", "self", ".", "update", "(", "self", ".", "_settings_from_args", "(", "args", ")", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Settings._get_user_dir_path
Returns Path object representing the user config resource
thefuck/conf.py
def _get_user_dir_path(self): """Returns Path object representing the user config resource""" xdg_config_home = os.environ.get('XDG_CONFIG_HOME', '~/.config') user_dir = Path(xdg_config_home, 'thefuck').expanduser() legacy_user_dir = Path('~', '.thefuck').expanduser() # For backward compatibility use legacy '~/.thefuck' if it exists: if legacy_user_dir.is_dir(): warn(u'Config path {} is deprecated. Please move to {}'.format( legacy_user_dir, user_dir)) return legacy_user_dir else: return user_dir
def _get_user_dir_path(self): """Returns Path object representing the user config resource""" xdg_config_home = os.environ.get('XDG_CONFIG_HOME', '~/.config') user_dir = Path(xdg_config_home, 'thefuck').expanduser() legacy_user_dir = Path('~', '.thefuck').expanduser() # For backward compatibility use legacy '~/.thefuck' if it exists: if legacy_user_dir.is_dir(): warn(u'Config path {} is deprecated. Please move to {}'.format( legacy_user_dir, user_dir)) return legacy_user_dir else: return user_dir
[ "Returns", "Path", "object", "representing", "the", "user", "config", "resource" ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L44-L56
[ "def", "_get_user_dir_path", "(", "self", ")", ":", "xdg_config_home", "=", "os", ".", "environ", ".", "get", "(", "'XDG_CONFIG_HOME'", ",", "'~/.config'", ")", "user_dir", "=", "Path", "(", "xdg_config_home", ",", "'thefuck'", ")", ".", "expanduser", "(", ")", "legacy_user_dir", "=", "Path", "(", "'~'", ",", "'.thefuck'", ")", ".", "expanduser", "(", ")", "# For backward compatibility use legacy '~/.thefuck' if it exists:", "if", "legacy_user_dir", ".", "is_dir", "(", ")", ":", "warn", "(", "u'Config path {} is deprecated. Please move to {}'", ".", "format", "(", "legacy_user_dir", ",", "user_dir", ")", ")", "return", "legacy_user_dir", "else", ":", "return", "user_dir" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Settings._setup_user_dir
Returns user config dir, create it when it doesn't exist.
thefuck/conf.py
def _setup_user_dir(self): """Returns user config dir, create it when it doesn't exist.""" user_dir = self._get_user_dir_path() rules_dir = user_dir.joinpath('rules') if not rules_dir.is_dir(): rules_dir.mkdir(parents=True) self.user_dir = user_dir
def _setup_user_dir(self): """Returns user config dir, create it when it doesn't exist.""" user_dir = self._get_user_dir_path() rules_dir = user_dir.joinpath('rules') if not rules_dir.is_dir(): rules_dir.mkdir(parents=True) self.user_dir = user_dir
[ "Returns", "user", "config", "dir", "create", "it", "when", "it", "doesn", "t", "exist", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L58-L65
[ "def", "_setup_user_dir", "(", "self", ")", ":", "user_dir", "=", "self", ".", "_get_user_dir_path", "(", ")", "rules_dir", "=", "user_dir", ".", "joinpath", "(", "'rules'", ")", "if", "not", "rules_dir", ".", "is_dir", "(", ")", ":", "rules_dir", ".", "mkdir", "(", "parents", "=", "True", ")", "self", ".", "user_dir", "=", "user_dir" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Settings._settings_from_file
Loads settings from file.
thefuck/conf.py
def _settings_from_file(self): """Loads settings from file.""" settings = load_source( 'settings', text_type(self.user_dir.joinpath('settings.py'))) return {key: getattr(settings, key) for key in const.DEFAULT_SETTINGS.keys() if hasattr(settings, key)}
def _settings_from_file(self): """Loads settings from file.""" settings = load_source( 'settings', text_type(self.user_dir.joinpath('settings.py'))) return {key: getattr(settings, key) for key in const.DEFAULT_SETTINGS.keys() if hasattr(settings, key)}
[ "Loads", "settings", "from", "file", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L67-L73
[ "def", "_settings_from_file", "(", "self", ")", ":", "settings", "=", "load_source", "(", "'settings'", ",", "text_type", "(", "self", ".", "user_dir", ".", "joinpath", "(", "'settings.py'", ")", ")", ")", "return", "{", "key", ":", "getattr", "(", "settings", ",", "key", ")", "for", "key", "in", "const", ".", "DEFAULT_SETTINGS", ".", "keys", "(", ")", "if", "hasattr", "(", "settings", ",", "key", ")", "}" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Settings._rules_from_env
Transforms rules list from env-string to python.
thefuck/conf.py
def _rules_from_env(self, val): """Transforms rules list from env-string to python.""" val = val.split(':') if 'DEFAULT_RULES' in val: val = const.DEFAULT_RULES + [rule for rule in val if rule != 'DEFAULT_RULES'] return val
def _rules_from_env(self, val): """Transforms rules list from env-string to python.""" val = val.split(':') if 'DEFAULT_RULES' in val: val = const.DEFAULT_RULES + [rule for rule in val if rule != 'DEFAULT_RULES'] return val
[ "Transforms", "rules", "list", "from", "env", "-", "string", "to", "python", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L75-L80
[ "def", "_rules_from_env", "(", "self", ",", "val", ")", ":", "val", "=", "val", ".", "split", "(", "':'", ")", "if", "'DEFAULT_RULES'", "in", "val", ":", "val", "=", "const", ".", "DEFAULT_RULES", "+", "[", "rule", "for", "rule", "in", "val", "if", "rule", "!=", "'DEFAULT_RULES'", "]", "return", "val" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Settings._priority_from_env
Gets priority pairs from env.
thefuck/conf.py
def _priority_from_env(self, val): """Gets priority pairs from env.""" for part in val.split(':'): try: rule, priority = part.split('=') yield rule, int(priority) except ValueError: continue
def _priority_from_env(self, val): """Gets priority pairs from env.""" for part in val.split(':'): try: rule, priority = part.split('=') yield rule, int(priority) except ValueError: continue
[ "Gets", "priority", "pairs", "from", "env", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L82-L89
[ "def", "_priority_from_env", "(", "self", ",", "val", ")", ":", "for", "part", "in", "val", ".", "split", "(", "':'", ")", ":", "try", ":", "rule", ",", "priority", "=", "part", ".", "split", "(", "'='", ")", "yield", "rule", ",", "int", "(", "priority", ")", "except", "ValueError", ":", "continue" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Settings._val_from_env
Transforms env-strings to python.
thefuck/conf.py
def _val_from_env(self, env, attr): """Transforms env-strings to python.""" val = os.environ[env] if attr in ('rules', 'exclude_rules'): return self._rules_from_env(val) elif attr == 'priority': return dict(self._priority_from_env(val)) elif attr in ('wait_command', 'history_limit', 'wait_slow_command', 'num_close_matches'): return int(val) elif attr in ('require_confirmation', 'no_colors', 'debug', 'alter_history', 'instant_mode'): return val.lower() == 'true' elif attr == 'slow_commands': return val.split(':') else: return val
def _val_from_env(self, env, attr): """Transforms env-strings to python.""" val = os.environ[env] if attr in ('rules', 'exclude_rules'): return self._rules_from_env(val) elif attr == 'priority': return dict(self._priority_from_env(val)) elif attr in ('wait_command', 'history_limit', 'wait_slow_command', 'num_close_matches'): return int(val) elif attr in ('require_confirmation', 'no_colors', 'debug', 'alter_history', 'instant_mode'): return val.lower() == 'true' elif attr == 'slow_commands': return val.split(':') else: return val
[ "Transforms", "env", "-", "strings", "to", "python", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L91-L107
[ "def", "_val_from_env", "(", "self", ",", "env", ",", "attr", ")", ":", "val", "=", "os", ".", "environ", "[", "env", "]", "if", "attr", "in", "(", "'rules'", ",", "'exclude_rules'", ")", ":", "return", "self", ".", "_rules_from_env", "(", "val", ")", "elif", "attr", "==", "'priority'", ":", "return", "dict", "(", "self", ".", "_priority_from_env", "(", "val", ")", ")", "elif", "attr", "in", "(", "'wait_command'", ",", "'history_limit'", ",", "'wait_slow_command'", ",", "'num_close_matches'", ")", ":", "return", "int", "(", "val", ")", "elif", "attr", "in", "(", "'require_confirmation'", ",", "'no_colors'", ",", "'debug'", ",", "'alter_history'", ",", "'instant_mode'", ")", ":", "return", "val", ".", "lower", "(", ")", "==", "'true'", "elif", "attr", "==", "'slow_commands'", ":", "return", "val", ".", "split", "(", "':'", ")", "else", ":", "return", "val" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Settings._settings_from_env
Loads settings from env.
thefuck/conf.py
def _settings_from_env(self): """Loads settings from env.""" return {attr: self._val_from_env(env, attr) for env, attr in const.ENV_TO_ATTR.items() if env in os.environ}
def _settings_from_env(self): """Loads settings from env.""" return {attr: self._val_from_env(env, attr) for env, attr in const.ENV_TO_ATTR.items() if env in os.environ}
[ "Loads", "settings", "from", "env", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L109-L113
[ "def", "_settings_from_env", "(", "self", ")", ":", "return", "{", "attr", ":", "self", ".", "_val_from_env", "(", "env", ",", "attr", ")", "for", "env", ",", "attr", "in", "const", ".", "ENV_TO_ATTR", ".", "items", "(", ")", "if", "env", "in", "os", ".", "environ", "}" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Settings._settings_from_args
Loads settings from args.
thefuck/conf.py
def _settings_from_args(self, args): """Loads settings from args.""" if not args: return {} from_args = {} if args.yes: from_args['require_confirmation'] = not args.yes if args.debug: from_args['debug'] = args.debug if args.repeat: from_args['repeat'] = args.repeat return from_args
def _settings_from_args(self, args): """Loads settings from args.""" if not args: return {} from_args = {} if args.yes: from_args['require_confirmation'] = not args.yes if args.debug: from_args['debug'] = args.debug if args.repeat: from_args['repeat'] = args.repeat return from_args
[ "Loads", "settings", "from", "args", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L115-L127
[ "def", "_settings_from_args", "(", "self", ",", "args", ")", ":", "if", "not", "args", ":", "return", "{", "}", "from_args", "=", "{", "}", "if", "args", ".", "yes", ":", "from_args", "[", "'require_confirmation'", "]", "=", "not", "args", ".", "yes", "if", "args", ".", "debug", ":", "from_args", "[", "'debug'", "]", "=", "args", ".", "debug", "if", "args", ".", "repeat", ":", "from_args", "[", "'repeat'", "]", "=", "args", ".", "repeat", "return", "from_args" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
_get_destination
When arguments order is wrong first argument will be destination.
thefuck/rules/ln_s_order.py
def _get_destination(script_parts): """When arguments order is wrong first argument will be destination.""" for part in script_parts: if part not in {'ln', '-s', '--symbolic'} and os.path.exists(part): return part
def _get_destination(script_parts): """When arguments order is wrong first argument will be destination.""" for part in script_parts: if part not in {'ln', '-s', '--symbolic'} and os.path.exists(part): return part
[ "When", "arguments", "order", "is", "wrong", "first", "argument", "will", "be", "destination", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/rules/ln_s_order.py#L5-L9
[ "def", "_get_destination", "(", "script_parts", ")", ":", "for", "part", "in", "script_parts", ":", "if", "part", "not", "in", "{", "'ln'", ",", "'-s'", ",", "'--symbolic'", "}", "and", "os", ".", "path", ".", "exists", "(", "part", ")", ":", "return", "part" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
sudo_support
Removes sudo before calling fn and adds it after.
thefuck/specific/sudo.py
def sudo_support(fn, command): """Removes sudo before calling fn and adds it after.""" if not command.script.startswith('sudo '): return fn(command) result = fn(command.update(script=command.script[5:])) if result and isinstance(result, six.string_types): return u'sudo {}'.format(result) elif isinstance(result, list): return [u'sudo {}'.format(x) for x in result] else: return result
def sudo_support(fn, command): """Removes sudo before calling fn and adds it after.""" if not command.script.startswith('sudo '): return fn(command) result = fn(command.update(script=command.script[5:])) if result and isinstance(result, six.string_types): return u'sudo {}'.format(result) elif isinstance(result, list): return [u'sudo {}'.format(x) for x in result] else: return result
[ "Removes", "sudo", "before", "calling", "fn", "and", "adds", "it", "after", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/specific/sudo.py#L6-L18
[ "def", "sudo_support", "(", "fn", ",", "command", ")", ":", "if", "not", "command", ".", "script", ".", "startswith", "(", "'sudo '", ")", ":", "return", "fn", "(", "command", ")", "result", "=", "fn", "(", "command", ".", "update", "(", "script", "=", "command", ".", "script", "[", "5", ":", "]", ")", ")", "if", "result", "and", "isinstance", "(", "result", ",", "six", ".", "string_types", ")", ":", "return", "u'sudo {}'", ".", "format", "(", "result", ")", "elif", "isinstance", "(", "result", ",", "list", ")", ":", "return", "[", "u'sudo {}'", ".", "format", "(", "x", ")", "for", "x", "in", "result", "]", "else", ":", "return", "result" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
_kill_process
Tries to kill the process otherwise just logs a debug message, the process will be killed when thefuck terminates. :type proc: Process
thefuck/output_readers/rerun.py
def _kill_process(proc): """Tries to kill the process otherwise just logs a debug message, the process will be killed when thefuck terminates. :type proc: Process """ try: proc.kill() except AccessDenied: logs.debug(u'Rerun: process PID {} ({}) could not be terminated'.format( proc.pid, proc.exe()))
def _kill_process(proc): """Tries to kill the process otherwise just logs a debug message, the process will be killed when thefuck terminates. :type proc: Process """ try: proc.kill() except AccessDenied: logs.debug(u'Rerun: process PID {} ({}) could not be terminated'.format( proc.pid, proc.exe()))
[ "Tries", "to", "kill", "the", "process", "otherwise", "just", "logs", "a", "debug", "message", "the", "process", "will", "be", "killed", "when", "thefuck", "terminates", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/output_readers/rerun.py#L9-L20
[ "def", "_kill_process", "(", "proc", ")", ":", "try", ":", "proc", ".", "kill", "(", ")", "except", "AccessDenied", ":", "logs", ".", "debug", "(", "u'Rerun: process PID {} ({}) could not be terminated'", ".", "format", "(", "proc", ".", "pid", ",", "proc", ".", "exe", "(", ")", ")", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
_wait_output
Returns `True` if we can get output of the command in the `settings.wait_command` time. Command will be killed if it wasn't finished in the time. :type popen: Popen :rtype: bool
thefuck/output_readers/rerun.py
def _wait_output(popen, is_slow): """Returns `True` if we can get output of the command in the `settings.wait_command` time. Command will be killed if it wasn't finished in the time. :type popen: Popen :rtype: bool """ proc = Process(popen.pid) try: proc.wait(settings.wait_slow_command if is_slow else settings.wait_command) return True except TimeoutExpired: for child in proc.children(recursive=True): _kill_process(child) _kill_process(proc) return False
def _wait_output(popen, is_slow): """Returns `True` if we can get output of the command in the `settings.wait_command` time. Command will be killed if it wasn't finished in the time. :type popen: Popen :rtype: bool """ proc = Process(popen.pid) try: proc.wait(settings.wait_slow_command if is_slow else settings.wait_command) return True except TimeoutExpired: for child in proc.children(recursive=True): _kill_process(child) _kill_process(proc) return False
[ "Returns", "True", "if", "we", "can", "get", "output", "of", "the", "command", "in", "the", "settings", ".", "wait_command", "time", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/output_readers/rerun.py#L23-L42
[ "def", "_wait_output", "(", "popen", ",", "is_slow", ")", ":", "proc", "=", "Process", "(", "popen", ".", "pid", ")", "try", ":", "proc", ".", "wait", "(", "settings", ".", "wait_slow_command", "if", "is_slow", "else", "settings", ".", "wait_command", ")", "return", "True", "except", "TimeoutExpired", ":", "for", "child", "in", "proc", ".", "children", "(", "recursive", "=", "True", ")", ":", "_kill_process", "(", "child", ")", "_kill_process", "(", "proc", ")", "return", "False" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
get_output
Runs the script and obtains stdin/stderr. :type script: str :type expanded: str :rtype: str | None
thefuck/output_readers/rerun.py
def get_output(script, expanded): """Runs the script and obtains stdin/stderr. :type script: str :type expanded: str :rtype: str | None """ env = dict(os.environ) env.update(settings.env) is_slow = shlex.split(expanded) in settings.slow_commands with logs.debug_time(u'Call: {}; with env: {}; is slow: '.format( script, env, is_slow)): result = Popen(expanded, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, env=env) if _wait_output(result, is_slow): output = result.stdout.read().decode('utf-8') logs.debug(u'Received output: {}'.format(output)) return output else: logs.debug(u'Execution timed out!') return None
def get_output(script, expanded): """Runs the script and obtains stdin/stderr. :type script: str :type expanded: str :rtype: str | None """ env = dict(os.environ) env.update(settings.env) is_slow = shlex.split(expanded) in settings.slow_commands with logs.debug_time(u'Call: {}; with env: {}; is slow: '.format( script, env, is_slow)): result = Popen(expanded, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, env=env) if _wait_output(result, is_slow): output = result.stdout.read().decode('utf-8') logs.debug(u'Received output: {}'.format(output)) return output else: logs.debug(u'Execution timed out!') return None
[ "Runs", "the", "script", "and", "obtains", "stdin", "/", "stderr", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/output_readers/rerun.py#L45-L67
[ "def", "get_output", "(", "script", ",", "expanded", ")", ":", "env", "=", "dict", "(", "os", ".", "environ", ")", "env", ".", "update", "(", "settings", ".", "env", ")", "is_slow", "=", "shlex", ".", "split", "(", "expanded", ")", "in", "settings", ".", "slow_commands", "with", "logs", ".", "debug_time", "(", "u'Call: {}; with env: {}; is slow: '", ".", "format", "(", "script", ",", "env", ",", "is_slow", ")", ")", ":", "result", "=", "Popen", "(", "expanded", ",", "shell", "=", "True", ",", "stdin", "=", "PIPE", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "STDOUT", ",", "env", "=", "env", ")", "if", "_wait_output", "(", "result", ",", "is_slow", ")", ":", "output", "=", "result", ".", "stdout", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", "logs", ".", "debug", "(", "u'Received output: {}'", ".", "format", "(", "output", ")", ")", "return", "output", "else", ":", "logs", ".", "debug", "(", "u'Execution timed out!'", ")", "return", "None" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
get_output
Reads script output from log. :type script: str :rtype: str | None
thefuck/output_readers/read_log.py
def get_output(script): """Reads script output from log. :type script: str :rtype: str | None """ if six.PY2: logs.warn('Experimental instant mode is Python 3+ only') return None if 'THEFUCK_OUTPUT_LOG' not in os.environ: logs.warn("Output log isn't specified") return None if const.USER_COMMAND_MARK not in os.environ.get('PS1', ''): logs.warn( "PS1 doesn't contain user command mark, please ensure " "that PS1 is not changed after The Fuck alias initialization") return None try: with logs.debug_time(u'Read output from log'): fd = os.open(os.environ['THEFUCK_OUTPUT_LOG'], os.O_RDONLY) buffer = mmap.mmap(fd, const.LOG_SIZE_IN_BYTES, mmap.MAP_SHARED, mmap.PROT_READ) _skip_old_lines(buffer) lines = _get_output_lines(script, buffer) output = '\n'.join(lines).strip() logs.debug(u'Received output: {}'.format(output)) return output except OSError: logs.warn("Can't read output log") return None except ScriptNotInLog: logs.warn("Script not found in output log") return None
def get_output(script): """Reads script output from log. :type script: str :rtype: str | None """ if six.PY2: logs.warn('Experimental instant mode is Python 3+ only') return None if 'THEFUCK_OUTPUT_LOG' not in os.environ: logs.warn("Output log isn't specified") return None if const.USER_COMMAND_MARK not in os.environ.get('PS1', ''): logs.warn( "PS1 doesn't contain user command mark, please ensure " "that PS1 is not changed after The Fuck alias initialization") return None try: with logs.debug_time(u'Read output from log'): fd = os.open(os.environ['THEFUCK_OUTPUT_LOG'], os.O_RDONLY) buffer = mmap.mmap(fd, const.LOG_SIZE_IN_BYTES, mmap.MAP_SHARED, mmap.PROT_READ) _skip_old_lines(buffer) lines = _get_output_lines(script, buffer) output = '\n'.join(lines).strip() logs.debug(u'Received output: {}'.format(output)) return output except OSError: logs.warn("Can't read output log") return None except ScriptNotInLog: logs.warn("Script not found in output log") return None
[ "Reads", "script", "output", "from", "log", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/output_readers/read_log.py#L70-L105
[ "def", "get_output", "(", "script", ")", ":", "if", "six", ".", "PY2", ":", "logs", ".", "warn", "(", "'Experimental instant mode is Python 3+ only'", ")", "return", "None", "if", "'THEFUCK_OUTPUT_LOG'", "not", "in", "os", ".", "environ", ":", "logs", ".", "warn", "(", "\"Output log isn't specified\"", ")", "return", "None", "if", "const", ".", "USER_COMMAND_MARK", "not", "in", "os", ".", "environ", ".", "get", "(", "'PS1'", ",", "''", ")", ":", "logs", ".", "warn", "(", "\"PS1 doesn't contain user command mark, please ensure \"", "\"that PS1 is not changed after The Fuck alias initialization\"", ")", "return", "None", "try", ":", "with", "logs", ".", "debug_time", "(", "u'Read output from log'", ")", ":", "fd", "=", "os", ".", "open", "(", "os", ".", "environ", "[", "'THEFUCK_OUTPUT_LOG'", "]", ",", "os", ".", "O_RDONLY", ")", "buffer", "=", "mmap", ".", "mmap", "(", "fd", ",", "const", ".", "LOG_SIZE_IN_BYTES", ",", "mmap", ".", "MAP_SHARED", ",", "mmap", ".", "PROT_READ", ")", "_skip_old_lines", "(", "buffer", ")", "lines", "=", "_get_output_lines", "(", "script", ",", "buffer", ")", "output", "=", "'\\n'", ".", "join", "(", "lines", ")", ".", "strip", "(", ")", "logs", ".", "debug", "(", "u'Received output: {}'", ".", "format", "(", "output", ")", ")", "return", "output", "except", "OSError", ":", "logs", ".", "warn", "(", "\"Can't read output log\"", ")", "return", "None", "except", "ScriptNotInLog", ":", "logs", ".", "warn", "(", "\"Script not found in output log\"", ")", "return", "None" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
get_pkgfile
Gets the packages that provide the given command using `pkgfile`. If the command is of the form `sudo foo`, searches for the `foo` command instead.
thefuck/specific/archlinux.py
def get_pkgfile(command): """ Gets the packages that provide the given command using `pkgfile`. If the command is of the form `sudo foo`, searches for the `foo` command instead. """ try: command = command.strip() if command.startswith('sudo '): command = command[5:] command = command.split(" ")[0] packages = subprocess.check_output( ['pkgfile', '-b', '-v', command], universal_newlines=True, stderr=utils.DEVNULL ).splitlines() return [package.split()[0] for package in packages] except subprocess.CalledProcessError as err: if err.returncode == 1 and err.output == "": return [] else: raise err
def get_pkgfile(command): """ Gets the packages that provide the given command using `pkgfile`. If the command is of the form `sudo foo`, searches for the `foo` command instead. """ try: command = command.strip() if command.startswith('sudo '): command = command[5:] command = command.split(" ")[0] packages = subprocess.check_output( ['pkgfile', '-b', '-v', command], universal_newlines=True, stderr=utils.DEVNULL ).splitlines() return [package.split()[0] for package in packages] except subprocess.CalledProcessError as err: if err.returncode == 1 and err.output == "": return [] else: raise err
[ "Gets", "the", "packages", "that", "provide", "the", "given", "command", "using", "pkgfile", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/specific/archlinux.py#L7-L31
[ "def", "get_pkgfile", "(", "command", ")", ":", "try", ":", "command", "=", "command", ".", "strip", "(", ")", "if", "command", ".", "startswith", "(", "'sudo '", ")", ":", "command", "=", "command", "[", "5", ":", "]", "command", "=", "command", ".", "split", "(", "\" \"", ")", "[", "0", "]", "packages", "=", "subprocess", ".", "check_output", "(", "[", "'pkgfile'", ",", "'-b'", ",", "'-v'", ",", "command", "]", ",", "universal_newlines", "=", "True", ",", "stderr", "=", "utils", ".", "DEVNULL", ")", ".", "splitlines", "(", ")", "return", "[", "package", ".", "split", "(", ")", "[", "0", "]", "for", "package", "in", "packages", "]", "except", "subprocess", ".", "CalledProcessError", "as", "err", ":", "if", "err", ".", "returncode", "==", "1", "and", "err", ".", "output", "==", "\"\"", ":", "return", "[", "]", "else", ":", "raise", "err" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
_get_sub_dirs
Returns a list of the child directories of the given parent directory
thefuck/rules/cd_correction.py
def _get_sub_dirs(parent): """Returns a list of the child directories of the given parent directory""" return [child for child in os.listdir(parent) if os.path.isdir(os.path.join(parent, child))]
def _get_sub_dirs(parent): """Returns a list of the child directories of the given parent directory""" return [child for child in os.listdir(parent) if os.path.isdir(os.path.join(parent, child))]
[ "Returns", "a", "list", "of", "the", "child", "directories", "of", "the", "given", "parent", "directory" ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/rules/cd_correction.py#L14-L16
[ "def", "_get_sub_dirs", "(", "parent", ")", ":", "return", "[", "child", "for", "child", "in", "os", ".", "listdir", "(", "parent", ")", "if", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "parent", ",", "child", ")", ")", "]" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
get_new_command
Attempt to rebuild the path string by spellchecking the directories. If it fails (i.e. no directories are a close enough match), then it defaults to the rules of cd_mkdir. Change sensitivity by changing MAX_ALLOWED_DIFF. Default value is 0.6
thefuck/rules/cd_correction.py
def get_new_command(command): """ Attempt to rebuild the path string by spellchecking the directories. If it fails (i.e. no directories are a close enough match), then it defaults to the rules of cd_mkdir. Change sensitivity by changing MAX_ALLOWED_DIFF. Default value is 0.6 """ dest = command.script_parts[1].split(os.sep) if dest[-1] == '': dest = dest[:-1] if dest[0] == '': cwd = os.sep dest = dest[1:] elif six.PY2: cwd = os.getcwdu() else: cwd = os.getcwd() for directory in dest: if directory == ".": continue elif directory == "..": cwd = os.path.split(cwd)[0] continue best_matches = get_close_matches(directory, _get_sub_dirs(cwd), cutoff=MAX_ALLOWED_DIFF) if best_matches: cwd = os.path.join(cwd, best_matches[0]) else: return cd_mkdir.get_new_command(command) return u'cd "{0}"'.format(cwd)
def get_new_command(command): """ Attempt to rebuild the path string by spellchecking the directories. If it fails (i.e. no directories are a close enough match), then it defaults to the rules of cd_mkdir. Change sensitivity by changing MAX_ALLOWED_DIFF. Default value is 0.6 """ dest = command.script_parts[1].split(os.sep) if dest[-1] == '': dest = dest[:-1] if dest[0] == '': cwd = os.sep dest = dest[1:] elif six.PY2: cwd = os.getcwdu() else: cwd = os.getcwd() for directory in dest: if directory == ".": continue elif directory == "..": cwd = os.path.split(cwd)[0] continue best_matches = get_close_matches(directory, _get_sub_dirs(cwd), cutoff=MAX_ALLOWED_DIFF) if best_matches: cwd = os.path.join(cwd, best_matches[0]) else: return cd_mkdir.get_new_command(command) return u'cd "{0}"'.format(cwd)
[ "Attempt", "to", "rebuild", "the", "path", "string", "by", "spellchecking", "the", "directories", ".", "If", "it", "fails", "(", "i", ".", "e", ".", "no", "directories", "are", "a", "close", "enough", "match", ")", "then", "it", "defaults", "to", "the", "rules", "of", "cd_mkdir", ".", "Change", "sensitivity", "by", "changing", "MAX_ALLOWED_DIFF", ".", "Default", "value", "is", "0", ".", "6" ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/rules/cd_correction.py#L32-L61
[ "def", "get_new_command", "(", "command", ")", ":", "dest", "=", "command", ".", "script_parts", "[", "1", "]", ".", "split", "(", "os", ".", "sep", ")", "if", "dest", "[", "-", "1", "]", "==", "''", ":", "dest", "=", "dest", "[", ":", "-", "1", "]", "if", "dest", "[", "0", "]", "==", "''", ":", "cwd", "=", "os", ".", "sep", "dest", "=", "dest", "[", "1", ":", "]", "elif", "six", ".", "PY2", ":", "cwd", "=", "os", ".", "getcwdu", "(", ")", "else", ":", "cwd", "=", "os", ".", "getcwd", "(", ")", "for", "directory", "in", "dest", ":", "if", "directory", "==", "\".\"", ":", "continue", "elif", "directory", "==", "\"..\"", ":", "cwd", "=", "os", ".", "path", ".", "split", "(", "cwd", ")", "[", "0", "]", "continue", "best_matches", "=", "get_close_matches", "(", "directory", ",", "_get_sub_dirs", "(", "cwd", ")", ",", "cutoff", "=", "MAX_ALLOWED_DIFF", ")", "if", "best_matches", ":", "cwd", "=", "os", ".", "path", ".", "join", "(", "cwd", ",", "best_matches", "[", "0", "]", ")", "else", ":", "return", "cd_mkdir", ".", "get_new_command", "(", "command", ")", "return", "u'cd \"{0}\"'", ".", "format", "(", "cwd", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Command.update
Returns new command with replaced fields. :rtype: Command
thefuck/types.py
def update(self, **kwargs): """Returns new command with replaced fields. :rtype: Command """ kwargs.setdefault('script', self.script) kwargs.setdefault('output', self.output) return Command(**kwargs)
def update(self, **kwargs): """Returns new command with replaced fields. :rtype: Command """ kwargs.setdefault('script', self.script) kwargs.setdefault('output', self.output) return Command(**kwargs)
[ "Returns", "new", "command", "with", "replaced", "fields", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L58-L66
[ "def", "update", "(", "self", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "'script'", ",", "self", ".", "script", ")", "kwargs", ".", "setdefault", "(", "'output'", ",", "self", ".", "output", ")", "return", "Command", "(", "*", "*", "kwargs", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Command.from_raw_script
Creates instance of `Command` from a list of script parts. :type raw_script: [basestring] :rtype: Command :raises: EmptyCommand
thefuck/types.py
def from_raw_script(cls, raw_script): """Creates instance of `Command` from a list of script parts. :type raw_script: [basestring] :rtype: Command :raises: EmptyCommand """ script = format_raw_script(raw_script) if not script: raise EmptyCommand expanded = shell.from_shell(script) output = get_output(script, expanded) return cls(expanded, output)
def from_raw_script(cls, raw_script): """Creates instance of `Command` from a list of script parts. :type raw_script: [basestring] :rtype: Command :raises: EmptyCommand """ script = format_raw_script(raw_script) if not script: raise EmptyCommand expanded = shell.from_shell(script) output = get_output(script, expanded) return cls(expanded, output)
[ "Creates", "instance", "of", "Command", "from", "a", "list", "of", "script", "parts", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L69-L83
[ "def", "from_raw_script", "(", "cls", ",", "raw_script", ")", ":", "script", "=", "format_raw_script", "(", "raw_script", ")", "if", "not", "script", ":", "raise", "EmptyCommand", "expanded", "=", "shell", ".", "from_shell", "(", "script", ")", "output", "=", "get_output", "(", "script", ",", "expanded", ")", "return", "cls", "(", "expanded", ",", "output", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Rule.from_path
Creates rule instance from path. :type path: pathlib.Path :rtype: Rule
thefuck/types.py
def from_path(cls, path): """Creates rule instance from path. :type path: pathlib.Path :rtype: Rule """ name = path.name[:-3] with logs.debug_time(u'Importing rule: {};'.format(name)): rule_module = load_source(name, str(path)) priority = getattr(rule_module, 'priority', DEFAULT_PRIORITY) return cls(name, rule_module.match, rule_module.get_new_command, getattr(rule_module, 'enabled_by_default', True), getattr(rule_module, 'side_effect', None), settings.priority.get(name, priority), getattr(rule_module, 'requires_output', True))
def from_path(cls, path): """Creates rule instance from path. :type path: pathlib.Path :rtype: Rule """ name = path.name[:-3] with logs.debug_time(u'Importing rule: {};'.format(name)): rule_module = load_source(name, str(path)) priority = getattr(rule_module, 'priority', DEFAULT_PRIORITY) return cls(name, rule_module.match, rule_module.get_new_command, getattr(rule_module, 'enabled_by_default', True), getattr(rule_module, 'side_effect', None), settings.priority.get(name, priority), getattr(rule_module, 'requires_output', True))
[ "Creates", "rule", "instance", "from", "path", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L131-L147
[ "def", "from_path", "(", "cls", ",", "path", ")", ":", "name", "=", "path", ".", "name", "[", ":", "-", "3", "]", "with", "logs", ".", "debug_time", "(", "u'Importing rule: {};'", ".", "format", "(", "name", ")", ")", ":", "rule_module", "=", "load_source", "(", "name", ",", "str", "(", "path", ")", ")", "priority", "=", "getattr", "(", "rule_module", ",", "'priority'", ",", "DEFAULT_PRIORITY", ")", "return", "cls", "(", "name", ",", "rule_module", ".", "match", ",", "rule_module", ".", "get_new_command", ",", "getattr", "(", "rule_module", ",", "'enabled_by_default'", ",", "True", ")", ",", "getattr", "(", "rule_module", ",", "'side_effect'", ",", "None", ")", ",", "settings", ".", "priority", ".", "get", "(", "name", ",", "priority", ")", ",", "getattr", "(", "rule_module", ",", "'requires_output'", ",", "True", ")", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Rule.is_enabled
Returns `True` when rule enabled. :rtype: bool
thefuck/types.py
def is_enabled(self): """Returns `True` when rule enabled. :rtype: bool """ if self.name in settings.exclude_rules: return False elif self.name in settings.rules: return True elif self.enabled_by_default and ALL_ENABLED in settings.rules: return True else: return False
def is_enabled(self): """Returns `True` when rule enabled. :rtype: bool """ if self.name in settings.exclude_rules: return False elif self.name in settings.rules: return True elif self.enabled_by_default and ALL_ENABLED in settings.rules: return True else: return False
[ "Returns", "True", "when", "rule", "enabled", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L150-L163
[ "def", "is_enabled", "(", "self", ")", ":", "if", "self", ".", "name", "in", "settings", ".", "exclude_rules", ":", "return", "False", "elif", "self", ".", "name", "in", "settings", ".", "rules", ":", "return", "True", "elif", "self", ".", "enabled_by_default", "and", "ALL_ENABLED", "in", "settings", ".", "rules", ":", "return", "True", "else", ":", "return", "False" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Rule.is_match
Returns `True` if rule matches the command. :type command: Command :rtype: bool
thefuck/types.py
def is_match(self, command): """Returns `True` if rule matches the command. :type command: Command :rtype: bool """ if command.output is None and self.requires_output: return False try: with logs.debug_time(u'Trying rule: {};'.format(self.name)): if self.match(command): return True except Exception: logs.rule_failed(self, sys.exc_info())
def is_match(self, command): """Returns `True` if rule matches the command. :type command: Command :rtype: bool """ if command.output is None and self.requires_output: return False try: with logs.debug_time(u'Trying rule: {};'.format(self.name)): if self.match(command): return True except Exception: logs.rule_failed(self, sys.exc_info())
[ "Returns", "True", "if", "rule", "matches", "the", "command", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L165-L180
[ "def", "is_match", "(", "self", ",", "command", ")", ":", "if", "command", ".", "output", "is", "None", "and", "self", ".", "requires_output", ":", "return", "False", "try", ":", "with", "logs", ".", "debug_time", "(", "u'Trying rule: {};'", ".", "format", "(", "self", ".", "name", ")", ")", ":", "if", "self", ".", "match", "(", "command", ")", ":", "return", "True", "except", "Exception", ":", "logs", ".", "rule_failed", "(", "self", ",", "sys", ".", "exc_info", "(", ")", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Rule.get_corrected_commands
Returns generator with corrected commands. :type command: Command :rtype: Iterable[CorrectedCommand]
thefuck/types.py
def get_corrected_commands(self, command): """Returns generator with corrected commands. :type command: Command :rtype: Iterable[CorrectedCommand] """ new_commands = self.get_new_command(command) if not isinstance(new_commands, list): new_commands = (new_commands,) for n, new_command in enumerate(new_commands): yield CorrectedCommand(script=new_command, side_effect=self.side_effect, priority=(n + 1) * self.priority)
def get_corrected_commands(self, command): """Returns generator with corrected commands. :type command: Command :rtype: Iterable[CorrectedCommand] """ new_commands = self.get_new_command(command) if not isinstance(new_commands, list): new_commands = (new_commands,) for n, new_command in enumerate(new_commands): yield CorrectedCommand(script=new_command, side_effect=self.side_effect, priority=(n + 1) * self.priority)
[ "Returns", "generator", "with", "corrected", "commands", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L182-L195
[ "def", "get_corrected_commands", "(", "self", ",", "command", ")", ":", "new_commands", "=", "self", ".", "get_new_command", "(", "command", ")", "if", "not", "isinstance", "(", "new_commands", ",", "list", ")", ":", "new_commands", "=", "(", "new_commands", ",", ")", "for", "n", ",", "new_command", "in", "enumerate", "(", "new_commands", ")", ":", "yield", "CorrectedCommand", "(", "script", "=", "new_command", ",", "side_effect", "=", "self", ".", "side_effect", ",", "priority", "=", "(", "n", "+", "1", ")", "*", "self", ".", "priority", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
CorrectedCommand._get_script
Returns fixed commands script. If `settings.repeat` is `True`, appends command with second attempt of running fuck in case fixed command fails again.
thefuck/types.py
def _get_script(self): """Returns fixed commands script. If `settings.repeat` is `True`, appends command with second attempt of running fuck in case fixed command fails again. """ if settings.repeat: repeat_fuck = '{} --repeat {}--force-command {}'.format( get_alias(), '--debug ' if settings.debug else '', shell.quote(self.script)) return shell.or_(self.script, repeat_fuck) else: return self.script
def _get_script(self): """Returns fixed commands script. If `settings.repeat` is `True`, appends command with second attempt of running fuck in case fixed command fails again. """ if settings.repeat: repeat_fuck = '{} --repeat {}--force-command {}'.format( get_alias(), '--debug ' if settings.debug else '', shell.quote(self.script)) return shell.or_(self.script, repeat_fuck) else: return self.script
[ "Returns", "fixed", "commands", "script", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L228-L242
[ "def", "_get_script", "(", "self", ")", ":", "if", "settings", ".", "repeat", ":", "repeat_fuck", "=", "'{} --repeat {}--force-command {}'", ".", "format", "(", "get_alias", "(", ")", ",", "'--debug '", "if", "settings", ".", "debug", "else", "''", ",", "shell", ".", "quote", "(", "self", ".", "script", ")", ")", "return", "shell", ".", "or_", "(", "self", ".", "script", ",", "repeat_fuck", ")", "else", ":", "return", "self", ".", "script" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
CorrectedCommand.run
Runs command from rule for passed command. :type old_cmd: Command
thefuck/types.py
def run(self, old_cmd): """Runs command from rule for passed command. :type old_cmd: Command """ if self.side_effect: self.side_effect(old_cmd, self.script) if settings.alter_history: shell.put_to_history(self.script) # This depends on correct setting of PYTHONIOENCODING by the alias: logs.debug(u'PYTHONIOENCODING: {}'.format( os.environ.get('PYTHONIOENCODING', '!!not-set!!'))) print(self._get_script())
def run(self, old_cmd): """Runs command from rule for passed command. :type old_cmd: Command """ if self.side_effect: self.side_effect(old_cmd, self.script) if settings.alter_history: shell.put_to_history(self.script) # This depends on correct setting of PYTHONIOENCODING by the alias: logs.debug(u'PYTHONIOENCODING: {}'.format( os.environ.get('PYTHONIOENCODING', '!!not-set!!'))) print(self._get_script())
[ "Runs", "command", "from", "rule", "for", "passed", "command", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L244-L258
[ "def", "run", "(", "self", ",", "old_cmd", ")", ":", "if", "self", ".", "side_effect", ":", "self", ".", "side_effect", "(", "old_cmd", ",", "self", ".", "script", ")", "if", "settings", ".", "alter_history", ":", "shell", ".", "put_to_history", "(", "self", ".", "script", ")", "# This depends on correct setting of PYTHONIOENCODING by the alias:", "logs", ".", "debug", "(", "u'PYTHONIOENCODING: {}'", ".", "format", "(", "os", ".", "environ", ".", "get", "(", "'PYTHONIOENCODING'", ",", "'!!not-set!!'", ")", ")", ")", "print", "(", "self", ".", "_get_script", "(", ")", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
_get_shell_pid
Returns parent process pid.
thefuck/entrypoints/not_configured.py
def _get_shell_pid(): """Returns parent process pid.""" proc = Process(os.getpid()) try: return proc.parent().pid except TypeError: return proc.parent.pid
def _get_shell_pid(): """Returns parent process pid.""" proc = Process(os.getpid()) try: return proc.parent().pid except TypeError: return proc.parent.pid
[ "Returns", "parent", "process", "pid", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/not_configured.py#L19-L26
[ "def", "_get_shell_pid", "(", ")", ":", "proc", "=", "Process", "(", "os", ".", "getpid", "(", ")", ")", "try", ":", "return", "proc", ".", "parent", "(", ")", ".", "pid", "except", "TypeError", ":", "return", "proc", ".", "parent", ".", "pid" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
_record_first_run
Records shell pid to tracker file.
thefuck/entrypoints/not_configured.py
def _record_first_run(): """Records shell pid to tracker file.""" info = {'pid': _get_shell_pid(), 'time': time.time()} mode = 'wb' if six.PY2 else 'w' with _get_not_configured_usage_tracker_path().open(mode) as tracker: json.dump(info, tracker)
def _record_first_run(): """Records shell pid to tracker file.""" info = {'pid': _get_shell_pid(), 'time': time.time()} mode = 'wb' if six.PY2 else 'w' with _get_not_configured_usage_tracker_path().open(mode) as tracker: json.dump(info, tracker)
[ "Records", "shell", "pid", "to", "tracker", "file", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/not_configured.py#L36-L43
[ "def", "_record_first_run", "(", ")", ":", "info", "=", "{", "'pid'", ":", "_get_shell_pid", "(", ")", ",", "'time'", ":", "time", ".", "time", "(", ")", "}", "mode", "=", "'wb'", "if", "six", ".", "PY2", "else", "'w'", "with", "_get_not_configured_usage_tracker_path", "(", ")", ".", "open", "(", "mode", ")", "as", "tracker", ":", "json", ".", "dump", "(", "info", ",", "tracker", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
_is_second_run
Returns `True` when we know that `fuck` called second time.
thefuck/entrypoints/not_configured.py
def _is_second_run(): """Returns `True` when we know that `fuck` called second time.""" tracker_path = _get_not_configured_usage_tracker_path() if not tracker_path.exists(): return False current_pid = _get_shell_pid() with tracker_path.open('r') as tracker: try: info = json.load(tracker) except ValueError: return False if not (isinstance(info, dict) and info.get('pid') == current_pid): return False return (_get_previous_command() == 'fuck' or time.time() - info.get('time', 0) < const.CONFIGURATION_TIMEOUT)
def _is_second_run(): """Returns `True` when we know that `fuck` called second time.""" tracker_path = _get_not_configured_usage_tracker_path() if not tracker_path.exists(): return False current_pid = _get_shell_pid() with tracker_path.open('r') as tracker: try: info = json.load(tracker) except ValueError: return False if not (isinstance(info, dict) and info.get('pid') == current_pid): return False return (_get_previous_command() == 'fuck' or time.time() - info.get('time', 0) < const.CONFIGURATION_TIMEOUT)
[ "Returns", "True", "when", "we", "know", "that", "fuck", "called", "second", "time", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/not_configured.py#L55-L72
[ "def", "_is_second_run", "(", ")", ":", "tracker_path", "=", "_get_not_configured_usage_tracker_path", "(", ")", "if", "not", "tracker_path", ".", "exists", "(", ")", ":", "return", "False", "current_pid", "=", "_get_shell_pid", "(", ")", "with", "tracker_path", ".", "open", "(", "'r'", ")", "as", "tracker", ":", "try", ":", "info", "=", "json", ".", "load", "(", "tracker", ")", "except", "ValueError", ":", "return", "False", "if", "not", "(", "isinstance", "(", "info", ",", "dict", ")", "and", "info", ".", "get", "(", "'pid'", ")", "==", "current_pid", ")", ":", "return", "False", "return", "(", "_get_previous_command", "(", ")", "==", "'fuck'", "or", "time", ".", "time", "(", ")", "-", "info", ".", "get", "(", "'time'", ",", "0", ")", "<", "const", ".", "CONFIGURATION_TIMEOUT", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
_is_already_configured
Returns `True` when alias already in shell config.
thefuck/entrypoints/not_configured.py
def _is_already_configured(configuration_details): """Returns `True` when alias already in shell config.""" path = Path(configuration_details.path).expanduser() with path.open('r') as shell_config: return configuration_details.content in shell_config.read()
def _is_already_configured(configuration_details): """Returns `True` when alias already in shell config.""" path = Path(configuration_details.path).expanduser() with path.open('r') as shell_config: return configuration_details.content in shell_config.read()
[ "Returns", "True", "when", "alias", "already", "in", "shell", "config", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/not_configured.py#L75-L79
[ "def", "_is_already_configured", "(", "configuration_details", ")", ":", "path", "=", "Path", "(", "configuration_details", ".", "path", ")", ".", "expanduser", "(", ")", "with", "path", ".", "open", "(", "'r'", ")", "as", "shell_config", ":", "return", "configuration_details", ".", "content", "in", "shell_config", ".", "read", "(", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
_configure
Adds alias to shell config.
thefuck/entrypoints/not_configured.py
def _configure(configuration_details): """Adds alias to shell config.""" path = Path(configuration_details.path).expanduser() with path.open('a') as shell_config: shell_config.write(u'\n') shell_config.write(configuration_details.content) shell_config.write(u'\n')
def _configure(configuration_details): """Adds alias to shell config.""" path = Path(configuration_details.path).expanduser() with path.open('a') as shell_config: shell_config.write(u'\n') shell_config.write(configuration_details.content) shell_config.write(u'\n')
[ "Adds", "alias", "to", "shell", "config", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/not_configured.py#L82-L88
[ "def", "_configure", "(", "configuration_details", ")", ":", "path", "=", "Path", "(", "configuration_details", ".", "path", ")", ".", "expanduser", "(", ")", "with", "path", ".", "open", "(", "'a'", ")", "as", "shell_config", ":", "shell_config", ".", "write", "(", "u'\\n'", ")", "shell_config", ".", "write", "(", "configuration_details", ".", "content", ")", "shell_config", ".", "write", "(", "u'\\n'", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
main
Shows useful information about how-to configure alias on a first run and configure automatically on a second. It'll be only visible when user type fuck and when alias isn't configured.
thefuck/entrypoints/not_configured.py
def main(): """Shows useful information about how-to configure alias on a first run and configure automatically on a second. It'll be only visible when user type fuck and when alias isn't configured. """ settings.init() configuration_details = shell.how_to_configure() if ( configuration_details and configuration_details.can_configure_automatically ): if _is_already_configured(configuration_details): logs.already_configured(configuration_details) return elif _is_second_run(): _configure(configuration_details) logs.configured_successfully(configuration_details) return else: _record_first_run() logs.how_to_configure_alias(configuration_details)
def main(): """Shows useful information about how-to configure alias on a first run and configure automatically on a second. It'll be only visible when user type fuck and when alias isn't configured. """ settings.init() configuration_details = shell.how_to_configure() if ( configuration_details and configuration_details.can_configure_automatically ): if _is_already_configured(configuration_details): logs.already_configured(configuration_details) return elif _is_second_run(): _configure(configuration_details) logs.configured_successfully(configuration_details) return else: _record_first_run() logs.how_to_configure_alias(configuration_details)
[ "Shows", "useful", "information", "about", "how", "-", "to", "configure", "alias", "on", "a", "first", "run", "and", "configure", "automatically", "on", "a", "second", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/not_configured.py#L91-L114
[ "def", "main", "(", ")", ":", "settings", ".", "init", "(", ")", "configuration_details", "=", "shell", ".", "how_to_configure", "(", ")", "if", "(", "configuration_details", "and", "configuration_details", ".", "can_configure_automatically", ")", ":", "if", "_is_already_configured", "(", "configuration_details", ")", ":", "logs", ".", "already_configured", "(", "configuration_details", ")", "return", "elif", "_is_second_run", "(", ")", ":", "_configure", "(", "configuration_details", ")", "logs", ".", "configured_successfully", "(", "configuration_details", ")", "return", "else", ":", "_record_first_run", "(", ")", "logs", ".", "how_to_configure_alias", "(", "configuration_details", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
memoize
Caches previous calls to the function.
thefuck/utils.py
def memoize(fn): """Caches previous calls to the function.""" memo = {} @wraps(fn) def wrapper(*args, **kwargs): if not memoize.disabled: key = pickle.dumps((args, kwargs)) if key not in memo: memo[key] = fn(*args, **kwargs) value = memo[key] else: # Memoize is disabled, call the function value = fn(*args, **kwargs) return value return wrapper
def memoize(fn): """Caches previous calls to the function.""" memo = {} @wraps(fn) def wrapper(*args, **kwargs): if not memoize.disabled: key = pickle.dumps((args, kwargs)) if key not in memo: memo[key] = fn(*args, **kwargs) value = memo[key] else: # Memoize is disabled, call the function value = fn(*args, **kwargs) return value return wrapper
[ "Caches", "previous", "calls", "to", "the", "function", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L25-L42
[ "def", "memoize", "(", "fn", ")", ":", "memo", "=", "{", "}", "@", "wraps", "(", "fn", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "memoize", ".", "disabled", ":", "key", "=", "pickle", ".", "dumps", "(", "(", "args", ",", "kwargs", ")", ")", "if", "key", "not", "in", "memo", ":", "memo", "[", "key", "]", "=", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "value", "=", "memo", "[", "key", "]", "else", ":", "# Memoize is disabled, call the function", "value", "=", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "value", "return", "wrapper" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
default_settings
Adds default values to settings if it not presented. Usage: @default_settings({'apt': '/usr/bin/apt'}) def match(command): print(settings.apt)
thefuck/utils.py
def default_settings(params): """Adds default values to settings if it not presented. Usage: @default_settings({'apt': '/usr/bin/apt'}) def match(command): print(settings.apt) """ def _default_settings(fn, command): for k, w in params.items(): settings.setdefault(k, w) return fn(command) return decorator(_default_settings)
def default_settings(params): """Adds default values to settings if it not presented. Usage: @default_settings({'apt': '/usr/bin/apt'}) def match(command): print(settings.apt) """ def _default_settings(fn, command): for k, w in params.items(): settings.setdefault(k, w) return fn(command) return decorator(_default_settings)
[ "Adds", "default", "values", "to", "settings", "if", "it", "not", "presented", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L73-L87
[ "def", "default_settings", "(", "params", ")", ":", "def", "_default_settings", "(", "fn", ",", "command", ")", ":", "for", "k", ",", "w", "in", "params", ".", "items", "(", ")", ":", "settings", ".", "setdefault", "(", "k", ",", "w", ")", "return", "fn", "(", "command", ")", "return", "decorator", "(", "_default_settings", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
get_closest
Returns closest match or just first from possibilities.
thefuck/utils.py
def get_closest(word, possibilities, cutoff=0.6, fallback_to_first=True): """Returns closest match or just first from possibilities.""" possibilities = list(possibilities) try: return difflib_get_close_matches(word, possibilities, 1, cutoff)[0] except IndexError: if fallback_to_first: return possibilities[0]
def get_closest(word, possibilities, cutoff=0.6, fallback_to_first=True): """Returns closest match or just first from possibilities.""" possibilities = list(possibilities) try: return difflib_get_close_matches(word, possibilities, 1, cutoff)[0] except IndexError: if fallback_to_first: return possibilities[0]
[ "Returns", "closest", "match", "or", "just", "first", "from", "possibilities", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L90-L97
[ "def", "get_closest", "(", "word", ",", "possibilities", ",", "cutoff", "=", "0.6", ",", "fallback_to_first", "=", "True", ")", ":", "possibilities", "=", "list", "(", "possibilities", ")", "try", ":", "return", "difflib_get_close_matches", "(", "word", ",", "possibilities", ",", "1", ",", "cutoff", ")", "[", "0", "]", "except", "IndexError", ":", "if", "fallback_to_first", ":", "return", "possibilities", "[", "0", "]" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
get_close_matches
Overrides `difflib.get_close_match` to controle argument `n`.
thefuck/utils.py
def get_close_matches(word, possibilities, n=None, cutoff=0.6): """Overrides `difflib.get_close_match` to controle argument `n`.""" if n is None: n = settings.num_close_matches return difflib_get_close_matches(word, possibilities, n, cutoff)
def get_close_matches(word, possibilities, n=None, cutoff=0.6): """Overrides `difflib.get_close_match` to controle argument `n`.""" if n is None: n = settings.num_close_matches return difflib_get_close_matches(word, possibilities, n, cutoff)
[ "Overrides", "difflib", ".", "get_close_match", "to", "controle", "argument", "n", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L100-L104
[ "def", "get_close_matches", "(", "word", ",", "possibilities", ",", "n", "=", "None", ",", "cutoff", "=", "0.6", ")", ":", "if", "n", "is", "None", ":", "n", "=", "settings", ".", "num_close_matches", "return", "difflib_get_close_matches", "(", "word", ",", "possibilities", ",", "n", ",", "cutoff", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
replace_argument
Replaces command line argument.
thefuck/utils.py
def replace_argument(script, from_, to): """Replaces command line argument.""" replaced_in_the_end = re.sub(u' {}$'.format(re.escape(from_)), u' {}'.format(to), script, count=1) if replaced_in_the_end != script: return replaced_in_the_end else: return script.replace( u' {} '.format(from_), u' {} '.format(to), 1)
def replace_argument(script, from_, to): """Replaces command line argument.""" replaced_in_the_end = re.sub(u' {}$'.format(re.escape(from_)), u' {}'.format(to), script, count=1) if replaced_in_the_end != script: return replaced_in_the_end else: return script.replace( u' {} '.format(from_), u' {} '.format(to), 1)
[ "Replaces", "command", "line", "argument", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L131-L139
[ "def", "replace_argument", "(", "script", ",", "from_", ",", "to", ")", ":", "replaced_in_the_end", "=", "re", ".", "sub", "(", "u' {}$'", ".", "format", "(", "re", ".", "escape", "(", "from_", ")", ")", ",", "u' {}'", ".", "format", "(", "to", ")", ",", "script", ",", "count", "=", "1", ")", "if", "replaced_in_the_end", "!=", "script", ":", "return", "replaced_in_the_end", "else", ":", "return", "script", ".", "replace", "(", "u' {} '", ".", "format", "(", "from_", ")", ",", "u' {} '", ".", "format", "(", "to", ")", ",", "1", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
replace_command
Helper for *_no_command rules.
thefuck/utils.py
def replace_command(command, broken, matched): """Helper for *_no_command rules.""" new_cmds = get_close_matches(broken, matched, cutoff=0.1) return [replace_argument(command.script, broken, new_cmd.strip()) for new_cmd in new_cmds]
def replace_command(command, broken, matched): """Helper for *_no_command rules.""" new_cmds = get_close_matches(broken, matched, cutoff=0.1) return [replace_argument(command.script, broken, new_cmd.strip()) for new_cmd in new_cmds]
[ "Helper", "for", "*", "_no_command", "rules", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L162-L166
[ "def", "replace_command", "(", "command", ",", "broken", ",", "matched", ")", ":", "new_cmds", "=", "get_close_matches", "(", "broken", ",", "matched", ",", "cutoff", "=", "0.1", ")", "return", "[", "replace_argument", "(", "command", ".", "script", ",", "broken", ",", "new_cmd", ".", "strip", "(", ")", ")", "for", "new_cmd", "in", "new_cmds", "]" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
is_app
Returns `True` if command is call to one of passed app names.
thefuck/utils.py
def is_app(command, *app_names, **kwargs): """Returns `True` if command is call to one of passed app names.""" at_least = kwargs.pop('at_least', 0) if kwargs: raise TypeError("got an unexpected keyword argument '{}'".format(kwargs.keys())) if len(command.script_parts) > at_least: return command.script_parts[0] in app_names return False
def is_app(command, *app_names, **kwargs): """Returns `True` if command is call to one of passed app names.""" at_least = kwargs.pop('at_least', 0) if kwargs: raise TypeError("got an unexpected keyword argument '{}'".format(kwargs.keys())) if len(command.script_parts) > at_least: return command.script_parts[0] in app_names return False
[ "Returns", "True", "if", "command", "is", "call", "to", "one", "of", "passed", "app", "names", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L170-L180
[ "def", "is_app", "(", "command", ",", "*", "app_names", ",", "*", "*", "kwargs", ")", ":", "at_least", "=", "kwargs", ".", "pop", "(", "'at_least'", ",", "0", ")", "if", "kwargs", ":", "raise", "TypeError", "(", "\"got an unexpected keyword argument '{}'\"", ".", "format", "(", "kwargs", ".", "keys", "(", ")", ")", ")", "if", "len", "(", "command", ".", "script_parts", ")", ">", "at_least", ":", "return", "command", ".", "script_parts", "[", "0", "]", "in", "app_names", "return", "False" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
for_app
Specifies that matching script is for on of app names.
thefuck/utils.py
def for_app(*app_names, **kwargs): """Specifies that matching script is for on of app names.""" def _for_app(fn, command): if is_app(command, *app_names, **kwargs): return fn(command) else: return False return decorator(_for_app)
def for_app(*app_names, **kwargs): """Specifies that matching script is for on of app names.""" def _for_app(fn, command): if is_app(command, *app_names, **kwargs): return fn(command) else: return False return decorator(_for_app)
[ "Specifies", "that", "matching", "script", "is", "for", "on", "of", "app", "names", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L183-L191
[ "def", "for_app", "(", "*", "app_names", ",", "*", "*", "kwargs", ")", ":", "def", "_for_app", "(", "fn", ",", "command", ")", ":", "if", "is_app", "(", "command", ",", "*", "app_names", ",", "*", "*", "kwargs", ")", ":", "return", "fn", "(", "command", ")", "else", ":", "return", "False", "return", "decorator", "(", "_for_app", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
cache
Caches function result in temporary file. Cache will be expired when modification date of files from `depends_on` will be changed. Only functions should be wrapped in `cache`, not methods.
thefuck/utils.py
def cache(*depends_on): """Caches function result in temporary file. Cache will be expired when modification date of files from `depends_on` will be changed. Only functions should be wrapped in `cache`, not methods. """ def cache_decorator(fn): @memoize @wraps(fn) def wrapper(*args, **kwargs): if cache.disabled: return fn(*args, **kwargs) else: return _cache.get_value(fn, depends_on, args, kwargs) return wrapper return cache_decorator
def cache(*depends_on): """Caches function result in temporary file. Cache will be expired when modification date of files from `depends_on` will be changed. Only functions should be wrapped in `cache`, not methods. """ def cache_decorator(fn): @memoize @wraps(fn) def wrapper(*args, **kwargs): if cache.disabled: return fn(*args, **kwargs) else: return _cache.get_value(fn, depends_on, args, kwargs) return wrapper return cache_decorator
[ "Caches", "function", "result", "in", "temporary", "file", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L266-L286
[ "def", "cache", "(", "*", "depends_on", ")", ":", "def", "cache_decorator", "(", "fn", ")", ":", "@", "memoize", "@", "wraps", "(", "fn", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "cache", ".", "disabled", ":", "return", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "return", "_cache", ".", "get_value", "(", "fn", ",", "depends_on", ",", "args", ",", "kwargs", ")", "return", "wrapper", "return", "cache_decorator" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
format_raw_script
Creates single script from a list of script parts. :type raw_script: [basestring] :rtype: basestring
thefuck/utils.py
def format_raw_script(raw_script): """Creates single script from a list of script parts. :type raw_script: [basestring] :rtype: basestring """ if six.PY2: script = ' '.join(arg.decode('utf-8') for arg in raw_script) else: script = ' '.join(raw_script) return script.strip()
def format_raw_script(raw_script): """Creates single script from a list of script parts. :type raw_script: [basestring] :rtype: basestring """ if six.PY2: script = ' '.join(arg.decode('utf-8') for arg in raw_script) else: script = ' '.join(raw_script) return script.strip()
[ "Creates", "single", "script", "from", "a", "list", "of", "script", "parts", "." ]
nvbn/thefuck
python
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L325-L337
[ "def", "format_raw_script", "(", "raw_script", ")", ":", "if", "six", ".", "PY2", ":", "script", "=", "' '", ".", "join", "(", "arg", ".", "decode", "(", "'utf-8'", ")", "for", "arg", "in", "raw_script", ")", "else", ":", "script", "=", "' '", ".", "join", "(", "raw_script", ")", "return", "script", ".", "strip", "(", ")" ]
40ab4eb62db57627bff10cf029d29c94704086a2
train
Policy.get_action
Decides actions given observations information, and takes them in environment. :param brain_info: A dictionary of brain names and BrainInfo from environment. :return: an ActionInfo containing action, memories, values and an object to be passed to add experiences
ml-agents/mlagents/trainers/policy.py
def get_action(self, brain_info: BrainInfo) -> ActionInfo: """ Decides actions given observations information, and takes them in environment. :param brain_info: A dictionary of brain names and BrainInfo from environment. :return: an ActionInfo containing action, memories, values and an object to be passed to add experiences """ if len(brain_info.agents) == 0: return ActionInfo([], [], [], None, None) run_out = self.evaluate(brain_info) return ActionInfo( action=run_out.get('action'), memory=run_out.get('memory_out'), text=None, value=run_out.get('value'), outputs=run_out )
def get_action(self, brain_info: BrainInfo) -> ActionInfo: """ Decides actions given observations information, and takes them in environment. :param brain_info: A dictionary of brain names and BrainInfo from environment. :return: an ActionInfo containing action, memories, values and an object to be passed to add experiences """ if len(brain_info.agents) == 0: return ActionInfo([], [], [], None, None) run_out = self.evaluate(brain_info) return ActionInfo( action=run_out.get('action'), memory=run_out.get('memory_out'), text=None, value=run_out.get('value'), outputs=run_out )
[ "Decides", "actions", "given", "observations", "information", "and", "takes", "them", "in", "environment", ".", ":", "param", "brain_info", ":", "A", "dictionary", "of", "brain", "names", "and", "BrainInfo", "from", "environment", ".", ":", "return", ":", "an", "ActionInfo", "containing", "action", "memories", "values", "and", "an", "object", "to", "be", "passed", "to", "add", "experiences" ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/policy.py#L91-L108
[ "def", "get_action", "(", "self", ",", "brain_info", ":", "BrainInfo", ")", "->", "ActionInfo", ":", "if", "len", "(", "brain_info", ".", "agents", ")", "==", "0", ":", "return", "ActionInfo", "(", "[", "]", ",", "[", "]", ",", "[", "]", ",", "None", ",", "None", ")", "run_out", "=", "self", ".", "evaluate", "(", "brain_info", ")", "return", "ActionInfo", "(", "action", "=", "run_out", ".", "get", "(", "'action'", ")", ",", "memory", "=", "run_out", ".", "get", "(", "'memory_out'", ")", ",", "text", "=", "None", ",", "value", "=", "run_out", ".", "get", "(", "'value'", ")", ",", "outputs", "=", "run_out", ")" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
Policy._execute_model
Executes model. :param feed_dict: Input dictionary mapping nodes to input data. :param out_dict: Output dictionary mapping names to nodes. :return: Dictionary mapping names to input data.
ml-agents/mlagents/trainers/policy.py
def _execute_model(self, feed_dict, out_dict): """ Executes model. :param feed_dict: Input dictionary mapping nodes to input data. :param out_dict: Output dictionary mapping names to nodes. :return: Dictionary mapping names to input data. """ network_out = self.sess.run(list(out_dict.values()), feed_dict=feed_dict) run_out = dict(zip(list(out_dict.keys()), network_out)) return run_out
def _execute_model(self, feed_dict, out_dict): """ Executes model. :param feed_dict: Input dictionary mapping nodes to input data. :param out_dict: Output dictionary mapping names to nodes. :return: Dictionary mapping names to input data. """ network_out = self.sess.run(list(out_dict.values()), feed_dict=feed_dict) run_out = dict(zip(list(out_dict.keys()), network_out)) return run_out
[ "Executes", "model", ".", ":", "param", "feed_dict", ":", "Input", "dictionary", "mapping", "nodes", "to", "input", "data", ".", ":", "param", "out_dict", ":", "Output", "dictionary", "mapping", "names", "to", "nodes", ".", ":", "return", ":", "Dictionary", "mapping", "names", "to", "input", "data", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/policy.py#L119-L128
[ "def", "_execute_model", "(", "self", ",", "feed_dict", ",", "out_dict", ")", ":", "network_out", "=", "self", ".", "sess", ".", "run", "(", "list", "(", "out_dict", ".", "values", "(", ")", ")", ",", "feed_dict", "=", "feed_dict", ")", "run_out", "=", "dict", "(", "zip", "(", "list", "(", "out_dict", ".", "keys", "(", ")", ")", ",", "network_out", ")", ")", "return", "run_out" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
Policy.get_current_step
Gets current model step. :return: current model step.
ml-agents/mlagents/trainers/policy.py
def get_current_step(self): """ Gets current model step. :return: current model step. """ step = self.sess.run(self.model.global_step) return step
def get_current_step(self): """ Gets current model step. :return: current model step. """ step = self.sess.run(self.model.global_step) return step
[ "Gets", "current", "model", "step", ".", ":", "return", ":", "current", "model", "step", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/policy.py#L147-L153
[ "def", "get_current_step", "(", "self", ")", ":", "step", "=", "self", ".", "sess", ".", "run", "(", "self", ".", "model", ".", "global_step", ")", "return", "step" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
Policy.save_model
Saves the model :param steps: The number of steps the model was trained for :return:
ml-agents/mlagents/trainers/policy.py
def save_model(self, steps): """ Saves the model :param steps: The number of steps the model was trained for :return: """ with self.graph.as_default(): last_checkpoint = self.model_path + '/model-' + str(steps) + '.cptk' self.saver.save(self.sess, last_checkpoint) tf.train.write_graph(self.graph, self.model_path, 'raw_graph_def.pb', as_text=False)
def save_model(self, steps): """ Saves the model :param steps: The number of steps the model was trained for :return: """ with self.graph.as_default(): last_checkpoint = self.model_path + '/model-' + str(steps) + '.cptk' self.saver.save(self.sess, last_checkpoint) tf.train.write_graph(self.graph, self.model_path, 'raw_graph_def.pb', as_text=False)
[ "Saves", "the", "model", ":", "param", "steps", ":", "The", "number", "of", "steps", "the", "model", "was", "trained", "for", ":", "return", ":" ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/policy.py#L173-L183
[ "def", "save_model", "(", "self", ",", "steps", ")", ":", "with", "self", ".", "graph", ".", "as_default", "(", ")", ":", "last_checkpoint", "=", "self", ".", "model_path", "+", "'/model-'", "+", "str", "(", "steps", ")", "+", "'.cptk'", "self", ".", "saver", ".", "save", "(", "self", ".", "sess", ",", "last_checkpoint", ")", "tf", ".", "train", ".", "write_graph", "(", "self", ".", "graph", ",", "self", ".", "model_path", ",", "'raw_graph_def.pb'", ",", "as_text", "=", "False", ")" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
Policy.export_model
Exports latest saved model to .nn format for Unity embedding.
ml-agents/mlagents/trainers/policy.py
def export_model(self): """ Exports latest saved model to .nn format for Unity embedding. """ with self.graph.as_default(): target_nodes = ','.join(self._process_graph()) ckpt = tf.train.get_checkpoint_state(self.model_path) freeze_graph.freeze_graph( input_graph=self.model_path + '/raw_graph_def.pb', input_binary=True, input_checkpoint=ckpt.model_checkpoint_path, output_node_names=target_nodes, output_graph=(self.model_path + '/frozen_graph_def.pb'), clear_devices=True, initializer_nodes='', input_saver='', restore_op_name='save/restore_all', filename_tensor_name='save/Const:0') tf2bc.convert(self.model_path + '/frozen_graph_def.pb', self.model_path + '.nn') logger.info('Exported ' + self.model_path + '.nn file')
def export_model(self): """ Exports latest saved model to .nn format for Unity embedding. """ with self.graph.as_default(): target_nodes = ','.join(self._process_graph()) ckpt = tf.train.get_checkpoint_state(self.model_path) freeze_graph.freeze_graph( input_graph=self.model_path + '/raw_graph_def.pb', input_binary=True, input_checkpoint=ckpt.model_checkpoint_path, output_node_names=target_nodes, output_graph=(self.model_path + '/frozen_graph_def.pb'), clear_devices=True, initializer_nodes='', input_saver='', restore_op_name='save/restore_all', filename_tensor_name='save/Const:0') tf2bc.convert(self.model_path + '/frozen_graph_def.pb', self.model_path + '.nn') logger.info('Exported ' + self.model_path + '.nn file')
[ "Exports", "latest", "saved", "model", "to", ".", "nn", "format", "for", "Unity", "embedding", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/policy.py#L185-L204
[ "def", "export_model", "(", "self", ")", ":", "with", "self", ".", "graph", ".", "as_default", "(", ")", ":", "target_nodes", "=", "','", ".", "join", "(", "self", ".", "_process_graph", "(", ")", ")", "ckpt", "=", "tf", ".", "train", ".", "get_checkpoint_state", "(", "self", ".", "model_path", ")", "freeze_graph", ".", "freeze_graph", "(", "input_graph", "=", "self", ".", "model_path", "+", "'/raw_graph_def.pb'", ",", "input_binary", "=", "True", ",", "input_checkpoint", "=", "ckpt", ".", "model_checkpoint_path", ",", "output_node_names", "=", "target_nodes", ",", "output_graph", "=", "(", "self", ".", "model_path", "+", "'/frozen_graph_def.pb'", ")", ",", "clear_devices", "=", "True", ",", "initializer_nodes", "=", "''", ",", "input_saver", "=", "''", ",", "restore_op_name", "=", "'save/restore_all'", ",", "filename_tensor_name", "=", "'save/Const:0'", ")", "tf2bc", ".", "convert", "(", "self", ".", "model_path", "+", "'/frozen_graph_def.pb'", ",", "self", ".", "model_path", "+", "'.nn'", ")", "logger", ".", "info", "(", "'Exported '", "+", "self", ".", "model_path", "+", "'.nn file'", ")" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
Policy._process_graph
Gets the list of the output nodes present in the graph for inference :return: list of node names
ml-agents/mlagents/trainers/policy.py
def _process_graph(self): """ Gets the list of the output nodes present in the graph for inference :return: list of node names """ all_nodes = [x.name for x in self.graph.as_graph_def().node] nodes = [x for x in all_nodes if x in self.possible_output_nodes] logger.info('List of nodes to export for brain :' + self.brain.brain_name) for n in nodes: logger.info('\t' + n) return nodes
def _process_graph(self): """ Gets the list of the output nodes present in the graph for inference :return: list of node names """ all_nodes = [x.name for x in self.graph.as_graph_def().node] nodes = [x for x in all_nodes if x in self.possible_output_nodes] logger.info('List of nodes to export for brain :' + self.brain.brain_name) for n in nodes: logger.info('\t' + n) return nodes
[ "Gets", "the", "list", "of", "the", "output", "nodes", "present", "in", "the", "graph", "for", "inference", ":", "return", ":", "list", "of", "node", "names" ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/policy.py#L206-L216
[ "def", "_process_graph", "(", "self", ")", ":", "all_nodes", "=", "[", "x", ".", "name", "for", "x", "in", "self", ".", "graph", ".", "as_graph_def", "(", ")", ".", "node", "]", "nodes", "=", "[", "x", "for", "x", "in", "all_nodes", "if", "x", "in", "self", ".", "possible_output_nodes", "]", "logger", ".", "info", "(", "'List of nodes to export for brain :'", "+", "self", ".", "brain", ".", "brain_name", ")", "for", "n", "in", "nodes", ":", "logger", ".", "info", "(", "'\\t'", "+", "n", ")", "return", "nodes" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
Buffer.reset_local_buffers
Resets all the local local_buffers
ml-agents/mlagents/trainers/buffer.py
def reset_local_buffers(self): """ Resets all the local local_buffers """ agent_ids = list(self.keys()) for k in agent_ids: self[k].reset_agent()
def reset_local_buffers(self): """ Resets all the local local_buffers """ agent_ids = list(self.keys()) for k in agent_ids: self[k].reset_agent()
[ "Resets", "all", "the", "local", "local_buffers" ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/buffer.py#L221-L227
[ "def", "reset_local_buffers", "(", "self", ")", ":", "agent_ids", "=", "list", "(", "self", ".", "keys", "(", ")", ")", "for", "k", "in", "agent_ids", ":", "self", "[", "k", "]", ".", "reset_agent", "(", ")" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
Buffer.append_update_buffer
Appends the buffer of an agent to the update buffer. :param agent_id: The id of the agent which data will be appended :param key_list: The fields that must be added. If None: all fields will be appended. :param batch_size: The number of elements that must be appended. If None: All of them will be. :param training_length: The length of the samples that must be appended. If None: only takes one element.
ml-agents/mlagents/trainers/buffer.py
def append_update_buffer(self, agent_id, key_list=None, batch_size=None, training_length=None): """ Appends the buffer of an agent to the update buffer. :param agent_id: The id of the agent which data will be appended :param key_list: The fields that must be added. If None: all fields will be appended. :param batch_size: The number of elements that must be appended. If None: All of them will be. :param training_length: The length of the samples that must be appended. If None: only takes one element. """ if key_list is None: key_list = self[agent_id].keys() if not self[agent_id].check_length(key_list): raise BufferException("The length of the fields {0} for agent {1} where not of same length" .format(key_list, agent_id)) for field_key in key_list: self.update_buffer[field_key].extend( self[agent_id][field_key].get_batch(batch_size=batch_size, training_length=training_length) )
def append_update_buffer(self, agent_id, key_list=None, batch_size=None, training_length=None): """ Appends the buffer of an agent to the update buffer. :param agent_id: The id of the agent which data will be appended :param key_list: The fields that must be added. If None: all fields will be appended. :param batch_size: The number of elements that must be appended. If None: All of them will be. :param training_length: The length of the samples that must be appended. If None: only takes one element. """ if key_list is None: key_list = self[agent_id].keys() if not self[agent_id].check_length(key_list): raise BufferException("The length of the fields {0} for agent {1} where not of same length" .format(key_list, agent_id)) for field_key in key_list: self.update_buffer[field_key].extend( self[agent_id][field_key].get_batch(batch_size=batch_size, training_length=training_length) )
[ "Appends", "the", "buffer", "of", "an", "agent", "to", "the", "update", "buffer", ".", ":", "param", "agent_id", ":", "The", "id", "of", "the", "agent", "which", "data", "will", "be", "appended", ":", "param", "key_list", ":", "The", "fields", "that", "must", "be", "added", ".", "If", "None", ":", "all", "fields", "will", "be", "appended", ".", ":", "param", "batch_size", ":", "The", "number", "of", "elements", "that", "must", "be", "appended", ".", "If", "None", ":", "All", "of", "them", "will", "be", ".", ":", "param", "training_length", ":", "The", "length", "of", "the", "samples", "that", "must", "be", "appended", ".", "If", "None", ":", "only", "takes", "one", "element", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/buffer.py#L229-L245
[ "def", "append_update_buffer", "(", "self", ",", "agent_id", ",", "key_list", "=", "None", ",", "batch_size", "=", "None", ",", "training_length", "=", "None", ")", ":", "if", "key_list", "is", "None", ":", "key_list", "=", "self", "[", "agent_id", "]", ".", "keys", "(", ")", "if", "not", "self", "[", "agent_id", "]", ".", "check_length", "(", "key_list", ")", ":", "raise", "BufferException", "(", "\"The length of the fields {0} for agent {1} where not of same length\"", ".", "format", "(", "key_list", ",", "agent_id", ")", ")", "for", "field_key", "in", "key_list", ":", "self", ".", "update_buffer", "[", "field_key", "]", ".", "extend", "(", "self", "[", "agent_id", "]", "[", "field_key", "]", ".", "get_batch", "(", "batch_size", "=", "batch_size", ",", "training_length", "=", "training_length", ")", ")" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
Buffer.append_all_agent_batch_to_update_buffer
Appends the buffer of all agents to the update buffer. :param key_list: The fields that must be added. If None: all fields will be appended. :param batch_size: The number of elements that must be appended. If None: All of them will be. :param training_length: The length of the samples that must be appended. If None: only takes one element.
ml-agents/mlagents/trainers/buffer.py
def append_all_agent_batch_to_update_buffer(self, key_list=None, batch_size=None, training_length=None): """ Appends the buffer of all agents to the update buffer. :param key_list: The fields that must be added. If None: all fields will be appended. :param batch_size: The number of elements that must be appended. If None: All of them will be. :param training_length: The length of the samples that must be appended. If None: only takes one element. """ for agent_id in self.keys(): self.append_update_buffer(agent_id, key_list, batch_size, training_length)
def append_all_agent_batch_to_update_buffer(self, key_list=None, batch_size=None, training_length=None): """ Appends the buffer of all agents to the update buffer. :param key_list: The fields that must be added. If None: all fields will be appended. :param batch_size: The number of elements that must be appended. If None: All of them will be. :param training_length: The length of the samples that must be appended. If None: only takes one element. """ for agent_id in self.keys(): self.append_update_buffer(agent_id, key_list, batch_size, training_length)
[ "Appends", "the", "buffer", "of", "all", "agents", "to", "the", "update", "buffer", ".", ":", "param", "key_list", ":", "The", "fields", "that", "must", "be", "added", ".", "If", "None", ":", "all", "fields", "will", "be", "appended", ".", ":", "param", "batch_size", ":", "The", "number", "of", "elements", "that", "must", "be", "appended", ".", "If", "None", ":", "All", "of", "them", "will", "be", ".", ":", "param", "training_length", ":", "The", "length", "of", "the", "samples", "that", "must", "be", "appended", ".", "If", "None", ":", "only", "takes", "one", "element", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/buffer.py#L247-L255
[ "def", "append_all_agent_batch_to_update_buffer", "(", "self", ",", "key_list", "=", "None", ",", "batch_size", "=", "None", ",", "training_length", "=", "None", ")", ":", "for", "agent_id", "in", "self", ".", "keys", "(", ")", ":", "self", ".", "append_update_buffer", "(", "agent_id", ",", "key_list", ",", "batch_size", ",", "training_length", ")" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
run_training
Launches training session. :param process_queue: Queue used to send signal back to main. :param sub_id: Unique id for training session. :param run_seed: Random seed used for training. :param run_options: Command line arguments for training.
ml-agents/mlagents/trainers/learn.py
def run_training(sub_id: int, run_seed: int, run_options, process_queue): """ Launches training session. :param process_queue: Queue used to send signal back to main. :param sub_id: Unique id for training session. :param run_seed: Random seed used for training. :param run_options: Command line arguments for training. """ # Docker Parameters docker_target_name = (run_options['--docker-target-name'] if run_options['--docker-target-name'] != 'None' else None) # General parameters env_path = (run_options['--env'] if run_options['--env'] != 'None' else None) run_id = run_options['--run-id'] load_model = run_options['--load'] train_model = run_options['--train'] save_freq = int(run_options['--save-freq']) keep_checkpoints = int(run_options['--keep-checkpoints']) base_port = int(run_options['--base-port']) num_envs = int(run_options['--num-envs']) curriculum_folder = (run_options['--curriculum'] if run_options['--curriculum'] != 'None' else None) lesson = int(run_options['--lesson']) fast_simulation = not bool(run_options['--slow']) no_graphics = run_options['--no-graphics'] trainer_config_path = run_options['<trainer-config-path>'] # Recognize and use docker volume if one is passed as an argument if not docker_target_name: model_path = './models/{run_id}-{sub_id}'.format(run_id=run_id, sub_id=sub_id) summaries_dir = './summaries' else: trainer_config_path = \ '/{docker_target_name}/{trainer_config_path}'.format( docker_target_name=docker_target_name, trainer_config_path=trainer_config_path) if curriculum_folder is not None: curriculum_folder = \ '/{docker_target_name}/{curriculum_folder}'.format( docker_target_name=docker_target_name, curriculum_folder=curriculum_folder) model_path = '/{docker_target_name}/models/{run_id}-{sub_id}'.format( docker_target_name=docker_target_name, run_id=run_id, sub_id=sub_id) summaries_dir = '/{docker_target_name}/summaries'.format( docker_target_name=docker_target_name) trainer_config = load_config(trainer_config_path) env_factory = create_environment_factory( env_path, docker_target_name, no_graphics, run_seed, base_port + (sub_id * num_envs) ) env = SubprocessUnityEnvironment(env_factory, num_envs) maybe_meta_curriculum = try_create_meta_curriculum(curriculum_folder, env) # Create controller and begin training. tc = TrainerController(model_path, summaries_dir, run_id + '-' + str(sub_id), save_freq, maybe_meta_curriculum, load_model, train_model, keep_checkpoints, lesson, env.external_brains, run_seed, fast_simulation) # Signal that environment has been launched. process_queue.put(True) # Begin training tc.start_learning(env, trainer_config)
def run_training(sub_id: int, run_seed: int, run_options, process_queue): """ Launches training session. :param process_queue: Queue used to send signal back to main. :param sub_id: Unique id for training session. :param run_seed: Random seed used for training. :param run_options: Command line arguments for training. """ # Docker Parameters docker_target_name = (run_options['--docker-target-name'] if run_options['--docker-target-name'] != 'None' else None) # General parameters env_path = (run_options['--env'] if run_options['--env'] != 'None' else None) run_id = run_options['--run-id'] load_model = run_options['--load'] train_model = run_options['--train'] save_freq = int(run_options['--save-freq']) keep_checkpoints = int(run_options['--keep-checkpoints']) base_port = int(run_options['--base-port']) num_envs = int(run_options['--num-envs']) curriculum_folder = (run_options['--curriculum'] if run_options['--curriculum'] != 'None' else None) lesson = int(run_options['--lesson']) fast_simulation = not bool(run_options['--slow']) no_graphics = run_options['--no-graphics'] trainer_config_path = run_options['<trainer-config-path>'] # Recognize and use docker volume if one is passed as an argument if not docker_target_name: model_path = './models/{run_id}-{sub_id}'.format(run_id=run_id, sub_id=sub_id) summaries_dir = './summaries' else: trainer_config_path = \ '/{docker_target_name}/{trainer_config_path}'.format( docker_target_name=docker_target_name, trainer_config_path=trainer_config_path) if curriculum_folder is not None: curriculum_folder = \ '/{docker_target_name}/{curriculum_folder}'.format( docker_target_name=docker_target_name, curriculum_folder=curriculum_folder) model_path = '/{docker_target_name}/models/{run_id}-{sub_id}'.format( docker_target_name=docker_target_name, run_id=run_id, sub_id=sub_id) summaries_dir = '/{docker_target_name}/summaries'.format( docker_target_name=docker_target_name) trainer_config = load_config(trainer_config_path) env_factory = create_environment_factory( env_path, docker_target_name, no_graphics, run_seed, base_port + (sub_id * num_envs) ) env = SubprocessUnityEnvironment(env_factory, num_envs) maybe_meta_curriculum = try_create_meta_curriculum(curriculum_folder, env) # Create controller and begin training. tc = TrainerController(model_path, summaries_dir, run_id + '-' + str(sub_id), save_freq, maybe_meta_curriculum, load_model, train_model, keep_checkpoints, lesson, env.external_brains, run_seed, fast_simulation) # Signal that environment has been launched. process_queue.put(True) # Begin training tc.start_learning(env, trainer_config)
[ "Launches", "training", "session", ".", ":", "param", "process_queue", ":", "Queue", "used", "to", "send", "signal", "back", "to", "main", ".", ":", "param", "sub_id", ":", "Unique", "id", "for", "training", "session", ".", ":", "param", "run_seed", ":", "Random", "seed", "used", "for", "training", ".", ":", "param", "run_options", ":", "Command", "line", "arguments", "for", "training", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/learn.py#L24-L95
[ "def", "run_training", "(", "sub_id", ":", "int", ",", "run_seed", ":", "int", ",", "run_options", ",", "process_queue", ")", ":", "# Docker Parameters", "docker_target_name", "=", "(", "run_options", "[", "'--docker-target-name'", "]", "if", "run_options", "[", "'--docker-target-name'", "]", "!=", "'None'", "else", "None", ")", "# General parameters", "env_path", "=", "(", "run_options", "[", "'--env'", "]", "if", "run_options", "[", "'--env'", "]", "!=", "'None'", "else", "None", ")", "run_id", "=", "run_options", "[", "'--run-id'", "]", "load_model", "=", "run_options", "[", "'--load'", "]", "train_model", "=", "run_options", "[", "'--train'", "]", "save_freq", "=", "int", "(", "run_options", "[", "'--save-freq'", "]", ")", "keep_checkpoints", "=", "int", "(", "run_options", "[", "'--keep-checkpoints'", "]", ")", "base_port", "=", "int", "(", "run_options", "[", "'--base-port'", "]", ")", "num_envs", "=", "int", "(", "run_options", "[", "'--num-envs'", "]", ")", "curriculum_folder", "=", "(", "run_options", "[", "'--curriculum'", "]", "if", "run_options", "[", "'--curriculum'", "]", "!=", "'None'", "else", "None", ")", "lesson", "=", "int", "(", "run_options", "[", "'--lesson'", "]", ")", "fast_simulation", "=", "not", "bool", "(", "run_options", "[", "'--slow'", "]", ")", "no_graphics", "=", "run_options", "[", "'--no-graphics'", "]", "trainer_config_path", "=", "run_options", "[", "'<trainer-config-path>'", "]", "# Recognize and use docker volume if one is passed as an argument", "if", "not", "docker_target_name", ":", "model_path", "=", "'./models/{run_id}-{sub_id}'", ".", "format", "(", "run_id", "=", "run_id", ",", "sub_id", "=", "sub_id", ")", "summaries_dir", "=", "'./summaries'", "else", ":", "trainer_config_path", "=", "'/{docker_target_name}/{trainer_config_path}'", ".", "format", "(", "docker_target_name", "=", "docker_target_name", ",", "trainer_config_path", "=", "trainer_config_path", ")", "if", "curriculum_folder", "is", "not", "None", ":", "curriculum_folder", "=", "'/{docker_target_name}/{curriculum_folder}'", ".", "format", "(", "docker_target_name", "=", "docker_target_name", ",", "curriculum_folder", "=", "curriculum_folder", ")", "model_path", "=", "'/{docker_target_name}/models/{run_id}-{sub_id}'", ".", "format", "(", "docker_target_name", "=", "docker_target_name", ",", "run_id", "=", "run_id", ",", "sub_id", "=", "sub_id", ")", "summaries_dir", "=", "'/{docker_target_name}/summaries'", ".", "format", "(", "docker_target_name", "=", "docker_target_name", ")", "trainer_config", "=", "load_config", "(", "trainer_config_path", ")", "env_factory", "=", "create_environment_factory", "(", "env_path", ",", "docker_target_name", ",", "no_graphics", ",", "run_seed", ",", "base_port", "+", "(", "sub_id", "*", "num_envs", ")", ")", "env", "=", "SubprocessUnityEnvironment", "(", "env_factory", ",", "num_envs", ")", "maybe_meta_curriculum", "=", "try_create_meta_curriculum", "(", "curriculum_folder", ",", "env", ")", "# Create controller and begin training.", "tc", "=", "TrainerController", "(", "model_path", ",", "summaries_dir", ",", "run_id", "+", "'-'", "+", "str", "(", "sub_id", ")", ",", "save_freq", ",", "maybe_meta_curriculum", ",", "load_model", ",", "train_model", ",", "keep_checkpoints", ",", "lesson", ",", "env", ".", "external_brains", ",", "run_seed", ",", "fast_simulation", ")", "# Signal that environment has been launched.", "process_queue", ".", "put", "(", "True", ")", "# Begin training", "tc", ".", "start_learning", "(", "env", ",", "trainer_config", ")" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
Trainer.get_action
Get an action using this trainer's current policy. :param curr_info: Current BrainInfo. :return: The ActionInfo given by the policy given the BrainInfo.
ml-agents/mlagents/trainers/trainer.py
def get_action(self, curr_info: BrainInfo) -> ActionInfo: """ Get an action using this trainer's current policy. :param curr_info: Current BrainInfo. :return: The ActionInfo given by the policy given the BrainInfo. """ self.trainer_metrics.start_experience_collection_timer() action = self.policy.get_action(curr_info) self.trainer_metrics.end_experience_collection_timer() return action
def get_action(self, curr_info: BrainInfo) -> ActionInfo: """ Get an action using this trainer's current policy. :param curr_info: Current BrainInfo. :return: The ActionInfo given by the policy given the BrainInfo. """ self.trainer_metrics.start_experience_collection_timer() action = self.policy.get_action(curr_info) self.trainer_metrics.end_experience_collection_timer() return action
[ "Get", "an", "action", "using", "this", "trainer", "s", "current", "policy", ".", ":", "param", "curr_info", ":", "Current", "BrainInfo", ".", ":", "return", ":", "The", "ActionInfo", "given", "by", "the", "policy", "given", "the", "BrainInfo", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/trainer.py#L106-L115
[ "def", "get_action", "(", "self", ",", "curr_info", ":", "BrainInfo", ")", "->", "ActionInfo", ":", "self", ".", "trainer_metrics", ".", "start_experience_collection_timer", "(", ")", "action", "=", "self", ".", "policy", ".", "get_action", "(", "curr_info", ")", "self", ".", "trainer_metrics", ".", "end_experience_collection_timer", "(", ")", "return", "action" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
Trainer.write_summary
Saves training statistics to Tensorboard. :param delta_train_start: Time elapsed since training started. :param lesson_num: Current lesson number in curriculum. :param global_step: The number of steps the simulation has been going for
ml-agents/mlagents/trainers/trainer.py
def write_summary(self, global_step, delta_train_start, lesson_num=0): """ Saves training statistics to Tensorboard. :param delta_train_start: Time elapsed since training started. :param lesson_num: Current lesson number in curriculum. :param global_step: The number of steps the simulation has been going for """ if global_step % self.trainer_parameters['summary_freq'] == 0 and global_step != 0: is_training = "Training." if self.is_training and self.get_step <= self.get_max_steps else "Not Training." if len(self.stats['Environment/Cumulative Reward']) > 0: mean_reward = np.mean( self.stats['Environment/Cumulative Reward']) LOGGER.info(" {}: {}: Step: {}. " "Time Elapsed: {:0.3f} s " "Mean " "Reward: {" ":0.3f}. Std of Reward: {:0.3f}. {}" .format(self.run_id, self.brain_name, min(self.get_step, self.get_max_steps), delta_train_start, mean_reward, np.std( self.stats['Environment/Cumulative Reward']), is_training)) else: LOGGER.info(" {}: {}: Step: {}. No episode was completed since last summary. {}" .format(self.run_id, self.brain_name, self.get_step, is_training)) summary = tf.Summary() for key in self.stats: if len(self.stats[key]) > 0: stat_mean = float(np.mean(self.stats[key])) summary.value.add(tag='{}'.format( key), simple_value=stat_mean) self.stats[key] = [] summary.value.add(tag='Environment/Lesson', simple_value=lesson_num) self.summary_writer.add_summary(summary, self.get_step) self.summary_writer.flush()
def write_summary(self, global_step, delta_train_start, lesson_num=0): """ Saves training statistics to Tensorboard. :param delta_train_start: Time elapsed since training started. :param lesson_num: Current lesson number in curriculum. :param global_step: The number of steps the simulation has been going for """ if global_step % self.trainer_parameters['summary_freq'] == 0 and global_step != 0: is_training = "Training." if self.is_training and self.get_step <= self.get_max_steps else "Not Training." if len(self.stats['Environment/Cumulative Reward']) > 0: mean_reward = np.mean( self.stats['Environment/Cumulative Reward']) LOGGER.info(" {}: {}: Step: {}. " "Time Elapsed: {:0.3f} s " "Mean " "Reward: {" ":0.3f}. Std of Reward: {:0.3f}. {}" .format(self.run_id, self.brain_name, min(self.get_step, self.get_max_steps), delta_train_start, mean_reward, np.std( self.stats['Environment/Cumulative Reward']), is_training)) else: LOGGER.info(" {}: {}: Step: {}. No episode was completed since last summary. {}" .format(self.run_id, self.brain_name, self.get_step, is_training)) summary = tf.Summary() for key in self.stats: if len(self.stats[key]) > 0: stat_mean = float(np.mean(self.stats[key])) summary.value.add(tag='{}'.format( key), simple_value=stat_mean) self.stats[key] = [] summary.value.add(tag='Environment/Lesson', simple_value=lesson_num) self.summary_writer.add_summary(summary, self.get_step) self.summary_writer.flush()
[ "Saves", "training", "statistics", "to", "Tensorboard", ".", ":", "param", "delta_train_start", ":", "Time", "elapsed", "since", "training", "started", ".", ":", "param", "lesson_num", ":", "Current", "lesson", "number", "in", "curriculum", ".", ":", "param", "global_step", ":", "The", "number", "of", "steps", "the", "simulation", "has", "been", "going", "for" ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/trainer.py#L180-L215
[ "def", "write_summary", "(", "self", ",", "global_step", ",", "delta_train_start", ",", "lesson_num", "=", "0", ")", ":", "if", "global_step", "%", "self", ".", "trainer_parameters", "[", "'summary_freq'", "]", "==", "0", "and", "global_step", "!=", "0", ":", "is_training", "=", "\"Training.\"", "if", "self", ".", "is_training", "and", "self", ".", "get_step", "<=", "self", ".", "get_max_steps", "else", "\"Not Training.\"", "if", "len", "(", "self", ".", "stats", "[", "'Environment/Cumulative Reward'", "]", ")", ">", "0", ":", "mean_reward", "=", "np", ".", "mean", "(", "self", ".", "stats", "[", "'Environment/Cumulative Reward'", "]", ")", "LOGGER", ".", "info", "(", "\" {}: {}: Step: {}. \"", "\"Time Elapsed: {:0.3f} s \"", "\"Mean \"", "\"Reward: {\"", "\":0.3f}. Std of Reward: {:0.3f}. {}\"", ".", "format", "(", "self", ".", "run_id", ",", "self", ".", "brain_name", ",", "min", "(", "self", ".", "get_step", ",", "self", ".", "get_max_steps", ")", ",", "delta_train_start", ",", "mean_reward", ",", "np", ".", "std", "(", "self", ".", "stats", "[", "'Environment/Cumulative Reward'", "]", ")", ",", "is_training", ")", ")", "else", ":", "LOGGER", ".", "info", "(", "\" {}: {}: Step: {}. No episode was completed since last summary. {}\"", ".", "format", "(", "self", ".", "run_id", ",", "self", ".", "brain_name", ",", "self", ".", "get_step", ",", "is_training", ")", ")", "summary", "=", "tf", ".", "Summary", "(", ")", "for", "key", "in", "self", ".", "stats", ":", "if", "len", "(", "self", ".", "stats", "[", "key", "]", ")", ">", "0", ":", "stat_mean", "=", "float", "(", "np", ".", "mean", "(", "self", ".", "stats", "[", "key", "]", ")", ")", "summary", ".", "value", ".", "add", "(", "tag", "=", "'{}'", ".", "format", "(", "key", ")", ",", "simple_value", "=", "stat_mean", ")", "self", ".", "stats", "[", "key", "]", "=", "[", "]", "summary", ".", "value", ".", "add", "(", "tag", "=", "'Environment/Lesson'", ",", "simple_value", "=", "lesson_num", ")", "self", ".", "summary_writer", ".", "add_summary", "(", "summary", ",", "self", ".", "get_step", ")", "self", ".", "summary_writer", ".", "flush", "(", ")" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
Trainer.write_tensorboard_text
Saves text to Tensorboard. Note: Only works on tensorflow r1.2 or above. :param key: The name of the text. :param input_dict: A dictionary that will be displayed in a table on Tensorboard.
ml-agents/mlagents/trainers/trainer.py
def write_tensorboard_text(self, key, input_dict): """ Saves text to Tensorboard. Note: Only works on tensorflow r1.2 or above. :param key: The name of the text. :param input_dict: A dictionary that will be displayed in a table on Tensorboard. """ try: with tf.Session() as sess: s_op = tf.summary.text(key, tf.convert_to_tensor( ([[str(x), str(input_dict[x])] for x in input_dict]))) s = sess.run(s_op) self.summary_writer.add_summary(s, self.get_step) except: LOGGER.info( "Cannot write text summary for Tensorboard. Tensorflow version must be r1.2 or above.") pass
def write_tensorboard_text(self, key, input_dict): """ Saves text to Tensorboard. Note: Only works on tensorflow r1.2 or above. :param key: The name of the text. :param input_dict: A dictionary that will be displayed in a table on Tensorboard. """ try: with tf.Session() as sess: s_op = tf.summary.text(key, tf.convert_to_tensor( ([[str(x), str(input_dict[x])] for x in input_dict]))) s = sess.run(s_op) self.summary_writer.add_summary(s, self.get_step) except: LOGGER.info( "Cannot write text summary for Tensorboard. Tensorflow version must be r1.2 or above.") pass
[ "Saves", "text", "to", "Tensorboard", ".", "Note", ":", "Only", "works", "on", "tensorflow", "r1", ".", "2", "or", "above", ".", ":", "param", "key", ":", "The", "name", "of", "the", "text", ".", ":", "param", "input_dict", ":", "A", "dictionary", "that", "will", "be", "displayed", "in", "a", "table", "on", "Tensorboard", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/trainer.py#L217-L233
[ "def", "write_tensorboard_text", "(", "self", ",", "key", ",", "input_dict", ")", ":", "try", ":", "with", "tf", ".", "Session", "(", ")", "as", "sess", ":", "s_op", "=", "tf", ".", "summary", ".", "text", "(", "key", ",", "tf", ".", "convert_to_tensor", "(", "(", "[", "[", "str", "(", "x", ")", ",", "str", "(", "input_dict", "[", "x", "]", ")", "]", "for", "x", "in", "input_dict", "]", ")", ")", ")", "s", "=", "sess", ".", "run", "(", "s_op", ")", "self", ".", "summary_writer", ".", "add_summary", "(", "s", ",", "self", ".", "get_step", ")", "except", ":", "LOGGER", ".", "info", "(", "\"Cannot write text summary for Tensorboard. Tensorflow version must be r1.2 or above.\"", ")", "pass" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
MetaCurriculum.lesson_nums
A dict from brain name to the brain's curriculum's lesson number.
ml-agents/mlagents/trainers/meta_curriculum.py
def lesson_nums(self): """A dict from brain name to the brain's curriculum's lesson number.""" lesson_nums = {} for brain_name, curriculum in self.brains_to_curriculums.items(): lesson_nums[brain_name] = curriculum.lesson_num return lesson_nums
def lesson_nums(self): """A dict from brain name to the brain's curriculum's lesson number.""" lesson_nums = {} for brain_name, curriculum in self.brains_to_curriculums.items(): lesson_nums[brain_name] = curriculum.lesson_num return lesson_nums
[ "A", "dict", "from", "brain", "name", "to", "the", "brain", "s", "curriculum", "s", "lesson", "number", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/meta_curriculum.py#L61-L67
[ "def", "lesson_nums", "(", "self", ")", ":", "lesson_nums", "=", "{", "}", "for", "brain_name", ",", "curriculum", "in", "self", ".", "brains_to_curriculums", ".", "items", "(", ")", ":", "lesson_nums", "[", "brain_name", "]", "=", "curriculum", ".", "lesson_num", "return", "lesson_nums" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
MetaCurriculum.increment_lessons
Attempts to increments all the lessons of all the curriculums in this MetaCurriculum. Note that calling this method does not guarantee the lesson of a curriculum will increment. The lesson of a curriculum will only increment if the specified measure threshold defined in the curriculum has been reached and the minimum number of episodes in the lesson have been completed. Args: measure_vals (dict): A dict of brain name to measure value. reward_buff_sizes (dict): A dict of brain names to the size of their corresponding reward buffers. Returns: A dict from brain name to whether that brain's lesson number was incremented.
ml-agents/mlagents/trainers/meta_curriculum.py
def increment_lessons(self, measure_vals, reward_buff_sizes=None): """Attempts to increments all the lessons of all the curriculums in this MetaCurriculum. Note that calling this method does not guarantee the lesson of a curriculum will increment. The lesson of a curriculum will only increment if the specified measure threshold defined in the curriculum has been reached and the minimum number of episodes in the lesson have been completed. Args: measure_vals (dict): A dict of brain name to measure value. reward_buff_sizes (dict): A dict of brain names to the size of their corresponding reward buffers. Returns: A dict from brain name to whether that brain's lesson number was incremented. """ ret = {} if reward_buff_sizes: for brain_name, buff_size in reward_buff_sizes.items(): if self._lesson_ready_to_increment(brain_name, buff_size): measure_val = measure_vals[brain_name] ret[brain_name] = (self.brains_to_curriculums[brain_name] .increment_lesson(measure_val)) else: for brain_name, measure_val in measure_vals.items(): ret[brain_name] = (self.brains_to_curriculums[brain_name] .increment_lesson(measure_val)) return ret
def increment_lessons(self, measure_vals, reward_buff_sizes=None): """Attempts to increments all the lessons of all the curriculums in this MetaCurriculum. Note that calling this method does not guarantee the lesson of a curriculum will increment. The lesson of a curriculum will only increment if the specified measure threshold defined in the curriculum has been reached and the minimum number of episodes in the lesson have been completed. Args: measure_vals (dict): A dict of brain name to measure value. reward_buff_sizes (dict): A dict of brain names to the size of their corresponding reward buffers. Returns: A dict from brain name to whether that brain's lesson number was incremented. """ ret = {} if reward_buff_sizes: for brain_name, buff_size in reward_buff_sizes.items(): if self._lesson_ready_to_increment(brain_name, buff_size): measure_val = measure_vals[brain_name] ret[brain_name] = (self.brains_to_curriculums[brain_name] .increment_lesson(measure_val)) else: for brain_name, measure_val in measure_vals.items(): ret[brain_name] = (self.brains_to_curriculums[brain_name] .increment_lesson(measure_val)) return ret
[ "Attempts", "to", "increments", "all", "the", "lessons", "of", "all", "the", "curriculums", "in", "this", "MetaCurriculum", ".", "Note", "that", "calling", "this", "method", "does", "not", "guarantee", "the", "lesson", "of", "a", "curriculum", "will", "increment", ".", "The", "lesson", "of", "a", "curriculum", "will", "only", "increment", "if", "the", "specified", "measure", "threshold", "defined", "in", "the", "curriculum", "has", "been", "reached", "and", "the", "minimum", "number", "of", "episodes", "in", "the", "lesson", "have", "been", "completed", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/meta_curriculum.py#L91-L119
[ "def", "increment_lessons", "(", "self", ",", "measure_vals", ",", "reward_buff_sizes", "=", "None", ")", ":", "ret", "=", "{", "}", "if", "reward_buff_sizes", ":", "for", "brain_name", ",", "buff_size", "in", "reward_buff_sizes", ".", "items", "(", ")", ":", "if", "self", ".", "_lesson_ready_to_increment", "(", "brain_name", ",", "buff_size", ")", ":", "measure_val", "=", "measure_vals", "[", "brain_name", "]", "ret", "[", "brain_name", "]", "=", "(", "self", ".", "brains_to_curriculums", "[", "brain_name", "]", ".", "increment_lesson", "(", "measure_val", ")", ")", "else", ":", "for", "brain_name", ",", "measure_val", "in", "measure_vals", ".", "items", "(", ")", ":", "ret", "[", "brain_name", "]", "=", "(", "self", ".", "brains_to_curriculums", "[", "brain_name", "]", ".", "increment_lesson", "(", "measure_val", ")", ")", "return", "ret" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
MetaCurriculum.set_all_curriculums_to_lesson_num
Sets all the curriculums in this meta curriculum to a specified lesson number. Args: lesson_num (int): The lesson number which all the curriculums will be set to.
ml-agents/mlagents/trainers/meta_curriculum.py
def set_all_curriculums_to_lesson_num(self, lesson_num): """Sets all the curriculums in this meta curriculum to a specified lesson number. Args: lesson_num (int): The lesson number which all the curriculums will be set to. """ for _, curriculum in self.brains_to_curriculums.items(): curriculum.lesson_num = lesson_num
def set_all_curriculums_to_lesson_num(self, lesson_num): """Sets all the curriculums in this meta curriculum to a specified lesson number. Args: lesson_num (int): The lesson number which all the curriculums will be set to. """ for _, curriculum in self.brains_to_curriculums.items(): curriculum.lesson_num = lesson_num
[ "Sets", "all", "the", "curriculums", "in", "this", "meta", "curriculum", "to", "a", "specified", "lesson", "number", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/meta_curriculum.py#L122-L131
[ "def", "set_all_curriculums_to_lesson_num", "(", "self", ",", "lesson_num", ")", ":", "for", "_", ",", "curriculum", "in", "self", ".", "brains_to_curriculums", ".", "items", "(", ")", ":", "curriculum", ".", "lesson_num", "=", "lesson_num" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
MetaCurriculum.get_config
Get the combined configuration of all curriculums in this MetaCurriculum. Returns: A dict from parameter to value.
ml-agents/mlagents/trainers/meta_curriculum.py
def get_config(self): """Get the combined configuration of all curriculums in this MetaCurriculum. Returns: A dict from parameter to value. """ config = {} for _, curriculum in self.brains_to_curriculums.items(): curr_config = curriculum.get_config() config.update(curr_config) return config
def get_config(self): """Get the combined configuration of all curriculums in this MetaCurriculum. Returns: A dict from parameter to value. """ config = {} for _, curriculum in self.brains_to_curriculums.items(): curr_config = curriculum.get_config() config.update(curr_config) return config
[ "Get", "the", "combined", "configuration", "of", "all", "curriculums", "in", "this", "MetaCurriculum", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/meta_curriculum.py#L134-L147
[ "def", "get_config", "(", "self", ")", ":", "config", "=", "{", "}", "for", "_", ",", "curriculum", "in", "self", ".", "brains_to_curriculums", ".", "items", "(", ")", ":", "curr_config", "=", "curriculum", ".", "get_config", "(", ")", "config", ".", "update", "(", "curr_config", ")", "return", "config" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
UnityEnvironment.reset
Sends a signal to reset the unity environment. :return: AllBrainInfo : A data structure corresponding to the initial reset state of the environment.
ml-agents-envs/mlagents/envs/environment.py
def reset(self, config=None, train_mode=True, custom_reset_parameters=None) -> AllBrainInfo: """ Sends a signal to reset the unity environment. :return: AllBrainInfo : A data structure corresponding to the initial reset state of the environment. """ if config is None: config = self._resetParameters elif config: logger.info("Academy reset with parameters: {0}" .format(', '.join([str(x) + ' -> ' + str(config[x]) for x in config]))) for k in config: if (k in self._resetParameters) and (isinstance(config[k], (int, float))): self._resetParameters[k] = config[k] elif not isinstance(config[k], (int, float)): raise UnityEnvironmentException( "The value for parameter '{0}'' must be an Integer or a Float.".format(k)) else: raise UnityEnvironmentException( "The parameter '{0}' is not a valid parameter.".format(k)) if self._loaded: outputs = self.communicator.exchange( self._generate_reset_input(train_mode, config, custom_reset_parameters) ) if outputs is None: raise KeyboardInterrupt rl_output = outputs.rl_output s = self._get_state(rl_output) self._global_done = s[1] for _b in self._external_brain_names: self._n_agents[_b] = len(s[0][_b].agents) return s[0] else: raise UnityEnvironmentException("No Unity environment is loaded.")
def reset(self, config=None, train_mode=True, custom_reset_parameters=None) -> AllBrainInfo: """ Sends a signal to reset the unity environment. :return: AllBrainInfo : A data structure corresponding to the initial reset state of the environment. """ if config is None: config = self._resetParameters elif config: logger.info("Academy reset with parameters: {0}" .format(', '.join([str(x) + ' -> ' + str(config[x]) for x in config]))) for k in config: if (k in self._resetParameters) and (isinstance(config[k], (int, float))): self._resetParameters[k] = config[k] elif not isinstance(config[k], (int, float)): raise UnityEnvironmentException( "The value for parameter '{0}'' must be an Integer or a Float.".format(k)) else: raise UnityEnvironmentException( "The parameter '{0}' is not a valid parameter.".format(k)) if self._loaded: outputs = self.communicator.exchange( self._generate_reset_input(train_mode, config, custom_reset_parameters) ) if outputs is None: raise KeyboardInterrupt rl_output = outputs.rl_output s = self._get_state(rl_output) self._global_done = s[1] for _b in self._external_brain_names: self._n_agents[_b] = len(s[0][_b].agents) return s[0] else: raise UnityEnvironmentException("No Unity environment is loaded.")
[ "Sends", "a", "signal", "to", "reset", "the", "unity", "environment", ".", ":", "return", ":", "AllBrainInfo", ":", "A", "data", "structure", "corresponding", "to", "the", "initial", "reset", "state", "of", "the", "environment", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents-envs/mlagents/envs/environment.py#L246-L279
[ "def", "reset", "(", "self", ",", "config", "=", "None", ",", "train_mode", "=", "True", ",", "custom_reset_parameters", "=", "None", ")", "->", "AllBrainInfo", ":", "if", "config", "is", "None", ":", "config", "=", "self", ".", "_resetParameters", "elif", "config", ":", "logger", ".", "info", "(", "\"Academy reset with parameters: {0}\"", ".", "format", "(", "', '", ".", "join", "(", "[", "str", "(", "x", ")", "+", "' -> '", "+", "str", "(", "config", "[", "x", "]", ")", "for", "x", "in", "config", "]", ")", ")", ")", "for", "k", "in", "config", ":", "if", "(", "k", "in", "self", ".", "_resetParameters", ")", "and", "(", "isinstance", "(", "config", "[", "k", "]", ",", "(", "int", ",", "float", ")", ")", ")", ":", "self", ".", "_resetParameters", "[", "k", "]", "=", "config", "[", "k", "]", "elif", "not", "isinstance", "(", "config", "[", "k", "]", ",", "(", "int", ",", "float", ")", ")", ":", "raise", "UnityEnvironmentException", "(", "\"The value for parameter '{0}'' must be an Integer or a Float.\"", ".", "format", "(", "k", ")", ")", "else", ":", "raise", "UnityEnvironmentException", "(", "\"The parameter '{0}' is not a valid parameter.\"", ".", "format", "(", "k", ")", ")", "if", "self", ".", "_loaded", ":", "outputs", "=", "self", ".", "communicator", ".", "exchange", "(", "self", ".", "_generate_reset_input", "(", "train_mode", ",", "config", ",", "custom_reset_parameters", ")", ")", "if", "outputs", "is", "None", ":", "raise", "KeyboardInterrupt", "rl_output", "=", "outputs", ".", "rl_output", "s", "=", "self", ".", "_get_state", "(", "rl_output", ")", "self", ".", "_global_done", "=", "s", "[", "1", "]", "for", "_b", "in", "self", ".", "_external_brain_names", ":", "self", ".", "_n_agents", "[", "_b", "]", "=", "len", "(", "s", "[", "0", "]", "[", "_b", "]", ".", "agents", ")", "return", "s", "[", "0", "]", "else", ":", "raise", "UnityEnvironmentException", "(", "\"No Unity environment is loaded.\"", ")" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
UnityEnvironment.step
Provides the environment with an action, moves the environment dynamics forward accordingly, and returns observation, state, and reward information to the agent. :param value: Value estimates provided by agents. :param vector_action: Agent's vector action. Can be a scalar or vector of int/floats. :param memory: Vector corresponding to memory used for recurrent policies. :param text_action: Text action to send to environment for. :param custom_action: Optional instance of a CustomAction protobuf message. :return: AllBrainInfo : A Data structure corresponding to the new state of the environment.
ml-agents-envs/mlagents/envs/environment.py
def step(self, vector_action=None, memory=None, text_action=None, value=None, custom_action=None) -> AllBrainInfo: """ Provides the environment with an action, moves the environment dynamics forward accordingly, and returns observation, state, and reward information to the agent. :param value: Value estimates provided by agents. :param vector_action: Agent's vector action. Can be a scalar or vector of int/floats. :param memory: Vector corresponding to memory used for recurrent policies. :param text_action: Text action to send to environment for. :param custom_action: Optional instance of a CustomAction protobuf message. :return: AllBrainInfo : A Data structure corresponding to the new state of the environment. """ vector_action = {} if vector_action is None else vector_action memory = {} if memory is None else memory text_action = {} if text_action is None else text_action value = {} if value is None else value custom_action = {} if custom_action is None else custom_action # Check that environment is loaded, and episode is currently running. if self._loaded and not self._global_done and self._global_done is not None: if isinstance(vector_action, self.SINGLE_BRAIN_ACTION_TYPES): if self._num_external_brains == 1: vector_action = {self._external_brain_names[0]: vector_action} elif self._num_external_brains > 1: raise UnityActionException( "You have {0} brains, you need to feed a dictionary of brain names a keys, " "and vector_actions as values".format(self._num_brains)) else: raise UnityActionException( "There are no external brains in the environment, " "step cannot take a vector_action input") if isinstance(memory, self.SINGLE_BRAIN_ACTION_TYPES): if self._num_external_brains == 1: memory = {self._external_brain_names[0]: memory} elif self._num_external_brains > 1: raise UnityActionException( "You have {0} brains, you need to feed a dictionary of brain names as keys " "and memories as values".format(self._num_brains)) else: raise UnityActionException( "There are no external brains in the environment, " "step cannot take a memory input") if isinstance(text_action, self.SINGLE_BRAIN_TEXT_TYPES): if self._num_external_brains == 1: text_action = {self._external_brain_names[0]: text_action} elif self._num_external_brains > 1: raise UnityActionException( "You have {0} brains, you need to feed a dictionary of brain names as keys " "and text_actions as values".format(self._num_brains)) else: raise UnityActionException( "There are no external brains in the environment, " "step cannot take a value input") if isinstance(value, self.SINGLE_BRAIN_ACTION_TYPES): if self._num_external_brains == 1: value = {self._external_brain_names[0]: value} elif self._num_external_brains > 1: raise UnityActionException( "You have {0} brains, you need to feed a dictionary of brain names as keys " "and state/action value estimates as values".format(self._num_brains)) else: raise UnityActionException( "There are no external brains in the environment, " "step cannot take a value input") if isinstance(custom_action, CustomAction): if self._num_external_brains == 1: custom_action = {self._external_brain_names[0]: custom_action} elif self._num_external_brains > 1: raise UnityActionException( "You have {0} brains, you need to feed a dictionary of brain names as keys " "and CustomAction instances as values".format(self._num_brains)) else: raise UnityActionException( "There are no external brains in the environment, " "step cannot take a custom_action input") for brain_name in list(vector_action.keys()) + list(memory.keys()) + list( text_action.keys()): if brain_name not in self._external_brain_names: raise UnityActionException( "The name {0} does not correspond to an external brain " "in the environment".format(brain_name)) for brain_name in self._external_brain_names: n_agent = self._n_agents[brain_name] if brain_name not in vector_action: if self._brains[brain_name].vector_action_space_type == "discrete": vector_action[brain_name] = [0.0] * n_agent * len( self._brains[brain_name].vector_action_space_size) else: vector_action[brain_name] = [0.0] * n_agent * \ self._brains[ brain_name].vector_action_space_size[0] else: vector_action[brain_name] = self._flatten(vector_action[brain_name]) if brain_name not in memory: memory[brain_name] = [] else: if memory[brain_name] is None: memory[brain_name] = [] else: memory[brain_name] = self._flatten(memory[brain_name]) if brain_name not in text_action: text_action[brain_name] = [""] * n_agent else: if text_action[brain_name] is None: text_action[brain_name] = [""] * n_agent if isinstance(text_action[brain_name], str): text_action[brain_name] = [text_action[brain_name]] * n_agent if brain_name not in custom_action: custom_action[brain_name] = [None] * n_agent else: if custom_action[brain_name] is None: custom_action[brain_name] = [None] * n_agent if isinstance(custom_action[brain_name], CustomAction): custom_action[brain_name] = [custom_action[brain_name]] * n_agent number_text_actions = len(text_action[brain_name]) if not ((number_text_actions == n_agent) or number_text_actions == 0): raise UnityActionException( "There was a mismatch between the provided text_action and " "the environment's expectation: " "The brain {0} expected {1} text_action but was given {2}".format( brain_name, n_agent, number_text_actions)) discrete_check = self._brains[brain_name].vector_action_space_type == "discrete" expected_discrete_size = n_agent * len( self._brains[brain_name].vector_action_space_size) continuous_check = self._brains[brain_name].vector_action_space_type == "continuous" expected_continuous_size = self._brains[brain_name].vector_action_space_size[ 0] * n_agent if not ((discrete_check and len( vector_action[brain_name]) == expected_discrete_size) or (continuous_check and len( vector_action[brain_name]) == expected_continuous_size)): raise UnityActionException( "There was a mismatch between the provided action and " "the environment's expectation: " "The brain {0} expected {1} {2} action(s), but was provided: {3}" .format(brain_name, str(expected_discrete_size) if discrete_check else str(expected_continuous_size), self._brains[brain_name].vector_action_space_type, str(vector_action[brain_name]))) outputs = self.communicator.exchange( self._generate_step_input(vector_action, memory, text_action, value, custom_action)) if outputs is None: raise KeyboardInterrupt rl_output = outputs.rl_output state = self._get_state(rl_output) self._global_done = state[1] for _b in self._external_brain_names: self._n_agents[_b] = len(state[0][_b].agents) return state[0] elif not self._loaded: raise UnityEnvironmentException("No Unity environment is loaded.") elif self._global_done: raise UnityActionException( "The episode is completed. Reset the environment with 'reset()'") elif self.global_done is None: raise UnityActionException( "You cannot conduct step without first calling reset. " "Reset the environment with 'reset()'")
def step(self, vector_action=None, memory=None, text_action=None, value=None, custom_action=None) -> AllBrainInfo: """ Provides the environment with an action, moves the environment dynamics forward accordingly, and returns observation, state, and reward information to the agent. :param value: Value estimates provided by agents. :param vector_action: Agent's vector action. Can be a scalar or vector of int/floats. :param memory: Vector corresponding to memory used for recurrent policies. :param text_action: Text action to send to environment for. :param custom_action: Optional instance of a CustomAction protobuf message. :return: AllBrainInfo : A Data structure corresponding to the new state of the environment. """ vector_action = {} if vector_action is None else vector_action memory = {} if memory is None else memory text_action = {} if text_action is None else text_action value = {} if value is None else value custom_action = {} if custom_action is None else custom_action # Check that environment is loaded, and episode is currently running. if self._loaded and not self._global_done and self._global_done is not None: if isinstance(vector_action, self.SINGLE_BRAIN_ACTION_TYPES): if self._num_external_brains == 1: vector_action = {self._external_brain_names[0]: vector_action} elif self._num_external_brains > 1: raise UnityActionException( "You have {0} brains, you need to feed a dictionary of brain names a keys, " "and vector_actions as values".format(self._num_brains)) else: raise UnityActionException( "There are no external brains in the environment, " "step cannot take a vector_action input") if isinstance(memory, self.SINGLE_BRAIN_ACTION_TYPES): if self._num_external_brains == 1: memory = {self._external_brain_names[0]: memory} elif self._num_external_brains > 1: raise UnityActionException( "You have {0} brains, you need to feed a dictionary of brain names as keys " "and memories as values".format(self._num_brains)) else: raise UnityActionException( "There are no external brains in the environment, " "step cannot take a memory input") if isinstance(text_action, self.SINGLE_BRAIN_TEXT_TYPES): if self._num_external_brains == 1: text_action = {self._external_brain_names[0]: text_action} elif self._num_external_brains > 1: raise UnityActionException( "You have {0} brains, you need to feed a dictionary of brain names as keys " "and text_actions as values".format(self._num_brains)) else: raise UnityActionException( "There are no external brains in the environment, " "step cannot take a value input") if isinstance(value, self.SINGLE_BRAIN_ACTION_TYPES): if self._num_external_brains == 1: value = {self._external_brain_names[0]: value} elif self._num_external_brains > 1: raise UnityActionException( "You have {0} brains, you need to feed a dictionary of brain names as keys " "and state/action value estimates as values".format(self._num_brains)) else: raise UnityActionException( "There are no external brains in the environment, " "step cannot take a value input") if isinstance(custom_action, CustomAction): if self._num_external_brains == 1: custom_action = {self._external_brain_names[0]: custom_action} elif self._num_external_brains > 1: raise UnityActionException( "You have {0} brains, you need to feed a dictionary of brain names as keys " "and CustomAction instances as values".format(self._num_brains)) else: raise UnityActionException( "There are no external brains in the environment, " "step cannot take a custom_action input") for brain_name in list(vector_action.keys()) + list(memory.keys()) + list( text_action.keys()): if brain_name not in self._external_brain_names: raise UnityActionException( "The name {0} does not correspond to an external brain " "in the environment".format(brain_name)) for brain_name in self._external_brain_names: n_agent = self._n_agents[brain_name] if brain_name not in vector_action: if self._brains[brain_name].vector_action_space_type == "discrete": vector_action[brain_name] = [0.0] * n_agent * len( self._brains[brain_name].vector_action_space_size) else: vector_action[brain_name] = [0.0] * n_agent * \ self._brains[ brain_name].vector_action_space_size[0] else: vector_action[brain_name] = self._flatten(vector_action[brain_name]) if brain_name not in memory: memory[brain_name] = [] else: if memory[brain_name] is None: memory[brain_name] = [] else: memory[brain_name] = self._flatten(memory[brain_name]) if brain_name not in text_action: text_action[brain_name] = [""] * n_agent else: if text_action[brain_name] is None: text_action[brain_name] = [""] * n_agent if isinstance(text_action[brain_name], str): text_action[brain_name] = [text_action[brain_name]] * n_agent if brain_name not in custom_action: custom_action[brain_name] = [None] * n_agent else: if custom_action[brain_name] is None: custom_action[brain_name] = [None] * n_agent if isinstance(custom_action[brain_name], CustomAction): custom_action[brain_name] = [custom_action[brain_name]] * n_agent number_text_actions = len(text_action[brain_name]) if not ((number_text_actions == n_agent) or number_text_actions == 0): raise UnityActionException( "There was a mismatch between the provided text_action and " "the environment's expectation: " "The brain {0} expected {1} text_action but was given {2}".format( brain_name, n_agent, number_text_actions)) discrete_check = self._brains[brain_name].vector_action_space_type == "discrete" expected_discrete_size = n_agent * len( self._brains[brain_name].vector_action_space_size) continuous_check = self._brains[brain_name].vector_action_space_type == "continuous" expected_continuous_size = self._brains[brain_name].vector_action_space_size[ 0] * n_agent if not ((discrete_check and len( vector_action[brain_name]) == expected_discrete_size) or (continuous_check and len( vector_action[brain_name]) == expected_continuous_size)): raise UnityActionException( "There was a mismatch between the provided action and " "the environment's expectation: " "The brain {0} expected {1} {2} action(s), but was provided: {3}" .format(brain_name, str(expected_discrete_size) if discrete_check else str(expected_continuous_size), self._brains[brain_name].vector_action_space_type, str(vector_action[brain_name]))) outputs = self.communicator.exchange( self._generate_step_input(vector_action, memory, text_action, value, custom_action)) if outputs is None: raise KeyboardInterrupt rl_output = outputs.rl_output state = self._get_state(rl_output) self._global_done = state[1] for _b in self._external_brain_names: self._n_agents[_b] = len(state[0][_b].agents) return state[0] elif not self._loaded: raise UnityEnvironmentException("No Unity environment is loaded.") elif self._global_done: raise UnityActionException( "The episode is completed. Reset the environment with 'reset()'") elif self.global_done is None: raise UnityActionException( "You cannot conduct step without first calling reset. " "Reset the environment with 'reset()'")
[ "Provides", "the", "environment", "with", "an", "action", "moves", "the", "environment", "dynamics", "forward", "accordingly", "and", "returns", "observation", "state", "and", "reward", "information", "to", "the", "agent", ".", ":", "param", "value", ":", "Value", "estimates", "provided", "by", "agents", ".", ":", "param", "vector_action", ":", "Agent", "s", "vector", "action", ".", "Can", "be", "a", "scalar", "or", "vector", "of", "int", "/", "floats", ".", ":", "param", "memory", ":", "Vector", "corresponding", "to", "memory", "used", "for", "recurrent", "policies", ".", ":", "param", "text_action", ":", "Text", "action", "to", "send", "to", "environment", "for", ".", ":", "param", "custom_action", ":", "Optional", "instance", "of", "a", "CustomAction", "protobuf", "message", ".", ":", "return", ":", "AllBrainInfo", ":", "A", "Data", "structure", "corresponding", "to", "the", "new", "state", "of", "the", "environment", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents-envs/mlagents/envs/environment.py#L281-L451
[ "def", "step", "(", "self", ",", "vector_action", "=", "None", ",", "memory", "=", "None", ",", "text_action", "=", "None", ",", "value", "=", "None", ",", "custom_action", "=", "None", ")", "->", "AllBrainInfo", ":", "vector_action", "=", "{", "}", "if", "vector_action", "is", "None", "else", "vector_action", "memory", "=", "{", "}", "if", "memory", "is", "None", "else", "memory", "text_action", "=", "{", "}", "if", "text_action", "is", "None", "else", "text_action", "value", "=", "{", "}", "if", "value", "is", "None", "else", "value", "custom_action", "=", "{", "}", "if", "custom_action", "is", "None", "else", "custom_action", "# Check that environment is loaded, and episode is currently running.", "if", "self", ".", "_loaded", "and", "not", "self", ".", "_global_done", "and", "self", ".", "_global_done", "is", "not", "None", ":", "if", "isinstance", "(", "vector_action", ",", "self", ".", "SINGLE_BRAIN_ACTION_TYPES", ")", ":", "if", "self", ".", "_num_external_brains", "==", "1", ":", "vector_action", "=", "{", "self", ".", "_external_brain_names", "[", "0", "]", ":", "vector_action", "}", "elif", "self", ".", "_num_external_brains", ">", "1", ":", "raise", "UnityActionException", "(", "\"You have {0} brains, you need to feed a dictionary of brain names a keys, \"", "\"and vector_actions as values\"", ".", "format", "(", "self", ".", "_num_brains", ")", ")", "else", ":", "raise", "UnityActionException", "(", "\"There are no external brains in the environment, \"", "\"step cannot take a vector_action input\"", ")", "if", "isinstance", "(", "memory", ",", "self", ".", "SINGLE_BRAIN_ACTION_TYPES", ")", ":", "if", "self", ".", "_num_external_brains", "==", "1", ":", "memory", "=", "{", "self", ".", "_external_brain_names", "[", "0", "]", ":", "memory", "}", "elif", "self", ".", "_num_external_brains", ">", "1", ":", "raise", "UnityActionException", "(", "\"You have {0} brains, you need to feed a dictionary of brain names as keys \"", "\"and memories as values\"", ".", "format", "(", "self", ".", "_num_brains", ")", ")", "else", ":", "raise", "UnityActionException", "(", "\"There are no external brains in the environment, \"", "\"step cannot take a memory input\"", ")", "if", "isinstance", "(", "text_action", ",", "self", ".", "SINGLE_BRAIN_TEXT_TYPES", ")", ":", "if", "self", ".", "_num_external_brains", "==", "1", ":", "text_action", "=", "{", "self", ".", "_external_brain_names", "[", "0", "]", ":", "text_action", "}", "elif", "self", ".", "_num_external_brains", ">", "1", ":", "raise", "UnityActionException", "(", "\"You have {0} brains, you need to feed a dictionary of brain names as keys \"", "\"and text_actions as values\"", ".", "format", "(", "self", ".", "_num_brains", ")", ")", "else", ":", "raise", "UnityActionException", "(", "\"There are no external brains in the environment, \"", "\"step cannot take a value input\"", ")", "if", "isinstance", "(", "value", ",", "self", ".", "SINGLE_BRAIN_ACTION_TYPES", ")", ":", "if", "self", ".", "_num_external_brains", "==", "1", ":", "value", "=", "{", "self", ".", "_external_brain_names", "[", "0", "]", ":", "value", "}", "elif", "self", ".", "_num_external_brains", ">", "1", ":", "raise", "UnityActionException", "(", "\"You have {0} brains, you need to feed a dictionary of brain names as keys \"", "\"and state/action value estimates as values\"", ".", "format", "(", "self", ".", "_num_brains", ")", ")", "else", ":", "raise", "UnityActionException", "(", "\"There are no external brains in the environment, \"", "\"step cannot take a value input\"", ")", "if", "isinstance", "(", "custom_action", ",", "CustomAction", ")", ":", "if", "self", ".", "_num_external_brains", "==", "1", ":", "custom_action", "=", "{", "self", ".", "_external_brain_names", "[", "0", "]", ":", "custom_action", "}", "elif", "self", ".", "_num_external_brains", ">", "1", ":", "raise", "UnityActionException", "(", "\"You have {0} brains, you need to feed a dictionary of brain names as keys \"", "\"and CustomAction instances as values\"", ".", "format", "(", "self", ".", "_num_brains", ")", ")", "else", ":", "raise", "UnityActionException", "(", "\"There are no external brains in the environment, \"", "\"step cannot take a custom_action input\"", ")", "for", "brain_name", "in", "list", "(", "vector_action", ".", "keys", "(", ")", ")", "+", "list", "(", "memory", ".", "keys", "(", ")", ")", "+", "list", "(", "text_action", ".", "keys", "(", ")", ")", ":", "if", "brain_name", "not", "in", "self", ".", "_external_brain_names", ":", "raise", "UnityActionException", "(", "\"The name {0} does not correspond to an external brain \"", "\"in the environment\"", ".", "format", "(", "brain_name", ")", ")", "for", "brain_name", "in", "self", ".", "_external_brain_names", ":", "n_agent", "=", "self", ".", "_n_agents", "[", "brain_name", "]", "if", "brain_name", "not", "in", "vector_action", ":", "if", "self", ".", "_brains", "[", "brain_name", "]", ".", "vector_action_space_type", "==", "\"discrete\"", ":", "vector_action", "[", "brain_name", "]", "=", "[", "0.0", "]", "*", "n_agent", "*", "len", "(", "self", ".", "_brains", "[", "brain_name", "]", ".", "vector_action_space_size", ")", "else", ":", "vector_action", "[", "brain_name", "]", "=", "[", "0.0", "]", "*", "n_agent", "*", "self", ".", "_brains", "[", "brain_name", "]", ".", "vector_action_space_size", "[", "0", "]", "else", ":", "vector_action", "[", "brain_name", "]", "=", "self", ".", "_flatten", "(", "vector_action", "[", "brain_name", "]", ")", "if", "brain_name", "not", "in", "memory", ":", "memory", "[", "brain_name", "]", "=", "[", "]", "else", ":", "if", "memory", "[", "brain_name", "]", "is", "None", ":", "memory", "[", "brain_name", "]", "=", "[", "]", "else", ":", "memory", "[", "brain_name", "]", "=", "self", ".", "_flatten", "(", "memory", "[", "brain_name", "]", ")", "if", "brain_name", "not", "in", "text_action", ":", "text_action", "[", "brain_name", "]", "=", "[", "\"\"", "]", "*", "n_agent", "else", ":", "if", "text_action", "[", "brain_name", "]", "is", "None", ":", "text_action", "[", "brain_name", "]", "=", "[", "\"\"", "]", "*", "n_agent", "if", "isinstance", "(", "text_action", "[", "brain_name", "]", ",", "str", ")", ":", "text_action", "[", "brain_name", "]", "=", "[", "text_action", "[", "brain_name", "]", "]", "*", "n_agent", "if", "brain_name", "not", "in", "custom_action", ":", "custom_action", "[", "brain_name", "]", "=", "[", "None", "]", "*", "n_agent", "else", ":", "if", "custom_action", "[", "brain_name", "]", "is", "None", ":", "custom_action", "[", "brain_name", "]", "=", "[", "None", "]", "*", "n_agent", "if", "isinstance", "(", "custom_action", "[", "brain_name", "]", ",", "CustomAction", ")", ":", "custom_action", "[", "brain_name", "]", "=", "[", "custom_action", "[", "brain_name", "]", "]", "*", "n_agent", "number_text_actions", "=", "len", "(", "text_action", "[", "brain_name", "]", ")", "if", "not", "(", "(", "number_text_actions", "==", "n_agent", ")", "or", "number_text_actions", "==", "0", ")", ":", "raise", "UnityActionException", "(", "\"There was a mismatch between the provided text_action and \"", "\"the environment's expectation: \"", "\"The brain {0} expected {1} text_action but was given {2}\"", ".", "format", "(", "brain_name", ",", "n_agent", ",", "number_text_actions", ")", ")", "discrete_check", "=", "self", ".", "_brains", "[", "brain_name", "]", ".", "vector_action_space_type", "==", "\"discrete\"", "expected_discrete_size", "=", "n_agent", "*", "len", "(", "self", ".", "_brains", "[", "brain_name", "]", ".", "vector_action_space_size", ")", "continuous_check", "=", "self", ".", "_brains", "[", "brain_name", "]", ".", "vector_action_space_type", "==", "\"continuous\"", "expected_continuous_size", "=", "self", ".", "_brains", "[", "brain_name", "]", ".", "vector_action_space_size", "[", "0", "]", "*", "n_agent", "if", "not", "(", "(", "discrete_check", "and", "len", "(", "vector_action", "[", "brain_name", "]", ")", "==", "expected_discrete_size", ")", "or", "(", "continuous_check", "and", "len", "(", "vector_action", "[", "brain_name", "]", ")", "==", "expected_continuous_size", ")", ")", ":", "raise", "UnityActionException", "(", "\"There was a mismatch between the provided action and \"", "\"the environment's expectation: \"", "\"The brain {0} expected {1} {2} action(s), but was provided: {3}\"", ".", "format", "(", "brain_name", ",", "str", "(", "expected_discrete_size", ")", "if", "discrete_check", "else", "str", "(", "expected_continuous_size", ")", ",", "self", ".", "_brains", "[", "brain_name", "]", ".", "vector_action_space_type", ",", "str", "(", "vector_action", "[", "brain_name", "]", ")", ")", ")", "outputs", "=", "self", ".", "communicator", ".", "exchange", "(", "self", ".", "_generate_step_input", "(", "vector_action", ",", "memory", ",", "text_action", ",", "value", ",", "custom_action", ")", ")", "if", "outputs", "is", "None", ":", "raise", "KeyboardInterrupt", "rl_output", "=", "outputs", ".", "rl_output", "state", "=", "self", ".", "_get_state", "(", "rl_output", ")", "self", ".", "_global_done", "=", "state", "[", "1", "]", "for", "_b", "in", "self", ".", "_external_brain_names", ":", "self", ".", "_n_agents", "[", "_b", "]", "=", "len", "(", "state", "[", "0", "]", "[", "_b", "]", ".", "agents", ")", "return", "state", "[", "0", "]", "elif", "not", "self", ".", "_loaded", ":", "raise", "UnityEnvironmentException", "(", "\"No Unity environment is loaded.\"", ")", "elif", "self", ".", "_global_done", ":", "raise", "UnityActionException", "(", "\"The episode is completed. Reset the environment with 'reset()'\"", ")", "elif", "self", ".", "global_done", "is", "None", ":", "raise", "UnityActionException", "(", "\"You cannot conduct step without first calling reset. \"", "\"Reset the environment with 'reset()'\"", ")" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
UnityEnvironment._flatten
Converts arrays to list. :param arr: numpy vector. :return: flattened list.
ml-agents-envs/mlagents/envs/environment.py
def _flatten(cls, arr) -> List[float]: """ Converts arrays to list. :param arr: numpy vector. :return: flattened list. """ if isinstance(arr, cls.SCALAR_ACTION_TYPES): arr = [float(arr)] if isinstance(arr, np.ndarray): arr = arr.tolist() if len(arr) == 0: return arr if isinstance(arr[0], np.ndarray): arr = [item for sublist in arr for item in sublist.tolist()] if isinstance(arr[0], list): arr = [item for sublist in arr for item in sublist] arr = [float(x) for x in arr] return arr
def _flatten(cls, arr) -> List[float]: """ Converts arrays to list. :param arr: numpy vector. :return: flattened list. """ if isinstance(arr, cls.SCALAR_ACTION_TYPES): arr = [float(arr)] if isinstance(arr, np.ndarray): arr = arr.tolist() if len(arr) == 0: return arr if isinstance(arr[0], np.ndarray): arr = [item for sublist in arr for item in sublist.tolist()] if isinstance(arr[0], list): arr = [item for sublist in arr for item in sublist] arr = [float(x) for x in arr] return arr
[ "Converts", "arrays", "to", "list", ".", ":", "param", "arr", ":", "numpy", "vector", ".", ":", "return", ":", "flattened", "list", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents-envs/mlagents/envs/environment.py#L469-L486
[ "def", "_flatten", "(", "cls", ",", "arr", ")", "->", "List", "[", "float", "]", ":", "if", "isinstance", "(", "arr", ",", "cls", ".", "SCALAR_ACTION_TYPES", ")", ":", "arr", "=", "[", "float", "(", "arr", ")", "]", "if", "isinstance", "(", "arr", ",", "np", ".", "ndarray", ")", ":", "arr", "=", "arr", ".", "tolist", "(", ")", "if", "len", "(", "arr", ")", "==", "0", ":", "return", "arr", "if", "isinstance", "(", "arr", "[", "0", "]", ",", "np", ".", "ndarray", ")", ":", "arr", "=", "[", "item", "for", "sublist", "in", "arr", "for", "item", "in", "sublist", ".", "tolist", "(", ")", "]", "if", "isinstance", "(", "arr", "[", "0", "]", ",", "list", ")", ":", "arr", "=", "[", "item", "for", "sublist", "in", "arr", "for", "item", "in", "sublist", "]", "arr", "=", "[", "float", "(", "x", ")", "for", "x", "in", "arr", "]", "return", "arr" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
UnityEnvironment._get_state
Collects experience information from all external brains in environment at current step. :return: a dictionary of BrainInfo objects.
ml-agents-envs/mlagents/envs/environment.py
def _get_state(self, output: UnityRLOutput) -> (AllBrainInfo, bool): """ Collects experience information from all external brains in environment at current step. :return: a dictionary of BrainInfo objects. """ _data = {} global_done = output.global_done for brain_name in output.agentInfos: agent_info_list = output.agentInfos[brain_name].value _data[brain_name] = BrainInfo.from_agent_proto(agent_info_list, self.brains[brain_name]) return _data, global_done
def _get_state(self, output: UnityRLOutput) -> (AllBrainInfo, bool): """ Collects experience information from all external brains in environment at current step. :return: a dictionary of BrainInfo objects. """ _data = {} global_done = output.global_done for brain_name in output.agentInfos: agent_info_list = output.agentInfos[brain_name].value _data[brain_name] = BrainInfo.from_agent_proto(agent_info_list, self.brains[brain_name]) return _data, global_done
[ "Collects", "experience", "information", "from", "all", "external", "brains", "in", "environment", "at", "current", "step", ".", ":", "return", ":", "a", "dictionary", "of", "BrainInfo", "objects", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents-envs/mlagents/envs/environment.py#L488-L499
[ "def", "_get_state", "(", "self", ",", "output", ":", "UnityRLOutput", ")", "->", "(", "AllBrainInfo", ",", "bool", ")", ":", "_data", "=", "{", "}", "global_done", "=", "output", ".", "global_done", "for", "brain_name", "in", "output", ".", "agentInfos", ":", "agent_info_list", "=", "output", ".", "agentInfos", "[", "brain_name", "]", ".", "value", "_data", "[", "brain_name", "]", "=", "BrainInfo", ".", "from_agent_proto", "(", "agent_info_list", ",", "self", ".", "brains", "[", "brain_name", "]", ")", "return", "_data", ",", "global_done" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
TrainerMetrics.end_experience_collection_timer
Inform Metrics class that experience collection is done.
ml-agents/mlagents/trainers/trainer_metrics.py
def end_experience_collection_timer(self): """ Inform Metrics class that experience collection is done. """ if self.time_start_experience_collection: curr_delta = time() - self.time_start_experience_collection if self.delta_last_experience_collection is None: self.delta_last_experience_collection = curr_delta else: self.delta_last_experience_collection += curr_delta self.time_start_experience_collection = None
def end_experience_collection_timer(self): """ Inform Metrics class that experience collection is done. """ if self.time_start_experience_collection: curr_delta = time() - self.time_start_experience_collection if self.delta_last_experience_collection is None: self.delta_last_experience_collection = curr_delta else: self.delta_last_experience_collection += curr_delta self.time_start_experience_collection = None
[ "Inform", "Metrics", "class", "that", "experience", "collection", "is", "done", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/trainer_metrics.py#L39-L49
[ "def", "end_experience_collection_timer", "(", "self", ")", ":", "if", "self", ".", "time_start_experience_collection", ":", "curr_delta", "=", "time", "(", ")", "-", "self", ".", "time_start_experience_collection", "if", "self", ".", "delta_last_experience_collection", "is", "None", ":", "self", ".", "delta_last_experience_collection", "=", "curr_delta", "else", ":", "self", ".", "delta_last_experience_collection", "+=", "curr_delta", "self", ".", "time_start_experience_collection", "=", "None" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
TrainerMetrics.add_delta_step
Inform Metrics class about time to step in environment.
ml-agents/mlagents/trainers/trainer_metrics.py
def add_delta_step(self, delta: float): """ Inform Metrics class about time to step in environment. """ if self.delta_last_experience_collection: self.delta_last_experience_collection += delta else: self.delta_last_experience_collection = delta
def add_delta_step(self, delta: float): """ Inform Metrics class about time to step in environment. """ if self.delta_last_experience_collection: self.delta_last_experience_collection += delta else: self.delta_last_experience_collection = delta
[ "Inform", "Metrics", "class", "about", "time", "to", "step", "in", "environment", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/trainer_metrics.py#L51-L58
[ "def", "add_delta_step", "(", "self", ",", "delta", ":", "float", ")", ":", "if", "self", ".", "delta_last_experience_collection", ":", "self", ".", "delta_last_experience_collection", "+=", "delta", "else", ":", "self", ".", "delta_last_experience_collection", "=", "delta" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
TrainerMetrics.start_policy_update_timer
Inform Metrics class that policy update has started. :int number_experiences: Number of experiences in Buffer at this point. :float mean_return: Return averaged across all cumulative returns since last policy update
ml-agents/mlagents/trainers/trainer_metrics.py
def start_policy_update_timer(self, number_experiences: int, mean_return: float): """ Inform Metrics class that policy update has started. :int number_experiences: Number of experiences in Buffer at this point. :float mean_return: Return averaged across all cumulative returns since last policy update """ self.last_buffer_length = number_experiences self.last_mean_return = mean_return self.time_policy_update_start = time()
def start_policy_update_timer(self, number_experiences: int, mean_return: float): """ Inform Metrics class that policy update has started. :int number_experiences: Number of experiences in Buffer at this point. :float mean_return: Return averaged across all cumulative returns since last policy update """ self.last_buffer_length = number_experiences self.last_mean_return = mean_return self.time_policy_update_start = time()
[ "Inform", "Metrics", "class", "that", "policy", "update", "has", "started", ".", ":", "int", "number_experiences", ":", "Number", "of", "experiences", "in", "Buffer", "at", "this", "point", ".", ":", "float", "mean_return", ":", "Return", "averaged", "across", "all", "cumulative", "returns", "since", "last", "policy", "update" ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/trainer_metrics.py#L60-L68
[ "def", "start_policy_update_timer", "(", "self", ",", "number_experiences", ":", "int", ",", "mean_return", ":", "float", ")", ":", "self", ".", "last_buffer_length", "=", "number_experiences", "self", ".", "last_mean_return", "=", "mean_return", "self", ".", "time_policy_update_start", "=", "time", "(", ")" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
TrainerMetrics.end_policy_update
Inform Metrics class that policy update has started.
ml-agents/mlagents/trainers/trainer_metrics.py
def end_policy_update(self): """ Inform Metrics class that policy update has started. """ if self.time_policy_update_start: self.delta_policy_update = time() - self.time_policy_update_start else: self.delta_policy_update = 0 delta_train_start = time() - self.time_training_start LOGGER.debug(" Policy Update Training Metrics for {}: " "\n\t\tTime to update Policy: {:0.3f} s \n" "\t\tTime elapsed since training: {:0.3f} s \n" "\t\tTime for experience collection: {:0.3f} s \n" "\t\tBuffer Length: {} \n" "\t\tReturns : {:0.3f}\n" .format(self.brain_name, self.delta_policy_update, delta_train_start, self.delta_last_experience_collection, self.last_buffer_length, self.last_mean_return)) self._add_row(delta_train_start)
def end_policy_update(self): """ Inform Metrics class that policy update has started. """ if self.time_policy_update_start: self.delta_policy_update = time() - self.time_policy_update_start else: self.delta_policy_update = 0 delta_train_start = time() - self.time_training_start LOGGER.debug(" Policy Update Training Metrics for {}: " "\n\t\tTime to update Policy: {:0.3f} s \n" "\t\tTime elapsed since training: {:0.3f} s \n" "\t\tTime for experience collection: {:0.3f} s \n" "\t\tBuffer Length: {} \n" "\t\tReturns : {:0.3f}\n" .format(self.brain_name, self.delta_policy_update, delta_train_start, self.delta_last_experience_collection, self.last_buffer_length, self.last_mean_return)) self._add_row(delta_train_start)
[ "Inform", "Metrics", "class", "that", "policy", "update", "has", "started", "." ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/trainer_metrics.py#L79-L97
[ "def", "end_policy_update", "(", "self", ")", ":", "if", "self", ".", "time_policy_update_start", ":", "self", ".", "delta_policy_update", "=", "time", "(", ")", "-", "self", ".", "time_policy_update_start", "else", ":", "self", ".", "delta_policy_update", "=", "0", "delta_train_start", "=", "time", "(", ")", "-", "self", ".", "time_training_start", "LOGGER", ".", "debug", "(", "\" Policy Update Training Metrics for {}: \"", "\"\\n\\t\\tTime to update Policy: {:0.3f} s \\n\"", "\"\\t\\tTime elapsed since training: {:0.3f} s \\n\"", "\"\\t\\tTime for experience collection: {:0.3f} s \\n\"", "\"\\t\\tBuffer Length: {} \\n\"", "\"\\t\\tReturns : {:0.3f}\\n\"", ".", "format", "(", "self", ".", "brain_name", ",", "self", ".", "delta_policy_update", ",", "delta_train_start", ",", "self", ".", "delta_last_experience_collection", ",", "self", ".", "last_buffer_length", ",", "self", ".", "last_mean_return", ")", ")", "self", ".", "_add_row", "(", "delta_train_start", ")" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
train
TrainerMetrics.write_training_metrics
Write Training Metrics to CSV
ml-agents/mlagents/trainers/trainer_metrics.py
def write_training_metrics(self): """ Write Training Metrics to CSV """ with open(self.path, 'w') as file: writer = csv.writer(file) writer.writerow(FIELD_NAMES) for row in self.rows: writer.writerow(row)
def write_training_metrics(self): """ Write Training Metrics to CSV """ with open(self.path, 'w') as file: writer = csv.writer(file) writer.writerow(FIELD_NAMES) for row in self.rows: writer.writerow(row)
[ "Write", "Training", "Metrics", "to", "CSV" ]
Unity-Technologies/ml-agents
python
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/trainer_metrics.py#L99-L107
[ "def", "write_training_metrics", "(", "self", ")", ":", "with", "open", "(", "self", ".", "path", ",", "'w'", ")", "as", "file", ":", "writer", "=", "csv", ".", "writer", "(", "file", ")", "writer", ".", "writerow", "(", "FIELD_NAMES", ")", "for", "row", "in", "self", ".", "rows", ":", "writer", ".", "writerow", "(", "row", ")" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f