repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
mozilla-services/axe-selenium-python
axe_selenium_python/axe.py
Axe.write_results
def write_results(self, data, name=None): """ Write JSON to file with the specified name. :param name: Path to the file to be written to. If no path is passed a new JSON file "results.json" will be created in the current working directory. :param output: JSON object. """ if name: filepath = os.path.abspath(name) else: filepath = os.path.join(os.path.getcwd(), "results.json") with open(filepath, "w", encoding="utf8") as f: try: f.write(unicode(json.dumps(data, indent=4))) except NameError: f.write(json.dumps(data, indent=4))
python
def write_results(self, data, name=None): """ Write JSON to file with the specified name. :param name: Path to the file to be written to. If no path is passed a new JSON file "results.json" will be created in the current working directory. :param output: JSON object. """ if name: filepath = os.path.abspath(name) else: filepath = os.path.join(os.path.getcwd(), "results.json") with open(filepath, "w", encoding="utf8") as f: try: f.write(unicode(json.dumps(data, indent=4))) except NameError: f.write(json.dumps(data, indent=4))
[ "def", "write_results", "(", "self", ",", "data", ",", "name", "=", "None", ")", ":", "if", "name", ":", "filepath", "=", "os", ".", "path", ".", "abspath", "(", "name", ")", "else", ":", "filepath", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "getcwd", "(", ")", ",", "\"results.json\"", ")", "with", "open", "(", "filepath", ",", "\"w\"", ",", "encoding", "=", "\"utf8\"", ")", "as", "f", ":", "try", ":", "f", ".", "write", "(", "unicode", "(", "json", ".", "dumps", "(", "data", ",", "indent", "=", "4", ")", ")", ")", "except", "NameError", ":", "f", ".", "write", "(", "json", ".", "dumps", "(", "data", ",", "indent", "=", "4", ")", ")" ]
Write JSON to file with the specified name. :param name: Path to the file to be written to. If no path is passed a new JSON file "results.json" will be created in the current working directory. :param output: JSON object.
[ "Write", "JSON", "to", "file", "with", "the", "specified", "name", "." ]
train
https://github.com/mozilla-services/axe-selenium-python/blob/475c9f4eb771587aea73897bee356284d0361d77/axe_selenium_python/axe.py#L96-L115
grigi/talkey
talkey/tts.py
create_engine
def create_engine(engine, options=None, defaults=None): ''' Creates an instance of an engine. There is a two-stage instantiation process with engines. 1. ``options``: The keyword options to instantiate the engine class 2. ``defaults``: The default configuration for the engine (options often depends on instantiated TTS engine) ''' if engine not in _ENGINE_MAP.keys(): raise TTSError('Unknown engine %s' % engine) options = options or {} defaults = defaults or {} einst = _ENGINE_MAP[engine](**options) einst.configure_default(**defaults) return einst
python
def create_engine(engine, options=None, defaults=None): ''' Creates an instance of an engine. There is a two-stage instantiation process with engines. 1. ``options``: The keyword options to instantiate the engine class 2. ``defaults``: The default configuration for the engine (options often depends on instantiated TTS engine) ''' if engine not in _ENGINE_MAP.keys(): raise TTSError('Unknown engine %s' % engine) options = options or {} defaults = defaults or {} einst = _ENGINE_MAP[engine](**options) einst.configure_default(**defaults) return einst
[ "def", "create_engine", "(", "engine", ",", "options", "=", "None", ",", "defaults", "=", "None", ")", ":", "if", "engine", "not", "in", "_ENGINE_MAP", ".", "keys", "(", ")", ":", "raise", "TTSError", "(", "'Unknown engine %s'", "%", "engine", ")", "options", "=", "options", "or", "{", "}", "defaults", "=", "defaults", "or", "{", "}", "einst", "=", "_ENGINE_MAP", "[", "engine", "]", "(", "*", "*", "options", ")", "einst", ".", "configure_default", "(", "*", "*", "defaults", ")", "return", "einst" ]
Creates an instance of an engine. There is a two-stage instantiation process with engines. 1. ``options``: The keyword options to instantiate the engine class 2. ``defaults``: The default configuration for the engine (options often depends on instantiated TTS engine)
[ "Creates", "an", "instance", "of", "an", "engine", ".", "There", "is", "a", "two", "-", "stage", "instantiation", "process", "with", "engines", "." ]
train
https://github.com/grigi/talkey/blob/5d2d4a1f7001744c4fd9a79a883a3f2001522329/talkey/tts.py#L14-L31
grigi/talkey
talkey/tts.py
Talkey.classify
def classify(self, txt): ''' Classifies text by language. Uses preferred_languages weighting. ''' ranks = [] for lang, score in langid.rank(txt): if lang in self.preferred_languages: score += self.preferred_factor ranks.append((lang, score)) ranks.sort(key=lambda x: x[1], reverse=True) return ranks[0][0]
python
def classify(self, txt): ''' Classifies text by language. Uses preferred_languages weighting. ''' ranks = [] for lang, score in langid.rank(txt): if lang in self.preferred_languages: score += self.preferred_factor ranks.append((lang, score)) ranks.sort(key=lambda x: x[1], reverse=True) return ranks[0][0]
[ "def", "classify", "(", "self", ",", "txt", ")", ":", "ranks", "=", "[", "]", "for", "lang", ",", "score", "in", "langid", ".", "rank", "(", "txt", ")", ":", "if", "lang", "in", "self", ".", "preferred_languages", ":", "score", "+=", "self", ".", "preferred_factor", "ranks", ".", "append", "(", "(", "lang", ",", "score", ")", ")", "ranks", ".", "sort", "(", "key", "=", "lambda", "x", ":", "x", "[", "1", "]", ",", "reverse", "=", "True", ")", "return", "ranks", "[", "0", "]", "[", "0", "]" ]
Classifies text by language. Uses preferred_languages weighting.
[ "Classifies", "text", "by", "language", ".", "Uses", "preferred_languages", "weighting", "." ]
train
https://github.com/grigi/talkey/blob/5d2d4a1f7001744c4fd9a79a883a3f2001522329/talkey/tts.py#L105-L115
grigi/talkey
talkey/tts.py
Talkey.get_engine_for_lang
def get_engine_for_lang(self, lang): ''' Determines the preferred engine/voice for a language. ''' for eng in self.engines: if lang in eng.languages.keys(): return eng raise TTSError('Could not match language')
python
def get_engine_for_lang(self, lang): ''' Determines the preferred engine/voice for a language. ''' for eng in self.engines: if lang in eng.languages.keys(): return eng raise TTSError('Could not match language')
[ "def", "get_engine_for_lang", "(", "self", ",", "lang", ")", ":", "for", "eng", "in", "self", ".", "engines", ":", "if", "lang", "in", "eng", ".", "languages", ".", "keys", "(", ")", ":", "return", "eng", "raise", "TTSError", "(", "'Could not match language'", ")" ]
Determines the preferred engine/voice for a language.
[ "Determines", "the", "preferred", "engine", "/", "voice", "for", "a", "language", "." ]
train
https://github.com/grigi/talkey/blob/5d2d4a1f7001744c4fd9a79a883a3f2001522329/talkey/tts.py#L117-L124
grigi/talkey
talkey/tts.py
Talkey.say
def say(self, txt, lang=None): ''' Says the text. if ``lang`` is ``None``, then uses ``classify()`` to detect language. ''' lang = lang or self.classify(txt) self.get_engine_for_lang(lang).say(txt, language=lang)
python
def say(self, txt, lang=None): ''' Says the text. if ``lang`` is ``None``, then uses ``classify()`` to detect language. ''' lang = lang or self.classify(txt) self.get_engine_for_lang(lang).say(txt, language=lang)
[ "def", "say", "(", "self", ",", "txt", ",", "lang", "=", "None", ")", ":", "lang", "=", "lang", "or", "self", ".", "classify", "(", "txt", ")", "self", ".", "get_engine_for_lang", "(", "lang", ")", ".", "say", "(", "txt", ",", "language", "=", "lang", ")" ]
Says the text. if ``lang`` is ``None``, then uses ``classify()`` to detect language.
[ "Says", "the", "text", "." ]
train
https://github.com/grigi/talkey/blob/5d2d4a1f7001744c4fd9a79a883a3f2001522329/talkey/tts.py#L126-L133
grigi/talkey
talkey/base.py
AbstractTTSEngine.configure_default
def configure_default(self, **_options): ''' Sets default configuration. Raises TTSError on error. ''' language, voice, voiceinfo, options = self._configure(**_options) self.languages_options[language] = (voice, options) self.default_language = language self.default_options = options
python
def configure_default(self, **_options): ''' Sets default configuration. Raises TTSError on error. ''' language, voice, voiceinfo, options = self._configure(**_options) self.languages_options[language] = (voice, options) self.default_language = language self.default_options = options
[ "def", "configure_default", "(", "self", ",", "*", "*", "_options", ")", ":", "language", ",", "voice", ",", "voiceinfo", ",", "options", "=", "self", ".", "_configure", "(", "*", "*", "_options", ")", "self", ".", "languages_options", "[", "language", "]", "=", "(", "voice", ",", "options", ")", "self", ".", "default_language", "=", "language", "self", ".", "default_options", "=", "options" ]
Sets default configuration. Raises TTSError on error.
[ "Sets", "default", "configuration", "." ]
train
https://github.com/grigi/talkey/blob/5d2d4a1f7001744c4fd9a79a883a3f2001522329/talkey/base.py#L206-L215
grigi/talkey
talkey/base.py
AbstractTTSEngine.configure
def configure(self, **_options): ''' Sets language-specific configuration. Raises TTSError on error. ''' language, voice, voiceinfo, options = self._configure(**_options) self.languages_options[language] = (voice, options)
python
def configure(self, **_options): ''' Sets language-specific configuration. Raises TTSError on error. ''' language, voice, voiceinfo, options = self._configure(**_options) self.languages_options[language] = (voice, options)
[ "def", "configure", "(", "self", ",", "*", "*", "_options", ")", ":", "language", ",", "voice", ",", "voiceinfo", ",", "options", "=", "self", ".", "_configure", "(", "*", "*", "_options", ")", "self", ".", "languages_options", "[", "language", "]", "=", "(", "voice", ",", "options", ")" ]
Sets language-specific configuration. Raises TTSError on error.
[ "Sets", "language", "-", "specific", "configuration", "." ]
train
https://github.com/grigi/talkey/blob/5d2d4a1f7001744c4fd9a79a883a3f2001522329/talkey/base.py#L217-L224
grigi/talkey
talkey/base.py
AbstractTTSEngine.say
def say(self, phrase, **_options): ''' Says the phrase, optionally allows to select/override any voice options. ''' language, voice, voiceinfo, options = self._configure(**_options) self._logger.debug("Saying '%s' with '%s'", phrase, self.SLUG) self._say(phrase, language, voice, voiceinfo, options)
python
def say(self, phrase, **_options): ''' Says the phrase, optionally allows to select/override any voice options. ''' language, voice, voiceinfo, options = self._configure(**_options) self._logger.debug("Saying '%s' with '%s'", phrase, self.SLUG) self._say(phrase, language, voice, voiceinfo, options)
[ "def", "say", "(", "self", ",", "phrase", ",", "*", "*", "_options", ")", ":", "language", ",", "voice", ",", "voiceinfo", ",", "options", "=", "self", ".", "_configure", "(", "*", "*", "_options", ")", "self", ".", "_logger", ".", "debug", "(", "\"Saying '%s' with '%s'\"", ",", "phrase", ",", "self", ".", "SLUG", ")", "self", ".", "_say", "(", "phrase", ",", "language", ",", "voice", ",", "voiceinfo", ",", "options", ")" ]
Says the phrase, optionally allows to select/override any voice options.
[ "Says", "the", "phrase", "optionally", "allows", "to", "select", "/", "override", "any", "voice", "options", "." ]
train
https://github.com/grigi/talkey/blob/5d2d4a1f7001744c4fd9a79a883a3f2001522329/talkey/base.py#L226-L232
grigi/talkey
talkey/base.py
AbstractTTSEngine.play
def play(self, filename, translate=False): # pragma: no cover ''' Plays the sounds. :filename: The input file name :translate: If True, it runs it through audioread which will translate from common compression formats to raw WAV. ''' # FIXME: Use platform-independent and async audio-output here # PyAudio looks most promising, too bad about: # --allow-external PyAudio --allow-unverified PyAudio if translate: with tempfile.NamedTemporaryFile(suffix='.wav', delete=False) as f: fname = f.name with audioread.audio_open(filename) as f: with contextlib.closing(wave.open(fname, 'w')) as of: of.setnchannels(f.channels) of.setframerate(f.samplerate) of.setsampwidth(2) for buf in f: of.writeframes(buf) filename = fname if winsound: winsound.PlaySound(str(filename), winsound.SND_FILENAME) else: cmd = ['aplay', str(filename)] self._logger.debug('Executing %s', ' '.join([pipes.quote(arg) for arg in cmd])) subprocess.call(cmd) if translate: os.remove(fname)
python
def play(self, filename, translate=False): # pragma: no cover ''' Plays the sounds. :filename: The input file name :translate: If True, it runs it through audioread which will translate from common compression formats to raw WAV. ''' # FIXME: Use platform-independent and async audio-output here # PyAudio looks most promising, too bad about: # --allow-external PyAudio --allow-unverified PyAudio if translate: with tempfile.NamedTemporaryFile(suffix='.wav', delete=False) as f: fname = f.name with audioread.audio_open(filename) as f: with contextlib.closing(wave.open(fname, 'w')) as of: of.setnchannels(f.channels) of.setframerate(f.samplerate) of.setsampwidth(2) for buf in f: of.writeframes(buf) filename = fname if winsound: winsound.PlaySound(str(filename), winsound.SND_FILENAME) else: cmd = ['aplay', str(filename)] self._logger.debug('Executing %s', ' '.join([pipes.quote(arg) for arg in cmd])) subprocess.call(cmd) if translate: os.remove(fname)
[ "def", "play", "(", "self", ",", "filename", ",", "translate", "=", "False", ")", ":", "# pragma: no cover", "# FIXME: Use platform-independent and async audio-output here", "# PyAudio looks most promising, too bad about:", "# --allow-external PyAudio --allow-unverified PyAudio", "if", "translate", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.wav'", ",", "delete", "=", "False", ")", "as", "f", ":", "fname", "=", "f", ".", "name", "with", "audioread", ".", "audio_open", "(", "filename", ")", "as", "f", ":", "with", "contextlib", ".", "closing", "(", "wave", ".", "open", "(", "fname", ",", "'w'", ")", ")", "as", "of", ":", "of", ".", "setnchannels", "(", "f", ".", "channels", ")", "of", ".", "setframerate", "(", "f", ".", "samplerate", ")", "of", ".", "setsampwidth", "(", "2", ")", "for", "buf", "in", "f", ":", "of", ".", "writeframes", "(", "buf", ")", "filename", "=", "fname", "if", "winsound", ":", "winsound", ".", "PlaySound", "(", "str", "(", "filename", ")", ",", "winsound", ".", "SND_FILENAME", ")", "else", ":", "cmd", "=", "[", "'aplay'", ",", "str", "(", "filename", ")", "]", "self", ".", "_logger", ".", "debug", "(", "'Executing %s'", ",", "' '", ".", "join", "(", "[", "pipes", ".", "quote", "(", "arg", ")", "for", "arg", "in", "cmd", "]", ")", ")", "subprocess", ".", "call", "(", "cmd", ")", "if", "translate", ":", "os", ".", "remove", "(", "fname", ")" ]
Plays the sounds. :filename: The input file name :translate: If True, it runs it through audioread which will translate from common compression formats to raw WAV.
[ "Plays", "the", "sounds", "." ]
train
https://github.com/grigi/talkey/blob/5d2d4a1f7001744c4fd9a79a883a3f2001522329/talkey/base.py#L234-L264
trbs/bucky
bucky/collectd.py
getCollectDServer
def getCollectDServer(queue, cfg): """Get the appropriate collectd server (multi processed or not)""" server = CollectDServerMP if cfg.collectd_workers > 1 else CollectDServer return server(queue, cfg)
python
def getCollectDServer(queue, cfg): """Get the appropriate collectd server (multi processed or not)""" server = CollectDServerMP if cfg.collectd_workers > 1 else CollectDServer return server(queue, cfg)
[ "def", "getCollectDServer", "(", "queue", ",", "cfg", ")", ":", "server", "=", "CollectDServerMP", "if", "cfg", ".", "collectd_workers", ">", "1", "else", "CollectDServer", "return", "server", "(", "queue", ",", "cfg", ")" ]
Get the appropriate collectd server (multi processed or not)
[ "Get", "the", "appropriate", "collectd", "server", "(", "multi", "processed", "or", "not", ")" ]
train
https://github.com/trbs/bucky/blob/ae4c696be46cda977cb5f27c31420985ef1cc0ba/bucky/collectd.py#L652-L655
trbs/bucky
bucky/collectd.py
CollectDCrypto._hashes_match
def _hashes_match(self, a, b): """Constant time comparison of bytes for py3, strings for py2""" if len(a) != len(b): return False diff = 0 if six.PY2: a = bytearray(a) b = bytearray(b) for x, y in zip(a, b): diff |= x ^ y return not diff
python
def _hashes_match(self, a, b): """Constant time comparison of bytes for py3, strings for py2""" if len(a) != len(b): return False diff = 0 if six.PY2: a = bytearray(a) b = bytearray(b) for x, y in zip(a, b): diff |= x ^ y return not diff
[ "def", "_hashes_match", "(", "self", ",", "a", ",", "b", ")", ":", "if", "len", "(", "a", ")", "!=", "len", "(", "b", ")", ":", "return", "False", "diff", "=", "0", "if", "six", ".", "PY2", ":", "a", "=", "bytearray", "(", "a", ")", "b", "=", "bytearray", "(", "b", ")", "for", "x", ",", "y", "in", "zip", "(", "a", ",", "b", ")", ":", "diff", "|=", "x", "^", "y", "return", "not", "diff" ]
Constant time comparison of bytes for py3, strings for py2
[ "Constant", "time", "comparison", "of", "bytes", "for", "py3", "strings", "for", "py2" ]
train
https://github.com/trbs/bucky/blob/ae4c696be46cda977cb5f27c31420985ef1cc0ba/bucky/collectd.py#L369-L379
heigeo/climata
climata/bin/acis_sites.py
load_sites
def load_sites(*basin_ids): """ Load metadata for all sites in given basin codes. """ # Resolve basin ids to HUC8s if needed basins = [] for basin in basin_ids: if basin.isdigit() and len(basin) == 8: basins.append(basin) else: from climata.huc8 import get_huc8 basins.extend(get_huc8(basin)) # Load sites with data since 1900 sites = StationMetaIO( basin=basins, parameter=list(elems.keys()), start_date='1900-01-01', end_date=date.today(), meta=ALL_META_FIELDS, ) # Load all sites (to get sites without data) seen_sites = [site.uid for site in sites] nodata_sites = [ site for site in StationMetaIO(basin=basins) if site.uid not in seen_sites ] # Determine the following from the site lists: seen_auths = set() # Which authority codes are actually used by any site seen_elems = set() # Which elems actually have data in any site ranges = {} # The overall period of record for each site for site in sites: for auth in site.sids.keys(): seen_auths.add(auth) start, end = None, None for elem in site.valid_daterange: s, e = site.valid_daterange[elem] seen_elems.add(elem) if s is None or e is None: continue if start is None or s < start: start = s if end is None or e > end: end = e ranges[site.uid] = [start, end] # Check for authority codes that might not be in sites with data for site in nodata_sites: for auth in site.sids.keys(): seen_auths.add(auth) # Print CSV headers (FIXME: use CsvFileIO for this?) seen_auths = sorted(seen_auths) seen_elems = sorted(seen_elems) print(",".join( ['ACIS uid', 'name'] + seen_auths + ['latitude', 'longitude', 'start', 'end', 'years'] + [elems[elem]['desc'] for elem in seen_elems] )) # Print sites with data for site in sites: # Determine if elems are available for entire period or shorter range start, end = ranges[site.uid] if start and end: years = end.year - start.year + 1 elem_ranges = [] for elem in seen_elems: estart, eend = site.valid_daterange[elem] if estart is None: erange = "" elif estart == start and eend == end: erange = "period" else: erange = "%s to %s" % (estart.date(), eend.date()) elem_ranges.append(erange) # Output CSV row print(",".join(map( str, [site.uid, site.name] + [site.sids.get(auth, "") for auth in seen_auths] + [site.latitude, site.longitude] + [start.date(), end.date(), years] + elem_ranges ))) # Print CSV rows for sites without data for site in nodata_sites: print(",".join(map( str, [site.uid, site.name] + [site.sids.get(auth, "") for auth in seen_auths] + [site.latitude, site.longitude] + ["NO DATA"] )))
python
def load_sites(*basin_ids): """ Load metadata for all sites in given basin codes. """ # Resolve basin ids to HUC8s if needed basins = [] for basin in basin_ids: if basin.isdigit() and len(basin) == 8: basins.append(basin) else: from climata.huc8 import get_huc8 basins.extend(get_huc8(basin)) # Load sites with data since 1900 sites = StationMetaIO( basin=basins, parameter=list(elems.keys()), start_date='1900-01-01', end_date=date.today(), meta=ALL_META_FIELDS, ) # Load all sites (to get sites without data) seen_sites = [site.uid for site in sites] nodata_sites = [ site for site in StationMetaIO(basin=basins) if site.uid not in seen_sites ] # Determine the following from the site lists: seen_auths = set() # Which authority codes are actually used by any site seen_elems = set() # Which elems actually have data in any site ranges = {} # The overall period of record for each site for site in sites: for auth in site.sids.keys(): seen_auths.add(auth) start, end = None, None for elem in site.valid_daterange: s, e = site.valid_daterange[elem] seen_elems.add(elem) if s is None or e is None: continue if start is None or s < start: start = s if end is None or e > end: end = e ranges[site.uid] = [start, end] # Check for authority codes that might not be in sites with data for site in nodata_sites: for auth in site.sids.keys(): seen_auths.add(auth) # Print CSV headers (FIXME: use CsvFileIO for this?) seen_auths = sorted(seen_auths) seen_elems = sorted(seen_elems) print(",".join( ['ACIS uid', 'name'] + seen_auths + ['latitude', 'longitude', 'start', 'end', 'years'] + [elems[elem]['desc'] for elem in seen_elems] )) # Print sites with data for site in sites: # Determine if elems are available for entire period or shorter range start, end = ranges[site.uid] if start and end: years = end.year - start.year + 1 elem_ranges = [] for elem in seen_elems: estart, eend = site.valid_daterange[elem] if estart is None: erange = "" elif estart == start and eend == end: erange = "period" else: erange = "%s to %s" % (estart.date(), eend.date()) elem_ranges.append(erange) # Output CSV row print(",".join(map( str, [site.uid, site.name] + [site.sids.get(auth, "") for auth in seen_auths] + [site.latitude, site.longitude] + [start.date(), end.date(), years] + elem_ranges ))) # Print CSV rows for sites without data for site in nodata_sites: print(",".join(map( str, [site.uid, site.name] + [site.sids.get(auth, "") for auth in seen_auths] + [site.latitude, site.longitude] + ["NO DATA"] )))
[ "def", "load_sites", "(", "*", "basin_ids", ")", ":", "# Resolve basin ids to HUC8s if needed", "basins", "=", "[", "]", "for", "basin", "in", "basin_ids", ":", "if", "basin", ".", "isdigit", "(", ")", "and", "len", "(", "basin", ")", "==", "8", ":", "basins", ".", "append", "(", "basin", ")", "else", ":", "from", "climata", ".", "huc8", "import", "get_huc8", "basins", ".", "extend", "(", "get_huc8", "(", "basin", ")", ")", "# Load sites with data since 1900", "sites", "=", "StationMetaIO", "(", "basin", "=", "basins", ",", "parameter", "=", "list", "(", "elems", ".", "keys", "(", ")", ")", ",", "start_date", "=", "'1900-01-01'", ",", "end_date", "=", "date", ".", "today", "(", ")", ",", "meta", "=", "ALL_META_FIELDS", ",", ")", "# Load all sites (to get sites without data)", "seen_sites", "=", "[", "site", ".", "uid", "for", "site", "in", "sites", "]", "nodata_sites", "=", "[", "site", "for", "site", "in", "StationMetaIO", "(", "basin", "=", "basins", ")", "if", "site", ".", "uid", "not", "in", "seen_sites", "]", "# Determine the following from the site lists:", "seen_auths", "=", "set", "(", ")", "# Which authority codes are actually used by any site", "seen_elems", "=", "set", "(", ")", "# Which elems actually have data in any site", "ranges", "=", "{", "}", "# The overall period of record for each site", "for", "site", "in", "sites", ":", "for", "auth", "in", "site", ".", "sids", ".", "keys", "(", ")", ":", "seen_auths", ".", "add", "(", "auth", ")", "start", ",", "end", "=", "None", ",", "None", "for", "elem", "in", "site", ".", "valid_daterange", ":", "s", ",", "e", "=", "site", ".", "valid_daterange", "[", "elem", "]", "seen_elems", ".", "add", "(", "elem", ")", "if", "s", "is", "None", "or", "e", "is", "None", ":", "continue", "if", "start", "is", "None", "or", "s", "<", "start", ":", "start", "=", "s", "if", "end", "is", "None", "or", "e", ">", "end", ":", "end", "=", "e", "ranges", "[", "site", ".", "uid", "]", "=", "[", "start", ",", "end", "]", "# Check for authority codes that might not be in sites with data", "for", "site", "in", "nodata_sites", ":", "for", "auth", "in", "site", ".", "sids", ".", "keys", "(", ")", ":", "seen_auths", ".", "add", "(", "auth", ")", "# Print CSV headers (FIXME: use CsvFileIO for this?)", "seen_auths", "=", "sorted", "(", "seen_auths", ")", "seen_elems", "=", "sorted", "(", "seen_elems", ")", "print", "(", "\",\"", ".", "join", "(", "[", "'ACIS uid'", ",", "'name'", "]", "+", "seen_auths", "+", "[", "'latitude'", ",", "'longitude'", ",", "'start'", ",", "'end'", ",", "'years'", "]", "+", "[", "elems", "[", "elem", "]", "[", "'desc'", "]", "for", "elem", "in", "seen_elems", "]", ")", ")", "# Print sites with data", "for", "site", "in", "sites", ":", "# Determine if elems are available for entire period or shorter range", "start", ",", "end", "=", "ranges", "[", "site", ".", "uid", "]", "if", "start", "and", "end", ":", "years", "=", "end", ".", "year", "-", "start", ".", "year", "+", "1", "elem_ranges", "=", "[", "]", "for", "elem", "in", "seen_elems", ":", "estart", ",", "eend", "=", "site", ".", "valid_daterange", "[", "elem", "]", "if", "estart", "is", "None", ":", "erange", "=", "\"\"", "elif", "estart", "==", "start", "and", "eend", "==", "end", ":", "erange", "=", "\"period\"", "else", ":", "erange", "=", "\"%s to %s\"", "%", "(", "estart", ".", "date", "(", ")", ",", "eend", ".", "date", "(", ")", ")", "elem_ranges", ".", "append", "(", "erange", ")", "# Output CSV row", "print", "(", "\",\"", ".", "join", "(", "map", "(", "str", ",", "[", "site", ".", "uid", ",", "site", ".", "name", "]", "+", "[", "site", ".", "sids", ".", "get", "(", "auth", ",", "\"\"", ")", "for", "auth", "in", "seen_auths", "]", "+", "[", "site", ".", "latitude", ",", "site", ".", "longitude", "]", "+", "[", "start", ".", "date", "(", ")", ",", "end", ".", "date", "(", ")", ",", "years", "]", "+", "elem_ranges", ")", ")", ")", "# Print CSV rows for sites without data", "for", "site", "in", "nodata_sites", ":", "print", "(", "\",\"", ".", "join", "(", "map", "(", "str", ",", "[", "site", ".", "uid", ",", "site", ".", "name", "]", "+", "[", "site", ".", "sids", ".", "get", "(", "auth", ",", "\"\"", ")", "for", "auth", "in", "seen_auths", "]", "+", "[", "site", ".", "latitude", ",", "site", ".", "longitude", "]", "+", "[", "\"NO DATA\"", "]", ")", ")", ")" ]
Load metadata for all sites in given basin codes.
[ "Load", "metadata", "for", "all", "sites", "in", "given", "basin", "codes", "." ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/bin/acis_sites.py#L16-L118
heigeo/climata
climata/huc8/__init__.py
get_huc8
def get_huc8(prefix): """ Return all HUC8s matching the given prefix (e.g. 1801) or basin name (e.g. Klamath) """ if not prefix.isdigit(): # Look up hucs by name name = prefix prefix = None for row in hucs: if row.basin.lower() == name.lower(): # Use most general huc if two have the same name if prefix is None or len(row.huc) < len(prefix): prefix = row.huc if prefix is None: return [] huc8s = [] for row in hucs: # Return all 8-digit hucs with given prefix if len(row.huc) == 8 and row.huc.startswith(prefix): huc8s.append(row.huc) return huc8s
python
def get_huc8(prefix): """ Return all HUC8s matching the given prefix (e.g. 1801) or basin name (e.g. Klamath) """ if not prefix.isdigit(): # Look up hucs by name name = prefix prefix = None for row in hucs: if row.basin.lower() == name.lower(): # Use most general huc if two have the same name if prefix is None or len(row.huc) < len(prefix): prefix = row.huc if prefix is None: return [] huc8s = [] for row in hucs: # Return all 8-digit hucs with given prefix if len(row.huc) == 8 and row.huc.startswith(prefix): huc8s.append(row.huc) return huc8s
[ "def", "get_huc8", "(", "prefix", ")", ":", "if", "not", "prefix", ".", "isdigit", "(", ")", ":", "# Look up hucs by name", "name", "=", "prefix", "prefix", "=", "None", "for", "row", "in", "hucs", ":", "if", "row", ".", "basin", ".", "lower", "(", ")", "==", "name", ".", "lower", "(", ")", ":", "# Use most general huc if two have the same name", "if", "prefix", "is", "None", "or", "len", "(", "row", ".", "huc", ")", "<", "len", "(", "prefix", ")", ":", "prefix", "=", "row", ".", "huc", "if", "prefix", "is", "None", ":", "return", "[", "]", "huc8s", "=", "[", "]", "for", "row", "in", "hucs", ":", "# Return all 8-digit hucs with given prefix", "if", "len", "(", "row", ".", "huc", ")", "==", "8", "and", "row", ".", "huc", ".", "startswith", "(", "prefix", ")", ":", "huc8s", ".", "append", "(", "row", ".", "huc", ")", "return", "huc8s" ]
Return all HUC8s matching the given prefix (e.g. 1801) or basin name (e.g. Klamath)
[ "Return", "all", "HUC8s", "matching", "the", "given", "prefix", "(", "e", ".", "g", ".", "1801", ")", "or", "basin", "name", "(", "e", ".", "g", ".", "Klamath", ")" ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/huc8/__init__.py#L23-L46
heigeo/climata
climata/acis/__init__.py
StationMetaIO.parse
def parse(self): """ Convert ACIS 'll' value into separate latitude and longitude. """ super(AcisIO, self).parse() # This is more of a "mapping" step than a "parsing" step, but mappers # only allow one-to-one mapping from input fields to output fields. for row in self.data: if 'meta' in row: row = row['meta'] if 'll' in row: row['longitude'], row['latitude'] = row['ll'] del row['ll']
python
def parse(self): """ Convert ACIS 'll' value into separate latitude and longitude. """ super(AcisIO, self).parse() # This is more of a "mapping" step than a "parsing" step, but mappers # only allow one-to-one mapping from input fields to output fields. for row in self.data: if 'meta' in row: row = row['meta'] if 'll' in row: row['longitude'], row['latitude'] = row['ll'] del row['ll']
[ "def", "parse", "(", "self", ")", ":", "super", "(", "AcisIO", ",", "self", ")", ".", "parse", "(", ")", "# This is more of a \"mapping\" step than a \"parsing\" step, but mappers", "# only allow one-to-one mapping from input fields to output fields.", "for", "row", "in", "self", ".", "data", ":", "if", "'meta'", "in", "row", ":", "row", "=", "row", "[", "'meta'", "]", "if", "'ll'", "in", "row", ":", "row", "[", "'longitude'", "]", ",", "row", "[", "'latitude'", "]", "=", "row", "[", "'ll'", "]", "del", "row", "[", "'ll'", "]" ]
Convert ACIS 'll' value into separate latitude and longitude.
[ "Convert", "ACIS", "ll", "value", "into", "separate", "latitude", "and", "longitude", "." ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/acis/__init__.py#L80-L93
heigeo/climata
climata/acis/__init__.py
StationMetaIO.map_value
def map_value(self, field, value): """ Clean up some values returned from the web service. (overrides wq.io.mappers.BaseMapper) """ if field == 'sids': # Site identifiers are returned as "[id] [auth_id]"; # Map to auth name for easier usability ids = {} for idinfo in value: id, auth = idinfo.split(' ') auth = AUTHORITY_BY_ID[auth] ids[auth['name']] = id return ids if field == 'valid_daterange': # Date ranges for each element are returned in an array # (sorted by the order the elements were were requested); # Convert to dictionary with element id as key elems, complex = self.getlist('parameter') ranges = {} for elem, val in zip(elems, value): if val: start, end = val ranges[elem] = (parse_date(start), parse_date(end)) else: ranges[elem] = None, None return ranges return value
python
def map_value(self, field, value): """ Clean up some values returned from the web service. (overrides wq.io.mappers.BaseMapper) """ if field == 'sids': # Site identifiers are returned as "[id] [auth_id]"; # Map to auth name for easier usability ids = {} for idinfo in value: id, auth = idinfo.split(' ') auth = AUTHORITY_BY_ID[auth] ids[auth['name']] = id return ids if field == 'valid_daterange': # Date ranges for each element are returned in an array # (sorted by the order the elements were were requested); # Convert to dictionary with element id as key elems, complex = self.getlist('parameter') ranges = {} for elem, val in zip(elems, value): if val: start, end = val ranges[elem] = (parse_date(start), parse_date(end)) else: ranges[elem] = None, None return ranges return value
[ "def", "map_value", "(", "self", ",", "field", ",", "value", ")", ":", "if", "field", "==", "'sids'", ":", "# Site identifiers are returned as \"[id] [auth_id]\";", "# Map to auth name for easier usability", "ids", "=", "{", "}", "for", "idinfo", "in", "value", ":", "id", ",", "auth", "=", "idinfo", ".", "split", "(", "' '", ")", "auth", "=", "AUTHORITY_BY_ID", "[", "auth", "]", "ids", "[", "auth", "[", "'name'", "]", "]", "=", "id", "return", "ids", "if", "field", "==", "'valid_daterange'", ":", "# Date ranges for each element are returned in an array", "# (sorted by the order the elements were were requested);", "# Convert to dictionary with element id as key", "elems", ",", "complex", "=", "self", ".", "getlist", "(", "'parameter'", ")", "ranges", "=", "{", "}", "for", "elem", ",", "val", "in", "zip", "(", "elems", ",", "value", ")", ":", "if", "val", ":", "start", ",", "end", "=", "val", "ranges", "[", "elem", "]", "=", "(", "parse_date", "(", "start", ")", ",", "parse_date", "(", "end", ")", ")", "else", ":", "ranges", "[", "elem", "]", "=", "None", ",", "None", "return", "ranges", "return", "value" ]
Clean up some values returned from the web service. (overrides wq.io.mappers.BaseMapper)
[ "Clean", "up", "some", "values", "returned", "from", "the", "web", "service", ".", "(", "overrides", "wq", ".", "io", ".", "mappers", ".", "BaseMapper", ")" ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/acis/__init__.py#L95-L124
heigeo/climata
climata/acis/__init__.py
StationDataIO.get_field_names
def get_field_names(self): """ ACIS web service returns "meta" and "data" for each station; Use meta attributes as field names """ field_names = super(StationDataIO, self).get_field_names() if set(field_names) == set(['meta', 'data']): meta_fields = list(self.data[0]['meta'].keys()) if set(meta_fields) < set(self.getvalue('meta')): meta_fields = self.getvalue('meta') field_names = list(meta_fields) + ['data'] return field_names
python
def get_field_names(self): """ ACIS web service returns "meta" and "data" for each station; Use meta attributes as field names """ field_names = super(StationDataIO, self).get_field_names() if set(field_names) == set(['meta', 'data']): meta_fields = list(self.data[0]['meta'].keys()) if set(meta_fields) < set(self.getvalue('meta')): meta_fields = self.getvalue('meta') field_names = list(meta_fields) + ['data'] return field_names
[ "def", "get_field_names", "(", "self", ")", ":", "field_names", "=", "super", "(", "StationDataIO", ",", "self", ")", ".", "get_field_names", "(", ")", "if", "set", "(", "field_names", ")", "==", "set", "(", "[", "'meta'", ",", "'data'", "]", ")", ":", "meta_fields", "=", "list", "(", "self", ".", "data", "[", "0", "]", "[", "'meta'", "]", ".", "keys", "(", ")", ")", "if", "set", "(", "meta_fields", ")", "<", "set", "(", "self", ".", "getvalue", "(", "'meta'", ")", ")", ":", "meta_fields", "=", "self", ".", "getvalue", "(", "'meta'", ")", "field_names", "=", "list", "(", "meta_fields", ")", "+", "[", "'data'", "]", "return", "field_names" ]
ACIS web service returns "meta" and "data" for each station; Use meta attributes as field names
[ "ACIS", "web", "service", "returns", "meta", "and", "data", "for", "each", "station", ";", "Use", "meta", "attributes", "as", "field", "names" ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/acis/__init__.py#L147-L158
heigeo/climata
climata/acis/__init__.py
StationDataIO.usable_item
def usable_item(self, data): """ ACIS web service returns "meta" and "data" for each station; use meta attributes as item values, and add an IO for iterating over "data" """ # Use metadata as item item = data['meta'] # Add nested IO for data elems, elems_is_complex = self.getlist('parameter') if elems_is_complex: elems = [elem['name'] for elem in elems] add, add_is_complex = self.getlist('add') item['data'] = DataIO( data=data['data'], parameter=elems, add=add, start_date=self.getvalue('start_date'), end_date=self.getvalue('end_date'), ) # TupleMapper will convert item to namedtuple return super(StationDataIO, self).usable_item(item)
python
def usable_item(self, data): """ ACIS web service returns "meta" and "data" for each station; use meta attributes as item values, and add an IO for iterating over "data" """ # Use metadata as item item = data['meta'] # Add nested IO for data elems, elems_is_complex = self.getlist('parameter') if elems_is_complex: elems = [elem['name'] for elem in elems] add, add_is_complex = self.getlist('add') item['data'] = DataIO( data=data['data'], parameter=elems, add=add, start_date=self.getvalue('start_date'), end_date=self.getvalue('end_date'), ) # TupleMapper will convert item to namedtuple return super(StationDataIO, self).usable_item(item)
[ "def", "usable_item", "(", "self", ",", "data", ")", ":", "# Use metadata as item", "item", "=", "data", "[", "'meta'", "]", "# Add nested IO for data", "elems", ",", "elems_is_complex", "=", "self", ".", "getlist", "(", "'parameter'", ")", "if", "elems_is_complex", ":", "elems", "=", "[", "elem", "[", "'name'", "]", "for", "elem", "in", "elems", "]", "add", ",", "add_is_complex", "=", "self", ".", "getlist", "(", "'add'", ")", "item", "[", "'data'", "]", "=", "DataIO", "(", "data", "=", "data", "[", "'data'", "]", ",", "parameter", "=", "elems", ",", "add", "=", "add", ",", "start_date", "=", "self", ".", "getvalue", "(", "'start_date'", ")", ",", "end_date", "=", "self", ".", "getvalue", "(", "'end_date'", ")", ",", ")", "# TupleMapper will convert item to namedtuple", "return", "super", "(", "StationDataIO", ",", "self", ")", ".", "usable_item", "(", "item", ")" ]
ACIS web service returns "meta" and "data" for each station; use meta attributes as item values, and add an IO for iterating over "data"
[ "ACIS", "web", "service", "returns", "meta", "and", "data", "for", "each", "station", ";", "use", "meta", "attributes", "as", "item", "values", "and", "add", "an", "IO", "for", "iterating", "over", "data" ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/acis/__init__.py#L175-L199
heigeo/climata
climata/acis/__init__.py
DataIO.load_data
def load_data(self, data): """ MultiStnData data results are arrays without explicit dates; Infer time series based on start date. """ dates = fill_date_range(self.start_date, self.end_date) for row, date in zip(data, dates): data = {'date': date} if self.add: # If self.add is set, results will contain additional # attributes (e.g. flags). In that case, create one row per # result, with attributes "date", "elem", "value", and one for # each item in self.add. for elem, vals in zip(self.parameter, row): data['elem'] = elem for add, val in zip(['value'] + self.add, vals): data[add] = val yield data else: # Otherwise, return one row per date, with "date" and each # element's value as attributes. for elem, val in zip(self.parameter, row): # namedtuple doesn't like numeric field names if elem.isdigit(): elem = "e%s" % elem data[elem] = val yield data
python
def load_data(self, data): """ MultiStnData data results are arrays without explicit dates; Infer time series based on start date. """ dates = fill_date_range(self.start_date, self.end_date) for row, date in zip(data, dates): data = {'date': date} if self.add: # If self.add is set, results will contain additional # attributes (e.g. flags). In that case, create one row per # result, with attributes "date", "elem", "value", and one for # each item in self.add. for elem, vals in zip(self.parameter, row): data['elem'] = elem for add, val in zip(['value'] + self.add, vals): data[add] = val yield data else: # Otherwise, return one row per date, with "date" and each # element's value as attributes. for elem, val in zip(self.parameter, row): # namedtuple doesn't like numeric field names if elem.isdigit(): elem = "e%s" % elem data[elem] = val yield data
[ "def", "load_data", "(", "self", ",", "data", ")", ":", "dates", "=", "fill_date_range", "(", "self", ".", "start_date", ",", "self", ".", "end_date", ")", "for", "row", ",", "date", "in", "zip", "(", "data", ",", "dates", ")", ":", "data", "=", "{", "'date'", ":", "date", "}", "if", "self", ".", "add", ":", "# If self.add is set, results will contain additional", "# attributes (e.g. flags). In that case, create one row per", "# result, with attributes \"date\", \"elem\", \"value\", and one for", "# each item in self.add.", "for", "elem", ",", "vals", "in", "zip", "(", "self", ".", "parameter", ",", "row", ")", ":", "data", "[", "'elem'", "]", "=", "elem", "for", "add", ",", "val", "in", "zip", "(", "[", "'value'", "]", "+", "self", ".", "add", ",", "vals", ")", ":", "data", "[", "add", "]", "=", "val", "yield", "data", "else", ":", "# Otherwise, return one row per date, with \"date\" and each", "# element's value as attributes.", "for", "elem", ",", "val", "in", "zip", "(", "self", ".", "parameter", ",", "row", ")", ":", "# namedtuple doesn't like numeric field names", "if", "elem", ".", "isdigit", "(", ")", ":", "elem", "=", "\"e%s\"", "%", "elem", "data", "[", "elem", "]", "=", "val", "yield", "data" ]
MultiStnData data results are arrays without explicit dates; Infer time series based on start date.
[ "MultiStnData", "data", "results", "are", "arrays", "without", "explicit", "dates", ";", "Infer", "time", "series", "based", "on", "start", "date", "." ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/acis/__init__.py#L216-L243
heigeo/climata
climata/acis/__init__.py
DataIO.get_field_names
def get_field_names(self): """ Different field names depending on self.add setting (see load_data) For BaseIO """ if self.add: return ['date', 'elem', 'value'] + [flag for flag in self.add] else: field_names = ['date'] for elem in self.parameter: # namedtuple doesn't like numeric field names if elem.isdigit(): elem = "e%s" % elem field_names.append(elem) return field_names
python
def get_field_names(self): """ Different field names depending on self.add setting (see load_data) For BaseIO """ if self.add: return ['date', 'elem', 'value'] + [flag for flag in self.add] else: field_names = ['date'] for elem in self.parameter: # namedtuple doesn't like numeric field names if elem.isdigit(): elem = "e%s" % elem field_names.append(elem) return field_names
[ "def", "get_field_names", "(", "self", ")", ":", "if", "self", ".", "add", ":", "return", "[", "'date'", ",", "'elem'", ",", "'value'", "]", "+", "[", "flag", "for", "flag", "in", "self", ".", "add", "]", "else", ":", "field_names", "=", "[", "'date'", "]", "for", "elem", "in", "self", ".", "parameter", ":", "# namedtuple doesn't like numeric field names", "if", "elem", ".", "isdigit", "(", ")", ":", "elem", "=", "\"e%s\"", "%", "elem", "field_names", ".", "append", "(", "elem", ")", "return", "field_names" ]
Different field names depending on self.add setting (see load_data) For BaseIO
[ "Different", "field", "names", "depending", "on", "self", ".", "add", "setting", "(", "see", "load_data", ")", "For", "BaseIO" ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/acis/__init__.py#L250-L264
heigeo/climata
climata/base.py
fill_date_range
def fill_date_range(start_date, end_date, date_format=None): """ Function accepts start date, end date, and format (if dates are strings) and returns a list of Python dates. """ if date_format: start_date = datetime.strptime(start_date, date_format).date() end_date = datetime.strptime(end_date, date_format).date() date_list = [] while start_date <= end_date: date_list.append(start_date) start_date = start_date + timedelta(days=1) return date_list
python
def fill_date_range(start_date, end_date, date_format=None): """ Function accepts start date, end date, and format (if dates are strings) and returns a list of Python dates. """ if date_format: start_date = datetime.strptime(start_date, date_format).date() end_date = datetime.strptime(end_date, date_format).date() date_list = [] while start_date <= end_date: date_list.append(start_date) start_date = start_date + timedelta(days=1) return date_list
[ "def", "fill_date_range", "(", "start_date", ",", "end_date", ",", "date_format", "=", "None", ")", ":", "if", "date_format", ":", "start_date", "=", "datetime", ".", "strptime", "(", "start_date", ",", "date_format", ")", ".", "date", "(", ")", "end_date", "=", "datetime", ".", "strptime", "(", "end_date", ",", "date_format", ")", ".", "date", "(", ")", "date_list", "=", "[", "]", "while", "start_date", "<=", "end_date", ":", "date_list", ".", "append", "(", "start_date", ")", "start_date", "=", "start_date", "+", "timedelta", "(", "days", "=", "1", ")", "return", "date_list" ]
Function accepts start date, end date, and format (if dates are strings) and returns a list of Python dates.
[ "Function", "accepts", "start", "date", "end", "date", "and", "format", "(", "if", "dates", "are", "strings", ")", "and", "returns", "a", "list", "of", "Python", "dates", "." ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/base.py#L252-L265
heigeo/climata
climata/base.py
FilterOpt.parse
def parse(self, value): """ Enforce rules and return parsed value """ if self.required and value is None: raise ValueError("%s is required!" % self.name) elif self.ignored and value is not None: warn("%s is ignored for this class!" % self.name) elif not self.multi and isinstance(value, (list, tuple)): if len(value) > 1: raise ValueError( "%s does not accept multiple values!" % self.name ) return value[0] elif self.multi and value is not None: if not isinstance(value, (list, tuple)): return [value] return value
python
def parse(self, value): """ Enforce rules and return parsed value """ if self.required and value is None: raise ValueError("%s is required!" % self.name) elif self.ignored and value is not None: warn("%s is ignored for this class!" % self.name) elif not self.multi and isinstance(value, (list, tuple)): if len(value) > 1: raise ValueError( "%s does not accept multiple values!" % self.name ) return value[0] elif self.multi and value is not None: if not isinstance(value, (list, tuple)): return [value] return value
[ "def", "parse", "(", "self", ",", "value", ")", ":", "if", "self", ".", "required", "and", "value", "is", "None", ":", "raise", "ValueError", "(", "\"%s is required!\"", "%", "self", ".", "name", ")", "elif", "self", ".", "ignored", "and", "value", "is", "not", "None", ":", "warn", "(", "\"%s is ignored for this class!\"", "%", "self", ".", "name", ")", "elif", "not", "self", ".", "multi", "and", "isinstance", "(", "value", ",", "(", "list", ",", "tuple", ")", ")", ":", "if", "len", "(", "value", ")", ">", "1", ":", "raise", "ValueError", "(", "\"%s does not accept multiple values!\"", "%", "self", ".", "name", ")", "return", "value", "[", "0", "]", "elif", "self", ".", "multi", "and", "value", "is", "not", "None", ":", "if", "not", "isinstance", "(", "value", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "[", "value", "]", "return", "value" ]
Enforce rules and return parsed value
[ "Enforce", "rules", "and", "return", "parsed", "value" ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/base.py#L32-L49
heigeo/climata
climata/base.py
DateOpt.parse
def parse(self, value): """ Parse date """ value = super(DateOpt, self).parse(value) if value is None: return None if isinstance(value, str): value = self.parse_date(value) if isinstance(value, datetime) and self.date_only: value = value.date() return value
python
def parse(self, value): """ Parse date """ value = super(DateOpt, self).parse(value) if value is None: return None if isinstance(value, str): value = self.parse_date(value) if isinstance(value, datetime) and self.date_only: value = value.date() return value
[ "def", "parse", "(", "self", ",", "value", ")", ":", "value", "=", "super", "(", "DateOpt", ",", "self", ")", ".", "parse", "(", "value", ")", "if", "value", "is", "None", ":", "return", "None", "if", "isinstance", "(", "value", ",", "str", ")", ":", "value", "=", "self", ".", "parse_date", "(", "value", ")", "if", "isinstance", "(", "value", ",", "datetime", ")", "and", "self", ".", "date_only", ":", "value", "=", "value", ".", "date", "(", ")", "return", "value" ]
Parse date
[ "Parse", "date" ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/base.py#L58-L69
heigeo/climata
climata/base.py
WebserviceLoader.get_filter_options
def get_filter_options(cls): """ List all filter options defined on class (and superclasses) """ attr = '_filter_options_%s' % id(cls) options = getattr(cls, attr, {}) if options: return options for key in dir(cls): val = getattr(cls, key) if isinstance(val, FilterOpt): options[key] = val setattr(cls, attr, options) return options
python
def get_filter_options(cls): """ List all filter options defined on class (and superclasses) """ attr = '_filter_options_%s' % id(cls) options = getattr(cls, attr, {}) if options: return options for key in dir(cls): val = getattr(cls, key) if isinstance(val, FilterOpt): options[key] = val setattr(cls, attr, options) return options
[ "def", "get_filter_options", "(", "cls", ")", ":", "attr", "=", "'_filter_options_%s'", "%", "id", "(", "cls", ")", "options", "=", "getattr", "(", "cls", ",", "attr", ",", "{", "}", ")", "if", "options", ":", "return", "options", "for", "key", "in", "dir", "(", "cls", ")", ":", "val", "=", "getattr", "(", "cls", ",", "key", ")", "if", "isinstance", "(", "val", ",", "FilterOpt", ")", ":", "options", "[", "key", "]", "=", "val", "setattr", "(", "cls", ",", "attr", ",", "options", ")", "return", "options" ]
List all filter options defined on class (and superclasses)
[ "List", "all", "filter", "options", "defined", "on", "class", "(", "and", "superclasses", ")" ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/base.py#L135-L151
heigeo/climata
climata/base.py
WebserviceLoader.getlist
def getlist(self, name): """ Retrieve given property from class/instance, ensuring it is a list. Also determine whether the list contains simple text/numeric values or nested dictionaries (a "complex" list) """ value = self.getvalue(name) complex = {} def str_value(val): # TODO: nonlocal complex if isinstance(val, dict): complex['complex'] = True return val else: return str(val) if value is None: pass else: value = [str_value(val) for val in as_list(value)] return value, bool(complex)
python
def getlist(self, name): """ Retrieve given property from class/instance, ensuring it is a list. Also determine whether the list contains simple text/numeric values or nested dictionaries (a "complex" list) """ value = self.getvalue(name) complex = {} def str_value(val): # TODO: nonlocal complex if isinstance(val, dict): complex['complex'] = True return val else: return str(val) if value is None: pass else: value = [str_value(val) for val in as_list(value)] return value, bool(complex)
[ "def", "getlist", "(", "self", ",", "name", ")", ":", "value", "=", "self", ".", "getvalue", "(", "name", ")", "complex", "=", "{", "}", "def", "str_value", "(", "val", ")", ":", "# TODO: nonlocal complex", "if", "isinstance", "(", "val", ",", "dict", ")", ":", "complex", "[", "'complex'", "]", "=", "True", "return", "val", "else", ":", "return", "str", "(", "val", ")", "if", "value", "is", "None", ":", "pass", "else", ":", "value", "=", "[", "str_value", "(", "val", ")", "for", "val", "in", "as_list", "(", "value", ")", "]", "return", "value", ",", "bool", "(", "complex", ")" ]
Retrieve given property from class/instance, ensuring it is a list. Also determine whether the list contains simple text/numeric values or nested dictionaries (a "complex" list)
[ "Retrieve", "given", "property", "from", "class", "/", "instance", "ensuring", "it", "is", "a", "list", ".", "Also", "determine", "whether", "the", "list", "contains", "simple", "text", "/", "numeric", "values", "or", "nested", "dictionaries", "(", "a", "complex", "list", ")" ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/base.py#L163-L185
heigeo/climata
climata/base.py
WebserviceLoader.set_param
def set_param(self, into, name): """ Set parameter key, noting whether list value is "complex" """ value, complex = self.getlist(name) if value is not None: into[name] = value return complex
python
def set_param(self, into, name): """ Set parameter key, noting whether list value is "complex" """ value, complex = self.getlist(name) if value is not None: into[name] = value return complex
[ "def", "set_param", "(", "self", ",", "into", ",", "name", ")", ":", "value", ",", "complex", "=", "self", ".", "getlist", "(", "name", ")", "if", "value", "is", "not", "None", ":", "into", "[", "name", "]", "=", "value", "return", "complex" ]
Set parameter key, noting whether list value is "complex"
[ "Set", "parameter", "key", "noting", "whether", "list", "value", "is", "complex" ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/base.py#L187-L194
heigeo/climata
climata/base.py
WebserviceLoader.get_params
def get_params(self): """ Get parameters for web service, noting whether any are "complex" """ params = {} complex = False for name, opt in self.filter_options.items(): if opt.ignored: continue if self.set_param(params, name): complex = True return params, complex
python
def get_params(self): """ Get parameters for web service, noting whether any are "complex" """ params = {} complex = False for name, opt in self.filter_options.items(): if opt.ignored: continue if self.set_param(params, name): complex = True return params, complex
[ "def", "get_params", "(", "self", ")", ":", "params", "=", "{", "}", "complex", "=", "False", "for", "name", ",", "opt", "in", "self", ".", "filter_options", ".", "items", "(", ")", ":", "if", "opt", ".", "ignored", ":", "continue", "if", "self", ".", "set_param", "(", "params", ",", "name", ")", ":", "complex", "=", "True", "return", "params", ",", "complex" ]
Get parameters for web service, noting whether any are "complex"
[ "Get", "parameters", "for", "web", "service", "noting", "whether", "any", "are", "complex" ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/base.py#L196-L208
heigeo/climata
climata/base.py
WebserviceLoader.params
def params(self): """ URL parameters for wq.io.loaders.NetLoader """ params, complex = self.get_params() url_params = self.default_params.copy() url_params.update(self.serialize_params(params, complex)) return url_params
python
def params(self): """ URL parameters for wq.io.loaders.NetLoader """ params, complex = self.get_params() url_params = self.default_params.copy() url_params.update(self.serialize_params(params, complex)) return url_params
[ "def", "params", "(", "self", ")", ":", "params", ",", "complex", "=", "self", ".", "get_params", "(", ")", "url_params", "=", "self", ".", "default_params", ".", "copy", "(", ")", "url_params", ".", "update", "(", "self", ".", "serialize_params", "(", "params", ",", "complex", ")", ")", "return", "url_params" ]
URL parameters for wq.io.loaders.NetLoader
[ "URL", "parameters", "for", "wq", ".", "io", ".", "loaders", ".", "NetLoader" ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/base.py#L211-L218
heigeo/climata
climata/base.py
WebserviceLoader.serialize_params
def serialize_params(self, params, complex=False): """ Serialize parameter names and values to a dict ready for urlencode() """ if complex: # See climata.acis for an example implementation raise NotImplementedError("Cannot serialize %s!" % params) else: # Simpler queries can use traditional URL parameters return { self.get_url_param(key): ','.join(val) for key, val in params.items() }
python
def serialize_params(self, params, complex=False): """ Serialize parameter names and values to a dict ready for urlencode() """ if complex: # See climata.acis for an example implementation raise NotImplementedError("Cannot serialize %s!" % params) else: # Simpler queries can use traditional URL parameters return { self.get_url_param(key): ','.join(val) for key, val in params.items() }
[ "def", "serialize_params", "(", "self", ",", "params", ",", "complex", "=", "False", ")", ":", "if", "complex", ":", "# See climata.acis for an example implementation", "raise", "NotImplementedError", "(", "\"Cannot serialize %s!\"", "%", "params", ")", "else", ":", "# Simpler queries can use traditional URL parameters", "return", "{", "self", ".", "get_url_param", "(", "key", ")", ":", "','", ".", "join", "(", "val", ")", "for", "key", ",", "val", "in", "params", ".", "items", "(", ")", "}" ]
Serialize parameter names and values to a dict ready for urlencode()
[ "Serialize", "parameter", "names", "and", "values", "to", "a", "dict", "ready", "for", "urlencode", "()" ]
train
https://github.com/heigeo/climata/blob/2028bdbd40e1c8985b0b62f7cb969ce7dfa8f1bd/climata/base.py#L220-L232
haydenth/ish_parser
ish_parser/Distance.py
Distance.get_inches
def get_inches(self): ''' convert the measurement to inches ''' if self._obs_value in self.MISSING: return 'MISSING' if self._obs_units == self.MILLIMETERS: return round(self.INCH_CONVERSION_FACTOR * self._obs_value, 4)
python
def get_inches(self): ''' convert the measurement to inches ''' if self._obs_value in self.MISSING: return 'MISSING' if self._obs_units == self.MILLIMETERS: return round(self.INCH_CONVERSION_FACTOR * self._obs_value, 4)
[ "def", "get_inches", "(", "self", ")", ":", "if", "self", ".", "_obs_value", "in", "self", ".", "MISSING", ":", "return", "'MISSING'", "if", "self", ".", "_obs_units", "==", "self", ".", "MILLIMETERS", ":", "return", "round", "(", "self", ".", "INCH_CONVERSION_FACTOR", "*", "self", ".", "_obs_value", ",", "4", ")" ]
convert the measurement to inches
[ "convert", "the", "measurement", "to", "inches" ]
train
https://github.com/haydenth/ish_parser/blob/98fe3b3d0e5c672598be878d1715f214d5077869/ish_parser/Distance.py#L13-L18
haydenth/ish_parser
ish_parser/ish_report.py
ish_report.formatted
def formatted(self): ''' print a nicely formatted output of this report ''' return """ Weather Station: %s (%s, %s) Elevation: %s m Time: %s UTC Air Temperature: %s C (%s F) Wind Speed: %s m/s (%s mph) Wind Direction: %s Present Weather Obs: %s Precipitation: %s Cloud Coverage: %s oktas Cloud Summation: %s Solar Irradiance: %s """ % (self.weather_station, self.latitude, self.longitude, self.elevation, self.datetime, self.air_temperature, self.air_temperature.get_fahrenheit(), self.wind_speed, self.wind_speed.get_miles(), self.wind_direction, str(self.present_weather), str(self.precipitation), str(self.sky_cover), str(self.sky_cover_summation), str(self.solar_irradiance))
python
def formatted(self): ''' print a nicely formatted output of this report ''' return """ Weather Station: %s (%s, %s) Elevation: %s m Time: %s UTC Air Temperature: %s C (%s F) Wind Speed: %s m/s (%s mph) Wind Direction: %s Present Weather Obs: %s Precipitation: %s Cloud Coverage: %s oktas Cloud Summation: %s Solar Irradiance: %s """ % (self.weather_station, self.latitude, self.longitude, self.elevation, self.datetime, self.air_temperature, self.air_temperature.get_fahrenheit(), self.wind_speed, self.wind_speed.get_miles(), self.wind_direction, str(self.present_weather), str(self.precipitation), str(self.sky_cover), str(self.sky_cover_summation), str(self.solar_irradiance))
[ "def", "formatted", "(", "self", ")", ":", "return", "\"\"\"\nWeather Station: %s (%s, %s)\nElevation: %s m\nTime: %s UTC\nAir Temperature: %s C (%s F)\nWind Speed: %s m/s (%s mph)\nWind Direction: %s\nPresent Weather Obs: %s\nPrecipitation: %s\nCloud Coverage: %s oktas\nCloud Summation: %s\nSolar Irradiance: %s \n \"\"\"", "%", "(", "self", ".", "weather_station", ",", "self", ".", "latitude", ",", "self", ".", "longitude", ",", "self", ".", "elevation", ",", "self", ".", "datetime", ",", "self", ".", "air_temperature", ",", "self", ".", "air_temperature", ".", "get_fahrenheit", "(", ")", ",", "self", ".", "wind_speed", ",", "self", ".", "wind_speed", ".", "get_miles", "(", ")", ",", "self", ".", "wind_direction", ",", "str", "(", "self", ".", "present_weather", ")", ",", "str", "(", "self", ".", "precipitation", ")", ",", "str", "(", "self", ".", "sky_cover", ")", ",", "str", "(", "self", ".", "sky_cover_summation", ")", ",", "str", "(", "self", ".", "solar_irradiance", ")", ")" ]
print a nicely formatted output of this report
[ "print", "a", "nicely", "formatted", "output", "of", "this", "report" ]
train
https://github.com/haydenth/ish_parser/blob/98fe3b3d0e5c672598be878d1715f214d5077869/ish_parser/ish_report.py#L296-L317
haydenth/ish_parser
ish_parser/ish_report.py
ish_report.loads
def loads(self, noaa_string): ''' load in a report (or set) from a string ''' self.raw = noaa_string self.weather_station = noaa_string[4:10] self.wban = noaa_string[10:15] expected_length = int(noaa_string[0:4]) + self.PREAMBLE_LENGTH actual_length = len(noaa_string) if actual_length != expected_length: msg = "Non matching lengths. Expected %d, got %d" % (expected_length, actual_length) raise ish_reportException(msg) try: self.datetime = datetime.strptime(noaa_string[15:27], '%Y%m%d%H%M') except ValueError: ''' some cases, we get 2400 hours, which is really the next day, so this is a workaround for those cases ''' time = noaa_string[15:27] time = time.replace("2400", "2300") self.datetime = datetime.strptime(time, '%Y%m%d%H%M') self.datetime += timedelta(hours=1) self.datetime = self.datetime.replace(tzinfo=pytz.UTC) self.report_type = ReportType(noaa_string[41:46].strip()) self.latitude = float(noaa_string[28:34]) / self.GEO_SCALE self.longitude = float(noaa_string[34:41]) / self.GEO_SCALE self.elevation = int(noaa_string[46:51]) ''' other mandatory fields ''' self.wind_direction = Direction(noaa_string[60:63], Direction.RADIANS, noaa_string[63:64]) self.wind_observation_direction_type = noaa_string[64:64] self.wind_speed = Speed(int(noaa_string[65:69]) / float(self.SPEED_SCALE), Speed.METERSPERSECOND, noaa_string[69:70]) self.sky_ceiling = Distance(int(noaa_string[70:75]), Distance.METERS, noaa_string[75:76]) self.sky_ceiling_determination = noaa_string[76:77] self.visibility_distance = Distance(int(noaa_string[78:84]), Distance.METERS, noaa_string[84:85]) self.visibility_variability = noaa_string[85:86] self.visibility_variability_quality = noaa_string[86:87] self.air_temperature = Temperature(int(noaa_string[87:92]) / self.TEMPERATURE_SCALE, Units.CELSIUS, noaa_string[92:93]) self.dew_point = Temperature(int(noaa_string[93:98]) / self.TEMPERATURE_SCALE, Units.CELSIUS, noaa_string[98:99]) self.humidity = Humidity(str(self.air_temperature), str(self.dew_point)) self.sea_level_pressure = Pressure(int(noaa_string[99:104])/self.PRESSURE_SCALE, Pressure.HECTOPASCALS, noaa_string[104:104]) ''' handle the additional fields ''' additional = noaa_string[105:108] if additional == 'ADD': position = 108 while position < expected_length: try: (position, (addl_code, addl_string)) = self._get_component(noaa_string, position) self._additional[addl_code] = addl_string except ish_reportException as err: ''' this catches when we move to remarks section ''' break ''' handle the remarks section if it exists ''' try: position = noaa_string.index('REM', 108) self._get_remarks_component(noaa_string, position) except (ish_reportException, ValueError) as err: ''' this catches when we move to EQD section ''' return self
python
def loads(self, noaa_string): ''' load in a report (or set) from a string ''' self.raw = noaa_string self.weather_station = noaa_string[4:10] self.wban = noaa_string[10:15] expected_length = int(noaa_string[0:4]) + self.PREAMBLE_LENGTH actual_length = len(noaa_string) if actual_length != expected_length: msg = "Non matching lengths. Expected %d, got %d" % (expected_length, actual_length) raise ish_reportException(msg) try: self.datetime = datetime.strptime(noaa_string[15:27], '%Y%m%d%H%M') except ValueError: ''' some cases, we get 2400 hours, which is really the next day, so this is a workaround for those cases ''' time = noaa_string[15:27] time = time.replace("2400", "2300") self.datetime = datetime.strptime(time, '%Y%m%d%H%M') self.datetime += timedelta(hours=1) self.datetime = self.datetime.replace(tzinfo=pytz.UTC) self.report_type = ReportType(noaa_string[41:46].strip()) self.latitude = float(noaa_string[28:34]) / self.GEO_SCALE self.longitude = float(noaa_string[34:41]) / self.GEO_SCALE self.elevation = int(noaa_string[46:51]) ''' other mandatory fields ''' self.wind_direction = Direction(noaa_string[60:63], Direction.RADIANS, noaa_string[63:64]) self.wind_observation_direction_type = noaa_string[64:64] self.wind_speed = Speed(int(noaa_string[65:69]) / float(self.SPEED_SCALE), Speed.METERSPERSECOND, noaa_string[69:70]) self.sky_ceiling = Distance(int(noaa_string[70:75]), Distance.METERS, noaa_string[75:76]) self.sky_ceiling_determination = noaa_string[76:77] self.visibility_distance = Distance(int(noaa_string[78:84]), Distance.METERS, noaa_string[84:85]) self.visibility_variability = noaa_string[85:86] self.visibility_variability_quality = noaa_string[86:87] self.air_temperature = Temperature(int(noaa_string[87:92]) / self.TEMPERATURE_SCALE, Units.CELSIUS, noaa_string[92:93]) self.dew_point = Temperature(int(noaa_string[93:98]) / self.TEMPERATURE_SCALE, Units.CELSIUS, noaa_string[98:99]) self.humidity = Humidity(str(self.air_temperature), str(self.dew_point)) self.sea_level_pressure = Pressure(int(noaa_string[99:104])/self.PRESSURE_SCALE, Pressure.HECTOPASCALS, noaa_string[104:104]) ''' handle the additional fields ''' additional = noaa_string[105:108] if additional == 'ADD': position = 108 while position < expected_length: try: (position, (addl_code, addl_string)) = self._get_component(noaa_string, position) self._additional[addl_code] = addl_string except ish_reportException as err: ''' this catches when we move to remarks section ''' break ''' handle the remarks section if it exists ''' try: position = noaa_string.index('REM', 108) self._get_remarks_component(noaa_string, position) except (ish_reportException, ValueError) as err: ''' this catches when we move to EQD section ''' return self
[ "def", "loads", "(", "self", ",", "noaa_string", ")", ":", "self", ".", "raw", "=", "noaa_string", "self", ".", "weather_station", "=", "noaa_string", "[", "4", ":", "10", "]", "self", ".", "wban", "=", "noaa_string", "[", "10", ":", "15", "]", "expected_length", "=", "int", "(", "noaa_string", "[", "0", ":", "4", "]", ")", "+", "self", ".", "PREAMBLE_LENGTH", "actual_length", "=", "len", "(", "noaa_string", ")", "if", "actual_length", "!=", "expected_length", ":", "msg", "=", "\"Non matching lengths. Expected %d, got %d\"", "%", "(", "expected_length", ",", "actual_length", ")", "raise", "ish_reportException", "(", "msg", ")", "try", ":", "self", ".", "datetime", "=", "datetime", ".", "strptime", "(", "noaa_string", "[", "15", ":", "27", "]", ",", "'%Y%m%d%H%M'", ")", "except", "ValueError", ":", "''' some cases, we get 2400 hours, which is really the next day, so \n this is a workaround for those cases '''", "time", "=", "noaa_string", "[", "15", ":", "27", "]", "time", "=", "time", ".", "replace", "(", "\"2400\"", ",", "\"2300\"", ")", "self", ".", "datetime", "=", "datetime", ".", "strptime", "(", "time", ",", "'%Y%m%d%H%M'", ")", "self", ".", "datetime", "+=", "timedelta", "(", "hours", "=", "1", ")", "self", ".", "datetime", "=", "self", ".", "datetime", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "UTC", ")", "self", ".", "report_type", "=", "ReportType", "(", "noaa_string", "[", "41", ":", "46", "]", ".", "strip", "(", ")", ")", "self", ".", "latitude", "=", "float", "(", "noaa_string", "[", "28", ":", "34", "]", ")", "/", "self", ".", "GEO_SCALE", "self", ".", "longitude", "=", "float", "(", "noaa_string", "[", "34", ":", "41", "]", ")", "/", "self", ".", "GEO_SCALE", "self", ".", "elevation", "=", "int", "(", "noaa_string", "[", "46", ":", "51", "]", ")", "''' other mandatory fields '''", "self", ".", "wind_direction", "=", "Direction", "(", "noaa_string", "[", "60", ":", "63", "]", ",", "Direction", ".", "RADIANS", ",", "noaa_string", "[", "63", ":", "64", "]", ")", "self", ".", "wind_observation_direction_type", "=", "noaa_string", "[", "64", ":", "64", "]", "self", ".", "wind_speed", "=", "Speed", "(", "int", "(", "noaa_string", "[", "65", ":", "69", "]", ")", "/", "float", "(", "self", ".", "SPEED_SCALE", ")", ",", "Speed", ".", "METERSPERSECOND", ",", "noaa_string", "[", "69", ":", "70", "]", ")", "self", ".", "sky_ceiling", "=", "Distance", "(", "int", "(", "noaa_string", "[", "70", ":", "75", "]", ")", ",", "Distance", ".", "METERS", ",", "noaa_string", "[", "75", ":", "76", "]", ")", "self", ".", "sky_ceiling_determination", "=", "noaa_string", "[", "76", ":", "77", "]", "self", ".", "visibility_distance", "=", "Distance", "(", "int", "(", "noaa_string", "[", "78", ":", "84", "]", ")", ",", "Distance", ".", "METERS", ",", "noaa_string", "[", "84", ":", "85", "]", ")", "self", ".", "visibility_variability", "=", "noaa_string", "[", "85", ":", "86", "]", "self", ".", "visibility_variability_quality", "=", "noaa_string", "[", "86", ":", "87", "]", "self", ".", "air_temperature", "=", "Temperature", "(", "int", "(", "noaa_string", "[", "87", ":", "92", "]", ")", "/", "self", ".", "TEMPERATURE_SCALE", ",", "Units", ".", "CELSIUS", ",", "noaa_string", "[", "92", ":", "93", "]", ")", "self", ".", "dew_point", "=", "Temperature", "(", "int", "(", "noaa_string", "[", "93", ":", "98", "]", ")", "/", "self", ".", "TEMPERATURE_SCALE", ",", "Units", ".", "CELSIUS", ",", "noaa_string", "[", "98", ":", "99", "]", ")", "self", ".", "humidity", "=", "Humidity", "(", "str", "(", "self", ".", "air_temperature", ")", ",", "str", "(", "self", ".", "dew_point", ")", ")", "self", ".", "sea_level_pressure", "=", "Pressure", "(", "int", "(", "noaa_string", "[", "99", ":", "104", "]", ")", "/", "self", ".", "PRESSURE_SCALE", ",", "Pressure", ".", "HECTOPASCALS", ",", "noaa_string", "[", "104", ":", "104", "]", ")", "''' handle the additional fields '''", "additional", "=", "noaa_string", "[", "105", ":", "108", "]", "if", "additional", "==", "'ADD'", ":", "position", "=", "108", "while", "position", "<", "expected_length", ":", "try", ":", "(", "position", ",", "(", "addl_code", ",", "addl_string", ")", ")", "=", "self", ".", "_get_component", "(", "noaa_string", ",", "position", ")", "self", ".", "_additional", "[", "addl_code", "]", "=", "addl_string", "except", "ish_reportException", "as", "err", ":", "''' this catches when we move to remarks section '''", "break", "''' handle the remarks section if it exists '''", "try", ":", "position", "=", "noaa_string", ".", "index", "(", "'REM'", ",", "108", ")", "self", ".", "_get_remarks_component", "(", "noaa_string", ",", "position", ")", "except", "(", "ish_reportException", ",", "ValueError", ")", "as", "err", ":", "''' this catches when we move to EQD section '''", "return", "self" ]
load in a report (or set) from a string
[ "load", "in", "a", "report", "(", "or", "set", ")", "from", "a", "string" ]
train
https://github.com/haydenth/ish_parser/blob/98fe3b3d0e5c672598be878d1715f214d5077869/ish_parser/ish_report.py#L319-L399
haydenth/ish_parser
ish_parser/ish_report.py
ish_report._get_remarks_component
def _get_remarks_component(self, string, initial_pos): ''' Parse the remarks into the _remarks dict ''' remarks_code = string[initial_pos:initial_pos + self.ADDR_CODE_LENGTH] if remarks_code != 'REM': raise ish_reportException("Parsing remarks. Expected REM but got %s." % (remarks_code,)) expected_length = int(string[0:4]) + self.PREAMBLE_LENGTH position = initial_pos + self.ADDR_CODE_LENGTH while position < expected_length: key = string[position:position + self.ADDR_CODE_LENGTH] if key == 'EQD': break chars_to_read = string[position + self.ADDR_CODE_LENGTH:position + \ (self.ADDR_CODE_LENGTH * 2)] chars_to_read = int(chars_to_read) position += (self.ADDR_CODE_LENGTH * 2) string_value = string[position:position + chars_to_read] self._remarks[key] = string_value position += chars_to_read
python
def _get_remarks_component(self, string, initial_pos): ''' Parse the remarks into the _remarks dict ''' remarks_code = string[initial_pos:initial_pos + self.ADDR_CODE_LENGTH] if remarks_code != 'REM': raise ish_reportException("Parsing remarks. Expected REM but got %s." % (remarks_code,)) expected_length = int(string[0:4]) + self.PREAMBLE_LENGTH position = initial_pos + self.ADDR_CODE_LENGTH while position < expected_length: key = string[position:position + self.ADDR_CODE_LENGTH] if key == 'EQD': break chars_to_read = string[position + self.ADDR_CODE_LENGTH:position + \ (self.ADDR_CODE_LENGTH * 2)] chars_to_read = int(chars_to_read) position += (self.ADDR_CODE_LENGTH * 2) string_value = string[position:position + chars_to_read] self._remarks[key] = string_value position += chars_to_read
[ "def", "_get_remarks_component", "(", "self", ",", "string", ",", "initial_pos", ")", ":", "remarks_code", "=", "string", "[", "initial_pos", ":", "initial_pos", "+", "self", ".", "ADDR_CODE_LENGTH", "]", "if", "remarks_code", "!=", "'REM'", ":", "raise", "ish_reportException", "(", "\"Parsing remarks. Expected REM but got %s.\"", "%", "(", "remarks_code", ",", ")", ")", "expected_length", "=", "int", "(", "string", "[", "0", ":", "4", "]", ")", "+", "self", ".", "PREAMBLE_LENGTH", "position", "=", "initial_pos", "+", "self", ".", "ADDR_CODE_LENGTH", "while", "position", "<", "expected_length", ":", "key", "=", "string", "[", "position", ":", "position", "+", "self", ".", "ADDR_CODE_LENGTH", "]", "if", "key", "==", "'EQD'", ":", "break", "chars_to_read", "=", "string", "[", "position", "+", "self", ".", "ADDR_CODE_LENGTH", ":", "position", "+", "(", "self", ".", "ADDR_CODE_LENGTH", "*", "2", ")", "]", "chars_to_read", "=", "int", "(", "chars_to_read", ")", "position", "+=", "(", "self", ".", "ADDR_CODE_LENGTH", "*", "2", ")", "string_value", "=", "string", "[", "position", ":", "position", "+", "chars_to_read", "]", "self", ".", "_remarks", "[", "key", "]", "=", "string_value", "position", "+=", "chars_to_read" ]
Parse the remarks into the _remarks dict
[ "Parse", "the", "remarks", "into", "the", "_remarks", "dict" ]
train
https://github.com/haydenth/ish_parser/blob/98fe3b3d0e5c672598be878d1715f214d5077869/ish_parser/ish_report.py#L401-L419
haydenth/ish_parser
ish_parser/ish_report.py
ish_report._get_component
def _get_component(self, string, initial_pos): ''' given a string and a position, return both an updated position and either a Component Object or a String back to the caller ''' add_code = string[initial_pos:initial_pos + self.ADDR_CODE_LENGTH] if add_code == 'REM': raise ish_reportException("This is a remarks record") if add_code == 'EQD': raise ish_reportException("This is EQD record") initial_pos += self.ADDR_CODE_LENGTH try: useable_map = self.MAP[add_code] except: raise BaseException("Cannot find code %s in string %s (%d)." % (add_code, string, initial_pos)) # if there is no defined length, then read next three chars to get it # this only applies to REM types, which have 3 chars for the type, then variable if useable_map[1] is False: chars_to_read = string[initial_pos + self.ADDR_CODE_LENGTH:initial_pos + \ (self.ADDR_CODE_LENGTH * 2)] chars_to_read = int(chars_to_read) initial_pos += (self.ADDR_CODE_LENGTH * 2) else: chars_to_read = useable_map[1] new_position = initial_pos + chars_to_read string_value = string[initial_pos:new_position] try: object_value = useable_map[2]() object_value.loads(string_value) except IndexError as err: object_value = string_value return (new_position, [add_code, object_value])
python
def _get_component(self, string, initial_pos): ''' given a string and a position, return both an updated position and either a Component Object or a String back to the caller ''' add_code = string[initial_pos:initial_pos + self.ADDR_CODE_LENGTH] if add_code == 'REM': raise ish_reportException("This is a remarks record") if add_code == 'EQD': raise ish_reportException("This is EQD record") initial_pos += self.ADDR_CODE_LENGTH try: useable_map = self.MAP[add_code] except: raise BaseException("Cannot find code %s in string %s (%d)." % (add_code, string, initial_pos)) # if there is no defined length, then read next three chars to get it # this only applies to REM types, which have 3 chars for the type, then variable if useable_map[1] is False: chars_to_read = string[initial_pos + self.ADDR_CODE_LENGTH:initial_pos + \ (self.ADDR_CODE_LENGTH * 2)] chars_to_read = int(chars_to_read) initial_pos += (self.ADDR_CODE_LENGTH * 2) else: chars_to_read = useable_map[1] new_position = initial_pos + chars_to_read string_value = string[initial_pos:new_position] try: object_value = useable_map[2]() object_value.loads(string_value) except IndexError as err: object_value = string_value return (new_position, [add_code, object_value])
[ "def", "_get_component", "(", "self", ",", "string", ",", "initial_pos", ")", ":", "add_code", "=", "string", "[", "initial_pos", ":", "initial_pos", "+", "self", ".", "ADDR_CODE_LENGTH", "]", "if", "add_code", "==", "'REM'", ":", "raise", "ish_reportException", "(", "\"This is a remarks record\"", ")", "if", "add_code", "==", "'EQD'", ":", "raise", "ish_reportException", "(", "\"This is EQD record\"", ")", "initial_pos", "+=", "self", ".", "ADDR_CODE_LENGTH", "try", ":", "useable_map", "=", "self", ".", "MAP", "[", "add_code", "]", "except", ":", "raise", "BaseException", "(", "\"Cannot find code %s in string %s (%d).\"", "%", "(", "add_code", ",", "string", ",", "initial_pos", ")", ")", "# if there is no defined length, then read next three chars to get it", "# this only applies to REM types, which have 3 chars for the type, then variable", "if", "useable_map", "[", "1", "]", "is", "False", ":", "chars_to_read", "=", "string", "[", "initial_pos", "+", "self", ".", "ADDR_CODE_LENGTH", ":", "initial_pos", "+", "(", "self", ".", "ADDR_CODE_LENGTH", "*", "2", ")", "]", "chars_to_read", "=", "int", "(", "chars_to_read", ")", "initial_pos", "+=", "(", "self", ".", "ADDR_CODE_LENGTH", "*", "2", ")", "else", ":", "chars_to_read", "=", "useable_map", "[", "1", "]", "new_position", "=", "initial_pos", "+", "chars_to_read", "string_value", "=", "string", "[", "initial_pos", ":", "new_position", "]", "try", ":", "object_value", "=", "useable_map", "[", "2", "]", "(", ")", "object_value", ".", "loads", "(", "string_value", ")", "except", "IndexError", "as", "err", ":", "object_value", "=", "string_value", "return", "(", "new_position", ",", "[", "add_code", ",", "object_value", "]", ")" ]
given a string and a position, return both an updated position and either a Component Object or a String back to the caller
[ "given", "a", "string", "and", "a", "position", "return", "both", "an", "updated", "position", "and", "either", "a", "Component", "Object", "or", "a", "String", "back", "to", "the", "caller" ]
train
https://github.com/haydenth/ish_parser/blob/98fe3b3d0e5c672598be878d1715f214d5077869/ish_parser/ish_report.py#L421-L456
haydenth/ish_parser
ish_parser/ish_parser.py
ish_parser.loads
def loads(self, string): ''' load from a string ''' for line in string.split("\n"): if len(line) < 10: continue try: report = ish_report() report.loads(line) self._reports.append(report) except BaseException as exp: ''' don't complain TOO much ''' logging.warning('unable to load report, error: %s' % exp)
python
def loads(self, string): ''' load from a string ''' for line in string.split("\n"): if len(line) < 10: continue try: report = ish_report() report.loads(line) self._reports.append(report) except BaseException as exp: ''' don't complain TOO much ''' logging.warning('unable to load report, error: %s' % exp)
[ "def", "loads", "(", "self", ",", "string", ")", ":", "for", "line", "in", "string", ".", "split", "(", "\"\\n\"", ")", ":", "if", "len", "(", "line", ")", "<", "10", ":", "continue", "try", ":", "report", "=", "ish_report", "(", ")", "report", ".", "loads", "(", "line", ")", "self", ".", "_reports", ".", "append", "(", "report", ")", "except", "BaseException", "as", "exp", ":", "''' don't complain TOO much '''", "logging", ".", "warning", "(", "'unable to load report, error: %s'", "%", "exp", ")" ]
load from a string
[ "load", "from", "a", "string" ]
train
https://github.com/haydenth/ish_parser/blob/98fe3b3d0e5c672598be878d1715f214d5077869/ish_parser/ish_parser.py#L15-L27
haydenth/ish_parser
ish_parser/ish_parser.py
ish_parser.get_observations
def get_observations(self): ''' return only specific weather observations (FM types) and ignore the summary of day reports ''' return [rpt for rpt in self._reports if rpt.report_type in self.OBS_TYPES]
python
def get_observations(self): ''' return only specific weather observations (FM types) and ignore the summary of day reports ''' return [rpt for rpt in self._reports if rpt.report_type in self.OBS_TYPES]
[ "def", "get_observations", "(", "self", ")", ":", "return", "[", "rpt", "for", "rpt", "in", "self", ".", "_reports", "if", "rpt", ".", "report_type", "in", "self", ".", "OBS_TYPES", "]" ]
return only specific weather observations (FM types) and ignore the summary of day reports
[ "return", "only", "specific", "weather", "observations", "(", "FM", "types", ")", "and", "ignore", "the", "summary", "of", "day", "reports" ]
train
https://github.com/haydenth/ish_parser/blob/98fe3b3d0e5c672598be878d1715f214d5077869/ish_parser/ish_parser.py#L33-L36
haydenth/ish_parser
ish_parser/Speed.py
Speed.get_miles
def get_miles(self): ''' convert the measurement to inches ''' if self._obs_value in self.MISSING: return 'MISSING' if self._obs_units == self.METERSPERSECOND: return round(2.23694 * self._obs_value, 4)
python
def get_miles(self): ''' convert the measurement to inches ''' if self._obs_value in self.MISSING: return 'MISSING' if self._obs_units == self.METERSPERSECOND: return round(2.23694 * self._obs_value, 4)
[ "def", "get_miles", "(", "self", ")", ":", "if", "self", ".", "_obs_value", "in", "self", ".", "MISSING", ":", "return", "'MISSING'", "if", "self", ".", "_obs_units", "==", "self", ".", "METERSPERSECOND", ":", "return", "round", "(", "2.23694", "*", "self", ".", "_obs_value", ",", "4", ")" ]
convert the measurement to inches
[ "convert", "the", "measurement", "to", "inches" ]
train
https://github.com/haydenth/ish_parser/blob/98fe3b3d0e5c672598be878d1715f214d5077869/ish_parser/Speed.py#L10-L15
subdownloader/subdownloader
scripts/distribution/translation_generator.py
TranslationGenerator.do_pot
def do_pot(self): """ Sync the template with the python code. """ files_to_translate = [] log.debug("Collecting python sources for pot ...") for source_path in self._source_paths: for source_path in self._iter_suffix(path=source_path, suffix=".py"): log.debug("... add to pot: {source}".format(source=str(source_path))) files_to_translate.append(str(source_path)) for system_file in self.SYSTEM_SOURCE_FILES: files_to_translate.append(str(self._system_path / system_file)) # FIXME: use separate domain for system source translations? Nerge them when generating mo's? log.debug("Finished collection sources.") pot_path = (self._po_path / self._basename).with_suffix(".pot") command = ["xgettext", "--keyword=_", "--keyword=_translate", "--output={output}".format(output=str(pot_path))] command.extend(files_to_translate) check_call(command) log.debug("pot file \"{pot}\" created!".format(pot=str(pot_path))) pot_copy_path = self._mo_path / pot_path.name log.debug("Copying pot file to mo path: {pot_copy_path}".format(pot_copy_path=str(pot_copy_path))) shutil.copy(str(pot_path), str(pot_copy_path))
python
def do_pot(self): """ Sync the template with the python code. """ files_to_translate = [] log.debug("Collecting python sources for pot ...") for source_path in self._source_paths: for source_path in self._iter_suffix(path=source_path, suffix=".py"): log.debug("... add to pot: {source}".format(source=str(source_path))) files_to_translate.append(str(source_path)) for system_file in self.SYSTEM_SOURCE_FILES: files_to_translate.append(str(self._system_path / system_file)) # FIXME: use separate domain for system source translations? Nerge them when generating mo's? log.debug("Finished collection sources.") pot_path = (self._po_path / self._basename).with_suffix(".pot") command = ["xgettext", "--keyword=_", "--keyword=_translate", "--output={output}".format(output=str(pot_path))] command.extend(files_to_translate) check_call(command) log.debug("pot file \"{pot}\" created!".format(pot=str(pot_path))) pot_copy_path = self._mo_path / pot_path.name log.debug("Copying pot file to mo path: {pot_copy_path}".format(pot_copy_path=str(pot_copy_path))) shutil.copy(str(pot_path), str(pot_copy_path))
[ "def", "do_pot", "(", "self", ")", ":", "files_to_translate", "=", "[", "]", "log", ".", "debug", "(", "\"Collecting python sources for pot ...\"", ")", "for", "source_path", "in", "self", ".", "_source_paths", ":", "for", "source_path", "in", "self", ".", "_iter_suffix", "(", "path", "=", "source_path", ",", "suffix", "=", "\".py\"", ")", ":", "log", ".", "debug", "(", "\"... add to pot: {source}\"", ".", "format", "(", "source", "=", "str", "(", "source_path", ")", ")", ")", "files_to_translate", ".", "append", "(", "str", "(", "source_path", ")", ")", "for", "system_file", "in", "self", ".", "SYSTEM_SOURCE_FILES", ":", "files_to_translate", ".", "append", "(", "str", "(", "self", ".", "_system_path", "/", "system_file", ")", ")", "# FIXME: use separate domain for system source translations? Nerge them when generating mo's?", "log", ".", "debug", "(", "\"Finished collection sources.\"", ")", "pot_path", "=", "(", "self", ".", "_po_path", "/", "self", ".", "_basename", ")", ".", "with_suffix", "(", "\".pot\"", ")", "command", "=", "[", "\"xgettext\"", ",", "\"--keyword=_\"", ",", "\"--keyword=_translate\"", ",", "\"--output={output}\"", ".", "format", "(", "output", "=", "str", "(", "pot_path", ")", ")", "]", "command", ".", "extend", "(", "files_to_translate", ")", "check_call", "(", "command", ")", "log", ".", "debug", "(", "\"pot file \\\"{pot}\\\" created!\"", ".", "format", "(", "pot", "=", "str", "(", "pot_path", ")", ")", ")", "pot_copy_path", "=", "self", ".", "_mo_path", "/", "pot_path", ".", "name", "log", ".", "debug", "(", "\"Copying pot file to mo path: {pot_copy_path}\"", ".", "format", "(", "pot_copy_path", "=", "str", "(", "pot_copy_path", ")", ")", ")", "shutil", ".", "copy", "(", "str", "(", "pot_path", ")", ",", "str", "(", "pot_copy_path", ")", ")" ]
Sync the template with the python code.
[ "Sync", "the", "template", "with", "the", "python", "code", "." ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/scripts/distribution/translation_generator.py#L47-L70
subdownloader/subdownloader
scripts/distribution/translation_generator.py
TranslationGenerator.do_po
def do_po(self): """ Update all po files with the data in the pot reference file. """ log.debug("Start updating po files ...") pot_path = (self._po_path / self._basename).with_suffix(".pot") for po_dir_path in self._iter_po_dir(): po_path = (po_dir_path / self._basename).with_suffix(".po") if po_path.exists(): log.debug("update {po}:".format(po=str(po_path))) check_call(["msgmerge", "-U", str(po_path), str(pot_path)]) else: log.debug("create {po}:".format(po=str(po_path))) check_call(["msginit", "-i", str(pot_path), "-o", str(po_path), "--no-translator"]) po_copy_path = self._mo_path / po_path.parent.name / po_path.name po_copy_path.parent.mkdir(exist_ok=True) log.debug("Copying po file to mo path: {po_copy_path}".format(po_copy_path=str(po_copy_path))) shutil.copy(str(po_path), str(po_copy_path)) log.debug("All po files updated")
python
def do_po(self): """ Update all po files with the data in the pot reference file. """ log.debug("Start updating po files ...") pot_path = (self._po_path / self._basename).with_suffix(".pot") for po_dir_path in self._iter_po_dir(): po_path = (po_dir_path / self._basename).with_suffix(".po") if po_path.exists(): log.debug("update {po}:".format(po=str(po_path))) check_call(["msgmerge", "-U", str(po_path), str(pot_path)]) else: log.debug("create {po}:".format(po=str(po_path))) check_call(["msginit", "-i", str(pot_path), "-o", str(po_path), "--no-translator"]) po_copy_path = self._mo_path / po_path.parent.name / po_path.name po_copy_path.parent.mkdir(exist_ok=True) log.debug("Copying po file to mo path: {po_copy_path}".format(po_copy_path=str(po_copy_path))) shutil.copy(str(po_path), str(po_copy_path)) log.debug("All po files updated")
[ "def", "do_po", "(", "self", ")", ":", "log", ".", "debug", "(", "\"Start updating po files ...\"", ")", "pot_path", "=", "(", "self", ".", "_po_path", "/", "self", ".", "_basename", ")", ".", "with_suffix", "(", "\".pot\"", ")", "for", "po_dir_path", "in", "self", ".", "_iter_po_dir", "(", ")", ":", "po_path", "=", "(", "po_dir_path", "/", "self", ".", "_basename", ")", ".", "with_suffix", "(", "\".po\"", ")", "if", "po_path", ".", "exists", "(", ")", ":", "log", ".", "debug", "(", "\"update {po}:\"", ".", "format", "(", "po", "=", "str", "(", "po_path", ")", ")", ")", "check_call", "(", "[", "\"msgmerge\"", ",", "\"-U\"", ",", "str", "(", "po_path", ")", ",", "str", "(", "pot_path", ")", "]", ")", "else", ":", "log", ".", "debug", "(", "\"create {po}:\"", ".", "format", "(", "po", "=", "str", "(", "po_path", ")", ")", ")", "check_call", "(", "[", "\"msginit\"", ",", "\"-i\"", ",", "str", "(", "pot_path", ")", ",", "\"-o\"", ",", "str", "(", "po_path", ")", ",", "\"--no-translator\"", "]", ")", "po_copy_path", "=", "self", ".", "_mo_path", "/", "po_path", ".", "parent", ".", "name", "/", "po_path", ".", "name", "po_copy_path", ".", "parent", ".", "mkdir", "(", "exist_ok", "=", "True", ")", "log", ".", "debug", "(", "\"Copying po file to mo path: {po_copy_path}\"", ".", "format", "(", "po_copy_path", "=", "str", "(", "po_copy_path", ")", ")", ")", "shutil", ".", "copy", "(", "str", "(", "po_path", ")", ",", "str", "(", "po_copy_path", ")", ")", "log", ".", "debug", "(", "\"All po files updated\"", ")" ]
Update all po files with the data in the pot reference file.
[ "Update", "all", "po", "files", "with", "the", "data", "in", "the", "pot", "reference", "file", "." ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/scripts/distribution/translation_generator.py#L78-L97
subdownloader/subdownloader
scripts/distribution/translation_generator.py
TranslationGenerator.do_mo
def do_mo(self): """ Generate mo files for all po files. """ log.debug("Start updating mo files ...") for po_dir_path in self._iter_po_dir(): po_path = (po_dir_path / self._basename).with_suffix(".po") lc_path = self._mo_path / po_dir_path.name / "LC_MESSAGES" lc_path.mkdir(parents=True, exist_ok=True) mo_path = (lc_path / self._basename).with_suffix(".mo") log.debug("Creating from {po}: {mo}".format(po=str(po_path), mo=str(mo_path))) check_call(["msgfmt", str(po_path), "-o", str(mo_path)]) log.debug("All mo files updated")
python
def do_mo(self): """ Generate mo files for all po files. """ log.debug("Start updating mo files ...") for po_dir_path in self._iter_po_dir(): po_path = (po_dir_path / self._basename).with_suffix(".po") lc_path = self._mo_path / po_dir_path.name / "LC_MESSAGES" lc_path.mkdir(parents=True, exist_ok=True) mo_path = (lc_path / self._basename).with_suffix(".mo") log.debug("Creating from {po}: {mo}".format(po=str(po_path), mo=str(mo_path))) check_call(["msgfmt", str(po_path), "-o", str(mo_path)]) log.debug("All mo files updated")
[ "def", "do_mo", "(", "self", ")", ":", "log", ".", "debug", "(", "\"Start updating mo files ...\"", ")", "for", "po_dir_path", "in", "self", ".", "_iter_po_dir", "(", ")", ":", "po_path", "=", "(", "po_dir_path", "/", "self", ".", "_basename", ")", ".", "with_suffix", "(", "\".po\"", ")", "lc_path", "=", "self", ".", "_mo_path", "/", "po_dir_path", ".", "name", "/", "\"LC_MESSAGES\"", "lc_path", ".", "mkdir", "(", "parents", "=", "True", ",", "exist_ok", "=", "True", ")", "mo_path", "=", "(", "lc_path", "/", "self", ".", "_basename", ")", ".", "with_suffix", "(", "\".mo\"", ")", "log", ".", "debug", "(", "\"Creating from {po}: {mo}\"", ".", "format", "(", "po", "=", "str", "(", "po_path", ")", ",", "mo", "=", "str", "(", "mo_path", ")", ")", ")", "check_call", "(", "[", "\"msgfmt\"", ",", "str", "(", "po_path", ")", ",", "\"-o\"", ",", "str", "(", "mo_path", ")", "]", ")", "log", ".", "debug", "(", "\"All mo files updated\"", ")" ]
Generate mo files for all po files.
[ "Generate", "mo", "files", "for", "all", "po", "files", "." ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/scripts/distribution/translation_generator.py#L99-L111
cenkalti/kuyruk
kuyruk/worker.py
Worker.run
def run(self) -> None: """Runs the worker and consumes messages from RabbitMQ. Returns only after `shutdown()` is called. """ if self._logging_level: logging.basicConfig( level=getattr(logging, self._logging_level.upper()), format="%(levelname).1s %(name)s.%(funcName)s:%(lineno)d - %(message)s") signal.signal(signal.SIGINT, self._handle_sigint) signal.signal(signal.SIGTERM, self._handle_sigterm) if platform.system() != 'Windows': # These features will not be available on Windows, but that is OK. # Read this issue for more details: # https://github.com/cenkalti/kuyruk/issues/54 signal.signal(signal.SIGHUP, self._handle_sighup) signal.signal(signal.SIGUSR1, self._handle_sigusr1) signal.signal(signal.SIGUSR2, self._handle_sigusr2) self._started_at = os.times().elapsed for t in self._threads: t.start() try: signals.worker_start.send(self.kuyruk, worker=self) self._consume_messages() signals.worker_shutdown.send(self.kuyruk, worker=self) finally: self.shutdown_pending.set() for t in self._threads: t.join() logger.debug("End run worker")
python
def run(self) -> None: """Runs the worker and consumes messages from RabbitMQ. Returns only after `shutdown()` is called. """ if self._logging_level: logging.basicConfig( level=getattr(logging, self._logging_level.upper()), format="%(levelname).1s %(name)s.%(funcName)s:%(lineno)d - %(message)s") signal.signal(signal.SIGINT, self._handle_sigint) signal.signal(signal.SIGTERM, self._handle_sigterm) if platform.system() != 'Windows': # These features will not be available on Windows, but that is OK. # Read this issue for more details: # https://github.com/cenkalti/kuyruk/issues/54 signal.signal(signal.SIGHUP, self._handle_sighup) signal.signal(signal.SIGUSR1, self._handle_sigusr1) signal.signal(signal.SIGUSR2, self._handle_sigusr2) self._started_at = os.times().elapsed for t in self._threads: t.start() try: signals.worker_start.send(self.kuyruk, worker=self) self._consume_messages() signals.worker_shutdown.send(self.kuyruk, worker=self) finally: self.shutdown_pending.set() for t in self._threads: t.join() logger.debug("End run worker")
[ "def", "run", "(", "self", ")", "->", "None", ":", "if", "self", ".", "_logging_level", ":", "logging", ".", "basicConfig", "(", "level", "=", "getattr", "(", "logging", ",", "self", ".", "_logging_level", ".", "upper", "(", ")", ")", ",", "format", "=", "\"%(levelname).1s %(name)s.%(funcName)s:%(lineno)d - %(message)s\"", ")", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "self", ".", "_handle_sigint", ")", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "self", ".", "_handle_sigterm", ")", "if", "platform", ".", "system", "(", ")", "!=", "'Windows'", ":", "# These features will not be available on Windows, but that is OK.", "# Read this issue for more details:", "# https://github.com/cenkalti/kuyruk/issues/54", "signal", ".", "signal", "(", "signal", ".", "SIGHUP", ",", "self", ".", "_handle_sighup", ")", "signal", ".", "signal", "(", "signal", ".", "SIGUSR1", ",", "self", ".", "_handle_sigusr1", ")", "signal", ".", "signal", "(", "signal", ".", "SIGUSR2", ",", "self", ".", "_handle_sigusr2", ")", "self", ".", "_started_at", "=", "os", ".", "times", "(", ")", ".", "elapsed", "for", "t", "in", "self", ".", "_threads", ":", "t", ".", "start", "(", ")", "try", ":", "signals", ".", "worker_start", ".", "send", "(", "self", ".", "kuyruk", ",", "worker", "=", "self", ")", "self", ".", "_consume_messages", "(", ")", "signals", ".", "worker_shutdown", ".", "send", "(", "self", ".", "kuyruk", ",", "worker", "=", "self", ")", "finally", ":", "self", ".", "shutdown_pending", ".", "set", "(", ")", "for", "t", "in", "self", ".", "_threads", ":", "t", ".", "join", "(", ")", "logger", ".", "debug", "(", "\"End run worker\"", ")" ]
Runs the worker and consumes messages from RabbitMQ. Returns only after `shutdown()` is called.
[ "Runs", "the", "worker", "and", "consumes", "messages", "from", "RabbitMQ", ".", "Returns", "only", "after", "shutdown", "()", "is", "called", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/worker.py#L81-L115
cenkalti/kuyruk
kuyruk/worker.py
Worker._process_message
def _process_message(self, message: amqp.Message) -> None: """Processes the message received from the queue.""" if self.shutdown_pending.is_set(): return try: if isinstance(message.body, bytes): message.body = message.body.decode() description = json.loads(message.body) except Exception: logger.error("Cannot decode message. Dropping. Message: %r", message.body) traceback.print_exc() message.channel.basic_reject(message.delivery_tag, requeue=False) else: logger.info("Processing task: %r", description) self._process_description(message, description)
python
def _process_message(self, message: amqp.Message) -> None: """Processes the message received from the queue.""" if self.shutdown_pending.is_set(): return try: if isinstance(message.body, bytes): message.body = message.body.decode() description = json.loads(message.body) except Exception: logger.error("Cannot decode message. Dropping. Message: %r", message.body) traceback.print_exc() message.channel.basic_reject(message.delivery_tag, requeue=False) else: logger.info("Processing task: %r", description) self._process_description(message, description)
[ "def", "_process_message", "(", "self", ",", "message", ":", "amqp", ".", "Message", ")", "->", "None", ":", "if", "self", ".", "shutdown_pending", ".", "is_set", "(", ")", ":", "return", "try", ":", "if", "isinstance", "(", "message", ".", "body", ",", "bytes", ")", ":", "message", ".", "body", "=", "message", ".", "body", ".", "decode", "(", ")", "description", "=", "json", ".", "loads", "(", "message", ".", "body", ")", "except", "Exception", ":", "logger", ".", "error", "(", "\"Cannot decode message. Dropping. Message: %r\"", ",", "message", ".", "body", ")", "traceback", ".", "print_exc", "(", ")", "message", ".", "channel", ".", "basic_reject", "(", "message", ".", "delivery_tag", ",", "requeue", "=", "False", ")", "else", ":", "logger", ".", "info", "(", "\"Processing task: %r\"", ",", "description", ")", "self", ".", "_process_description", "(", "message", ",", "description", ")" ]
Processes the message received from the queue.
[ "Processes", "the", "message", "received", "from", "the", "queue", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/worker.py#L175-L190
cenkalti/kuyruk
kuyruk/worker.py
Worker._apply_task
def _apply_task(task: Task, args: Tuple, kwargs: Dict[str, Any]) -> Any: """Logs the time spent while running the task.""" if args is None: args = () if kwargs is None: kwargs = {} start = monotonic() try: return task.apply(*args, **kwargs) finally: delta = monotonic() - start logger.info("%s finished in %i seconds." % (task.name, delta))
python
def _apply_task(task: Task, args: Tuple, kwargs: Dict[str, Any]) -> Any: """Logs the time spent while running the task.""" if args is None: args = () if kwargs is None: kwargs = {} start = monotonic() try: return task.apply(*args, **kwargs) finally: delta = monotonic() - start logger.info("%s finished in %i seconds." % (task.name, delta))
[ "def", "_apply_task", "(", "task", ":", "Task", ",", "args", ":", "Tuple", ",", "kwargs", ":", "Dict", "[", "str", ",", "Any", "]", ")", "->", "Any", ":", "if", "args", "is", "None", ":", "args", "=", "(", ")", "if", "kwargs", "is", "None", ":", "kwargs", "=", "{", "}", "start", "=", "monotonic", "(", ")", "try", ":", "return", "task", ".", "apply", "(", "*", "args", ",", "*", "*", "kwargs", ")", "finally", ":", "delta", "=", "monotonic", "(", ")", "-", "start", "logger", ".", "info", "(", "\"%s finished in %i seconds.\"", "%", "(", "task", ".", "name", ",", "delta", ")", ")" ]
Logs the time spent while running the task.
[ "Logs", "the", "time", "spent", "while", "running", "the", "task", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/worker.py#L292-L304
cenkalti/kuyruk
kuyruk/worker.py
Worker._shutdown_timer
def _shutdown_timer(self) -> None: """Counts down from MAX_WORKER_RUN_TIME. When it reaches zero sutdown gracefully. """ remaining = self._max_run_time - self.uptime if not self.shutdown_pending.wait(remaining): logger.warning('Run time reached zero') self.shutdown()
python
def _shutdown_timer(self) -> None: """Counts down from MAX_WORKER_RUN_TIME. When it reaches zero sutdown gracefully. """ remaining = self._max_run_time - self.uptime if not self.shutdown_pending.wait(remaining): logger.warning('Run time reached zero') self.shutdown()
[ "def", "_shutdown_timer", "(", "self", ")", "->", "None", ":", "remaining", "=", "self", ".", "_max_run_time", "-", "self", ".", "uptime", "if", "not", "self", ".", "shutdown_pending", ".", "wait", "(", "remaining", ")", ":", "logger", ".", "warning", "(", "'Run time reached zero'", ")", "self", ".", "shutdown", "(", ")" ]
Counts down from MAX_WORKER_RUN_TIME. When it reaches zero sutdown gracefully.
[ "Counts", "down", "from", "MAX_WORKER_RUN_TIME", ".", "When", "it", "reaches", "zero", "sutdown", "gracefully", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/worker.py#L351-L359
cenkalti/kuyruk
kuyruk/worker.py
Worker._handle_sigint
def _handle_sigint(self, signum: int, frame: Any) -> None: """Shutdown after processing current task.""" logger.warning("Catched SIGINT") self.shutdown()
python
def _handle_sigint(self, signum: int, frame: Any) -> None: """Shutdown after processing current task.""" logger.warning("Catched SIGINT") self.shutdown()
[ "def", "_handle_sigint", "(", "self", ",", "signum", ":", "int", ",", "frame", ":", "Any", ")", "->", "None", ":", "logger", ".", "warning", "(", "\"Catched SIGINT\"", ")", "self", ".", "shutdown", "(", ")" ]
Shutdown after processing current task.
[ "Shutdown", "after", "processing", "current", "task", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/worker.py#L366-L369
cenkalti/kuyruk
kuyruk/worker.py
Worker._handle_sighup
def _handle_sighup(self, signum: int, frame: Any) -> None: """Used internally to fail the task when connection to RabbitMQ is lost during the execution of the task. """ logger.warning("Catched SIGHUP") exc_info = self._heartbeat_exc_info self._heartbeat_exc_info = None # Format exception info to see in tools like Sentry. formatted_exception = ''.join(traceback.format_exception(*exc_info)) # noqa raise HeartbeatError(exc_info)
python
def _handle_sighup(self, signum: int, frame: Any) -> None: """Used internally to fail the task when connection to RabbitMQ is lost during the execution of the task. """ logger.warning("Catched SIGHUP") exc_info = self._heartbeat_exc_info self._heartbeat_exc_info = None # Format exception info to see in tools like Sentry. formatted_exception = ''.join(traceback.format_exception(*exc_info)) # noqa raise HeartbeatError(exc_info)
[ "def", "_handle_sighup", "(", "self", ",", "signum", ":", "int", ",", "frame", ":", "Any", ")", "->", "None", ":", "logger", ".", "warning", "(", "\"Catched SIGHUP\"", ")", "exc_info", "=", "self", ".", "_heartbeat_exc_info", "self", ".", "_heartbeat_exc_info", "=", "None", "# Format exception info to see in tools like Sentry.", "formatted_exception", "=", "''", ".", "join", "(", "traceback", ".", "format_exception", "(", "*", "exc_info", ")", ")", "# noqa", "raise", "HeartbeatError", "(", "exc_info", ")" ]
Used internally to fail the task when connection to RabbitMQ is lost during the execution of the task.
[ "Used", "internally", "to", "fail", "the", "task", "when", "connection", "to", "RabbitMQ", "is", "lost", "during", "the", "execution", "of", "the", "task", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/worker.py#L376-L386
cenkalti/kuyruk
kuyruk/worker.py
Worker._handle_sigusr1
def _handle_sigusr1(signum: int, frame: Any) -> None: """Print stacktrace.""" print('=' * 70) print(''.join(traceback.format_stack())) print('-' * 70)
python
def _handle_sigusr1(signum: int, frame: Any) -> None: """Print stacktrace.""" print('=' * 70) print(''.join(traceback.format_stack())) print('-' * 70)
[ "def", "_handle_sigusr1", "(", "signum", ":", "int", ",", "frame", ":", "Any", ")", "->", "None", ":", "print", "(", "'='", "*", "70", ")", "print", "(", "''", ".", "join", "(", "traceback", ".", "format_stack", "(", ")", ")", ")", "print", "(", "'-'", "*", "70", ")" ]
Print stacktrace.
[ "Print", "stacktrace", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/worker.py#L389-L393
cenkalti/kuyruk
kuyruk/worker.py
Worker._handle_sigusr2
def _handle_sigusr2(self, signum: int, frame: Any) -> None: """Drop current task.""" logger.warning("Catched SIGUSR2") if self.current_task: logger.warning("Dropping current task...") raise Discard
python
def _handle_sigusr2(self, signum: int, frame: Any) -> None: """Drop current task.""" logger.warning("Catched SIGUSR2") if self.current_task: logger.warning("Dropping current task...") raise Discard
[ "def", "_handle_sigusr2", "(", "self", ",", "signum", ":", "int", ",", "frame", ":", "Any", ")", "->", "None", ":", "logger", ".", "warning", "(", "\"Catched SIGUSR2\"", ")", "if", "self", ".", "current_task", ":", "logger", ".", "warning", "(", "\"Dropping current task...\"", ")", "raise", "Discard" ]
Drop current task.
[ "Drop", "current", "task", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/worker.py#L395-L400
subdownloader/subdownloader
subdownloader/client/configuration.py
configuration_get_default_folder
def configuration_get_default_folder(): """ Return the default folder where user-specific data is stored. This depends of the system on which Python is running, :return: path to the user-specific configuration data folder """ system = platform.system() if system == 'Linux': # https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html sys_config_path = Path(os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))) elif system == 'Windows': sys_config_path = Path(os.getenv('APPDATA', '')) else: log.error('Unknown system: "{system}" (using default configuration path)'.format(system=system)) sys_config_path = Path() log.debug('User-specific system configuration folder="{sys_config_path}"'.format( sys_config_path=sys_config_path)) sys_config = sys_config_path / PROJECT_TITLE log.debug('User-specific {project} configuration folder="{sys_config}"'.format( project=PROJECT_TITLE, sys_config=sys_config)) return sys_config
python
def configuration_get_default_folder(): """ Return the default folder where user-specific data is stored. This depends of the system on which Python is running, :return: path to the user-specific configuration data folder """ system = platform.system() if system == 'Linux': # https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html sys_config_path = Path(os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))) elif system == 'Windows': sys_config_path = Path(os.getenv('APPDATA', '')) else: log.error('Unknown system: "{system}" (using default configuration path)'.format(system=system)) sys_config_path = Path() log.debug('User-specific system configuration folder="{sys_config_path}"'.format( sys_config_path=sys_config_path)) sys_config = sys_config_path / PROJECT_TITLE log.debug('User-specific {project} configuration folder="{sys_config}"'.format( project=PROJECT_TITLE, sys_config=sys_config)) return sys_config
[ "def", "configuration_get_default_folder", "(", ")", ":", "system", "=", "platform", ".", "system", "(", ")", "if", "system", "==", "'Linux'", ":", "# https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html", "sys_config_path", "=", "Path", "(", "os", ".", "getenv", "(", "'XDG_CONFIG_HOME'", ",", "os", ".", "path", ".", "expanduser", "(", "\"~/.config\"", ")", ")", ")", "elif", "system", "==", "'Windows'", ":", "sys_config_path", "=", "Path", "(", "os", ".", "getenv", "(", "'APPDATA'", ",", "''", ")", ")", "else", ":", "log", ".", "error", "(", "'Unknown system: \"{system}\" (using default configuration path)'", ".", "format", "(", "system", "=", "system", ")", ")", "sys_config_path", "=", "Path", "(", ")", "log", ".", "debug", "(", "'User-specific system configuration folder=\"{sys_config_path}\"'", ".", "format", "(", "sys_config_path", "=", "sys_config_path", ")", ")", "sys_config", "=", "sys_config_path", "/", "PROJECT_TITLE", "log", ".", "debug", "(", "'User-specific {project} configuration folder=\"{sys_config}\"'", ".", "format", "(", "project", "=", "PROJECT_TITLE", ",", "sys_config", "=", "sys_config", ")", ")", "return", "sys_config" ]
Return the default folder where user-specific data is stored. This depends of the system on which Python is running, :return: path to the user-specific configuration data folder
[ "Return", "the", "default", "folder", "where", "user", "-", "specific", "data", "is", "stored", ".", "This", "depends", "of", "the", "system", "on", "which", "Python", "is", "running", ":", "return", ":", "path", "to", "the", "user", "-", "specific", "configuration", "data", "folder" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/client/configuration.py#L95-L115
subdownloader/subdownloader
subdownloader/languages/language.py
Language.from_locale
def from_locale(cls, locale): """ Create a new Language instance from a locale string :param locale: locale as string :return: Language instance with instance.locale() == locale if locale is valid else instance of Unknown Language """ locale = str(locale) if locale is 'unknown': return UnknownLanguage(locale) try: return cls._from_xyz('locale', locale) except NotALanguageException: log.warning('Unknown locale: {}'.format(locale)) return UnknownLanguage(locale)
python
def from_locale(cls, locale): """ Create a new Language instance from a locale string :param locale: locale as string :return: Language instance with instance.locale() == locale if locale is valid else instance of Unknown Language """ locale = str(locale) if locale is 'unknown': return UnknownLanguage(locale) try: return cls._from_xyz('locale', locale) except NotALanguageException: log.warning('Unknown locale: {}'.format(locale)) return UnknownLanguage(locale)
[ "def", "from_locale", "(", "cls", ",", "locale", ")", ":", "locale", "=", "str", "(", "locale", ")", "if", "locale", "is", "'unknown'", ":", "return", "UnknownLanguage", "(", "locale", ")", "try", ":", "return", "cls", ".", "_from_xyz", "(", "'locale'", ",", "locale", ")", "except", "NotALanguageException", ":", "log", ".", "warning", "(", "'Unknown locale: {}'", ".", "format", "(", "locale", ")", ")", "return", "UnknownLanguage", "(", "locale", ")" ]
Create a new Language instance from a locale string :param locale: locale as string :return: Language instance with instance.locale() == locale if locale is valid else instance of Unknown Language
[ "Create", "a", "new", "Language", "instance", "from", "a", "locale", "string", ":", "param", "locale", ":", "locale", "as", "string", ":", "return", ":", "Language", "instance", "with", "instance", ".", "locale", "()", "==", "locale", "if", "locale", "is", "valid", "else", "instance", "of", "Unknown", "Language" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/languages/language.py#L144-L157
subdownloader/subdownloader
subdownloader/languages/language.py
Language.from_xx
def from_xx(cls, xx): """ Create a new Language instance from a ISO639 string :param xx: ISO639 as string :return: Language instance with instance.xx() == xx if xx is valid else instance of UnknownLanguage """ xx = str(xx).lower() if xx is 'unknown': return UnknownLanguage(xx) try: return cls._from_xyz('ISO639', xx) except NotALanguageException: log.warning('Unknown ISO639: {}'.format(xx)) return UnknownLanguage(xx)
python
def from_xx(cls, xx): """ Create a new Language instance from a ISO639 string :param xx: ISO639 as string :return: Language instance with instance.xx() == xx if xx is valid else instance of UnknownLanguage """ xx = str(xx).lower() if xx is 'unknown': return UnknownLanguage(xx) try: return cls._from_xyz('ISO639', xx) except NotALanguageException: log.warning('Unknown ISO639: {}'.format(xx)) return UnknownLanguage(xx)
[ "def", "from_xx", "(", "cls", ",", "xx", ")", ":", "xx", "=", "str", "(", "xx", ")", ".", "lower", "(", ")", "if", "xx", "is", "'unknown'", ":", "return", "UnknownLanguage", "(", "xx", ")", "try", ":", "return", "cls", ".", "_from_xyz", "(", "'ISO639'", ",", "xx", ")", "except", "NotALanguageException", ":", "log", ".", "warning", "(", "'Unknown ISO639: {}'", ".", "format", "(", "xx", ")", ")", "return", "UnknownLanguage", "(", "xx", ")" ]
Create a new Language instance from a ISO639 string :param xx: ISO639 as string :return: Language instance with instance.xx() == xx if xx is valid else instance of UnknownLanguage
[ "Create", "a", "new", "Language", "instance", "from", "a", "ISO639", "string", ":", "param", "xx", ":", "ISO639", "as", "string", ":", "return", ":", "Language", "instance", "with", "instance", ".", "xx", "()", "==", "xx", "if", "xx", "is", "valid", "else", "instance", "of", "UnknownLanguage" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/languages/language.py#L160-L173
subdownloader/subdownloader
subdownloader/languages/language.py
Language.from_xxx
def from_xxx(cls, xxx): """ Create a new Language instance from a LanguageID string :param xxx: LanguageID as string :return: Language instance with instance.xxx() == xxx if xxx is valid else instance of UnknownLanguage """ xxx = str(xxx).lower() if xxx is 'unknown': return UnknownLanguage(xxx) try: return cls._from_xyz('LanguageID', xxx) except NotALanguageException: log.warning('Unknown LanguageId: {}'.format(xxx)) return UnknownLanguage(xxx)
python
def from_xxx(cls, xxx): """ Create a new Language instance from a LanguageID string :param xxx: LanguageID as string :return: Language instance with instance.xxx() == xxx if xxx is valid else instance of UnknownLanguage """ xxx = str(xxx).lower() if xxx is 'unknown': return UnknownLanguage(xxx) try: return cls._from_xyz('LanguageID', xxx) except NotALanguageException: log.warning('Unknown LanguageId: {}'.format(xxx)) return UnknownLanguage(xxx)
[ "def", "from_xxx", "(", "cls", ",", "xxx", ")", ":", "xxx", "=", "str", "(", "xxx", ")", ".", "lower", "(", ")", "if", "xxx", "is", "'unknown'", ":", "return", "UnknownLanguage", "(", "xxx", ")", "try", ":", "return", "cls", ".", "_from_xyz", "(", "'LanguageID'", ",", "xxx", ")", "except", "NotALanguageException", ":", "log", ".", "warning", "(", "'Unknown LanguageId: {}'", ".", "format", "(", "xxx", ")", ")", "return", "UnknownLanguage", "(", "xxx", ")" ]
Create a new Language instance from a LanguageID string :param xxx: LanguageID as string :return: Language instance with instance.xxx() == xxx if xxx is valid else instance of UnknownLanguage
[ "Create", "a", "new", "Language", "instance", "from", "a", "LanguageID", "string", ":", "param", "xxx", ":", "LanguageID", "as", "string", ":", "return", ":", "Language", "instance", "with", "instance", ".", "xxx", "()", "==", "xxx", "if", "xxx", "is", "valid", "else", "instance", "of", "UnknownLanguage" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/languages/language.py#L176-L189
subdownloader/subdownloader
subdownloader/languages/language.py
Language.from_name
def from_name(cls, name): """ Create a new Language instance from a name as string :param name: name as string :return: Language instance with instance.name() == name if name is valid else instance of UnknownLanguage """ name = str(name).lower() if name is 'unknown' or name is _('unknown'): return UnknownLanguage(name) try: return cls._from_xyz('LanguageName', name) except NotALanguageException: log.warning('Unknown LanguageName: {}'.format(name)) return UnknownLanguage(name)
python
def from_name(cls, name): """ Create a new Language instance from a name as string :param name: name as string :return: Language instance with instance.name() == name if name is valid else instance of UnknownLanguage """ name = str(name).lower() if name is 'unknown' or name is _('unknown'): return UnknownLanguage(name) try: return cls._from_xyz('LanguageName', name) except NotALanguageException: log.warning('Unknown LanguageName: {}'.format(name)) return UnknownLanguage(name)
[ "def", "from_name", "(", "cls", ",", "name", ")", ":", "name", "=", "str", "(", "name", ")", ".", "lower", "(", ")", "if", "name", "is", "'unknown'", "or", "name", "is", "_", "(", "'unknown'", ")", ":", "return", "UnknownLanguage", "(", "name", ")", "try", ":", "return", "cls", ".", "_from_xyz", "(", "'LanguageName'", ",", "name", ")", "except", "NotALanguageException", ":", "log", ".", "warning", "(", "'Unknown LanguageName: {}'", ".", "format", "(", "name", ")", ")", "return", "UnknownLanguage", "(", "name", ")" ]
Create a new Language instance from a name as string :param name: name as string :return: Language instance with instance.name() == name if name is valid else instance of UnknownLanguage
[ "Create", "a", "new", "Language", "instance", "from", "a", "name", "as", "string", ":", "param", "name", ":", "name", "as", "string", ":", "return", ":", "Language", "instance", "with", "instance", ".", "name", "()", "==", "name", "if", "name", "is", "valid", "else", "instance", "of", "UnknownLanguage" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/languages/language.py#L192-L205
subdownloader/subdownloader
subdownloader/languages/language.py
Language._from_xyz
def _from_xyz(cls, xyzkey, xyzvalue): """ Private helper function to create new Language instance. :param xyzkey: one of ('locale', 'ISO639', 'LanguageID', 'LanguageName') :param xyzvalue: corresponding value of xyzkey :return: Language instance """ if xyzvalue == 'unknown' or xyzvalue == _('unknown'): return UnknownLanguage(xyzvalue) for lang_id, lang_data in enumerate(LANGUAGES): for data_value in lang_data[xyzkey]: if xyzvalue == data_value.lower(): return cls(lang_id) raise NotALanguageException(xyzvalue, 'Illegal language {}: {}'.format(xyzkey, xyzvalue))
python
def _from_xyz(cls, xyzkey, xyzvalue): """ Private helper function to create new Language instance. :param xyzkey: one of ('locale', 'ISO639', 'LanguageID', 'LanguageName') :param xyzvalue: corresponding value of xyzkey :return: Language instance """ if xyzvalue == 'unknown' or xyzvalue == _('unknown'): return UnknownLanguage(xyzvalue) for lang_id, lang_data in enumerate(LANGUAGES): for data_value in lang_data[xyzkey]: if xyzvalue == data_value.lower(): return cls(lang_id) raise NotALanguageException(xyzvalue, 'Illegal language {}: {}'.format(xyzkey, xyzvalue))
[ "def", "_from_xyz", "(", "cls", ",", "xyzkey", ",", "xyzvalue", ")", ":", "if", "xyzvalue", "==", "'unknown'", "or", "xyzvalue", "==", "_", "(", "'unknown'", ")", ":", "return", "UnknownLanguage", "(", "xyzvalue", ")", "for", "lang_id", ",", "lang_data", "in", "enumerate", "(", "LANGUAGES", ")", ":", "for", "data_value", "in", "lang_data", "[", "xyzkey", "]", ":", "if", "xyzvalue", "==", "data_value", ".", "lower", "(", ")", ":", "return", "cls", "(", "lang_id", ")", "raise", "NotALanguageException", "(", "xyzvalue", ",", "'Illegal language {}: {}'", ".", "format", "(", "xyzkey", ",", "xyzvalue", ")", ")" ]
Private helper function to create new Language instance. :param xyzkey: one of ('locale', 'ISO639', 'LanguageID', 'LanguageName') :param xyzvalue: corresponding value of xyzkey :return: Language instance
[ "Private", "helper", "function", "to", "create", "new", "Language", "instance", ".", ":", "param", "xyzkey", ":", "one", "of", "(", "locale", "ISO639", "LanguageID", "LanguageName", ")", ":", "param", "xyzvalue", ":", "corresponding", "value", "of", "xyzkey", ":", "return", ":", "Language", "instance" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/languages/language.py#L208-L221
subdownloader/subdownloader
subdownloader/languages/language.py
Language.from_unknown
def from_unknown(cls, value, xx=False, xxx=False, locale=False, name=False): """ Try to create a Language instance having only some limited data about the Language. If no corresponding Language is found, a NotALanguageException is thrown. :param value: data known about the language as string :param xx: True if the value may be a locale :param xxx: True if the value may be a LanguageID :param locale: True if the value may be a locale :param name: True if the value may be a LanguageName :return: Language Instance if a matching Language was found """ # Use 2 lists instead of dict ==> order known keys = ['ISO639', 'LanguageID', 'locale', 'LanguageName'] truefalses = [xx, xxx, locale, name] value = value.lower() for key, doKey in zip(keys, truefalses): if doKey: try: return cls._from_xyz(key, value) except NotALanguageException: pass raise NotALanguageException(value, 'Illegal language "{}"'.format(value))
python
def from_unknown(cls, value, xx=False, xxx=False, locale=False, name=False): """ Try to create a Language instance having only some limited data about the Language. If no corresponding Language is found, a NotALanguageException is thrown. :param value: data known about the language as string :param xx: True if the value may be a locale :param xxx: True if the value may be a LanguageID :param locale: True if the value may be a locale :param name: True if the value may be a LanguageName :return: Language Instance if a matching Language was found """ # Use 2 lists instead of dict ==> order known keys = ['ISO639', 'LanguageID', 'locale', 'LanguageName'] truefalses = [xx, xxx, locale, name] value = value.lower() for key, doKey in zip(keys, truefalses): if doKey: try: return cls._from_xyz(key, value) except NotALanguageException: pass raise NotALanguageException(value, 'Illegal language "{}"'.format(value))
[ "def", "from_unknown", "(", "cls", ",", "value", ",", "xx", "=", "False", ",", "xxx", "=", "False", ",", "locale", "=", "False", ",", "name", "=", "False", ")", ":", "# Use 2 lists instead of dict ==> order known", "keys", "=", "[", "'ISO639'", ",", "'LanguageID'", ",", "'locale'", ",", "'LanguageName'", "]", "truefalses", "=", "[", "xx", ",", "xxx", ",", "locale", ",", "name", "]", "value", "=", "value", ".", "lower", "(", ")", "for", "key", ",", "doKey", "in", "zip", "(", "keys", ",", "truefalses", ")", ":", "if", "doKey", ":", "try", ":", "return", "cls", ".", "_from_xyz", "(", "key", ",", "value", ")", "except", "NotALanguageException", ":", "pass", "raise", "NotALanguageException", "(", "value", ",", "'Illegal language \"{}\"'", ".", "format", "(", "value", ")", ")" ]
Try to create a Language instance having only some limited data about the Language. If no corresponding Language is found, a NotALanguageException is thrown. :param value: data known about the language as string :param xx: True if the value may be a locale :param xxx: True if the value may be a LanguageID :param locale: True if the value may be a locale :param name: True if the value may be a LanguageName :return: Language Instance if a matching Language was found
[ "Try", "to", "create", "a", "Language", "instance", "having", "only", "some", "limited", "data", "about", "the", "Language", ".", "If", "no", "corresponding", "Language", "is", "found", "a", "NotALanguageException", "is", "thrown", ".", ":", "param", "value", ":", "data", "known", "about", "the", "language", "as", "string", ":", "param", "xx", ":", "True", "if", "the", "value", "may", "be", "a", "locale", ":", "param", "xxx", ":", "True", "if", "the", "value", "may", "be", "a", "LanguageID", ":", "param", "locale", ":", "True", "if", "the", "value", "may", "be", "a", "locale", ":", "param", "name", ":", "True", "if", "the", "value", "may", "be", "a", "LanguageName", ":", "return", ":", "Language", "Instance", "if", "a", "matching", "Language", "was", "found" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/languages/language.py#L224-L245
subdownloader/subdownloader
subdownloader/languages/language.py
Language.from_file
def from_file(cls, filepath, chunk_size=None): """ Try do determine the language of a text file. :param filepath: string file path :param chunk_size: amount of bytes of file to read to determine language :return: Language instance if detection succeeded, otherwise return UnknownLanguage """ log.debug('Language.from_file: "{}", chunk={} ...'.format(filepath, chunk_size)) with filepath.open('rb') as f: data = f.read(-1 if chunk_size is None else chunk_size) data_ascii = asciify(data) lang_xx = langdetect_detect(data_ascii) lang = cls.from_xx(lang_xx) log.debug('... result language={}'.format(lang)) return lang
python
def from_file(cls, filepath, chunk_size=None): """ Try do determine the language of a text file. :param filepath: string file path :param chunk_size: amount of bytes of file to read to determine language :return: Language instance if detection succeeded, otherwise return UnknownLanguage """ log.debug('Language.from_file: "{}", chunk={} ...'.format(filepath, chunk_size)) with filepath.open('rb') as f: data = f.read(-1 if chunk_size is None else chunk_size) data_ascii = asciify(data) lang_xx = langdetect_detect(data_ascii) lang = cls.from_xx(lang_xx) log.debug('... result language={}'.format(lang)) return lang
[ "def", "from_file", "(", "cls", ",", "filepath", ",", "chunk_size", "=", "None", ")", ":", "log", ".", "debug", "(", "'Language.from_file: \"{}\", chunk={} ...'", ".", "format", "(", "filepath", ",", "chunk_size", ")", ")", "with", "filepath", ".", "open", "(", "'rb'", ")", "as", "f", ":", "data", "=", "f", ".", "read", "(", "-", "1", "if", "chunk_size", "is", "None", "else", "chunk_size", ")", "data_ascii", "=", "asciify", "(", "data", ")", "lang_xx", "=", "langdetect_detect", "(", "data_ascii", ")", "lang", "=", "cls", ".", "from_xx", "(", "lang_xx", ")", "log", ".", "debug", "(", "'... result language={}'", ".", "format", "(", "lang", ")", ")", "return", "lang" ]
Try do determine the language of a text file. :param filepath: string file path :param chunk_size: amount of bytes of file to read to determine language :return: Language instance if detection succeeded, otherwise return UnknownLanguage
[ "Try", "do", "determine", "the", "language", "of", "a", "text", "file", ".", ":", "param", "filepath", ":", "string", "file", "path", ":", "param", "chunk_size", ":", "amount", "of", "bytes", "of", "file", "to", "read", "to", "determine", "language", ":", "return", ":", "Language", "instance", "if", "detection", "succeeded", "otherwise", "return", "UnknownLanguage" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/languages/language.py#L248-L262
subdownloader/subdownloader
subdownloader/client/gui/searchFileWidget.py
SearchFileWidget.onFolderTreeClicked
def onFolderTreeClicked(self, proxyIndex): """What to do when a Folder in the tree is clicked""" if not proxyIndex.isValid(): return index = self.proxyFileModel.mapToSource(proxyIndex) settings = QSettings() folder_path = self.fileModel.filePath(index) settings.setValue('mainwindow/workingDirectory', folder_path)
python
def onFolderTreeClicked(self, proxyIndex): """What to do when a Folder in the tree is clicked""" if not proxyIndex.isValid(): return index = self.proxyFileModel.mapToSource(proxyIndex) settings = QSettings() folder_path = self.fileModel.filePath(index) settings.setValue('mainwindow/workingDirectory', folder_path)
[ "def", "onFolderTreeClicked", "(", "self", ",", "proxyIndex", ")", ":", "if", "not", "proxyIndex", ".", "isValid", "(", ")", ":", "return", "index", "=", "self", ".", "proxyFileModel", ".", "mapToSource", "(", "proxyIndex", ")", "settings", "=", "QSettings", "(", ")", "folder_path", "=", "self", ".", "fileModel", ".", "filePath", "(", "index", ")", "settings", ".", "setValue", "(", "'mainwindow/workingDirectory'", ",", "folder_path", ")" ]
What to do when a Folder in the tree is clicked
[ "What", "to", "do", "when", "a", "Folder", "in", "the", "tree", "is", "clicked" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/client/gui/searchFileWidget.py#L214-L222
cenkalti/kuyruk
kuyruk/task.py
Task.send_to_queue
def send_to_queue( self, args: Tuple=(), kwargs: Dict[str, Any]={}, host: str=None, wait_result: Union[int, float]=None, message_ttl: Union[int, float]=None, ) -> Any: """ Sends a message to the queue. A worker will run the task's function when it receives the message. :param args: Arguments that will be passed to task on execution. :param kwargs: Keyword arguments that will be passed to task on execution. :param host: Send this task to specific host. ``host`` will be appended to the queue name. If ``host`` is "localhost", hostname of the server will be appended to the queue name. :param wait_result: Wait for result from worker for ``wait_result`` seconds. If timeout occurs, :class:`~kuyruk.exceptions.ResultTimeout` is raised. If excecption occurs in worker, :class:`~kuyruk.exceptions.RemoteException` is raised. :param message_ttl: If set, message will be destroyed in queue after ``message_ttl`` seconds. :return: Result from worker if ``wait_result`` is set, else :const:`None`. """ if self.kuyruk.config.EAGER: # Run the task in current process result = self.apply(*args, **kwargs) return result if wait_result else None logger.debug("Task.send_to_queue args=%r, kwargs=%r", args, kwargs) queue = self._queue_for_host(host) description = self._get_description(args, kwargs) self._send_signal(signals.task_presend, args=args, kwargs=kwargs, description=description) body = json.dumps(description) msg = amqp.Message(body=body) if wait_result: # Use direct reply-to feature from RabbitMQ: # https://www.rabbitmq.com/direct-reply-to.html msg.properties['reply_to'] = 'amq.rabbitmq.reply-to' if message_ttl: msg.properties['expiration'] = str(int(message_ttl * 1000)) with self.kuyruk.channel() as ch: if wait_result: result = Result(ch.connection) ch.basic_consume(queue='amq.rabbitmq.reply-to', no_ack=True, callback=result.process_message) ch.queue_declare(queue=queue, durable=True, auto_delete=False) ch.basic_publish(msg, exchange="", routing_key=queue) self._send_signal(signals.task_postsend, args=args, kwargs=kwargs, description=description) if wait_result: return result.wait(wait_result)
python
def send_to_queue( self, args: Tuple=(), kwargs: Dict[str, Any]={}, host: str=None, wait_result: Union[int, float]=None, message_ttl: Union[int, float]=None, ) -> Any: """ Sends a message to the queue. A worker will run the task's function when it receives the message. :param args: Arguments that will be passed to task on execution. :param kwargs: Keyword arguments that will be passed to task on execution. :param host: Send this task to specific host. ``host`` will be appended to the queue name. If ``host`` is "localhost", hostname of the server will be appended to the queue name. :param wait_result: Wait for result from worker for ``wait_result`` seconds. If timeout occurs, :class:`~kuyruk.exceptions.ResultTimeout` is raised. If excecption occurs in worker, :class:`~kuyruk.exceptions.RemoteException` is raised. :param message_ttl: If set, message will be destroyed in queue after ``message_ttl`` seconds. :return: Result from worker if ``wait_result`` is set, else :const:`None`. """ if self.kuyruk.config.EAGER: # Run the task in current process result = self.apply(*args, **kwargs) return result if wait_result else None logger.debug("Task.send_to_queue args=%r, kwargs=%r", args, kwargs) queue = self._queue_for_host(host) description = self._get_description(args, kwargs) self._send_signal(signals.task_presend, args=args, kwargs=kwargs, description=description) body = json.dumps(description) msg = amqp.Message(body=body) if wait_result: # Use direct reply-to feature from RabbitMQ: # https://www.rabbitmq.com/direct-reply-to.html msg.properties['reply_to'] = 'amq.rabbitmq.reply-to' if message_ttl: msg.properties['expiration'] = str(int(message_ttl * 1000)) with self.kuyruk.channel() as ch: if wait_result: result = Result(ch.connection) ch.basic_consume(queue='amq.rabbitmq.reply-to', no_ack=True, callback=result.process_message) ch.queue_declare(queue=queue, durable=True, auto_delete=False) ch.basic_publish(msg, exchange="", routing_key=queue) self._send_signal(signals.task_postsend, args=args, kwargs=kwargs, description=description) if wait_result: return result.wait(wait_result)
[ "def", "send_to_queue", "(", "self", ",", "args", ":", "Tuple", "=", "(", ")", ",", "kwargs", ":", "Dict", "[", "str", ",", "Any", "]", "=", "{", "}", ",", "host", ":", "str", "=", "None", ",", "wait_result", ":", "Union", "[", "int", ",", "float", "]", "=", "None", ",", "message_ttl", ":", "Union", "[", "int", ",", "float", "]", "=", "None", ",", ")", "->", "Any", ":", "if", "self", ".", "kuyruk", ".", "config", ".", "EAGER", ":", "# Run the task in current process", "result", "=", "self", ".", "apply", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "result", "if", "wait_result", "else", "None", "logger", ".", "debug", "(", "\"Task.send_to_queue args=%r, kwargs=%r\"", ",", "args", ",", "kwargs", ")", "queue", "=", "self", ".", "_queue_for_host", "(", "host", ")", "description", "=", "self", ".", "_get_description", "(", "args", ",", "kwargs", ")", "self", ".", "_send_signal", "(", "signals", ".", "task_presend", ",", "args", "=", "args", ",", "kwargs", "=", "kwargs", ",", "description", "=", "description", ")", "body", "=", "json", ".", "dumps", "(", "description", ")", "msg", "=", "amqp", ".", "Message", "(", "body", "=", "body", ")", "if", "wait_result", ":", "# Use direct reply-to feature from RabbitMQ:", "# https://www.rabbitmq.com/direct-reply-to.html", "msg", ".", "properties", "[", "'reply_to'", "]", "=", "'amq.rabbitmq.reply-to'", "if", "message_ttl", ":", "msg", ".", "properties", "[", "'expiration'", "]", "=", "str", "(", "int", "(", "message_ttl", "*", "1000", ")", ")", "with", "self", ".", "kuyruk", ".", "channel", "(", ")", "as", "ch", ":", "if", "wait_result", ":", "result", "=", "Result", "(", "ch", ".", "connection", ")", "ch", ".", "basic_consume", "(", "queue", "=", "'amq.rabbitmq.reply-to'", ",", "no_ack", "=", "True", ",", "callback", "=", "result", ".", "process_message", ")", "ch", ".", "queue_declare", "(", "queue", "=", "queue", ",", "durable", "=", "True", ",", "auto_delete", "=", "False", ")", "ch", ".", "basic_publish", "(", "msg", ",", "exchange", "=", "\"\"", ",", "routing_key", "=", "queue", ")", "self", ".", "_send_signal", "(", "signals", ".", "task_postsend", ",", "args", "=", "args", ",", "kwargs", "=", "kwargs", ",", "description", "=", "description", ")", "if", "wait_result", ":", "return", "result", ".", "wait", "(", "wait_result", ")" ]
Sends a message to the queue. A worker will run the task's function when it receives the message. :param args: Arguments that will be passed to task on execution. :param kwargs: Keyword arguments that will be passed to task on execution. :param host: Send this task to specific host. ``host`` will be appended to the queue name. If ``host`` is "localhost", hostname of the server will be appended to the queue name. :param wait_result: Wait for result from worker for ``wait_result`` seconds. If timeout occurs, :class:`~kuyruk.exceptions.ResultTimeout` is raised. If excecption occurs in worker, :class:`~kuyruk.exceptions.RemoteException` is raised. :param message_ttl: If set, message will be destroyed in queue after ``message_ttl`` seconds. :return: Result from worker if ``wait_result`` is set, else :const:`None`.
[ "Sends", "a", "message", "to", "the", "queue", ".", "A", "worker", "will", "run", "the", "task", "s", "function", "when", "it", "receives", "the", "message", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/task.py#L69-L130
cenkalti/kuyruk
kuyruk/task.py
Task._get_description
def _get_description(self, args: Tuple, kwargs: Dict[str, Any]) -> Dict[str, Any]: """Return the dictionary to be sent to the queue.""" return { 'id': uuid1().hex, 'args': args, 'kwargs': kwargs, 'module': self._module_name, 'function': self.f.__name__, 'sender_hostname': socket.gethostname(), 'sender_pid': os.getpid(), 'sender_cmd': ' '.join(sys.argv), 'sender_timestamp': datetime.utcnow().isoformat()[:19], }
python
def _get_description(self, args: Tuple, kwargs: Dict[str, Any]) -> Dict[str, Any]: """Return the dictionary to be sent to the queue.""" return { 'id': uuid1().hex, 'args': args, 'kwargs': kwargs, 'module': self._module_name, 'function': self.f.__name__, 'sender_hostname': socket.gethostname(), 'sender_pid': os.getpid(), 'sender_cmd': ' '.join(sys.argv), 'sender_timestamp': datetime.utcnow().isoformat()[:19], }
[ "def", "_get_description", "(", "self", ",", "args", ":", "Tuple", ",", "kwargs", ":", "Dict", "[", "str", ",", "Any", "]", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "return", "{", "'id'", ":", "uuid1", "(", ")", ".", "hex", ",", "'args'", ":", "args", ",", "'kwargs'", ":", "kwargs", ",", "'module'", ":", "self", ".", "_module_name", ",", "'function'", ":", "self", ".", "f", ".", "__name__", ",", "'sender_hostname'", ":", "socket", ".", "gethostname", "(", ")", ",", "'sender_pid'", ":", "os", ".", "getpid", "(", ")", ",", "'sender_cmd'", ":", "' '", ".", "join", "(", "sys", ".", "argv", ")", ",", "'sender_timestamp'", ":", "datetime", ".", "utcnow", "(", ")", ".", "isoformat", "(", ")", "[", ":", "19", "]", ",", "}" ]
Return the dictionary to be sent to the queue.
[ "Return", "the", "dictionary", "to", "be", "sent", "to", "the", "queue", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/task.py#L139-L151
cenkalti/kuyruk
kuyruk/task.py
Task.apply
def apply(self, *args: Any, **kwargs: Any) -> Any: """Called by workers to run the wrapped function. You may call it yourself if you want to run the task in current process without sending to the queue. If task has a `retry` property it will be retried on failure. If task has a `max_run_time` property the task will not be allowed to run more than that. """ def send_signal(sig: Signal, **extra: Any) -> None: self._send_signal(sig, args=args, kwargs=kwargs, **extra) logger.debug("Applying %r, args=%r, kwargs=%r", self, args, kwargs) send_signal(signals.task_preapply) try: tries = 1 + self.retry while 1: tries -= 1 send_signal(signals.task_prerun) try: with time_limit(self.max_run_time or 0): return self.f(*args, **kwargs) except Exception: send_signal(signals.task_error, exc_info=sys.exc_info()) if tries <= 0: raise else: break finally: send_signal(signals.task_postrun) except Exception: send_signal(signals.task_failure, exc_info=sys.exc_info()) raise else: send_signal(signals.task_success) finally: send_signal(signals.task_postapply)
python
def apply(self, *args: Any, **kwargs: Any) -> Any: """Called by workers to run the wrapped function. You may call it yourself if you want to run the task in current process without sending to the queue. If task has a `retry` property it will be retried on failure. If task has a `max_run_time` property the task will not be allowed to run more than that. """ def send_signal(sig: Signal, **extra: Any) -> None: self._send_signal(sig, args=args, kwargs=kwargs, **extra) logger.debug("Applying %r, args=%r, kwargs=%r", self, args, kwargs) send_signal(signals.task_preapply) try: tries = 1 + self.retry while 1: tries -= 1 send_signal(signals.task_prerun) try: with time_limit(self.max_run_time or 0): return self.f(*args, **kwargs) except Exception: send_signal(signals.task_error, exc_info=sys.exc_info()) if tries <= 0: raise else: break finally: send_signal(signals.task_postrun) except Exception: send_signal(signals.task_failure, exc_info=sys.exc_info()) raise else: send_signal(signals.task_success) finally: send_signal(signals.task_postapply)
[ "def", "apply", "(", "self", ",", "*", "args", ":", "Any", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "Any", ":", "def", "send_signal", "(", "sig", ":", "Signal", ",", "*", "*", "extra", ":", "Any", ")", "->", "None", ":", "self", ".", "_send_signal", "(", "sig", ",", "args", "=", "args", ",", "kwargs", "=", "kwargs", ",", "*", "*", "extra", ")", "logger", ".", "debug", "(", "\"Applying %r, args=%r, kwargs=%r\"", ",", "self", ",", "args", ",", "kwargs", ")", "send_signal", "(", "signals", ".", "task_preapply", ")", "try", ":", "tries", "=", "1", "+", "self", ".", "retry", "while", "1", ":", "tries", "-=", "1", "send_signal", "(", "signals", ".", "task_prerun", ")", "try", ":", "with", "time_limit", "(", "self", ".", "max_run_time", "or", "0", ")", ":", "return", "self", ".", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "Exception", ":", "send_signal", "(", "signals", ".", "task_error", ",", "exc_info", "=", "sys", ".", "exc_info", "(", ")", ")", "if", "tries", "<=", "0", ":", "raise", "else", ":", "break", "finally", ":", "send_signal", "(", "signals", ".", "task_postrun", ")", "except", "Exception", ":", "send_signal", "(", "signals", ".", "task_failure", ",", "exc_info", "=", "sys", ".", "exc_info", "(", ")", ")", "raise", "else", ":", "send_signal", "(", "signals", ".", "task_success", ")", "finally", ":", "send_signal", "(", "signals", ".", "task_postapply", ")" ]
Called by workers to run the wrapped function. You may call it yourself if you want to run the task in current process without sending to the queue. If task has a `retry` property it will be retried on failure. If task has a `max_run_time` property the task will not be allowed to run more than that.
[ "Called", "by", "workers", "to", "run", "the", "wrapped", "function", ".", "You", "may", "call", "it", "yourself", "if", "you", "want", "to", "run", "the", "task", "in", "current", "process", "without", "sending", "to", "the", "queue", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/task.py#L156-L194
cenkalti/kuyruk
kuyruk/task.py
Task._module_name
def _module_name(self) -> str: """Module name of the wrapped function.""" name = self.f.__module__ if name == '__main__': return importer.main_module_name() return name
python
def _module_name(self) -> str: """Module name of the wrapped function.""" name = self.f.__module__ if name == '__main__': return importer.main_module_name() return name
[ "def", "_module_name", "(", "self", ")", "->", "str", ":", "name", "=", "self", ".", "f", ".", "__module__", "if", "name", "==", "'__main__'", ":", "return", "importer", ".", "main_module_name", "(", ")", "return", "name" ]
Module name of the wrapped function.
[ "Module", "name", "of", "the", "wrapped", "function", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/task.py#L205-L210
subdownloader/subdownloader
subdownloader/client/internationalization.py
i18n_install
def i18n_install(lc=None): """ Install internationalization support for the clients using the specified locale. If there is no support for the locale, the default locale will be used. As last resort, a null translator will be installed. :param lc: locale to install. If None, the system default locale will be used. """ log.debug('i18n_install( {lc} ) called.'.format(lc=lc)) if lc is None: lc = i18n_system_locale() if lc is None: log.debug('i18n_install(): installing NullTranslations') translator = gettext.NullTranslations() else: child_locales = i18n_support_locale(lc) # Call i18n_support_locale to log the supported locales log.debug('i18n_install(): installing gettext.translation(domain={domain}, localedir={localedir}, ' 'languages={languages}, fallback={fallback})'.format(domain=project.PROJECT_TITLE.lower(), localedir=i18n_get_path(), languages=child_locales, fallback=True)) translator = gettext.translation( domain=project.PROJECT_TITLE.lower(), localedir=str(i18n_get_path()), languages=child_locales, fallback=True) translator.install(names=['ngettext'])
python
def i18n_install(lc=None): """ Install internationalization support for the clients using the specified locale. If there is no support for the locale, the default locale will be used. As last resort, a null translator will be installed. :param lc: locale to install. If None, the system default locale will be used. """ log.debug('i18n_install( {lc} ) called.'.format(lc=lc)) if lc is None: lc = i18n_system_locale() if lc is None: log.debug('i18n_install(): installing NullTranslations') translator = gettext.NullTranslations() else: child_locales = i18n_support_locale(lc) # Call i18n_support_locale to log the supported locales log.debug('i18n_install(): installing gettext.translation(domain={domain}, localedir={localedir}, ' 'languages={languages}, fallback={fallback})'.format(domain=project.PROJECT_TITLE.lower(), localedir=i18n_get_path(), languages=child_locales, fallback=True)) translator = gettext.translation( domain=project.PROJECT_TITLE.lower(), localedir=str(i18n_get_path()), languages=child_locales, fallback=True) translator.install(names=['ngettext'])
[ "def", "i18n_install", "(", "lc", "=", "None", ")", ":", "log", ".", "debug", "(", "'i18n_install( {lc} ) called.'", ".", "format", "(", "lc", "=", "lc", ")", ")", "if", "lc", "is", "None", ":", "lc", "=", "i18n_system_locale", "(", ")", "if", "lc", "is", "None", ":", "log", ".", "debug", "(", "'i18n_install(): installing NullTranslations'", ")", "translator", "=", "gettext", ".", "NullTranslations", "(", ")", "else", ":", "child_locales", "=", "i18n_support_locale", "(", "lc", ")", "# Call i18n_support_locale to log the supported locales", "log", ".", "debug", "(", "'i18n_install(): installing gettext.translation(domain={domain}, localedir={localedir}, '", "'languages={languages}, fallback={fallback})'", ".", "format", "(", "domain", "=", "project", ".", "PROJECT_TITLE", ".", "lower", "(", ")", ",", "localedir", "=", "i18n_get_path", "(", ")", ",", "languages", "=", "child_locales", ",", "fallback", "=", "True", ")", ")", "translator", "=", "gettext", ".", "translation", "(", "domain", "=", "project", ".", "PROJECT_TITLE", ".", "lower", "(", ")", ",", "localedir", "=", "str", "(", "i18n_get_path", "(", ")", ")", ",", "languages", "=", "child_locales", ",", "fallback", "=", "True", ")", "translator", ".", "install", "(", "names", "=", "[", "'ngettext'", "]", ")" ]
Install internationalization support for the clients using the specified locale. If there is no support for the locale, the default locale will be used. As last resort, a null translator will be installed. :param lc: locale to install. If None, the system default locale will be used.
[ "Install", "internationalization", "support", "for", "the", "clients", "using", "the", "specified", "locale", ".", "If", "there", "is", "no", "support", "for", "the", "locale", "the", "default", "locale", "will", "be", "used", ".", "As", "last", "resort", "a", "null", "translator", "will", "be", "installed", ".", ":", "param", "lc", ":", "locale", "to", "install", ".", "If", "None", "the", "system", "default", "locale", "will", "be", "used", "." ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/client/internationalization.py#L17-L41
subdownloader/subdownloader
subdownloader/client/internationalization.py
i18n_system_locale
def i18n_system_locale(): """ Return the system locale :return: the system locale (as a string) """ log.debug('i18n_system_locale() called') lc, encoding = locale.getlocale() log.debug('locale.getlocale() = (lc="{lc}", encoding="{encoding}).'.format(lc=lc, encoding=encoding)) if lc is None: lc, encoding = locale.getdefaultlocale() log.debug('locale.getdefaultlocale() = (lc="{lc}", encoding="{encoding}).'.format(lc=lc, encoding=encoding)) return lc
python
def i18n_system_locale(): """ Return the system locale :return: the system locale (as a string) """ log.debug('i18n_system_locale() called') lc, encoding = locale.getlocale() log.debug('locale.getlocale() = (lc="{lc}", encoding="{encoding}).'.format(lc=lc, encoding=encoding)) if lc is None: lc, encoding = locale.getdefaultlocale() log.debug('locale.getdefaultlocale() = (lc="{lc}", encoding="{encoding}).'.format(lc=lc, encoding=encoding)) return lc
[ "def", "i18n_system_locale", "(", ")", ":", "log", ".", "debug", "(", "'i18n_system_locale() called'", ")", "lc", ",", "encoding", "=", "locale", ".", "getlocale", "(", ")", "log", ".", "debug", "(", "'locale.getlocale() = (lc=\"{lc}\", encoding=\"{encoding}).'", ".", "format", "(", "lc", "=", "lc", ",", "encoding", "=", "encoding", ")", ")", "if", "lc", "is", "None", ":", "lc", ",", "encoding", "=", "locale", ".", "getdefaultlocale", "(", ")", "log", ".", "debug", "(", "'locale.getdefaultlocale() = (lc=\"{lc}\", encoding=\"{encoding}).'", ".", "format", "(", "lc", "=", "lc", ",", "encoding", "=", "encoding", ")", ")", "return", "lc" ]
Return the system locale :return: the system locale (as a string)
[ "Return", "the", "system", "locale", ":", "return", ":", "the", "system", "locale", "(", "as", "a", "string", ")" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/client/internationalization.py#L44-L55
subdownloader/subdownloader
subdownloader/client/internationalization.py
i18n_locale_fallbacks_calculate
def i18n_locale_fallbacks_calculate(lc): """ Calculate all child locales from a locale. e.g. for locale="pt_BR.us-ascii", returns ["pt_BR.us-ascii", "pt_BR.us", "pt_BR", "pt"] :param lc: locale for which the child locales are needed :return: all child locales (including the parameter lc) """ log.debug('i18n_locale_fallbacks_calculate( locale="{locale}" ) called'.format(locale=lc)) locales = [] lc_original = lc while lc: locales.append(lc) rindex = max([lc.rfind(separator) for separator in ['@', '_', '-', '.']]) if rindex == -1: break lc = lc[:rindex] log.debug('i18n_locale_fallbacks_calculate( lc="{lc}" ) = {locales}'.format(lc=lc_original, locales=locales)) return locales
python
def i18n_locale_fallbacks_calculate(lc): """ Calculate all child locales from a locale. e.g. for locale="pt_BR.us-ascii", returns ["pt_BR.us-ascii", "pt_BR.us", "pt_BR", "pt"] :param lc: locale for which the child locales are needed :return: all child locales (including the parameter lc) """ log.debug('i18n_locale_fallbacks_calculate( locale="{locale}" ) called'.format(locale=lc)) locales = [] lc_original = lc while lc: locales.append(lc) rindex = max([lc.rfind(separator) for separator in ['@', '_', '-', '.']]) if rindex == -1: break lc = lc[:rindex] log.debug('i18n_locale_fallbacks_calculate( lc="{lc}" ) = {locales}'.format(lc=lc_original, locales=locales)) return locales
[ "def", "i18n_locale_fallbacks_calculate", "(", "lc", ")", ":", "log", ".", "debug", "(", "'i18n_locale_fallbacks_calculate( locale=\"{locale}\" ) called'", ".", "format", "(", "locale", "=", "lc", ")", ")", "locales", "=", "[", "]", "lc_original", "=", "lc", "while", "lc", ":", "locales", ".", "append", "(", "lc", ")", "rindex", "=", "max", "(", "[", "lc", ".", "rfind", "(", "separator", ")", "for", "separator", "in", "[", "'@'", ",", "'_'", ",", "'-'", ",", "'.'", "]", "]", ")", "if", "rindex", "==", "-", "1", ":", "break", "lc", "=", "lc", "[", ":", "rindex", "]", "log", ".", "debug", "(", "'i18n_locale_fallbacks_calculate( lc=\"{lc}\" ) = {locales}'", ".", "format", "(", "lc", "=", "lc_original", ",", "locales", "=", "locales", ")", ")", "return", "locales" ]
Calculate all child locales from a locale. e.g. for locale="pt_BR.us-ascii", returns ["pt_BR.us-ascii", "pt_BR.us", "pt_BR", "pt"] :param lc: locale for which the child locales are needed :return: all child locales (including the parameter lc)
[ "Calculate", "all", "child", "locales", "from", "a", "locale", ".", "e", ".", "g", ".", "for", "locale", "=", "pt_BR", ".", "us", "-", "ascii", "returns", "[", "pt_BR", ".", "us", "-", "ascii", "pt_BR", ".", "us", "pt_BR", "pt", "]", ":", "param", "lc", ":", "locale", "for", "which", "the", "child", "locales", "are", "needed", ":", "return", ":", "all", "child", "locales", "(", "including", "the", "parameter", "lc", ")" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/client/internationalization.py#L58-L75
subdownloader/subdownloader
subdownloader/client/internationalization.py
i18n_support_locale
def i18n_support_locale(lc_parent): """ Find out whether lc is supported. Returns all child locales (and eventually lc) which do have support. :param lc_parent: Locale for which we want to know the child locales that are supported :return: list of supported locales """ log.debug('i18n_support_locale( locale="{locale}" ) called'.format(locale=lc_parent)) lc_childs = i18n_locale_fallbacks_calculate(lc_parent) locales = [] locale_path = i18n_get_path() mo_file = '{project}.mo'.format(project=project.PROJECT_TITLE.lower()) for lc in lc_childs: lc_mo_path = locale_path / lc / 'LC_MESSAGES' / mo_file log.debug('Locale data "{lc_mo_path}" exists? ...'.format(lc_mo_path=lc_mo_path)) if lc_mo_path.is_file(): log.debug('... Yes! "{locale_path}" contains {mo_file}.'.format(locale_path=locale_path, mo_file=mo_file)) locales.append(lc) else: log.debug('... No') log.debug('i18n_support_locale( lc="{lc}" ) = {locales}'.format(lc=lc_parent, locales=locales)) return locales
python
def i18n_support_locale(lc_parent): """ Find out whether lc is supported. Returns all child locales (and eventually lc) which do have support. :param lc_parent: Locale for which we want to know the child locales that are supported :return: list of supported locales """ log.debug('i18n_support_locale( locale="{locale}" ) called'.format(locale=lc_parent)) lc_childs = i18n_locale_fallbacks_calculate(lc_parent) locales = [] locale_path = i18n_get_path() mo_file = '{project}.mo'.format(project=project.PROJECT_TITLE.lower()) for lc in lc_childs: lc_mo_path = locale_path / lc / 'LC_MESSAGES' / mo_file log.debug('Locale data "{lc_mo_path}" exists? ...'.format(lc_mo_path=lc_mo_path)) if lc_mo_path.is_file(): log.debug('... Yes! "{locale_path}" contains {mo_file}.'.format(locale_path=locale_path, mo_file=mo_file)) locales.append(lc) else: log.debug('... No') log.debug('i18n_support_locale( lc="{lc}" ) = {locales}'.format(lc=lc_parent, locales=locales)) return locales
[ "def", "i18n_support_locale", "(", "lc_parent", ")", ":", "log", ".", "debug", "(", "'i18n_support_locale( locale=\"{locale}\" ) called'", ".", "format", "(", "locale", "=", "lc_parent", ")", ")", "lc_childs", "=", "i18n_locale_fallbacks_calculate", "(", "lc_parent", ")", "locales", "=", "[", "]", "locale_path", "=", "i18n_get_path", "(", ")", "mo_file", "=", "'{project}.mo'", ".", "format", "(", "project", "=", "project", ".", "PROJECT_TITLE", ".", "lower", "(", ")", ")", "for", "lc", "in", "lc_childs", ":", "lc_mo_path", "=", "locale_path", "/", "lc", "/", "'LC_MESSAGES'", "/", "mo_file", "log", ".", "debug", "(", "'Locale data \"{lc_mo_path}\" exists? ...'", ".", "format", "(", "lc_mo_path", "=", "lc_mo_path", ")", ")", "if", "lc_mo_path", ".", "is_file", "(", ")", ":", "log", ".", "debug", "(", "'... Yes! \"{locale_path}\" contains {mo_file}.'", ".", "format", "(", "locale_path", "=", "locale_path", ",", "mo_file", "=", "mo_file", ")", ")", "locales", ".", "append", "(", "lc", ")", "else", ":", "log", ".", "debug", "(", "'... No'", ")", "log", ".", "debug", "(", "'i18n_support_locale( lc=\"{lc}\" ) = {locales}'", ".", "format", "(", "lc", "=", "lc_parent", ",", "locales", "=", "locales", ")", ")", "return", "locales" ]
Find out whether lc is supported. Returns all child locales (and eventually lc) which do have support. :param lc_parent: Locale for which we want to know the child locales that are supported :return: list of supported locales
[ "Find", "out", "whether", "lc", "is", "supported", ".", "Returns", "all", "child", "locales", "(", "and", "eventually", "lc", ")", "which", "do", "have", "support", ".", ":", "param", "lc_parent", ":", "Locale", "for", "which", "we", "want", "to", "know", "the", "child", "locales", "that", "are", "supported", ":", "return", ":", "list", "of", "supported", "locales" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/client/internationalization.py#L78-L101
subdownloader/subdownloader
subdownloader/client/internationalization.py
i18n_get_path
def i18n_get_path(): """ Get path to the internationalization data. :return: path as a string """ local_locale_path = client_get_path() / 'locale' if platform.system() == 'Linux': if local_locale_path.exists(): return local_locale_path else: return Path('/usr/share/locale') else: return local_locale_path
python
def i18n_get_path(): """ Get path to the internationalization data. :return: path as a string """ local_locale_path = client_get_path() / 'locale' if platform.system() == 'Linux': if local_locale_path.exists(): return local_locale_path else: return Path('/usr/share/locale') else: return local_locale_path
[ "def", "i18n_get_path", "(", ")", ":", "local_locale_path", "=", "client_get_path", "(", ")", "/", "'locale'", "if", "platform", ".", "system", "(", ")", "==", "'Linux'", ":", "if", "local_locale_path", ".", "exists", "(", ")", ":", "return", "local_locale_path", "else", ":", "return", "Path", "(", "'/usr/share/locale'", ")", "else", ":", "return", "local_locale_path" ]
Get path to the internationalization data. :return: path as a string
[ "Get", "path", "to", "the", "internationalization", "data", ".", ":", "return", ":", "path", "as", "a", "string" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/client/internationalization.py#L104-L116
subdownloader/subdownloader
subdownloader/client/internationalization.py
i18n_get_supported_locales
def i18n_get_supported_locales(): """ List all locales that have internationalization data for this program :return: List of locales """ locale_path = i18n_get_path() log.debug('Scanning translation files .mo in locale path: {}'.format(locale_path)) langs = [] mo_file = '{project}.mo'.format(project=project.PROJECT_TITLE.lower()) for lc in locale_path.iterdir(): lc_mo_path = lc / 'LC_MESSAGES' / mo_file if lc_mo_path.exists(): langs.append(lc.name) log.debug('Detected: {langs}'.format(langs=langs)) return langs
python
def i18n_get_supported_locales(): """ List all locales that have internationalization data for this program :return: List of locales """ locale_path = i18n_get_path() log.debug('Scanning translation files .mo in locale path: {}'.format(locale_path)) langs = [] mo_file = '{project}.mo'.format(project=project.PROJECT_TITLE.lower()) for lc in locale_path.iterdir(): lc_mo_path = lc / 'LC_MESSAGES' / mo_file if lc_mo_path.exists(): langs.append(lc.name) log.debug('Detected: {langs}'.format(langs=langs)) return langs
[ "def", "i18n_get_supported_locales", "(", ")", ":", "locale_path", "=", "i18n_get_path", "(", ")", "log", ".", "debug", "(", "'Scanning translation files .mo in locale path: {}'", ".", "format", "(", "locale_path", ")", ")", "langs", "=", "[", "]", "mo_file", "=", "'{project}.mo'", ".", "format", "(", "project", "=", "project", ".", "PROJECT_TITLE", ".", "lower", "(", ")", ")", "for", "lc", "in", "locale_path", ".", "iterdir", "(", ")", ":", "lc_mo_path", "=", "lc", "/", "'LC_MESSAGES'", "/", "mo_file", "if", "lc_mo_path", ".", "exists", "(", ")", ":", "langs", ".", "append", "(", "lc", ".", "name", ")", "log", ".", "debug", "(", "'Detected: {langs}'", ".", "format", "(", "langs", "=", "langs", ")", ")", "return", "langs" ]
List all locales that have internationalization data for this program :return: List of locales
[ "List", "all", "locales", "that", "have", "internationalization", "data", "for", "this", "program", ":", "return", ":", "List", "of", "locales" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/client/internationalization.py#L119-L133
cenkalti/kuyruk
kuyruk/config.py
Config.from_object
def from_object(self, obj: Union[str, Any]) -> None: """Load values from an object.""" if isinstance(obj, str): obj = importer.import_object_str(obj) for key in dir(obj): if key.isupper(): value = getattr(obj, key) self._setattr(key, value) logger.info("Config is loaded from object: %r", obj)
python
def from_object(self, obj: Union[str, Any]) -> None: """Load values from an object.""" if isinstance(obj, str): obj = importer.import_object_str(obj) for key in dir(obj): if key.isupper(): value = getattr(obj, key) self._setattr(key, value) logger.info("Config is loaded from object: %r", obj)
[ "def", "from_object", "(", "self", ",", "obj", ":", "Union", "[", "str", ",", "Any", "]", ")", "->", "None", ":", "if", "isinstance", "(", "obj", ",", "str", ")", ":", "obj", "=", "importer", ".", "import_object_str", "(", "obj", ")", "for", "key", "in", "dir", "(", "obj", ")", ":", "if", "key", ".", "isupper", "(", ")", ":", "value", "=", "getattr", "(", "obj", ",", "key", ")", "self", ".", "_setattr", "(", "key", ",", "value", ")", "logger", ".", "info", "(", "\"Config is loaded from object: %r\"", ",", "obj", ")" ]
Load values from an object.
[ "Load", "values", "from", "an", "object", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/config.py#L62-L72
cenkalti/kuyruk
kuyruk/config.py
Config.from_dict
def from_dict(self, d: Dict[str, Any]) -> None: """Load values from a dict.""" for key, value in d.items(): if key.isupper(): self._setattr(key, value) logger.info("Config is loaded from dict: %r", d)
python
def from_dict(self, d: Dict[str, Any]) -> None: """Load values from a dict.""" for key, value in d.items(): if key.isupper(): self._setattr(key, value) logger.info("Config is loaded from dict: %r", d)
[ "def", "from_dict", "(", "self", ",", "d", ":", "Dict", "[", "str", ",", "Any", "]", ")", "->", "None", ":", "for", "key", ",", "value", "in", "d", ".", "items", "(", ")", ":", "if", "key", ".", "isupper", "(", ")", ":", "self", ".", "_setattr", "(", "key", ",", "value", ")", "logger", ".", "info", "(", "\"Config is loaded from dict: %r\"", ",", "d", ")" ]
Load values from a dict.
[ "Load", "values", "from", "a", "dict", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/config.py#L74-L80
cenkalti/kuyruk
kuyruk/config.py
Config.from_pyfile
def from_pyfile(self, filename: str) -> None: """Load values from a Python file.""" globals_ = {} # type: Dict[str, Any] locals_ = {} # type: Dict[str, Any] with open(filename, "rb") as f: exec(compile(f.read(), filename, 'exec'), globals_, locals_) for key, value in locals_.items(): if (key.isupper() and not isinstance(value, types.ModuleType)): self._setattr(key, value) logger.info("Config is loaded from file: %s", filename)
python
def from_pyfile(self, filename: str) -> None: """Load values from a Python file.""" globals_ = {} # type: Dict[str, Any] locals_ = {} # type: Dict[str, Any] with open(filename, "rb") as f: exec(compile(f.read(), filename, 'exec'), globals_, locals_) for key, value in locals_.items(): if (key.isupper() and not isinstance(value, types.ModuleType)): self._setattr(key, value) logger.info("Config is loaded from file: %s", filename)
[ "def", "from_pyfile", "(", "self", ",", "filename", ":", "str", ")", "->", "None", ":", "globals_", "=", "{", "}", "# type: Dict[str, Any]", "locals_", "=", "{", "}", "# type: Dict[str, Any]", "with", "open", "(", "filename", ",", "\"rb\"", ")", "as", "f", ":", "exec", "(", "compile", "(", "f", ".", "read", "(", ")", ",", "filename", ",", "'exec'", ")", ",", "globals_", ",", "locals_", ")", "for", "key", ",", "value", "in", "locals_", ".", "items", "(", ")", ":", "if", "(", "key", ".", "isupper", "(", ")", "and", "not", "isinstance", "(", "value", ",", "types", ".", "ModuleType", ")", ")", ":", "self", ".", "_setattr", "(", "key", ",", "value", ")", "logger", ".", "info", "(", "\"Config is loaded from file: %s\"", ",", "filename", ")" ]
Load values from a Python file.
[ "Load", "values", "from", "a", "Python", "file", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/config.py#L90-L101
cenkalti/kuyruk
kuyruk/config.py
Config.from_env_vars
def from_env_vars(self) -> None: """Load values from environment variables. Keys must start with `KUYRUK_`.""" for key, value in os.environ.items(): if key.startswith('KUYRUK_'): key = key[7:] if hasattr(Config, key): try: value = ast.literal_eval(value) except (ValueError, SyntaxError): pass self._setattr(key, value)
python
def from_env_vars(self) -> None: """Load values from environment variables. Keys must start with `KUYRUK_`.""" for key, value in os.environ.items(): if key.startswith('KUYRUK_'): key = key[7:] if hasattr(Config, key): try: value = ast.literal_eval(value) except (ValueError, SyntaxError): pass self._setattr(key, value)
[ "def", "from_env_vars", "(", "self", ")", "->", "None", ":", "for", "key", ",", "value", "in", "os", ".", "environ", ".", "items", "(", ")", ":", "if", "key", ".", "startswith", "(", "'KUYRUK_'", ")", ":", "key", "=", "key", "[", "7", ":", "]", "if", "hasattr", "(", "Config", ",", "key", ")", ":", "try", ":", "value", "=", "ast", ".", "literal_eval", "(", "value", ")", "except", "(", "ValueError", ",", "SyntaxError", ")", ":", "pass", "self", ".", "_setattr", "(", "key", ",", "value", ")" ]
Load values from environment variables. Keys must start with `KUYRUK_`.
[ "Load", "values", "from", "environment", "variables", ".", "Keys", "must", "start", "with", "KUYRUK_", "." ]
train
https://github.com/cenkalti/kuyruk/blob/c99d66be9d8fb077610f2fa883d5a1d268b42f04/kuyruk/config.py#L103-L115
jonbretman/jinja-to-js
jinja_to_js/__init__.py
option
def option(current_kwargs, **kwargs): """ Context manager for temporarily setting a keyword argument and then restoring it to whatever it was before. """ tmp_kwargs = dict((key, current_kwargs.get(key)) for key, value in kwargs.items()) current_kwargs.update(kwargs) yield current_kwargs.update(tmp_kwargs)
python
def option(current_kwargs, **kwargs): """ Context manager for temporarily setting a keyword argument and then restoring it to whatever it was before. """ tmp_kwargs = dict((key, current_kwargs.get(key)) for key, value in kwargs.items()) current_kwargs.update(kwargs) yield current_kwargs.update(tmp_kwargs)
[ "def", "option", "(", "current_kwargs", ",", "*", "*", "kwargs", ")", ":", "tmp_kwargs", "=", "dict", "(", "(", "key", ",", "current_kwargs", ".", "get", "(", "key", ")", ")", "for", "key", ",", "value", "in", "kwargs", ".", "items", "(", ")", ")", "current_kwargs", ".", "update", "(", "kwargs", ")", "yield", "current_kwargs", ".", "update", "(", "tmp_kwargs", ")" ]
Context manager for temporarily setting a keyword argument and then restoring it to whatever it was before.
[ "Context", "manager", "for", "temporarily", "setting", "a", "keyword", "argument", "and", "then", "restoring", "it", "to", "whatever", "it", "was", "before", "." ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L105-L114
jonbretman/jinja-to-js
jinja_to_js/__init__.py
is_method_call
def is_method_call(node, method_name): """ Returns True if `node` is a method call for `method_name`. `method_name` can be either a string or an iterable of strings. """ if not isinstance(node, nodes.Call): return False if isinstance(node.node, nodes.Getattr): # e.g. foo.bar() method = node.node.attr elif isinstance(node.node, nodes.Name): # e.g. bar() method = node.node.name elif isinstance(node.node, nodes.Getitem): # e.g. foo["bar"]() method = node.node.arg.value else: return False if isinstance(method_name, (list, tuple)): return method in method_name return method == method_name
python
def is_method_call(node, method_name): """ Returns True if `node` is a method call for `method_name`. `method_name` can be either a string or an iterable of strings. """ if not isinstance(node, nodes.Call): return False if isinstance(node.node, nodes.Getattr): # e.g. foo.bar() method = node.node.attr elif isinstance(node.node, nodes.Name): # e.g. bar() method = node.node.name elif isinstance(node.node, nodes.Getitem): # e.g. foo["bar"]() method = node.node.arg.value else: return False if isinstance(method_name, (list, tuple)): return method in method_name return method == method_name
[ "def", "is_method_call", "(", "node", ",", "method_name", ")", ":", "if", "not", "isinstance", "(", "node", ",", "nodes", ".", "Call", ")", ":", "return", "False", "if", "isinstance", "(", "node", ".", "node", ",", "nodes", ".", "Getattr", ")", ":", "# e.g. foo.bar()", "method", "=", "node", ".", "node", ".", "attr", "elif", "isinstance", "(", "node", ".", "node", ",", "nodes", ".", "Name", ")", ":", "# e.g. bar()", "method", "=", "node", ".", "node", ".", "name", "elif", "isinstance", "(", "node", ".", "node", ",", "nodes", ".", "Getitem", ")", ":", "# e.g. foo[\"bar\"]()", "method", "=", "node", ".", "node", ".", "arg", ".", "value", "else", ":", "return", "False", "if", "isinstance", "(", "method_name", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "method", "in", "method_name", "return", "method", "==", "method_name" ]
Returns True if `node` is a method call for `method_name`. `method_name` can be either a string or an iterable of strings.
[ "Returns", "True", "if", "node", "is", "a", "method", "call", "for", "method_name", ".", "method_name", "can", "be", "either", "a", "string", "or", "an", "iterable", "of", "strings", "." ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L117-L144
jonbretman/jinja-to-js
jinja_to_js/__init__.py
is_loop_helper
def is_loop_helper(node): """ Returns True is node is a loop helper e.g. {{ loop.index }} or {{ loop.first }} """ return hasattr(node, 'node') and isinstance(node.node, nodes.Name) and node.node.name == 'loop'
python
def is_loop_helper(node): """ Returns True is node is a loop helper e.g. {{ loop.index }} or {{ loop.first }} """ return hasattr(node, 'node') and isinstance(node.node, nodes.Name) and node.node.name == 'loop'
[ "def", "is_loop_helper", "(", "node", ")", ":", "return", "hasattr", "(", "node", ",", "'node'", ")", "and", "isinstance", "(", "node", ".", "node", ",", "nodes", ".", "Name", ")", "and", "node", ".", "node", ".", "name", "==", "'loop'" ]
Returns True is node is a loop helper e.g. {{ loop.index }} or {{ loop.first }}
[ "Returns", "True", "is", "node", "is", "a", "loop", "helper", "e", ".", "g", ".", "{{", "loop", ".", "index", "}}", "or", "{{", "loop", ".", "first", "}}" ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L147-L151
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS.get_output
def get_output(self): """ Returns the generated JavaScript code. Returns: str """ # generate the JS function string template_function = TEMPLATE_WRAPPER.format( function_name=self.js_function_name, template_code=self.output.getvalue() ).strip() # get the correct module format template module_format = JS_MODULE_FORMATS[self.js_module_format] # generate the module code return module_format(self.dependencies, template_function)
python
def get_output(self): """ Returns the generated JavaScript code. Returns: str """ # generate the JS function string template_function = TEMPLATE_WRAPPER.format( function_name=self.js_function_name, template_code=self.output.getvalue() ).strip() # get the correct module format template module_format = JS_MODULE_FORMATS[self.js_module_format] # generate the module code return module_format(self.dependencies, template_function)
[ "def", "get_output", "(", "self", ")", ":", "# generate the JS function string", "template_function", "=", "TEMPLATE_WRAPPER", ".", "format", "(", "function_name", "=", "self", ".", "js_function_name", ",", "template_code", "=", "self", ".", "output", ".", "getvalue", "(", ")", ")", ".", "strip", "(", ")", "# get the correct module format template", "module_format", "=", "JS_MODULE_FORMATS", "[", "self", ".", "js_module_format", "]", "# generate the module code", "return", "module_format", "(", "self", ".", "dependencies", ",", "template_function", ")" ]
Returns the generated JavaScript code. Returns: str
[ "Returns", "the", "generated", "JavaScript", "code", "." ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L251-L268
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS._get_depencency_var_name
def _get_depencency_var_name(self, dependency): """ Returns the variable name assigned to the given dependency or None if the dependency has not yet been registered. Args: dependency (str): Thet dependency that needs to be imported. Returns: str or None """ for dep_path, var_name in self.dependencies: if dep_path == dependency: return var_name
python
def _get_depencency_var_name(self, dependency): """ Returns the variable name assigned to the given dependency or None if the dependency has not yet been registered. Args: dependency (str): Thet dependency that needs to be imported. Returns: str or None """ for dep_path, var_name in self.dependencies: if dep_path == dependency: return var_name
[ "def", "_get_depencency_var_name", "(", "self", ",", "dependency", ")", ":", "for", "dep_path", ",", "var_name", "in", "self", ".", "dependencies", ":", "if", "dep_path", "==", "dependency", ":", "return", "var_name" ]
Returns the variable name assigned to the given dependency or None if the dependency has not yet been registered. Args: dependency (str): Thet dependency that needs to be imported. Returns: str or None
[ "Returns", "the", "variable", "name", "assigned", "to", "the", "given", "dependency", "or", "None", "if", "the", "dependency", "has", "not", "yet", "been", "registered", "." ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L270-L283
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS._add_dependency
def _add_dependency(self, dependency, var_name=None): """ Adds the given dependency and returns the variable name to use to access it. If `var_name` is not given then a random one will be created. Args: dependency (str): var_name (str, optional): Returns: str """ if var_name is None: var_name = next(self.temp_var_names) # Don't add duplicate dependencies if (dependency, var_name) not in self.dependencies: self.dependencies.append((dependency, var_name)) return var_name
python
def _add_dependency(self, dependency, var_name=None): """ Adds the given dependency and returns the variable name to use to access it. If `var_name` is not given then a random one will be created. Args: dependency (str): var_name (str, optional): Returns: str """ if var_name is None: var_name = next(self.temp_var_names) # Don't add duplicate dependencies if (dependency, var_name) not in self.dependencies: self.dependencies.append((dependency, var_name)) return var_name
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
Adds the given dependency and returns the variable name to use to access it. If `var_name` is not given then a random one will be created. Args: dependency (str): var_name (str, optional): Returns: str
[ "Adds", "the", "given", "dependency", "and", "returns", "the", "variable", "name", "to", "use", "to", "access", "it", ".", "If", "var_name", "is", "not", "given", "then", "a", "random", "one", "will", "be", "created", "." ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L285-L302
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS._process_extends
def _process_extends(self, node, **kwargs): """ Processes an extends block e.g. `{% extends "some/template.jinja" %}` """ # find all the blocks in this template for b in self.ast.find_all(nodes.Block): # if not already in `child_blocks` then this is the first time a # block with this name has been encountered. if b.name not in self.child_blocks: self.child_blocks[b.name] = b else: # otherwise we have seen this block before, so we need to find the last # super_block and add the block from this template to the end. block = self.child_blocks.get(b.name) while hasattr(block, 'super_block'): block = block.super_block block.super_block = b # load the parent template parent_template = JinjaToJS(template_root=self.template_root, template_name=node.template.value, js_module_format=self.js_module_format, runtime_path=self.runtime_path, include_prefix=self.include_prefix, include_ext=self.include_ext, child_blocks=self.child_blocks, dependencies=self.dependencies) # add the parent templates output to the current output self.output.write(parent_template.output.getvalue()) # Raise an exception so we stop parsing this template raise ExtendsException
python
def _process_extends(self, node, **kwargs): """ Processes an extends block e.g. `{% extends "some/template.jinja" %}` """ # find all the blocks in this template for b in self.ast.find_all(nodes.Block): # if not already in `child_blocks` then this is the first time a # block with this name has been encountered. if b.name not in self.child_blocks: self.child_blocks[b.name] = b else: # otherwise we have seen this block before, so we need to find the last # super_block and add the block from this template to the end. block = self.child_blocks.get(b.name) while hasattr(block, 'super_block'): block = block.super_block block.super_block = b # load the parent template parent_template = JinjaToJS(template_root=self.template_root, template_name=node.template.value, js_module_format=self.js_module_format, runtime_path=self.runtime_path, include_prefix=self.include_prefix, include_ext=self.include_ext, child_blocks=self.child_blocks, dependencies=self.dependencies) # add the parent templates output to the current output self.output.write(parent_template.output.getvalue()) # Raise an exception so we stop parsing this template raise ExtendsException
[ "def", "_process_extends", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":", "# find all the blocks in this template", "for", "b", "in", "self", ".", "ast", ".", "find_all", "(", "nodes", ".", "Block", ")", ":", "# if not already in `child_blocks` then this is the first time a", "# block with this name has been encountered.", "if", "b", ".", "name", "not", "in", "self", ".", "child_blocks", ":", "self", ".", "child_blocks", "[", "b", ".", "name", "]", "=", "b", "else", ":", "# otherwise we have seen this block before, so we need to find the last", "# super_block and add the block from this template to the end.", "block", "=", "self", ".", "child_blocks", ".", "get", "(", "b", ".", "name", ")", "while", "hasattr", "(", "block", ",", "'super_block'", ")", ":", "block", "=", "block", ".", "super_block", "block", ".", "super_block", "=", "b", "# load the parent template", "parent_template", "=", "JinjaToJS", "(", "template_root", "=", "self", ".", "template_root", ",", "template_name", "=", "node", ".", "template", ".", "value", ",", "js_module_format", "=", "self", ".", "js_module_format", ",", "runtime_path", "=", "self", ".", "runtime_path", ",", "include_prefix", "=", "self", ".", "include_prefix", ",", "include_ext", "=", "self", ".", "include_ext", ",", "child_blocks", "=", "self", ".", "child_blocks", ",", "dependencies", "=", "self", ".", "dependencies", ")", "# add the parent templates output to the current output", "self", ".", "output", ".", "write", "(", "parent_template", ".", "output", ".", "getvalue", "(", ")", ")", "# Raise an exception so we stop parsing this template", "raise", "ExtendsException" ]
Processes an extends block e.g. `{% extends "some/template.jinja" %}`
[ "Processes", "an", "extends", "block", "e", ".", "g", ".", "{", "%", "extends", "some", "/", "template", ".", "jinja", "%", "}" ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L312-L347
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS._process_block
def _process_block(self, node, **kwargs): """ Processes a block e.g. `{% block my_block %}{% endblock %}` """ # check if this node already has a 'super_block' attribute if not hasattr(node, 'super_block'): # since it doesn't it must be the last block in the inheritance chain node.super_block = None # see if there has been a child block defined - if there is this # will be the first block in the inheritance chain child_block = self.child_blocks.get(node.name) if child_block: # we have child nodes so we need to set `node` as the # super of the last one in the chain last_block = child_block while hasattr(last_block, 'super_block'): last_block = child_block.super_block # once we have found it, set this node as it's super block last_block.super_block = node # this is the node we want to process as it's the first in the inheritance chain node = child_block # process the block passing the it's super along, if this block # calls super() it will be handled by `_process_call` for n in node.body: self._process_node(n, super_block=node.super_block, **kwargs)
python
def _process_block(self, node, **kwargs): """ Processes a block e.g. `{% block my_block %}{% endblock %}` """ # check if this node already has a 'super_block' attribute if not hasattr(node, 'super_block'): # since it doesn't it must be the last block in the inheritance chain node.super_block = None # see if there has been a child block defined - if there is this # will be the first block in the inheritance chain child_block = self.child_blocks.get(node.name) if child_block: # we have child nodes so we need to set `node` as the # super of the last one in the chain last_block = child_block while hasattr(last_block, 'super_block'): last_block = child_block.super_block # once we have found it, set this node as it's super block last_block.super_block = node # this is the node we want to process as it's the first in the inheritance chain node = child_block # process the block passing the it's super along, if this block # calls super() it will be handled by `_process_call` for n in node.body: self._process_node(n, super_block=node.super_block, **kwargs)
[ "def", "_process_block", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":", "# check if this node already has a 'super_block' attribute", "if", "not", "hasattr", "(", "node", ",", "'super_block'", ")", ":", "# since it doesn't it must be the last block in the inheritance chain", "node", ".", "super_block", "=", "None", "# see if there has been a child block defined - if there is this", "# will be the first block in the inheritance chain", "child_block", "=", "self", ".", "child_blocks", ".", "get", "(", "node", ".", "name", ")", "if", "child_block", ":", "# we have child nodes so we need to set `node` as the", "# super of the last one in the chain", "last_block", "=", "child_block", "while", "hasattr", "(", "last_block", ",", "'super_block'", ")", ":", "last_block", "=", "child_block", ".", "super_block", "# once we have found it, set this node as it's super block", "last_block", ".", "super_block", "=", "node", "# this is the node we want to process as it's the first in the inheritance chain", "node", "=", "child_block", "# process the block passing the it's super along, if this block", "# calls super() it will be handled by `_process_call`", "for", "n", "in", "node", ".", "body", ":", "self", ".", "_process_node", "(", "n", ",", "super_block", "=", "node", ".", "super_block", ",", "*", "*", "kwargs", ")" ]
Processes a block e.g. `{% block my_block %}{% endblock %}`
[ "Processes", "a", "block", "e", ".", "g", ".", "{", "%", "block", "my_block", "%", "}", "{", "%", "endblock", "%", "}" ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L349-L381
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS._process_output
def _process_output(self, node, **kwargs): """ Processes an output node, which will contain things like `Name` and `TemplateData` nodes. """ for n in node.nodes: self._process_node(n, **kwargs)
python
def _process_output(self, node, **kwargs): """ Processes an output node, which will contain things like `Name` and `TemplateData` nodes. """ for n in node.nodes: self._process_node(n, **kwargs)
[ "def", "_process_output", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":", "for", "n", "in", "node", ".", "nodes", ":", "self", ".", "_process_node", "(", "n", ",", "*", "*", "kwargs", ")" ]
Processes an output node, which will contain things like `Name` and `TemplateData` nodes.
[ "Processes", "an", "output", "node", "which", "will", "contain", "things", "like", "Name", "and", "TemplateData", "nodes", "." ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L383-L388
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS._process_templatedata
def _process_templatedata(self, node, **_): """ Processes a `TemplateData` node, this is just a bit of as-is text to be written to the output. """ # escape double quotes value = re.sub('"', r'\\"', node.data) # escape new lines value = re.sub('\n', r'\\n', value) # append value to the result self.output.write('__result += "' + value + '";')
python
def _process_templatedata(self, node, **_): """ Processes a `TemplateData` node, this is just a bit of as-is text to be written to the output. """ # escape double quotes value = re.sub('"', r'\\"', node.data) # escape new lines value = re.sub('\n', r'\\n', value) # append value to the result self.output.write('__result += "' + value + '";')
[ "def", "_process_templatedata", "(", "self", ",", "node", ",", "*", "*", "_", ")", ":", "# escape double quotes", "value", "=", "re", ".", "sub", "(", "'\"'", ",", "r'\\\\\"'", ",", "node", ".", "data", ")", "# escape new lines", "value", "=", "re", ".", "sub", "(", "'\\n'", ",", "r'\\\\n'", ",", "value", ")", "# append value to the result", "self", ".", "output", ".", "write", "(", "'__result += \"'", "+", "value", "+", "'\";'", ")" ]
Processes a `TemplateData` node, this is just a bit of as-is text to be written to the output.
[ "Processes", "a", "TemplateData", "node", "this", "is", "just", "a", "bit", "of", "as", "-", "is", "text", "to", "be", "written", "to", "the", "output", "." ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L390-L403
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS._process_name
def _process_name(self, node, **kwargs): """ Processes a `Name` node. Some examples of `Name` nodes: {{ foo }} -> 'foo' is a Name {% if foo }} -> 'foo' is a Name """ with self._interpolation(): with self._python_bool_wrapper(**kwargs): if node.name not in self.stored_names and node.ctx != 'store': self.output.write(self.context_name) self.output.write('.') if node.ctx == 'store': self.stored_names.add(node.name) self.output.write(node.name)
python
def _process_name(self, node, **kwargs): """ Processes a `Name` node. Some examples of `Name` nodes: {{ foo }} -> 'foo' is a Name {% if foo }} -> 'foo' is a Name """ with self._interpolation(): with self._python_bool_wrapper(**kwargs): if node.name not in self.stored_names and node.ctx != 'store': self.output.write(self.context_name) self.output.write('.') if node.ctx == 'store': self.stored_names.add(node.name) self.output.write(node.name)
[ "def", "_process_name", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "_interpolation", "(", ")", ":", "with", "self", ".", "_python_bool_wrapper", "(", "*", "*", "kwargs", ")", ":", "if", "node", ".", "name", "not", "in", "self", ".", "stored_names", "and", "node", ".", "ctx", "!=", "'store'", ":", "self", ".", "output", ".", "write", "(", "self", ".", "context_name", ")", "self", ".", "output", ".", "write", "(", "'.'", ")", "if", "node", ".", "ctx", "==", "'store'", ":", "self", ".", "stored_names", ".", "add", "(", "node", ".", "name", ")", "self", ".", "output", ".", "write", "(", "node", ".", "name", ")" ]
Processes a `Name` node. Some examples of `Name` nodes: {{ foo }} -> 'foo' is a Name {% if foo }} -> 'foo' is a Name
[ "Processes", "a", "Name", "node", ".", "Some", "examples", "of", "Name", "nodes", ":", "{{", "foo", "}}", "-", ">", "foo", "is", "a", "Name", "{", "%", "if", "foo", "}}", "-", ">", "foo", "is", "a", "Name" ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L405-L422
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS._process_getattr
def _process_getattr(self, node, **kwargs): """ Processes a `GetAttr` node. e.g. {{ foo.bar }} """ with self._interpolation(): with self._python_bool_wrapper(**kwargs) as new_kwargs: if is_loop_helper(node): self._process_loop_helper(node, **new_kwargs) else: self._process_node(node.node, **new_kwargs) self.output.write('.') self.output.write(node.attr)
python
def _process_getattr(self, node, **kwargs): """ Processes a `GetAttr` node. e.g. {{ foo.bar }} """ with self._interpolation(): with self._python_bool_wrapper(**kwargs) as new_kwargs: if is_loop_helper(node): self._process_loop_helper(node, **new_kwargs) else: self._process_node(node.node, **new_kwargs) self.output.write('.') self.output.write(node.attr)
[ "def", "_process_getattr", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "_interpolation", "(", ")", ":", "with", "self", ".", "_python_bool_wrapper", "(", "*", "*", "kwargs", ")", "as", "new_kwargs", ":", "if", "is_loop_helper", "(", "node", ")", ":", "self", ".", "_process_loop_helper", "(", "node", ",", "*", "*", "new_kwargs", ")", "else", ":", "self", ".", "_process_node", "(", "node", ".", "node", ",", "*", "*", "new_kwargs", ")", "self", ".", "output", ".", "write", "(", "'.'", ")", "self", ".", "output", ".", "write", "(", "node", ".", "attr", ")" ]
Processes a `GetAttr` node. e.g. {{ foo.bar }}
[ "Processes", "a", "GetAttr", "node", ".", "e", ".", "g", ".", "{{", "foo", ".", "bar", "}}" ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L424-L436
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS._process_getitem
def _process_getitem(self, node, **kwargs): """ Processes a `GetItem` node e.g. {{ foo["bar"] }} """ with self._interpolation(): with self._python_bool_wrapper(**kwargs) as new_kwargs: self._process_node(node.node, **new_kwargs) if isinstance(node.arg, nodes.Slice): self.output.write('.slice(') if node.arg.step is not None: raise Exception('The step argument is not supported when slicing.') if node.arg.start is None: self.output.write('0') else: self._process_node(node.arg.start, **new_kwargs) if node.arg.stop is None: self.output.write(')') else: self.output.write(',') self._process_node(node.arg.stop, **new_kwargs) self.output.write(')') else: self.output.write('[') self._process_node(node.arg, **new_kwargs) self.output.write(']')
python
def _process_getitem(self, node, **kwargs): """ Processes a `GetItem` node e.g. {{ foo["bar"] }} """ with self._interpolation(): with self._python_bool_wrapper(**kwargs) as new_kwargs: self._process_node(node.node, **new_kwargs) if isinstance(node.arg, nodes.Slice): self.output.write('.slice(') if node.arg.step is not None: raise Exception('The step argument is not supported when slicing.') if node.arg.start is None: self.output.write('0') else: self._process_node(node.arg.start, **new_kwargs) if node.arg.stop is None: self.output.write(')') else: self.output.write(',') self._process_node(node.arg.stop, **new_kwargs) self.output.write(')') else: self.output.write('[') self._process_node(node.arg, **new_kwargs) self.output.write(']')
[ "def", "_process_getitem", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "_interpolation", "(", ")", ":", "with", "self", ".", "_python_bool_wrapper", "(", "*", "*", "kwargs", ")", "as", "new_kwargs", ":", "self", ".", "_process_node", "(", "node", ".", "node", ",", "*", "*", "new_kwargs", ")", "if", "isinstance", "(", "node", ".", "arg", ",", "nodes", ".", "Slice", ")", ":", "self", ".", "output", ".", "write", "(", "'.slice('", ")", "if", "node", ".", "arg", ".", "step", "is", "not", "None", ":", "raise", "Exception", "(", "'The step argument is not supported when slicing.'", ")", "if", "node", ".", "arg", ".", "start", "is", "None", ":", "self", ".", "output", ".", "write", "(", "'0'", ")", "else", ":", "self", ".", "_process_node", "(", "node", ".", "arg", ".", "start", ",", "*", "*", "new_kwargs", ")", "if", "node", ".", "arg", ".", "stop", "is", "None", ":", "self", ".", "output", ".", "write", "(", "')'", ")", "else", ":", "self", ".", "output", ".", "write", "(", "','", ")", "self", ".", "_process_node", "(", "node", ".", "arg", ".", "stop", ",", "*", "*", "new_kwargs", ")", "self", ".", "output", ".", "write", "(", "')'", ")", "else", ":", "self", ".", "output", ".", "write", "(", "'['", ")", "self", ".", "_process_node", "(", "node", ".", "arg", ",", "*", "*", "new_kwargs", ")", "self", ".", "output", ".", "write", "(", "']'", ")" ]
Processes a `GetItem` node e.g. {{ foo["bar"] }}
[ "Processes", "a", "GetItem", "node", "e", ".", "g", ".", "{{", "foo", "[", "bar", "]", "}}" ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L438-L467
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS._process_for
def _process_for(self, node, **kwargs): """ Processes a for loop. e.g. {% for number in numbers %} {{ number }} {% endfor %} {% for key, value in somemap.items() %} {{ key }} -> {{ value }} {% %} """ # since a for loop can introduce new names into the context # we need to remember the ones that existed outside the loop previous_stored_names = self.stored_names.copy() with self._execution(): self.output.write('__runtime.each(') if is_method_call(node.iter, dict.keys.__name__): self.output.write('Object.keys(') self._process_node(node.iter, **kwargs) if is_method_call(node.iter, dict.keys.__name__): self.output.write(')') self.output.write(',') self.output.write('function') self.output.write('(') # javascript iterations put the value first, then the key if isinstance(node.target, nodes.Tuple): if len(node.target.items) > 2: raise Exception('De-structuring more than 2 items is not supported.') for i, item in enumerate(reversed(node.target.items)): self._process_node(item, **kwargs) if i < len(node.target.items) - 1: self.output.write(',') else: self._process_node(node.target, **kwargs) self.output.write(')') self.output.write('{') if node.test: self.output.write('if (!(') self._process_node(node.test, **kwargs) self.output.write(')) { return; }') assigns = node.target.items if isinstance(node.target, nodes.Tuple) else [node.target] with self._scoped_variables(assigns, **kwargs): for n in node.body: self._process_node(n, **kwargs) with self._execution(): self.output.write('}') self.output.write(')') self.output.write(';') # restore the stored names self.stored_names = previous_stored_names
python
def _process_for(self, node, **kwargs): """ Processes a for loop. e.g. {% for number in numbers %} {{ number }} {% endfor %} {% for key, value in somemap.items() %} {{ key }} -> {{ value }} {% %} """ # since a for loop can introduce new names into the context # we need to remember the ones that existed outside the loop previous_stored_names = self.stored_names.copy() with self._execution(): self.output.write('__runtime.each(') if is_method_call(node.iter, dict.keys.__name__): self.output.write('Object.keys(') self._process_node(node.iter, **kwargs) if is_method_call(node.iter, dict.keys.__name__): self.output.write(')') self.output.write(',') self.output.write('function') self.output.write('(') # javascript iterations put the value first, then the key if isinstance(node.target, nodes.Tuple): if len(node.target.items) > 2: raise Exception('De-structuring more than 2 items is not supported.') for i, item in enumerate(reversed(node.target.items)): self._process_node(item, **kwargs) if i < len(node.target.items) - 1: self.output.write(',') else: self._process_node(node.target, **kwargs) self.output.write(')') self.output.write('{') if node.test: self.output.write('if (!(') self._process_node(node.test, **kwargs) self.output.write(')) { return; }') assigns = node.target.items if isinstance(node.target, nodes.Tuple) else [node.target] with self._scoped_variables(assigns, **kwargs): for n in node.body: self._process_node(n, **kwargs) with self._execution(): self.output.write('}') self.output.write(')') self.output.write(';') # restore the stored names self.stored_names = previous_stored_names
[ "def", "_process_for", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":", "# since a for loop can introduce new names into the context", "# we need to remember the ones that existed outside the loop", "previous_stored_names", "=", "self", ".", "stored_names", ".", "copy", "(", ")", "with", "self", ".", "_execution", "(", ")", ":", "self", ".", "output", ".", "write", "(", "'__runtime.each('", ")", "if", "is_method_call", "(", "node", ".", "iter", ",", "dict", ".", "keys", ".", "__name__", ")", ":", "self", ".", "output", ".", "write", "(", "'Object.keys('", ")", "self", ".", "_process_node", "(", "node", ".", "iter", ",", "*", "*", "kwargs", ")", "if", "is_method_call", "(", "node", ".", "iter", ",", "dict", ".", "keys", ".", "__name__", ")", ":", "self", ".", "output", ".", "write", "(", "')'", ")", "self", ".", "output", ".", "write", "(", "','", ")", "self", ".", "output", ".", "write", "(", "'function'", ")", "self", ".", "output", ".", "write", "(", "'('", ")", "# javascript iterations put the value first, then the key", "if", "isinstance", "(", "node", ".", "target", ",", "nodes", ".", "Tuple", ")", ":", "if", "len", "(", "node", ".", "target", ".", "items", ")", ">", "2", ":", "raise", "Exception", "(", "'De-structuring more than 2 items is not supported.'", ")", "for", "i", ",", "item", "in", "enumerate", "(", "reversed", "(", "node", ".", "target", ".", "items", ")", ")", ":", "self", ".", "_process_node", "(", "item", ",", "*", "*", "kwargs", ")", "if", "i", "<", "len", "(", "node", ".", "target", ".", "items", ")", "-", "1", ":", "self", ".", "output", ".", "write", "(", "','", ")", "else", ":", "self", ".", "_process_node", "(", "node", ".", "target", ",", "*", "*", "kwargs", ")", "self", ".", "output", ".", "write", "(", "')'", ")", "self", ".", "output", ".", "write", "(", "'{'", ")", "if", "node", ".", "test", ":", "self", ".", "output", ".", "write", "(", "'if (!('", ")", "self", ".", "_process_node", "(", "node", ".", "test", ",", "*", "*", "kwargs", ")", "self", ".", "output", ".", "write", "(", "')) { return; }'", ")", "assigns", "=", "node", ".", "target", ".", "items", "if", "isinstance", "(", "node", ".", "target", ",", "nodes", ".", "Tuple", ")", "else", "[", "node", ".", "target", "]", "with", "self", ".", "_scoped_variables", "(", "assigns", ",", "*", "*", "kwargs", ")", ":", "for", "n", "in", "node", ".", "body", ":", "self", ".", "_process_node", "(", "n", ",", "*", "*", "kwargs", ")", "with", "self", ".", "_execution", "(", ")", ":", "self", ".", "output", ".", "write", "(", "'}'", ")", "self", ".", "output", ".", "write", "(", "')'", ")", "self", ".", "output", ".", "write", "(", "';'", ")", "# restore the stored names", "self", ".", "stored_names", "=", "previous_stored_names" ]
Processes a for loop. e.g. {% for number in numbers %} {{ number }} {% endfor %} {% for key, value in somemap.items() %} {{ key }} -> {{ value }} {% %}
[ "Processes", "a", "for", "loop", ".", "e", ".", "g", ".", "{", "%", "for", "number", "in", "numbers", "%", "}", "{{", "number", "}}", "{", "%", "endfor", "%", "}", "{", "%", "for", "key", "value", "in", "somemap", ".", "items", "()", "%", "}", "{{", "key", "}}", "-", ">", "{{", "value", "}}", "{", "%", "%", "}" ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L469-L531
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS._process_if
def _process_if(self, node, execute_end=None, **kwargs): """ Processes an if block e.g. `{% if foo %} do something {% endif %}` """ with self._execution(): self.output.write('if') self.output.write('(') with option(kwargs, use_python_bool_wrapper=True): self._process_node(node.test, **kwargs) self.output.write(')') self.output.write('{') # We accept an `execute_end` function as a keyword argument as this function is # recursive in the case of something like if-elif-elif-else. In these cases this # invocation of this function may have to close execution opened by a previous # invocation of this function. if execute_end: execute_end() # body for n in node.body: self._process_node(n, **kwargs) if not node.else_ and not node.elif_: # no else - just close the if with self._execution(): self.output.write('}') else: # either an else or an elif with self._execution() as execute_end: self.output.write('}') self.output.write(' else ') # check for elif for n in node.elif_: self._process_node(n, execute_end=execute_end, **kwargs) if node.elif_ and node.else_: self.output.write(' else ') # open up the body self.output.write('{') # process the body of the else for n in node.else_: self._process_node(n, **kwargs) # close the body with self._execution(): self.output.write('}')
python
def _process_if(self, node, execute_end=None, **kwargs): """ Processes an if block e.g. `{% if foo %} do something {% endif %}` """ with self._execution(): self.output.write('if') self.output.write('(') with option(kwargs, use_python_bool_wrapper=True): self._process_node(node.test, **kwargs) self.output.write(')') self.output.write('{') # We accept an `execute_end` function as a keyword argument as this function is # recursive in the case of something like if-elif-elif-else. In these cases this # invocation of this function may have to close execution opened by a previous # invocation of this function. if execute_end: execute_end() # body for n in node.body: self._process_node(n, **kwargs) if not node.else_ and not node.elif_: # no else - just close the if with self._execution(): self.output.write('}') else: # either an else or an elif with self._execution() as execute_end: self.output.write('}') self.output.write(' else ') # check for elif for n in node.elif_: self._process_node(n, execute_end=execute_end, **kwargs) if node.elif_ and node.else_: self.output.write(' else ') # open up the body self.output.write('{') # process the body of the else for n in node.else_: self._process_node(n, **kwargs) # close the body with self._execution(): self.output.write('}')
[ "def", "_process_if", "(", "self", ",", "node", ",", "execute_end", "=", "None", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "_execution", "(", ")", ":", "self", ".", "output", ".", "write", "(", "'if'", ")", "self", ".", "output", ".", "write", "(", "'('", ")", "with", "option", "(", "kwargs", ",", "use_python_bool_wrapper", "=", "True", ")", ":", "self", ".", "_process_node", "(", "node", ".", "test", ",", "*", "*", "kwargs", ")", "self", ".", "output", ".", "write", "(", "')'", ")", "self", ".", "output", ".", "write", "(", "'{'", ")", "# We accept an `execute_end` function as a keyword argument as this function is", "# recursive in the case of something like if-elif-elif-else. In these cases this", "# invocation of this function may have to close execution opened by a previous", "# invocation of this function.", "if", "execute_end", ":", "execute_end", "(", ")", "# body", "for", "n", "in", "node", ".", "body", ":", "self", ".", "_process_node", "(", "n", ",", "*", "*", "kwargs", ")", "if", "not", "node", ".", "else_", "and", "not", "node", ".", "elif_", ":", "# no else - just close the if", "with", "self", ".", "_execution", "(", ")", ":", "self", ".", "output", ".", "write", "(", "'}'", ")", "else", ":", "# either an else or an elif", "with", "self", ".", "_execution", "(", ")", "as", "execute_end", ":", "self", ".", "output", ".", "write", "(", "'}'", ")", "self", ".", "output", ".", "write", "(", "' else '", ")", "# check for elif", "for", "n", "in", "node", ".", "elif_", ":", "self", ".", "_process_node", "(", "n", ",", "execute_end", "=", "execute_end", ",", "*", "*", "kwargs", ")", "if", "node", ".", "elif_", "and", "node", ".", "else_", ":", "self", ".", "output", ".", "write", "(", "' else '", ")", "# open up the body", "self", ".", "output", ".", "write", "(", "'{'", ")", "# process the body of the else", "for", "n", "in", "node", ".", "else_", ":", "self", ".", "_process_node", "(", "n", ",", "*", "*", "kwargs", ")", "# close the body", "with", "self", ".", "_execution", "(", ")", ":", "self", ".", "output", ".", "write", "(", "'}'", ")" ]
Processes an if block e.g. `{% if foo %} do something {% endif %}`
[ "Processes", "an", "if", "block", "e", ".", "g", ".", "{", "%", "if", "foo", "%", "}", "do", "something", "{", "%", "endif", "%", "}" ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L533-L586
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS._process_math
def _process_math(self, node, math_operator=None, function=None, **kwargs): """ Processes a math node e.g. `Div`, `Sub`, `Add`, `Mul` etc... If `function` is provided the expression is wrapped in a call to that function. """ with self._interpolation(): if function: self.output.write(function) self.output.write('(') self._process_node(node.left, **kwargs) self.output.write(math_operator) self._process_node(node.right, **kwargs) if function: self.output.write(')')
python
def _process_math(self, node, math_operator=None, function=None, **kwargs): """ Processes a math node e.g. `Div`, `Sub`, `Add`, `Mul` etc... If `function` is provided the expression is wrapped in a call to that function. """ with self._interpolation(): if function: self.output.write(function) self.output.write('(') self._process_node(node.left, **kwargs) self.output.write(math_operator) self._process_node(node.right, **kwargs) if function: self.output.write(')')
[ "def", "_process_math", "(", "self", ",", "node", ",", "math_operator", "=", "None", ",", "function", "=", "None", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "_interpolation", "(", ")", ":", "if", "function", ":", "self", ".", "output", ".", "write", "(", "function", ")", "self", ".", "output", ".", "write", "(", "'('", ")", "self", ".", "_process_node", "(", "node", ".", "left", ",", "*", "*", "kwargs", ")", "self", ".", "output", ".", "write", "(", "math_operator", ")", "self", ".", "_process_node", "(", "node", ".", "right", ",", "*", "*", "kwargs", ")", "if", "function", ":", "self", ".", "output", ".", "write", "(", "')'", ")" ]
Processes a math node e.g. `Div`, `Sub`, `Add`, `Mul` etc... If `function` is provided the expression is wrapped in a call to that function.
[ "Processes", "a", "math", "node", "e", ".", "g", ".", "Div", "Sub", "Add", "Mul", "etc", "...", "If", "function", "is", "provided", "the", "expression", "is", "wrapped", "in", "a", "call", "to", "that", "function", "." ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L1010-L1026
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS._process_loop_helper
def _process_loop_helper(self, node, **kwargs): """ Processes a loop helper e.g. {{ loop.first }} or {{ loop.index }} """ if node.attr == LOOP_HELPER_INDEX: self.output.write('(arguments[1] + 1)') elif node.attr == LOOP_HELPER_INDEX_0: self.output.write('arguments[1]') elif node.attr == LOOP_HELPER_FIRST: self.output.write('(arguments[1] == 0)') elif node.attr == LOOP_HELPER_LAST: self.output.write('(arguments[1] == arguments[2].length - 1)') elif node.attr == LOOP_HELPER_LENGTH: self.output.write('arguments[2].length')
python
def _process_loop_helper(self, node, **kwargs): """ Processes a loop helper e.g. {{ loop.first }} or {{ loop.index }} """ if node.attr == LOOP_HELPER_INDEX: self.output.write('(arguments[1] + 1)') elif node.attr == LOOP_HELPER_INDEX_0: self.output.write('arguments[1]') elif node.attr == LOOP_HELPER_FIRST: self.output.write('(arguments[1] == 0)') elif node.attr == LOOP_HELPER_LAST: self.output.write('(arguments[1] == arguments[2].length - 1)') elif node.attr == LOOP_HELPER_LENGTH: self.output.write('arguments[2].length')
[ "def", "_process_loop_helper", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":", "if", "node", ".", "attr", "==", "LOOP_HELPER_INDEX", ":", "self", ".", "output", ".", "write", "(", "'(arguments[1] + 1)'", ")", "elif", "node", ".", "attr", "==", "LOOP_HELPER_INDEX_0", ":", "self", ".", "output", ".", "write", "(", "'arguments[1]'", ")", "elif", "node", ".", "attr", "==", "LOOP_HELPER_FIRST", ":", "self", ".", "output", ".", "write", "(", "'(arguments[1] == 0)'", ")", "elif", "node", ".", "attr", "==", "LOOP_HELPER_LAST", ":", "self", ".", "output", ".", "write", "(", "'(arguments[1] == arguments[2].length - 1)'", ")", "elif", "node", ".", "attr", "==", "LOOP_HELPER_LENGTH", ":", "self", ".", "output", ".", "write", "(", "'arguments[2].length'", ")" ]
Processes a loop helper e.g. {{ loop.first }} or {{ loop.index }}
[ "Processes", "a", "loop", "helper", "e", ".", "g", ".", "{{", "loop", ".", "first", "}}", "or", "{{", "loop", ".", "index", "}}" ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L1028-L1042
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS._execution
def _execution(self): """ Context manager for executing some JavaScript inside a template. """ did_start_executing = False if self.state == STATE_DEFAULT: did_start_executing = True self.state = STATE_EXECUTING def close(): if did_start_executing and self.state == STATE_EXECUTING: self.state = STATE_DEFAULT yield close close()
python
def _execution(self): """ Context manager for executing some JavaScript inside a template. """ did_start_executing = False if self.state == STATE_DEFAULT: did_start_executing = True self.state = STATE_EXECUTING def close(): if did_start_executing and self.state == STATE_EXECUTING: self.state = STATE_DEFAULT yield close close()
[ "def", "_execution", "(", "self", ")", ":", "did_start_executing", "=", "False", "if", "self", ".", "state", "==", "STATE_DEFAULT", ":", "did_start_executing", "=", "True", "self", ".", "state", "=", "STATE_EXECUTING", "def", "close", "(", ")", ":", "if", "did_start_executing", "and", "self", ".", "state", "==", "STATE_EXECUTING", ":", "self", ".", "state", "=", "STATE_DEFAULT", "yield", "close", "close", "(", ")" ]
Context manager for executing some JavaScript inside a template.
[ "Context", "manager", "for", "executing", "some", "JavaScript", "inside", "a", "template", "." ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L1054-L1070
jonbretman/jinja-to-js
jinja_to_js/__init__.py
JinjaToJS._scoped_variables
def _scoped_variables(self, nodes_list, **kwargs): """ Context manager for creating scoped variables defined by the nodes in `nodes_list`. These variables will be added to the context, and when the context manager exits the context object will be restored to it's previous state. """ tmp_vars = [] for node in nodes_list: is_assign_node = isinstance(node, nodes.Assign) name = node.target.name if is_assign_node else node.name # create a temp variable name tmp_var = next(self.temp_var_names) # save previous context value with self._execution(): # save the current value of this name self.output.write('var %s = %s.%s;' % (tmp_var, self.context_name, name)) # add new value to context self.output.write('%s.%s = ' % (self.context_name, name)) if is_assign_node: self._process_node(node.node, **kwargs) else: self.output.write(node.name) self.output.write(';') tmp_vars.append((tmp_var, name)) yield # restore context for tmp_var, name in tmp_vars: with self._execution(): self.output.write('%s.%s = %s;' % (self.context_name, name, tmp_var))
python
def _scoped_variables(self, nodes_list, **kwargs): """ Context manager for creating scoped variables defined by the nodes in `nodes_list`. These variables will be added to the context, and when the context manager exits the context object will be restored to it's previous state. """ tmp_vars = [] for node in nodes_list: is_assign_node = isinstance(node, nodes.Assign) name = node.target.name if is_assign_node else node.name # create a temp variable name tmp_var = next(self.temp_var_names) # save previous context value with self._execution(): # save the current value of this name self.output.write('var %s = %s.%s;' % (tmp_var, self.context_name, name)) # add new value to context self.output.write('%s.%s = ' % (self.context_name, name)) if is_assign_node: self._process_node(node.node, **kwargs) else: self.output.write(node.name) self.output.write(';') tmp_vars.append((tmp_var, name)) yield # restore context for tmp_var, name in tmp_vars: with self._execution(): self.output.write('%s.%s = %s;' % (self.context_name, name, tmp_var))
[ "def", "_scoped_variables", "(", "self", ",", "nodes_list", ",", "*", "*", "kwargs", ")", ":", "tmp_vars", "=", "[", "]", "for", "node", "in", "nodes_list", ":", "is_assign_node", "=", "isinstance", "(", "node", ",", "nodes", ".", "Assign", ")", "name", "=", "node", ".", "target", ".", "name", "if", "is_assign_node", "else", "node", ".", "name", "# create a temp variable name", "tmp_var", "=", "next", "(", "self", ".", "temp_var_names", ")", "# save previous context value", "with", "self", ".", "_execution", "(", ")", ":", "# save the current value of this name", "self", ".", "output", ".", "write", "(", "'var %s = %s.%s;'", "%", "(", "tmp_var", ",", "self", ".", "context_name", ",", "name", ")", ")", "# add new value to context", "self", ".", "output", ".", "write", "(", "'%s.%s = '", "%", "(", "self", ".", "context_name", ",", "name", ")", ")", "if", "is_assign_node", ":", "self", ".", "_process_node", "(", "node", ".", "node", ",", "*", "*", "kwargs", ")", "else", ":", "self", ".", "output", ".", "write", "(", "node", ".", "name", ")", "self", ".", "output", ".", "write", "(", "';'", ")", "tmp_vars", ".", "append", "(", "(", "tmp_var", ",", "name", ")", ")", "yield", "# restore context", "for", "tmp_var", ",", "name", "in", "tmp_vars", ":", "with", "self", ".", "_execution", "(", ")", ":", "self", ".", "output", ".", "write", "(", "'%s.%s = %s;'", "%", "(", "self", ".", "context_name", ",", "name", ",", "tmp_var", ")", ")" ]
Context manager for creating scoped variables defined by the nodes in `nodes_list`. These variables will be added to the context, and when the context manager exits the context object will be restored to it's previous state.
[ "Context", "manager", "for", "creating", "scoped", "variables", "defined", "by", "the", "nodes", "in", "nodes_list", ".", "These", "variables", "will", "be", "added", "to", "the", "context", "and", "when", "the", "context", "manager", "exits", "the", "context", "object", "will", "be", "restored", "to", "it", "s", "previous", "state", "." ]
train
https://github.com/jonbretman/jinja-to-js/blob/0a784b10a83d37a3171c5797547e9fc460c51289/jinja_to_js/__init__.py#L1094-L1133
subdownloader/subdownloader
subdownloader/webutil.py
download_raw
def download_raw(url, local_path, callback): """ Download an url to a local file. :param url: url of the file to download :param local_path: path where the downloaded file should be saved :param callback: instance of ProgressCallback :return: True is succeeded """ log.debug('download_raw(url={url}, local_path={local_path})'.format(url=url, local_path=local_path)) raw_progress = RawDownloadProgress(callback) reporthook = raw_progress.get_report_hook() try: log.debug('urlretrieve(url={url}, local_path={local_path}) ...'.format(url=url, local_path=local_path)) urlretrieve(url=url, filename=local_path, reporthook=reporthook) log.debug('... SUCCEEDED') callback.finish(True) return True except URLError: log.exception('... FAILED') callback.finish(False) return False
python
def download_raw(url, local_path, callback): """ Download an url to a local file. :param url: url of the file to download :param local_path: path where the downloaded file should be saved :param callback: instance of ProgressCallback :return: True is succeeded """ log.debug('download_raw(url={url}, local_path={local_path})'.format(url=url, local_path=local_path)) raw_progress = RawDownloadProgress(callback) reporthook = raw_progress.get_report_hook() try: log.debug('urlretrieve(url={url}, local_path={local_path}) ...'.format(url=url, local_path=local_path)) urlretrieve(url=url, filename=local_path, reporthook=reporthook) log.debug('... SUCCEEDED') callback.finish(True) return True except URLError: log.exception('... FAILED') callback.finish(False) return False
[ "def", "download_raw", "(", "url", ",", "local_path", ",", "callback", ")", ":", "log", ".", "debug", "(", "'download_raw(url={url}, local_path={local_path})'", ".", "format", "(", "url", "=", "url", ",", "local_path", "=", "local_path", ")", ")", "raw_progress", "=", "RawDownloadProgress", "(", "callback", ")", "reporthook", "=", "raw_progress", ".", "get_report_hook", "(", ")", "try", ":", "log", ".", "debug", "(", "'urlretrieve(url={url}, local_path={local_path}) ...'", ".", "format", "(", "url", "=", "url", ",", "local_path", "=", "local_path", ")", ")", "urlretrieve", "(", "url", "=", "url", ",", "filename", "=", "local_path", ",", "reporthook", "=", "reporthook", ")", "log", ".", "debug", "(", "'... SUCCEEDED'", ")", "callback", ".", "finish", "(", "True", ")", "return", "True", "except", "URLError", ":", "log", ".", "exception", "(", "'... FAILED'", ")", "callback", ".", "finish", "(", "False", ")", "return", "False" ]
Download an url to a local file. :param url: url of the file to download :param local_path: path where the downloaded file should be saved :param callback: instance of ProgressCallback :return: True is succeeded
[ "Download", "an", "url", "to", "a", "local", "file", ".", ":", "param", "url", ":", "url", "of", "the", "file", "to", "download", ":", "param", "local_path", ":", "path", "where", "the", "downloaded", "file", "should", "be", "saved", ":", "param", "callback", ":", "instance", "of", "ProgressCallback", ":", "return", ":", "True", "is", "succeeded" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/webutil.py#L47-L67
subdownloader/subdownloader
subdownloader/webutil.py
RawDownloadProgress.get_report_hook
def get_report_hook(self): """ Return a callback function suitable for using reporthook argument of urllib(.request).urlretrieve :return: function object """ def report_hook(chunkNumber, chunkSize, totalSize): if totalSize != -1 and not self._callback.range_initialized(): log.debug('Initializing range: [{},{}]'.format(0, totalSize)) self._callback.set_range(0, totalSize) self._chunkNumber = chunkNumber self._total += chunkSize if self._total > totalSize: # The chunk size can be bigger than the file self._total = totalSize self._callback.update(self._total) return report_hook
python
def get_report_hook(self): """ Return a callback function suitable for using reporthook argument of urllib(.request).urlretrieve :return: function object """ def report_hook(chunkNumber, chunkSize, totalSize): if totalSize != -1 and not self._callback.range_initialized(): log.debug('Initializing range: [{},{}]'.format(0, totalSize)) self._callback.set_range(0, totalSize) self._chunkNumber = chunkNumber self._total += chunkSize if self._total > totalSize: # The chunk size can be bigger than the file self._total = totalSize self._callback.update(self._total) return report_hook
[ "def", "get_report_hook", "(", "self", ")", ":", "def", "report_hook", "(", "chunkNumber", ",", "chunkSize", ",", "totalSize", ")", ":", "if", "totalSize", "!=", "-", "1", "and", "not", "self", ".", "_callback", ".", "range_initialized", "(", ")", ":", "log", ".", "debug", "(", "'Initializing range: [{},{}]'", ".", "format", "(", "0", ",", "totalSize", ")", ")", "self", ".", "_callback", ".", "set_range", "(", "0", ",", "totalSize", ")", "self", ".", "_chunkNumber", "=", "chunkNumber", "self", ".", "_total", "+=", "chunkSize", "if", "self", ".", "_total", ">", "totalSize", ":", "# The chunk size can be bigger than the file", "self", ".", "_total", "=", "totalSize", "self", ".", "_callback", ".", "update", "(", "self", ".", "_total", ")", "return", "report_hook" ]
Return a callback function suitable for using reporthook argument of urllib(.request).urlretrieve :return: function object
[ "Return", "a", "callback", "function", "suitable", "for", "using", "reporthook", "argument", "of", "urllib", "(", ".", "request", ")", ".", "urlretrieve", ":", "return", ":", "function", "object" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/webutil.py#L83-L99
subdownloader/subdownloader
subdownloader/provider/__init__.py
window_iterator
def window_iterator(data, width): """ Instead of iterating element by element, get a number of elements at each iteration step. :param data: data to iterate on :param width: maximum number of elements to get in each iteration step :return: """ start = 0 while start < len(data): yield data[start:start+width] start += width
python
def window_iterator(data, width): """ Instead of iterating element by element, get a number of elements at each iteration step. :param data: data to iterate on :param width: maximum number of elements to get in each iteration step :return: """ start = 0 while start < len(data): yield data[start:start+width] start += width
[ "def", "window_iterator", "(", "data", ",", "width", ")", ":", "start", "=", "0", "while", "start", "<", "len", "(", "data", ")", ":", "yield", "data", "[", "start", ":", "start", "+", "width", "]", "start", "+=", "width" ]
Instead of iterating element by element, get a number of elements at each iteration step. :param data: data to iterate on :param width: maximum number of elements to get in each iteration step :return:
[ "Instead", "of", "iterating", "element", "by", "element", "get", "a", "number", "of", "elements", "at", "each", "iteration", "step", ".", ":", "param", "data", ":", "data", "to", "iterate", "on", ":", "param", "width", ":", "maximum", "number", "of", "elements", "to", "get", "in", "each", "iteration", "step", ":", "return", ":" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/provider/__init__.py#L5-L15
subdownloader/subdownloader
subdownloader/callback.py
ProgressCallback.set_range
def set_range(self, minimum, maximum): """ Set a range. The range is passed unchanged to the rangeChanged member function. :param minimum: minimum value of the range (None if no percentage is required) :param maximum: maximum value of the range (None if no percentage is required) """ self._min = minimum self._max = maximum self.on_rangeChange(minimum, maximum)
python
def set_range(self, minimum, maximum): """ Set a range. The range is passed unchanged to the rangeChanged member function. :param minimum: minimum value of the range (None if no percentage is required) :param maximum: maximum value of the range (None if no percentage is required) """ self._min = minimum self._max = maximum self.on_rangeChange(minimum, maximum)
[ "def", "set_range", "(", "self", ",", "minimum", ",", "maximum", ")", ":", "self", ".", "_min", "=", "minimum", "self", ".", "_max", "=", "maximum", "self", ".", "on_rangeChange", "(", "minimum", ",", "maximum", ")" ]
Set a range. The range is passed unchanged to the rangeChanged member function. :param minimum: minimum value of the range (None if no percentage is required) :param maximum: maximum value of the range (None if no percentage is required)
[ "Set", "a", "range", ".", "The", "range", "is", "passed", "unchanged", "to", "the", "rangeChanged", "member", "function", ".", ":", "param", "minimum", ":", "minimum", "value", "of", "the", "range", "(", "None", "if", "no", "percentage", "is", "required", ")", ":", "param", "maximum", ":", "maximum", "value", "of", "the", "range", "(", "None", "if", "no", "percentage", "is", "required", ")" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/callback.py#L32-L41
subdownloader/subdownloader
subdownloader/callback.py
ProgressCallback.get_child_progress
def get_child_progress(self, parent_min, parent_max): """ Create a new child ProgressCallback. Minimum and maximum values of the child are mapped to parent_min and parent_max of this parent ProgressCallback. :param parent_min: minimum value of the child is mapped to parent_min of this parent ProgressCallback :param parent_max: maximum value of the child is mapped to parent_max of this parent ProgressCallback :return: instance of SubProgressCallback """ return SubProgressCallback(parent=self, parent_min=parent_min, parent_max=parent_max)
python
def get_child_progress(self, parent_min, parent_max): """ Create a new child ProgressCallback. Minimum and maximum values of the child are mapped to parent_min and parent_max of this parent ProgressCallback. :param parent_min: minimum value of the child is mapped to parent_min of this parent ProgressCallback :param parent_max: maximum value of the child is mapped to parent_max of this parent ProgressCallback :return: instance of SubProgressCallback """ return SubProgressCallback(parent=self, parent_min=parent_min, parent_max=parent_max)
[ "def", "get_child_progress", "(", "self", ",", "parent_min", ",", "parent_max", ")", ":", "return", "SubProgressCallback", "(", "parent", "=", "self", ",", "parent_min", "=", "parent_min", ",", "parent_max", "=", "parent_max", ")" ]
Create a new child ProgressCallback. Minimum and maximum values of the child are mapped to parent_min and parent_max of this parent ProgressCallback. :param parent_min: minimum value of the child is mapped to parent_min of this parent ProgressCallback :param parent_max: maximum value of the child is mapped to parent_max of this parent ProgressCallback :return: instance of SubProgressCallback
[ "Create", "a", "new", "child", "ProgressCallback", ".", "Minimum", "and", "maximum", "values", "of", "the", "child", "are", "mapped", "to", "parent_min", "and", "parent_max", "of", "this", "parent", "ProgressCallback", ".", ":", "param", "parent_min", ":", "minimum", "value", "of", "the", "child", "is", "mapped", "to", "parent_min", "of", "this", "parent", "ProgressCallback", ":", "param", "parent_max", ":", "maximum", "value", "of", "the", "child", "is", "mapped", "to", "parent_max", "of", "this", "parent", "ProgressCallback", ":", "return", ":", "instance", "of", "SubProgressCallback" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/callback.py#L50-L58
subdownloader/subdownloader
subdownloader/callback.py
ProgressCallback.update
def update(self, value, *args, **kwargs): """ Call this function to inform that an update is available. This function does NOT call finish when value == maximum. :param value: The current index/position of the action. (Should be, but must not be, in the range [min, max]) :param args: extra positional arguments to pass on :param kwargs: extra keyword arguments to pass on """ log.debug('update(value={value}, args={args}, kwargs={kwargs})'.format(value=value, args=args, kwargs=kwargs)) self.on_update(value, *args, **kwargs)
python
def update(self, value, *args, **kwargs): """ Call this function to inform that an update is available. This function does NOT call finish when value == maximum. :param value: The current index/position of the action. (Should be, but must not be, in the range [min, max]) :param args: extra positional arguments to pass on :param kwargs: extra keyword arguments to pass on """ log.debug('update(value={value}, args={args}, kwargs={kwargs})'.format(value=value, args=args, kwargs=kwargs)) self.on_update(value, *args, **kwargs)
[ "def", "update", "(", "self", ",", "value", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "log", ".", "debug", "(", "'update(value={value}, args={args}, kwargs={kwargs})'", ".", "format", "(", "value", "=", "value", ",", "args", "=", "args", ",", "kwargs", "=", "kwargs", ")", ")", "self", ".", "on_update", "(", "value", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Call this function to inform that an update is available. This function does NOT call finish when value == maximum. :param value: The current index/position of the action. (Should be, but must not be, in the range [min, max]) :param args: extra positional arguments to pass on :param kwargs: extra keyword arguments to pass on
[ "Call", "this", "function", "to", "inform", "that", "an", "update", "is", "available", ".", "This", "function", "does", "NOT", "call", "finish", "when", "value", "==", "maximum", ".", ":", "param", "value", ":", "The", "current", "index", "/", "position", "of", "the", "action", ".", "(", "Should", "be", "but", "must", "not", "be", "in", "the", "range", "[", "min", "max", "]", ")", ":", "param", "args", ":", "extra", "positional", "arguments", "to", "pass", "on", ":", "param", "kwargs", ":", "extra", "keyword", "arguments", "to", "pass", "on" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/callback.py#L60-L69
subdownloader/subdownloader
subdownloader/callback.py
ProgressCallback.finish
def finish(self, *args, **kwargs): """ Call this function to inform that the operation is finished. :param args: extra positional arguments to pass on :param kwargs: extra keyword arguments to pass on """ log.debug('finish(args={args}, kwargs={kwargs})'.format(args=args, kwargs=kwargs)) self.on_finish(*args, **kwargs)
python
def finish(self, *args, **kwargs): """ Call this function to inform that the operation is finished. :param args: extra positional arguments to pass on :param kwargs: extra keyword arguments to pass on """ log.debug('finish(args={args}, kwargs={kwargs})'.format(args=args, kwargs=kwargs)) self.on_finish(*args, **kwargs)
[ "def", "finish", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "log", ".", "debug", "(", "'finish(args={args}, kwargs={kwargs})'", ".", "format", "(", "args", "=", "args", ",", "kwargs", "=", "kwargs", ")", ")", "self", ".", "on_finish", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Call this function to inform that the operation is finished. :param args: extra positional arguments to pass on :param kwargs: extra keyword arguments to pass on
[ "Call", "this", "function", "to", "inform", "that", "the", "operation", "is", "finished", ".", ":", "param", "args", ":", "extra", "positional", "arguments", "to", "pass", "on", ":", "param", "kwargs", ":", "extra", "keyword", "arguments", "to", "pass", "on" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/callback.py#L71-L78
subdownloader/subdownloader
subdownloader/callback.py
SubProgressCallback.on_update
def on_update(self, value, *args, **kwargs): """ Inform the parent of progress. :param value: The value of this subprogresscallback :param args: Extra positional arguments :param kwargs: Extra keyword arguments """ parent_value = self._parent_min if self._max != self._min: sub_progress = (value - self._min) / (self._max - self._min) parent_value = self._parent_min + sub_progress * (self._parent_max - self._parent_min) self._parent.update(parent_value, *args, **kwargs)
python
def on_update(self, value, *args, **kwargs): """ Inform the parent of progress. :param value: The value of this subprogresscallback :param args: Extra positional arguments :param kwargs: Extra keyword arguments """ parent_value = self._parent_min if self._max != self._min: sub_progress = (value - self._min) / (self._max - self._min) parent_value = self._parent_min + sub_progress * (self._parent_max - self._parent_min) self._parent.update(parent_value, *args, **kwargs)
[ "def", "on_update", "(", "self", ",", "value", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "parent_value", "=", "self", ".", "_parent_min", "if", "self", ".", "_max", "!=", "self", ".", "_min", ":", "sub_progress", "=", "(", "value", "-", "self", ".", "_min", ")", "/", "(", "self", ".", "_max", "-", "self", ".", "_min", ")", "parent_value", "=", "self", ".", "_parent_min", "+", "sub_progress", "*", "(", "self", ".", "_parent_max", "-", "self", ".", "_parent_min", ")", "self", ".", "_parent", ".", "update", "(", "parent_value", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Inform the parent of progress. :param value: The value of this subprogresscallback :param args: Extra positional arguments :param kwargs: Extra keyword arguments
[ "Inform", "the", "parent", "of", "progress", ".", ":", "param", "value", ":", "The", "value", "of", "this", "subprogresscallback", ":", "param", "args", ":", "Extra", "positional", "arguments", ":", "param", "kwargs", ":", "Extra", "keyword", "arguments" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/callback.py#L145-L156
subdownloader/subdownloader
subdownloader/provider/SDService.py
SDService._login
def _login(self, username="", password=""): """Login to the Server using username/password, empty parameters means an anonymously login Returns True if login sucessful, and False if not. """ self.log.debug("----------------") self.log.debug("Logging in (username: %s)..." % username) def run_query(): return self._xmlrpc_server.LogIn( username, password, self.language, self.user_agent) info = self._safe_exec(run_query, None) if info is None: self._token = None return False self.log.debug("Login ended in %s with status: %s" % (info['seconds'], info['status'])) if info['status'] == "200 OK": self.log.debug("Session ID: %s" % info['token']) self.log.debug("----------------") self._token = info['token'] return True else: # force token reset self.log.debug("----------------") self._token = None return False
python
def _login(self, username="", password=""): """Login to the Server using username/password, empty parameters means an anonymously login Returns True if login sucessful, and False if not. """ self.log.debug("----------------") self.log.debug("Logging in (username: %s)..." % username) def run_query(): return self._xmlrpc_server.LogIn( username, password, self.language, self.user_agent) info = self._safe_exec(run_query, None) if info is None: self._token = None return False self.log.debug("Login ended in %s with status: %s" % (info['seconds'], info['status'])) if info['status'] == "200 OK": self.log.debug("Session ID: %s" % info['token']) self.log.debug("----------------") self._token = info['token'] return True else: # force token reset self.log.debug("----------------") self._token = None return False
[ "def", "_login", "(", "self", ",", "username", "=", "\"\"", ",", "password", "=", "\"\"", ")", ":", "self", ".", "log", ".", "debug", "(", "\"----------------\"", ")", "self", ".", "log", ".", "debug", "(", "\"Logging in (username: %s)...\"", "%", "username", ")", "def", "run_query", "(", ")", ":", "return", "self", ".", "_xmlrpc_server", ".", "LogIn", "(", "username", ",", "password", ",", "self", ".", "language", ",", "self", ".", "user_agent", ")", "info", "=", "self", ".", "_safe_exec", "(", "run_query", ",", "None", ")", "if", "info", "is", "None", ":", "self", ".", "_token", "=", "None", "return", "False", "self", ".", "log", ".", "debug", "(", "\"Login ended in %s with status: %s\"", "%", "(", "info", "[", "'seconds'", "]", ",", "info", "[", "'status'", "]", ")", ")", "if", "info", "[", "'status'", "]", "==", "\"200 OK\"", ":", "self", ".", "log", ".", "debug", "(", "\"Session ID: %s\"", "%", "info", "[", "'token'", "]", ")", "self", ".", "log", ".", "debug", "(", "\"----------------\"", ")", "self", ".", "_token", "=", "info", "[", "'token'", "]", "return", "True", "else", ":", "# force token reset", "self", ".", "log", ".", "debug", "(", "\"----------------\"", ")", "self", ".", "_token", "=", "None", "return", "False" ]
Login to the Server using username/password, empty parameters means an anonymously login Returns True if login sucessful, and False if not.
[ "Login", "to", "the", "Server", "using", "username", "/", "password", "empty", "parameters", "means", "an", "anonymously", "login", "Returns", "True", "if", "login", "sucessful", "and", "False", "if", "not", "." ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/provider/SDService.py#L248-L277
subdownloader/subdownloader
subdownloader/provider/SDService.py
SDService._logout
def _logout(self): """Logout from current session(token) This functions doesn't return any boolean value, since it can 'fail' for anonymous logins """ self.log.debug("Logging out from session ID: %s" % self._token) try: info = self._xmlrpc_server.LogOut(self._token) self.log.debug("Logout ended in %s with status: %s" % (info['seconds'], info['status'])) except ProtocolError as e: self.log.debug("error in HTTP/HTTPS transport layer") raise except Fault as e: self.log.debug("error in xml-rpc server") raise except: self.log.exception("Connection to the server failed/other error") raise finally: # force token reset self._token = None
python
def _logout(self): """Logout from current session(token) This functions doesn't return any boolean value, since it can 'fail' for anonymous logins """ self.log.debug("Logging out from session ID: %s" % self._token) try: info = self._xmlrpc_server.LogOut(self._token) self.log.debug("Logout ended in %s with status: %s" % (info['seconds'], info['status'])) except ProtocolError as e: self.log.debug("error in HTTP/HTTPS transport layer") raise except Fault as e: self.log.debug("error in xml-rpc server") raise except: self.log.exception("Connection to the server failed/other error") raise finally: # force token reset self._token = None
[ "def", "_logout", "(", "self", ")", ":", "self", ".", "log", ".", "debug", "(", "\"Logging out from session ID: %s\"", "%", "self", ".", "_token", ")", "try", ":", "info", "=", "self", ".", "_xmlrpc_server", ".", "LogOut", "(", "self", ".", "_token", ")", "self", ".", "log", ".", "debug", "(", "\"Logout ended in %s with status: %s\"", "%", "(", "info", "[", "'seconds'", "]", ",", "info", "[", "'status'", "]", ")", ")", "except", "ProtocolError", "as", "e", ":", "self", ".", "log", ".", "debug", "(", "\"error in HTTP/HTTPS transport layer\"", ")", "raise", "except", "Fault", "as", "e", ":", "self", ".", "log", ".", "debug", "(", "\"error in xml-rpc server\"", ")", "raise", "except", ":", "self", ".", "log", ".", "exception", "(", "\"Connection to the server failed/other error\"", ")", "raise", "finally", ":", "# force token reset", "self", ".", "_token", "=", "None" ]
Logout from current session(token) This functions doesn't return any boolean value, since it can 'fail' for anonymous logins
[ "Logout", "from", "current", "session", "(", "token", ")", "This", "functions", "doesn", "t", "return", "any", "boolean", "value", "since", "it", "can", "fail", "for", "anonymous", "logins" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/provider/SDService.py#L288-L308
subdownloader/subdownloader
subdownloader/client/arguments.py
parse_arguments
def parse_arguments(args=None): """ Parse the program arguments. :return: argparse.Namespace object with the parsed arguments """ parser = get_argument_parser() # Autocomplete arguments autocomplete(parser) ns = parser.parse_args(args=args) return ArgumentSettings( program=ArgumentProgramSettings( log=ArgumentLogSettings( path=None, level=ns.loglevel, ), settings=ArgumentSettingsSettings( path=ns.settings_path, ), client=ArgumentClientSettings( type=ns.client_type, cli=ArgumentClientCliSettings( interactive=False, ), gui=ArgumentClientGuiSettings( ), ), ), search=ArgumentSearchSettings( recursive=ns.recursive, working_directory=ns.video_path, ), filter=FilterSettings( languages=ns.languages, ), download=DownloadSettings( rename_strategy=ns.rename_strategy, ), providers=ns.providers, proxy=ns.proxy, test=ns.test, )
python
def parse_arguments(args=None): """ Parse the program arguments. :return: argparse.Namespace object with the parsed arguments """ parser = get_argument_parser() # Autocomplete arguments autocomplete(parser) ns = parser.parse_args(args=args) return ArgumentSettings( program=ArgumentProgramSettings( log=ArgumentLogSettings( path=None, level=ns.loglevel, ), settings=ArgumentSettingsSettings( path=ns.settings_path, ), client=ArgumentClientSettings( type=ns.client_type, cli=ArgumentClientCliSettings( interactive=False, ), gui=ArgumentClientGuiSettings( ), ), ), search=ArgumentSearchSettings( recursive=ns.recursive, working_directory=ns.video_path, ), filter=FilterSettings( languages=ns.languages, ), download=DownloadSettings( rename_strategy=ns.rename_strategy, ), providers=ns.providers, proxy=ns.proxy, test=ns.test, )
[ "def", "parse_arguments", "(", "args", "=", "None", ")", ":", "parser", "=", "get_argument_parser", "(", ")", "# Autocomplete arguments", "autocomplete", "(", "parser", ")", "ns", "=", "parser", ".", "parse_args", "(", "args", "=", "args", ")", "return", "ArgumentSettings", "(", "program", "=", "ArgumentProgramSettings", "(", "log", "=", "ArgumentLogSettings", "(", "path", "=", "None", ",", "level", "=", "ns", ".", "loglevel", ",", ")", ",", "settings", "=", "ArgumentSettingsSettings", "(", "path", "=", "ns", ".", "settings_path", ",", ")", ",", "client", "=", "ArgumentClientSettings", "(", "type", "=", "ns", ".", "client_type", ",", "cli", "=", "ArgumentClientCliSettings", "(", "interactive", "=", "False", ",", ")", ",", "gui", "=", "ArgumentClientGuiSettings", "(", ")", ",", ")", ",", ")", ",", "search", "=", "ArgumentSearchSettings", "(", "recursive", "=", "ns", ".", "recursive", ",", "working_directory", "=", "ns", ".", "video_path", ",", ")", ",", "filter", "=", "FilterSettings", "(", "languages", "=", "ns", ".", "languages", ",", ")", ",", "download", "=", "DownloadSettings", "(", "rename_strategy", "=", "ns", ".", "rename_strategy", ",", ")", ",", "providers", "=", "ns", ".", "providers", ",", "proxy", "=", "ns", ".", "proxy", ",", "test", "=", "ns", ".", "test", ",", ")" ]
Parse the program arguments. :return: argparse.Namespace object with the parsed arguments
[ "Parse", "the", "program", "arguments", ".", ":", "return", ":", "argparse", ".", "Namespace", "object", "with", "the", "parsed", "arguments" ]
train
https://github.com/subdownloader/subdownloader/blob/bbccedd11b18d925ad4c062b5eb65981e24d0433/subdownloader/client/arguments.py#L18-L60