id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
242,300
edeposit/edeposit.amqp.ltp
src/edeposit/amqp/ltp/info_composer.py
compose_info
def compose_info(root_dir, files, hash_fn, aleph_record, urn_nbn=None): """ Compose `info` XML file. Info example:: <?xml version="1.0" encoding="UTF-8" standalone="yes" ?> <info> <created>2014-07-31T10:58:53</created> <metadataversion>1.0</metadataversion> <packageid>c88f5a50-7b34-11e2-b930-005056827e51</packageid> <mainmets>mets.xml</mainmets> <titleid type="ccnb">cnb001852189</titleid> <titleid type="isbn">978-80-85979-89-6</titleid> <collection>edeposit</collection> <institution>nakladatelství Altar</institution> <creator>ABA001</creator> <size>1530226</size> <itemlist itemtotal="1"> <item>\data\Denik_zajatce_Sramek_CZ_v30f-font.epub</item> </itemlist> <checksum type="MD5" checksum="ce076548eaade33888005de5d4634a0d"> \MD5.md5 </checksum> </info> Args: root_dir (str): Absolute path to the root directory. files (list): Absolute paths to all ebook and metadata files. hash_fn (str): Absolute path to the MD5 file. aleph_record (str): String with Aleph record with metadata. Returns: str: XML string. """ # compute hash for hashfile with open(hash_fn) as f: hash_file_md5 = hashlib.md5(f.read()).hexdigest() schema_location = "http://www.ndk.cz/standardy-digitalizace/info11.xsd" document = odict[ "info": odict[ "@xmlns:xsi": "http://www.w3.org/2001/XMLSchema-instance", "@xsi:noNamespaceSchemaLocation": schema_location, "created": time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), "metadataversion": "1.0", "packageid": _path_to_id(root_dir), # not used in SIP # "mainmets": _get_localized_fn(metadata_fn, root_dir), "titleid": None, "collection": "edeposit", "institution": None, "creator": None, "size": _calc_dir_size(root_dir) / 1024, # size in kiB "itemlist": odict[ "@itemtotal": "2", "item": map( lambda x: _get_localized_fn(x, root_dir), files ) ], "checksum": odict[ "@type": "MD5", "@checksum": hash_file_md5, "#text": _get_localized_fn(hash_fn, root_dir) ], ] ] # get informations from MARC record record = MARCXMLRecord(aleph_record) # get publisher info publisher = unicode(record.get_publisher(), "utf-8") if record.get_publisher(None): document["info"]["institution"] = remove_hairs(publisher) # get <creator> info creator = record.getDataRecords("910", "a", False) alt_creator = record.getDataRecords("040", "d", False) document["info"]["creator"] = creator[0] if creator else alt_creator[-1] # collect informations for <titleid> tags isbns = record.get_ISBNs() ccnb = record.getDataRecords("015", "a", False) ccnb = ccnb[0] if ccnb else None if any([isbns, ccnb, urn_nbn]): # TODO: issn document["info"]["titleid"] = [] for isbn in isbns: document["info"]["titleid"].append({ "@type": "isbn", "#text": isbn }) if ccnb: document["info"]["titleid"].append({ "@type": "ccnb", "#text": ccnb }) if urn_nbn: document["info"]["titleid"].append({ "@type": "urnnbn", "#text": urn_nbn }) # TODO: later # if issn: # document["info"]["titleid"].append({ # "@type": "issn", # "#text": issn # }) # remove unset options unset_keys = [ key for key in document["info"] if key is None ] for key in unset_keys: del document[key] xml_document = xmltodict.unparse(document, pretty=True) return xml_document.encode("utf-8")
python
def compose_info(root_dir, files, hash_fn, aleph_record, urn_nbn=None): """ Compose `info` XML file. Info example:: <?xml version="1.0" encoding="UTF-8" standalone="yes" ?> <info> <created>2014-07-31T10:58:53</created> <metadataversion>1.0</metadataversion> <packageid>c88f5a50-7b34-11e2-b930-005056827e51</packageid> <mainmets>mets.xml</mainmets> <titleid type="ccnb">cnb001852189</titleid> <titleid type="isbn">978-80-85979-89-6</titleid> <collection>edeposit</collection> <institution>nakladatelství Altar</institution> <creator>ABA001</creator> <size>1530226</size> <itemlist itemtotal="1"> <item>\data\Denik_zajatce_Sramek_CZ_v30f-font.epub</item> </itemlist> <checksum type="MD5" checksum="ce076548eaade33888005de5d4634a0d"> \MD5.md5 </checksum> </info> Args: root_dir (str): Absolute path to the root directory. files (list): Absolute paths to all ebook and metadata files. hash_fn (str): Absolute path to the MD5 file. aleph_record (str): String with Aleph record with metadata. Returns: str: XML string. """ # compute hash for hashfile with open(hash_fn) as f: hash_file_md5 = hashlib.md5(f.read()).hexdigest() schema_location = "http://www.ndk.cz/standardy-digitalizace/info11.xsd" document = odict[ "info": odict[ "@xmlns:xsi": "http://www.w3.org/2001/XMLSchema-instance", "@xsi:noNamespaceSchemaLocation": schema_location, "created": time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), "metadataversion": "1.0", "packageid": _path_to_id(root_dir), # not used in SIP # "mainmets": _get_localized_fn(metadata_fn, root_dir), "titleid": None, "collection": "edeposit", "institution": None, "creator": None, "size": _calc_dir_size(root_dir) / 1024, # size in kiB "itemlist": odict[ "@itemtotal": "2", "item": map( lambda x: _get_localized_fn(x, root_dir), files ) ], "checksum": odict[ "@type": "MD5", "@checksum": hash_file_md5, "#text": _get_localized_fn(hash_fn, root_dir) ], ] ] # get informations from MARC record record = MARCXMLRecord(aleph_record) # get publisher info publisher = unicode(record.get_publisher(), "utf-8") if record.get_publisher(None): document["info"]["institution"] = remove_hairs(publisher) # get <creator> info creator = record.getDataRecords("910", "a", False) alt_creator = record.getDataRecords("040", "d", False) document["info"]["creator"] = creator[0] if creator else alt_creator[-1] # collect informations for <titleid> tags isbns = record.get_ISBNs() ccnb = record.getDataRecords("015", "a", False) ccnb = ccnb[0] if ccnb else None if any([isbns, ccnb, urn_nbn]): # TODO: issn document["info"]["titleid"] = [] for isbn in isbns: document["info"]["titleid"].append({ "@type": "isbn", "#text": isbn }) if ccnb: document["info"]["titleid"].append({ "@type": "ccnb", "#text": ccnb }) if urn_nbn: document["info"]["titleid"].append({ "@type": "urnnbn", "#text": urn_nbn }) # TODO: later # if issn: # document["info"]["titleid"].append({ # "@type": "issn", # "#text": issn # }) # remove unset options unset_keys = [ key for key in document["info"] if key is None ] for key in unset_keys: del document[key] xml_document = xmltodict.unparse(document, pretty=True) return xml_document.encode("utf-8")
[ "def", "compose_info", "(", "root_dir", ",", "files", ",", "hash_fn", ",", "aleph_record", ",", "urn_nbn", "=", "None", ")", ":", "# compute hash for hashfile", "with", "open", "(", "hash_fn", ")", "as", "f", ":", "hash_file_md5", "=", "hashlib", ".", "md5", "(", "f", ".", "read", "(", ")", ")", ".", "hexdigest", "(", ")", "schema_location", "=", "\"http://www.ndk.cz/standardy-digitalizace/info11.xsd\"", "document", "=", "odict", "[", "\"info\"", ":", "odict", "[", "\"@xmlns:xsi\"", ":", "\"http://www.w3.org/2001/XMLSchema-instance\"", ",", "\"@xsi:noNamespaceSchemaLocation\"", ":", "schema_location", ",", "\"created\"", ":", "time", ".", "strftime", "(", "\"%Y-%m-%dT%H:%M:%S\"", ",", "time", ".", "gmtime", "(", ")", ")", ",", "\"metadataversion\"", ":", "\"1.0\"", ",", "\"packageid\"", ":", "_path_to_id", "(", "root_dir", ")", ",", "# not used in SIP", "# \"mainmets\": _get_localized_fn(metadata_fn, root_dir),", "\"titleid\"", ":", "None", ",", "\"collection\"", ":", "\"edeposit\"", ",", "\"institution\"", ":", "None", ",", "\"creator\"", ":", "None", ",", "\"size\"", ":", "_calc_dir_size", "(", "root_dir", ")", "/", "1024", ",", "# size in kiB", "\"itemlist\"", ":", "odict", "[", "\"@itemtotal\"", ":", "\"2\"", ",", "\"item\"", ":", "map", "(", "lambda", "x", ":", "_get_localized_fn", "(", "x", ",", "root_dir", ")", ",", "files", ")", "]", ",", "\"checksum\"", ":", "odict", "[", "\"@type\"", ":", "\"MD5\"", ",", "\"@checksum\"", ":", "hash_file_md5", ",", "\"#text\"", ":", "_get_localized_fn", "(", "hash_fn", ",", "root_dir", ")", "]", ",", "]", "]", "# get informations from MARC record", "record", "=", "MARCXMLRecord", "(", "aleph_record", ")", "# get publisher info", "publisher", "=", "unicode", "(", "record", ".", "get_publisher", "(", ")", ",", "\"utf-8\"", ")", "if", "record", ".", "get_publisher", "(", "None", ")", ":", "document", "[", "\"info\"", "]", "[", "\"institution\"", "]", "=", "remove_hairs", "(", "publisher", ")", "# get <creator> info", "creator", "=", "record", ".", "getDataRecords", "(", "\"910\"", ",", "\"a\"", ",", "False", ")", "alt_creator", "=", "record", ".", "getDataRecords", "(", "\"040\"", ",", "\"d\"", ",", "False", ")", "document", "[", "\"info\"", "]", "[", "\"creator\"", "]", "=", "creator", "[", "0", "]", "if", "creator", "else", "alt_creator", "[", "-", "1", "]", "# collect informations for <titleid> tags", "isbns", "=", "record", ".", "get_ISBNs", "(", ")", "ccnb", "=", "record", ".", "getDataRecords", "(", "\"015\"", ",", "\"a\"", ",", "False", ")", "ccnb", "=", "ccnb", "[", "0", "]", "if", "ccnb", "else", "None", "if", "any", "(", "[", "isbns", ",", "ccnb", ",", "urn_nbn", "]", ")", ":", "# TODO: issn", "document", "[", "\"info\"", "]", "[", "\"titleid\"", "]", "=", "[", "]", "for", "isbn", "in", "isbns", ":", "document", "[", "\"info\"", "]", "[", "\"titleid\"", "]", ".", "append", "(", "{", "\"@type\"", ":", "\"isbn\"", ",", "\"#text\"", ":", "isbn", "}", ")", "if", "ccnb", ":", "document", "[", "\"info\"", "]", "[", "\"titleid\"", "]", ".", "append", "(", "{", "\"@type\"", ":", "\"ccnb\"", ",", "\"#text\"", ":", "ccnb", "}", ")", "if", "urn_nbn", ":", "document", "[", "\"info\"", "]", "[", "\"titleid\"", "]", ".", "append", "(", "{", "\"@type\"", ":", "\"urnnbn\"", ",", "\"#text\"", ":", "urn_nbn", "}", ")", "# TODO: later", "# if issn:", "# document[\"info\"][\"titleid\"].append({", "# \"@type\": \"issn\",", "# \"#text\": issn", "# })", "# remove unset options", "unset_keys", "=", "[", "key", "for", "key", "in", "document", "[", "\"info\"", "]", "if", "key", "is", "None", "]", "for", "key", "in", "unset_keys", ":", "del", "document", "[", "key", "]", "xml_document", "=", "xmltodict", ".", "unparse", "(", "document", ",", "pretty", "=", "True", ")", "return", "xml_document", ".", "encode", "(", "\"utf-8\"", ")" ]
Compose `info` XML file. Info example:: <?xml version="1.0" encoding="UTF-8" standalone="yes" ?> <info> <created>2014-07-31T10:58:53</created> <metadataversion>1.0</metadataversion> <packageid>c88f5a50-7b34-11e2-b930-005056827e51</packageid> <mainmets>mets.xml</mainmets> <titleid type="ccnb">cnb001852189</titleid> <titleid type="isbn">978-80-85979-89-6</titleid> <collection>edeposit</collection> <institution>nakladatelství Altar</institution> <creator>ABA001</creator> <size>1530226</size> <itemlist itemtotal="1"> <item>\data\Denik_zajatce_Sramek_CZ_v30f-font.epub</item> </itemlist> <checksum type="MD5" checksum="ce076548eaade33888005de5d4634a0d"> \MD5.md5 </checksum> </info> Args: root_dir (str): Absolute path to the root directory. files (list): Absolute paths to all ebook and metadata files. hash_fn (str): Absolute path to the MD5 file. aleph_record (str): String with Aleph record with metadata. Returns: str: XML string.
[ "Compose", "info", "XML", "file", "." ]
df9ac7ec6cbdbeaaeed438ca66df75ea967b6d8e
https://github.com/edeposit/edeposit.amqp.ltp/blob/df9ac7ec6cbdbeaaeed438ca66df75ea967b6d8e/src/edeposit/amqp/ltp/info_composer.py#L81-L210
242,301
1and1/infrascope
src/infrascope/cli.py
run
def run(argv=None): """Main CLI entry point.""" cli = InfrascopeCLI() return cli.run(sys.argv[1:] if argv is None else argv)
python
def run(argv=None): """Main CLI entry point.""" cli = InfrascopeCLI() return cli.run(sys.argv[1:] if argv is None else argv)
[ "def", "run", "(", "argv", "=", "None", ")", ":", "cli", "=", "InfrascopeCLI", "(", ")", "return", "cli", ".", "run", "(", "sys", ".", "argv", "[", "1", ":", "]", "if", "argv", "is", "None", "else", "argv", ")" ]
Main CLI entry point.
[ "Main", "CLI", "entry", "point", "." ]
d7e291917e618a0a3cd6d5bfc20c6b5defd7550c
https://github.com/1and1/infrascope/blob/d7e291917e618a0a3cd6d5bfc20c6b5defd7550c/src/infrascope/cli.py#L58-L61
242,302
donovan-duplessis/pwnurl
pwnurl/app.py
create_app
def create_app(configobj=ProdConfig): """ Create and configure Flask Application """ app = Flask(__name__) app.config.from_object(configobj) configure_blueprints(app) configure_extensions(app) configure_callbacks(app) configure_filters(app) configure_error_handlers(app) return app
python
def create_app(configobj=ProdConfig): """ Create and configure Flask Application """ app = Flask(__name__) app.config.from_object(configobj) configure_blueprints(app) configure_extensions(app) configure_callbacks(app) configure_filters(app) configure_error_handlers(app) return app
[ "def", "create_app", "(", "configobj", "=", "ProdConfig", ")", ":", "app", "=", "Flask", "(", "__name__", ")", "app", ".", "config", ".", "from_object", "(", "configobj", ")", "configure_blueprints", "(", "app", ")", "configure_extensions", "(", "app", ")", "configure_callbacks", "(", "app", ")", "configure_filters", "(", "app", ")", "configure_error_handlers", "(", "app", ")", "return", "app" ]
Create and configure Flask Application
[ "Create", "and", "configure", "Flask", "Application" ]
a13e27694f738228d186ea437b4d15ef5a925a87
https://github.com/donovan-duplessis/pwnurl/blob/a13e27694f738228d186ea437b4d15ef5a925a87/pwnurl/app.py#L21-L31
242,303
donovan-duplessis/pwnurl
pwnurl/app.py
configure_extensions
def configure_extensions(app): """ Configure application extensions """ db.init_app(app) app.wsgi_app = ProxyFix(app.wsgi_app) assets.init_app(app) for asset in bundles: for (name, bundle) in asset.iteritems(): assets.register(name, bundle) login_manager.login_view = 'frontend.login' login_manager.login_message_category = 'info' @login_manager.user_loader def load_user(id): return User.query.get(int(id)) login_manager.init_app(app) cache.init_app(app) migrate.init_app(app, db) toolbar.init_app(app)
python
def configure_extensions(app): """ Configure application extensions """ db.init_app(app) app.wsgi_app = ProxyFix(app.wsgi_app) assets.init_app(app) for asset in bundles: for (name, bundle) in asset.iteritems(): assets.register(name, bundle) login_manager.login_view = 'frontend.login' login_manager.login_message_category = 'info' @login_manager.user_loader def load_user(id): return User.query.get(int(id)) login_manager.init_app(app) cache.init_app(app) migrate.init_app(app, db) toolbar.init_app(app)
[ "def", "configure_extensions", "(", "app", ")", ":", "db", ".", "init_app", "(", "app", ")", "app", ".", "wsgi_app", "=", "ProxyFix", "(", "app", ".", "wsgi_app", ")", "assets", ".", "init_app", "(", "app", ")", "for", "asset", "in", "bundles", ":", "for", "(", "name", ",", "bundle", ")", "in", "asset", ".", "iteritems", "(", ")", ":", "assets", ".", "register", "(", "name", ",", "bundle", ")", "login_manager", ".", "login_view", "=", "'frontend.login'", "login_manager", ".", "login_message_category", "=", "'info'", "@", "login_manager", ".", "user_loader", "def", "load_user", "(", "id", ")", ":", "return", "User", ".", "query", ".", "get", "(", "int", "(", "id", ")", ")", "login_manager", ".", "init_app", "(", "app", ")", "cache", ".", "init_app", "(", "app", ")", "migrate", ".", "init_app", "(", "app", ",", "db", ")", "toolbar", ".", "init_app", "(", "app", ")" ]
Configure application extensions
[ "Configure", "application", "extensions" ]
a13e27694f738228d186ea437b4d15ef5a925a87
https://github.com/donovan-duplessis/pwnurl/blob/a13e27694f738228d186ea437b4d15ef5a925a87/pwnurl/app.py#L48-L71
242,304
donovan-duplessis/pwnurl
pwnurl/app.py
configure_callbacks
def configure_callbacks(app): """ Configure application callbacks """ @app.before_request def before_request(): """ Retrieve menu configuration before every request (this will return cached version if possible, else reload from database. """ from flask import session #g.menusystem = helper.generate_menusystem() session['menusystem'] = helper.generate_menusystem() print session['menusystem']
python
def configure_callbacks(app): """ Configure application callbacks """ @app.before_request def before_request(): """ Retrieve menu configuration before every request (this will return cached version if possible, else reload from database. """ from flask import session #g.menusystem = helper.generate_menusystem() session['menusystem'] = helper.generate_menusystem() print session['menusystem']
[ "def", "configure_callbacks", "(", "app", ")", ":", "@", "app", ".", "before_request", "def", "before_request", "(", ")", ":", "\"\"\" Retrieve menu configuration before every request (this will return\n cached version if possible, else reload from database. \"\"\"", "from", "flask", "import", "session", "#g.menusystem = helper.generate_menusystem()", "session", "[", "'menusystem'", "]", "=", "helper", ".", "generate_menusystem", "(", ")", "print", "session", "[", "'menusystem'", "]" ]
Configure application callbacks
[ "Configure", "application", "callbacks" ]
a13e27694f738228d186ea437b4d15ef5a925a87
https://github.com/donovan-duplessis/pwnurl/blob/a13e27694f738228d186ea437b4d15ef5a925a87/pwnurl/app.py#L74-L86
242,305
donovan-duplessis/pwnurl
pwnurl/app.py
configure_error_handlers
def configure_error_handlers(app): """ Configure application error handlers """ def render_error(error): return (render_template('errors/%s.html' % error.code, title=error_messages[error.code], code=error.code), error.code) for (errcode, title) in error_messages.iteritems(): app.errorhandler(errcode)(render_error)
python
def configure_error_handlers(app): """ Configure application error handlers """ def render_error(error): return (render_template('errors/%s.html' % error.code, title=error_messages[error.code], code=error.code), error.code) for (errcode, title) in error_messages.iteritems(): app.errorhandler(errcode)(render_error)
[ "def", "configure_error_handlers", "(", "app", ")", ":", "def", "render_error", "(", "error", ")", ":", "return", "(", "render_template", "(", "'errors/%s.html'", "%", "error", ".", "code", ",", "title", "=", "error_messages", "[", "error", ".", "code", "]", ",", "code", "=", "error", ".", "code", ")", ",", "error", ".", "code", ")", "for", "(", "errcode", ",", "title", ")", "in", "error_messages", ".", "iteritems", "(", ")", ":", "app", ".", "errorhandler", "(", "errcode", ")", "(", "render_error", ")" ]
Configure application error handlers
[ "Configure", "application", "error", "handlers" ]
a13e27694f738228d186ea437b4d15ef5a925a87
https://github.com/donovan-duplessis/pwnurl/blob/a13e27694f738228d186ea437b4d15ef5a925a87/pwnurl/app.py#L89-L97
242,306
krinj/k-util
k_util/logger.py
Logger._strip_colors
def _strip_colors(self, message: str) -> str: """ Remove all of the color tags from this message. """ for c in self.COLORS: message = message.replace(c, "") return message
python
def _strip_colors(self, message: str) -> str: """ Remove all of the color tags from this message. """ for c in self.COLORS: message = message.replace(c, "") return message
[ "def", "_strip_colors", "(", "self", ",", "message", ":", "str", ")", "->", "str", ":", "for", "c", "in", "self", ".", "COLORS", ":", "message", "=", "message", ".", "replace", "(", "c", ",", "\"\"", ")", "return", "message" ]
Remove all of the color tags from this message.
[ "Remove", "all", "of", "the", "color", "tags", "from", "this", "message", "." ]
b118826b1d6f49ca4e1ca7327d5b171db332ac23
https://github.com/krinj/k-util/blob/b118826b1d6f49ca4e1ca7327d5b171db332ac23/k_util/logger.py#L305-L309
242,307
50onRed/smr
smr/shared.py
add_str
def add_str(window, line_num, str): """ attempt to draw str on screen and ignore errors if they occur """ try: window.addstr(line_num, 0, str) except curses.error: pass
python
def add_str(window, line_num, str): """ attempt to draw str on screen and ignore errors if they occur """ try: window.addstr(line_num, 0, str) except curses.error: pass
[ "def", "add_str", "(", "window", ",", "line_num", ",", "str", ")", ":", "try", ":", "window", ".", "addstr", "(", "line_num", ",", "0", ",", "str", ")", "except", "curses", ".", "error", ":", "pass" ]
attempt to draw str on screen and ignore errors if they occur
[ "attempt", "to", "draw", "str", "on", "screen", "and", "ignore", "errors", "if", "they", "occur" ]
999b33d86b6a900d7c4aadf03cf4a661acba9f1b
https://github.com/50onRed/smr/blob/999b33d86b6a900d7c4aadf03cf4a661acba9f1b/smr/shared.py#L39-L44
242,308
50onRed/smr
smr/shared.py
write_file_to_descriptor
def write_file_to_descriptor(input_queue, descriptor): """ get item from input_queue and write it to descriptor returns True if and only if it was successfully written """ try: file_name = input_queue.get(timeout=2) descriptor.write("{}\n".format(file_name)) descriptor.flush() input_queue.task_done() return True except Empty: # no more files in queue descriptor.close() return False except IOError: return False
python
def write_file_to_descriptor(input_queue, descriptor): """ get item from input_queue and write it to descriptor returns True if and only if it was successfully written """ try: file_name = input_queue.get(timeout=2) descriptor.write("{}\n".format(file_name)) descriptor.flush() input_queue.task_done() return True except Empty: # no more files in queue descriptor.close() return False except IOError: return False
[ "def", "write_file_to_descriptor", "(", "input_queue", ",", "descriptor", ")", ":", "try", ":", "file_name", "=", "input_queue", ".", "get", "(", "timeout", "=", "2", ")", "descriptor", ".", "write", "(", "\"{}\\n\"", ".", "format", "(", "file_name", ")", ")", "descriptor", ".", "flush", "(", ")", "input_queue", ".", "task_done", "(", ")", "return", "True", "except", "Empty", ":", "# no more files in queue", "descriptor", ".", "close", "(", ")", "return", "False", "except", "IOError", ":", "return", "False" ]
get item from input_queue and write it to descriptor returns True if and only if it was successfully written
[ "get", "item", "from", "input_queue", "and", "write", "it", "to", "descriptor", "returns", "True", "if", "and", "only", "if", "it", "was", "successfully", "written" ]
999b33d86b6a900d7c4aadf03cf4a661acba9f1b
https://github.com/50onRed/smr/blob/999b33d86b6a900d7c4aadf03cf4a661acba9f1b/smr/shared.py#L63-L79
242,309
jeffrimko/Auxly
lib/auxly/listy.py
smooth
def smooth(l): """Yields a generator which smooths all elements as if the given list was of depth 1. **Examples**: :: list(auxly.listy.smooth([1,[2,[3,[4]]]])) # [1, 2, 3, 4] """ if type(l) in [list, tuple]: for i in l: for j in smooth(i): yield j else: yield l
python
def smooth(l): """Yields a generator which smooths all elements as if the given list was of depth 1. **Examples**: :: list(auxly.listy.smooth([1,[2,[3,[4]]]])) # [1, 2, 3, 4] """ if type(l) in [list, tuple]: for i in l: for j in smooth(i): yield j else: yield l
[ "def", "smooth", "(", "l", ")", ":", "if", "type", "(", "l", ")", "in", "[", "list", ",", "tuple", "]", ":", "for", "i", "in", "l", ":", "for", "j", "in", "smooth", "(", "i", ")", ":", "yield", "j", "else", ":", "yield", "l" ]
Yields a generator which smooths all elements as if the given list was of depth 1. **Examples**: :: list(auxly.listy.smooth([1,[2,[3,[4]]]])) # [1, 2, 3, 4]
[ "Yields", "a", "generator", "which", "smooths", "all", "elements", "as", "if", "the", "given", "list", "was", "of", "depth", "1", "." ]
5aae876bcb6ca117c81d904f9455764cdc78cd48
https://github.com/jeffrimko/Auxly/blob/5aae876bcb6ca117c81d904f9455764cdc78cd48/lib/auxly/listy.py#L21-L35
242,310
CivicSpleen/ckcache
ckcache/s3.py
S3Cache.s3path
def s3path(self, rel_path): """Return the path as an S3 schema""" import urlparse path = self.path(rel_path, public_url=True) parts = list(urlparse.urlparse(path)) parts[0] = 's3' parts[1] = self.bucket_name return urlparse.urlunparse(parts)
python
def s3path(self, rel_path): """Return the path as an S3 schema""" import urlparse path = self.path(rel_path, public_url=True) parts = list(urlparse.urlparse(path)) parts[0] = 's3' parts[1] = self.bucket_name return urlparse.urlunparse(parts)
[ "def", "s3path", "(", "self", ",", "rel_path", ")", ":", "import", "urlparse", "path", "=", "self", ".", "path", "(", "rel_path", ",", "public_url", "=", "True", ")", "parts", "=", "list", "(", "urlparse", ".", "urlparse", "(", "path", ")", ")", "parts", "[", "0", "]", "=", "'s3'", "parts", "[", "1", "]", "=", "self", ".", "bucket_name", "return", "urlparse", ".", "urlunparse", "(", "parts", ")" ]
Return the path as an S3 schema
[ "Return", "the", "path", "as", "an", "S3", "schema" ]
0c699b6ba97ff164e9702504f0e1643dd4cd39e1
https://github.com/CivicSpleen/ckcache/blob/0c699b6ba97ff164e9702504f0e1643dd4cd39e1/ckcache/s3.py#L153-L164
242,311
CivicSpleen/ckcache
ckcache/s3.py
S3Cache.get_stream
def get_stream(self, rel_path, cb=None, return_meta=False): """Return the object as a stream""" from boto.s3.key import Key from boto.exception import S3ResponseError import StringIO from . import MetadataFlo b = StringIO.StringIO() try: k = self._get_boto_key(rel_path) if not k: return None k.get_contents_to_file(b, cb=cb, num_cb=100) b.seek(0) if return_meta: d = k.metadata d['size'] = k.size d['etag'] = k.etag else: d = {} return MetadataFlo(b, d) except S3ResponseError as e: if e.status == 404: return None else: raise e
python
def get_stream(self, rel_path, cb=None, return_meta=False): """Return the object as a stream""" from boto.s3.key import Key from boto.exception import S3ResponseError import StringIO from . import MetadataFlo b = StringIO.StringIO() try: k = self._get_boto_key(rel_path) if not k: return None k.get_contents_to_file(b, cb=cb, num_cb=100) b.seek(0) if return_meta: d = k.metadata d['size'] = k.size d['etag'] = k.etag else: d = {} return MetadataFlo(b, d) except S3ResponseError as e: if e.status == 404: return None else: raise e
[ "def", "get_stream", "(", "self", ",", "rel_path", ",", "cb", "=", "None", ",", "return_meta", "=", "False", ")", ":", "from", "boto", ".", "s3", ".", "key", "import", "Key", "from", "boto", ".", "exception", "import", "S3ResponseError", "import", "StringIO", "from", ".", "import", "MetadataFlo", "b", "=", "StringIO", ".", "StringIO", "(", ")", "try", ":", "k", "=", "self", ".", "_get_boto_key", "(", "rel_path", ")", "if", "not", "k", ":", "return", "None", "k", ".", "get_contents_to_file", "(", "b", ",", "cb", "=", "cb", ",", "num_cb", "=", "100", ")", "b", ".", "seek", "(", "0", ")", "if", "return_meta", ":", "d", "=", "k", ".", "metadata", "d", "[", "'size'", "]", "=", "k", ".", "size", "d", "[", "'etag'", "]", "=", "k", ".", "etag", "else", ":", "d", "=", "{", "}", "return", "MetadataFlo", "(", "b", ",", "d", ")", "except", "S3ResponseError", "as", "e", ":", "if", "e", ".", "status", "==", "404", ":", "return", "None", "else", ":", "raise", "e" ]
Return the object as a stream
[ "Return", "the", "object", "as", "a", "stream" ]
0c699b6ba97ff164e9702504f0e1643dd4cd39e1
https://github.com/CivicSpleen/ckcache/blob/0c699b6ba97ff164e9702504f0e1643dd4cd39e1/ckcache/s3.py#L187-L218
242,312
CivicSpleen/ckcache
ckcache/s3.py
S3Cache.list
def list(self, path=None, with_metadata=False, include_partitions=False): '''Get a list of all of bundle files in the cache. Does not return partition files''' import json sub_path = self.prefix + '/' + path.strip('/') if path else self.prefix l = {} for e in self.bucket.list(sub_path): path = e.name.replace(self.prefix, '', 1).strip('/') if path.startswith('_') or path.startswith('meta'): continue # TODO 'include_partitions' doesn't make any sense outside of ambry if not include_partitions and path.count('/') > 1: continue # partition files if with_metadata: d = self.metadata(path) if d and 'identity' in d: d['identity'] = json.loads(d['identity']) else: d = {} d['caches'] = [self.repo_id] if path: l[path] = d return l
python
def list(self, path=None, with_metadata=False, include_partitions=False): '''Get a list of all of bundle files in the cache. Does not return partition files''' import json sub_path = self.prefix + '/' + path.strip('/') if path else self.prefix l = {} for e in self.bucket.list(sub_path): path = e.name.replace(self.prefix, '', 1).strip('/') if path.startswith('_') or path.startswith('meta'): continue # TODO 'include_partitions' doesn't make any sense outside of ambry if not include_partitions and path.count('/') > 1: continue # partition files if with_metadata: d = self.metadata(path) if d and 'identity' in d: d['identity'] = json.loads(d['identity']) else: d = {} d['caches'] = [self.repo_id] if path: l[path] = d return l
[ "def", "list", "(", "self", ",", "path", "=", "None", ",", "with_metadata", "=", "False", ",", "include_partitions", "=", "False", ")", ":", "import", "json", "sub_path", "=", "self", ".", "prefix", "+", "'/'", "+", "path", ".", "strip", "(", "'/'", ")", "if", "path", "else", "self", ".", "prefix", "l", "=", "{", "}", "for", "e", "in", "self", ".", "bucket", ".", "list", "(", "sub_path", ")", ":", "path", "=", "e", ".", "name", ".", "replace", "(", "self", ".", "prefix", ",", "''", ",", "1", ")", ".", "strip", "(", "'/'", ")", "if", "path", ".", "startswith", "(", "'_'", ")", "or", "path", ".", "startswith", "(", "'meta'", ")", ":", "continue", "# TODO 'include_partitions' doesn't make any sense outside of ambry", "if", "not", "include_partitions", "and", "path", ".", "count", "(", "'/'", ")", ">", "1", ":", "continue", "# partition files", "if", "with_metadata", ":", "d", "=", "self", ".", "metadata", "(", "path", ")", "if", "d", "and", "'identity'", "in", "d", ":", "d", "[", "'identity'", "]", "=", "json", ".", "loads", "(", "d", "[", "'identity'", "]", ")", "else", ":", "d", "=", "{", "}", "d", "[", "'caches'", "]", "=", "[", "self", ".", "repo_id", "]", "if", "path", ":", "l", "[", "path", "]", "=", "d", "return", "l" ]
Get a list of all of bundle files in the cache. Does not return partition files
[ "Get", "a", "list", "of", "all", "of", "bundle", "files", "in", "the", "cache", ".", "Does", "not", "return", "partition", "files" ]
0c699b6ba97ff164e9702504f0e1643dd4cd39e1
https://github.com/CivicSpleen/ckcache/blob/0c699b6ba97ff164e9702504f0e1643dd4cd39e1/ckcache/s3.py#L440-L470
242,313
klmitch/tendril
tendril/connection.py
Tendril._send_streamify
def _send_streamify(self, frame): """ Helper method to streamify a frame. """ # Get the state and framer state = self._send_framer_state framer = self._send_framer # Reset the state as needed state._reset(framer) # Now pass the frame through streamify() and return the result return framer.streamify(state, frame)
python
def _send_streamify(self, frame): """ Helper method to streamify a frame. """ # Get the state and framer state = self._send_framer_state framer = self._send_framer # Reset the state as needed state._reset(framer) # Now pass the frame through streamify() and return the result return framer.streamify(state, frame)
[ "def", "_send_streamify", "(", "self", ",", "frame", ")", ":", "# Get the state and framer", "state", "=", "self", ".", "_send_framer_state", "framer", "=", "self", ".", "_send_framer", "# Reset the state as needed", "state", ".", "_reset", "(", "framer", ")", "# Now pass the frame through streamify() and return the result", "return", "framer", ".", "streamify", "(", "state", ",", "frame", ")" ]
Helper method to streamify a frame.
[ "Helper", "method", "to", "streamify", "a", "frame", "." ]
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/connection.py#L102-L115
242,314
klmitch/tendril
tendril/connection.py
Tendril._recv_frameify
def _recv_frameify(self, data): """ Helper method to frameify a stream. """ # Get the state and framer state = self._recv_framer_state framer = None # Grab off as many frames as we can frameify = None while True: # Check if we need to change framers if framer != self._recv_framer: # Notify the currently-running framer if frameify: try: frameify.throw(framers.FrameSwitch) except StopIteration: pass # Set up the new framer framer = self._recv_framer state._reset(framer) frameify = framer.frameify(state, data) data = '' # Now part of the state's buffer # Get the next frame try: frame = frameify.next() except StopIteration: # OK, we've extracted as many frames as we can break # OK, send the frame to the application if self._application: self._application.recv_frame(frame)
python
def _recv_frameify(self, data): """ Helper method to frameify a stream. """ # Get the state and framer state = self._recv_framer_state framer = None # Grab off as many frames as we can frameify = None while True: # Check if we need to change framers if framer != self._recv_framer: # Notify the currently-running framer if frameify: try: frameify.throw(framers.FrameSwitch) except StopIteration: pass # Set up the new framer framer = self._recv_framer state._reset(framer) frameify = framer.frameify(state, data) data = '' # Now part of the state's buffer # Get the next frame try: frame = frameify.next() except StopIteration: # OK, we've extracted as many frames as we can break # OK, send the frame to the application if self._application: self._application.recv_frame(frame)
[ "def", "_recv_frameify", "(", "self", ",", "data", ")", ":", "# Get the state and framer", "state", "=", "self", ".", "_recv_framer_state", "framer", "=", "None", "# Grab off as many frames as we can", "frameify", "=", "None", "while", "True", ":", "# Check if we need to change framers", "if", "framer", "!=", "self", ".", "_recv_framer", ":", "# Notify the currently-running framer", "if", "frameify", ":", "try", ":", "frameify", ".", "throw", "(", "framers", ".", "FrameSwitch", ")", "except", "StopIteration", ":", "pass", "# Set up the new framer", "framer", "=", "self", ".", "_recv_framer", "state", ".", "_reset", "(", "framer", ")", "frameify", "=", "framer", ".", "frameify", "(", "state", ",", "data", ")", "data", "=", "''", "# Now part of the state's buffer", "# Get the next frame", "try", ":", "frame", "=", "frameify", ".", "next", "(", ")", "except", "StopIteration", ":", "# OK, we've extracted as many frames as we can", "break", "# OK, send the frame to the application", "if", "self", ".", "_application", ":", "self", ".", "_application", ".", "recv_frame", "(", "frame", ")" ]
Helper method to frameify a stream.
[ "Helper", "method", "to", "frameify", "a", "stream", "." ]
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/connection.py#L117-L153
242,315
klmitch/tendril
tendril/connection.py
Tendril.closed
def closed(self, error=None): """ Notify the application that the connection has been closed. :param error: The exception which has caused the connection to be closed. If the connection has been closed due to an EOF, pass ``None``. """ if self._application: try: self._application.closed(error) except Exception: # Ignore exceptions from the notification pass
python
def closed(self, error=None): """ Notify the application that the connection has been closed. :param error: The exception which has caused the connection to be closed. If the connection has been closed due to an EOF, pass ``None``. """ if self._application: try: self._application.closed(error) except Exception: # Ignore exceptions from the notification pass
[ "def", "closed", "(", "self", ",", "error", "=", "None", ")", ":", "if", "self", ".", "_application", ":", "try", ":", "self", ".", "_application", ".", "closed", "(", "error", ")", "except", "Exception", ":", "# Ignore exceptions from the notification", "pass" ]
Notify the application that the connection has been closed. :param error: The exception which has caused the connection to be closed. If the connection has been closed due to an EOF, pass ``None``.
[ "Notify", "the", "application", "that", "the", "connection", "has", "been", "closed", "." ]
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/connection.py#L169-L183
242,316
klmitch/tendril
tendril/connection.py
Tendril.send_framer
def send_framer(self, value): """ Set the framer in use for the sending side of the connection. The framer state will be reset next time the framer is used. """ if not isinstance(value, framers.Framer): raise ValueError("framer must be an instance of tendril.Framer") self._send_framer = value
python
def send_framer(self, value): """ Set the framer in use for the sending side of the connection. The framer state will be reset next time the framer is used. """ if not isinstance(value, framers.Framer): raise ValueError("framer must be an instance of tendril.Framer") self._send_framer = value
[ "def", "send_framer", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "framers", ".", "Framer", ")", ":", "raise", "ValueError", "(", "\"framer must be an instance of tendril.Framer\"", ")", "self", ".", "_send_framer", "=", "value" ]
Set the framer in use for the sending side of the connection. The framer state will be reset next time the framer is used.
[ "Set", "the", "framer", "in", "use", "for", "the", "sending", "side", "of", "the", "connection", ".", "The", "framer", "state", "will", "be", "reset", "next", "time", "the", "framer", "is", "used", "." ]
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/connection.py#L203-L212
242,317
klmitch/tendril
tendril/connection.py
Tendril.recv_framer
def recv_framer(self, value): """ Set the framer in use for the receiving side of the connection. The framer state will be reset next time the framer is used. """ if not isinstance(value, framers.Framer): raise ValueError("framer must be an instance of tendril.Framer") self._recv_framer = value
python
def recv_framer(self, value): """ Set the framer in use for the receiving side of the connection. The framer state will be reset next time the framer is used. """ if not isinstance(value, framers.Framer): raise ValueError("framer must be an instance of tendril.Framer") self._recv_framer = value
[ "def", "recv_framer", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "framers", ".", "Framer", ")", ":", "raise", "ValueError", "(", "\"framer must be an instance of tendril.Framer\"", ")", "self", ".", "_recv_framer", "=", "value" ]
Set the framer in use for the receiving side of the connection. The framer state will be reset next time the framer is used.
[ "Set", "the", "framer", "in", "use", "for", "the", "receiving", "side", "of", "the", "connection", ".", "The", "framer", "state", "will", "be", "reset", "next", "time", "the", "framer", "is", "used", "." ]
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/connection.py#L243-L253
242,318
klmitch/tendril
tendril/connection.py
Tendril.framers
def framers(self, value): """ Set the framers in use for the connection. The framer states will be reset next time their respective framer is used. """ # Handle sequence values if isinstance(value, collections.Sequence): if len(value) != 2: raise ValueError('need exactly 2 values to unpack') elif (not isinstance(value[0], framers.Framer) or not isinstance(value[1], framers.Framer)): raise ValueError("framer must be an instance of " "tendril.Framer") self._send_framer, self._recv_framer = value # If we have a single value, assume it's a framer else: if not isinstance(value, framers.Framer): raise ValueError("framer must be an instance of " "tendril.Framer") self._send_framer = value self._recv_framer = value
python
def framers(self, value): """ Set the framers in use for the connection. The framer states will be reset next time their respective framer is used. """ # Handle sequence values if isinstance(value, collections.Sequence): if len(value) != 2: raise ValueError('need exactly 2 values to unpack') elif (not isinstance(value[0], framers.Framer) or not isinstance(value[1], framers.Framer)): raise ValueError("framer must be an instance of " "tendril.Framer") self._send_framer, self._recv_framer = value # If we have a single value, assume it's a framer else: if not isinstance(value, framers.Framer): raise ValueError("framer must be an instance of " "tendril.Framer") self._send_framer = value self._recv_framer = value
[ "def", "framers", "(", "self", ",", "value", ")", ":", "# Handle sequence values", "if", "isinstance", "(", "value", ",", "collections", ".", "Sequence", ")", ":", "if", "len", "(", "value", ")", "!=", "2", ":", "raise", "ValueError", "(", "'need exactly 2 values to unpack'", ")", "elif", "(", "not", "isinstance", "(", "value", "[", "0", "]", ",", "framers", ".", "Framer", ")", "or", "not", "isinstance", "(", "value", "[", "1", "]", ",", "framers", ".", "Framer", ")", ")", ":", "raise", "ValueError", "(", "\"framer must be an instance of \"", "\"tendril.Framer\"", ")", "self", ".", "_send_framer", ",", "self", ".", "_recv_framer", "=", "value", "# If we have a single value, assume it's a framer", "else", ":", "if", "not", "isinstance", "(", "value", ",", "framers", ".", "Framer", ")", ":", "raise", "ValueError", "(", "\"framer must be an instance of \"", "\"tendril.Framer\"", ")", "self", ".", "_send_framer", "=", "value", "self", ".", "_recv_framer", "=", "value" ]
Set the framers in use for the connection. The framer states will be reset next time their respective framer is used.
[ "Set", "the", "framers", "in", "use", "for", "the", "connection", ".", "The", "framer", "states", "will", "be", "reset", "next", "time", "their", "respective", "framer", "is", "used", "." ]
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/connection.py#L283-L307
242,319
klmitch/tendril
tendril/connection.py
Tendril.framers
def framers(self): """ Reset the framers in use for the connection to be a tendril.IdentityFramer. The framer states will be reset next time their respective framer is used. """ f = self.default_framer() self._send_framer = f self._recv_framer = f
python
def framers(self): """ Reset the framers in use for the connection to be a tendril.IdentityFramer. The framer states will be reset next time their respective framer is used. """ f = self.default_framer() self._send_framer = f self._recv_framer = f
[ "def", "framers", "(", "self", ")", ":", "f", "=", "self", ".", "default_framer", "(", ")", "self", ".", "_send_framer", "=", "f", "self", ".", "_recv_framer", "=", "f" ]
Reset the framers in use for the connection to be a tendril.IdentityFramer. The framer states will be reset next time their respective framer is used.
[ "Reset", "the", "framers", "in", "use", "for", "the", "connection", "to", "be", "a", "tendril", ".", "IdentityFramer", ".", "The", "framer", "states", "will", "be", "reset", "next", "time", "their", "respective", "framer", "is", "used", "." ]
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/connection.py#L310-L319
242,320
klmitch/tendril
tendril/connection.py
Tendril.application
def application(self, value): """Update the application.""" # Always allow None if value is None: self._application = None return # Check that the state is valid if not isinstance(value, application.Application): raise ValueError("application must be an instance of " "tendril.Application") self._application = value
python
def application(self, value): """Update the application.""" # Always allow None if value is None: self._application = None return # Check that the state is valid if not isinstance(value, application.Application): raise ValueError("application must be an instance of " "tendril.Application") self._application = value
[ "def", "application", "(", "self", ",", "value", ")", ":", "# Always allow None", "if", "value", "is", "None", ":", "self", ".", "_application", "=", "None", "return", "# Check that the state is valid", "if", "not", "isinstance", "(", "value", ",", "application", ".", "Application", ")", ":", "raise", "ValueError", "(", "\"application must be an instance of \"", "\"tendril.Application\"", ")", "self", ".", "_application", "=", "value" ]
Update the application.
[ "Update", "the", "application", "." ]
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/connection.py#L337-L350
242,321
pop/pageup
pageup/pageup.py
build
def build(): """ Builds pages given template.jinja, style.css, and content.rst produces index.html. """ test_files() with open('content.rst') as f: content = publish_parts(f.read(), writer_name='html') title = content['title'] body = content['html_body'].replace('\n',' ') with open('template.jinja', 'r') as f: loader = FileSystemLoader(getcwd()) env= Environment(loader=loader) template = env.get_template('template.jinja') page = template.render(title=title, content=body) with open('index.html', 'w') as f: f.write(page)
python
def build(): """ Builds pages given template.jinja, style.css, and content.rst produces index.html. """ test_files() with open('content.rst') as f: content = publish_parts(f.read(), writer_name='html') title = content['title'] body = content['html_body'].replace('\n',' ') with open('template.jinja', 'r') as f: loader = FileSystemLoader(getcwd()) env= Environment(loader=loader) template = env.get_template('template.jinja') page = template.render(title=title, content=body) with open('index.html', 'w') as f: f.write(page)
[ "def", "build", "(", ")", ":", "test_files", "(", ")", "with", "open", "(", "'content.rst'", ")", "as", "f", ":", "content", "=", "publish_parts", "(", "f", ".", "read", "(", ")", ",", "writer_name", "=", "'html'", ")", "title", "=", "content", "[", "'title'", "]", "body", "=", "content", "[", "'html_body'", "]", ".", "replace", "(", "'\\n'", ",", "' '", ")", "with", "open", "(", "'template.jinja'", ",", "'r'", ")", "as", "f", ":", "loader", "=", "FileSystemLoader", "(", "getcwd", "(", ")", ")", "env", "=", "Environment", "(", "loader", "=", "loader", ")", "template", "=", "env", ".", "get_template", "(", "'template.jinja'", ")", "page", "=", "template", ".", "render", "(", "title", "=", "title", ",", "content", "=", "body", ")", "with", "open", "(", "'index.html'", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "page", ")" ]
Builds pages given template.jinja, style.css, and content.rst produces index.html.
[ "Builds", "pages", "given", "template", ".", "jinja", "style", ".", "css", "and", "content", ".", "rst", "produces", "index", ".", "html", "." ]
e78471d50517e1779e6e2a5ea961f2a2def7e5e8
https://github.com/pop/pageup/blob/e78471d50517e1779e6e2a5ea961f2a2def7e5e8/pageup/pageup.py#L34-L53
242,322
pop/pageup
pageup/pageup.py
init
def init(directory=None): """ Initializes a new site in the `directory` Current working dir if directory is None. """ if directory is not None and not path.exists(directory): makedirs(directory) else: print('%s already exists, populating with template files' % (directory)) directory = '' if not path.isfile(path.join(directory,'style.css')): grab('style.css', directory) print('Added sample style') if not path.isfile(path.join(directory,'template.jinja')): grab('template.jinja', directory) print('Added sample template.jinja') if not path.isfile(path.join(directory,'content.rst')): grab('content.rst', directory) print('Added sample content.rst')
python
def init(directory=None): """ Initializes a new site in the `directory` Current working dir if directory is None. """ if directory is not None and not path.exists(directory): makedirs(directory) else: print('%s already exists, populating with template files' % (directory)) directory = '' if not path.isfile(path.join(directory,'style.css')): grab('style.css', directory) print('Added sample style') if not path.isfile(path.join(directory,'template.jinja')): grab('template.jinja', directory) print('Added sample template.jinja') if not path.isfile(path.join(directory,'content.rst')): grab('content.rst', directory) print('Added sample content.rst')
[ "def", "init", "(", "directory", "=", "None", ")", ":", "if", "directory", "is", "not", "None", "and", "not", "path", ".", "exists", "(", "directory", ")", ":", "makedirs", "(", "directory", ")", "else", ":", "print", "(", "'%s already exists, populating with template files'", "%", "(", "directory", ")", ")", "directory", "=", "''", "if", "not", "path", ".", "isfile", "(", "path", ".", "join", "(", "directory", ",", "'style.css'", ")", ")", ":", "grab", "(", "'style.css'", ",", "directory", ")", "print", "(", "'Added sample style'", ")", "if", "not", "path", ".", "isfile", "(", "path", ".", "join", "(", "directory", ",", "'template.jinja'", ")", ")", ":", "grab", "(", "'template.jinja'", ",", "directory", ")", "print", "(", "'Added sample template.jinja'", ")", "if", "not", "path", ".", "isfile", "(", "path", ".", "join", "(", "directory", ",", "'content.rst'", ")", ")", ":", "grab", "(", "'content.rst'", ",", "directory", ")", "print", "(", "'Added sample content.rst'", ")" ]
Initializes a new site in the `directory` Current working dir if directory is None.
[ "Initializes", "a", "new", "site", "in", "the", "directory", "Current", "working", "dir", "if", "directory", "is", "None", "." ]
e78471d50517e1779e6e2a5ea961f2a2def7e5e8
https://github.com/pop/pageup/blob/e78471d50517e1779e6e2a5ea961f2a2def7e5e8/pageup/pageup.py#L56-L75
242,323
defnull/contexter
contexter.py
ExitStack.callback
def callback(self, callback, *args, **kwds): """ Registers an arbitrary callback and arguments. Cannot suppress exceptions. """ return self << _CloseDummy(callback, args, kwds)
python
def callback(self, callback, *args, **kwds): """ Registers an arbitrary callback and arguments. Cannot suppress exceptions. """ return self << _CloseDummy(callback, args, kwds)
[ "def", "callback", "(", "self", ",", "callback", ",", "*", "args", ",", "*", "*", "kwds", ")", ":", "return", "self", "<<", "_CloseDummy", "(", "callback", ",", "args", ",", "kwds", ")" ]
Registers an arbitrary callback and arguments. Cannot suppress exceptions.
[ "Registers", "an", "arbitrary", "callback", "and", "arguments", "." ]
12113375bd748f40a8a26e59847dbe291b957c0d
https://github.com/defnull/contexter/blob/12113375bd748f40a8a26e59847dbe291b957c0d/contexter.py#L151-L156
242,324
defnull/contexter
contexter.py
ExitStack.pop_all
def pop_all(self): """ Preserve the context stack by transferring it to a new instance """ ret = ExitStack() ret._context_stack.append(self._context_stack.pop()) self._context_stack.append([])
python
def pop_all(self): """ Preserve the context stack by transferring it to a new instance """ ret = ExitStack() ret._context_stack.append(self._context_stack.pop()) self._context_stack.append([])
[ "def", "pop_all", "(", "self", ")", ":", "ret", "=", "ExitStack", "(", ")", "ret", ".", "_context_stack", ".", "append", "(", "self", ".", "_context_stack", ".", "pop", "(", ")", ")", "self", ".", "_context_stack", ".", "append", "(", "[", "]", ")" ]
Preserve the context stack by transferring it to a new instance
[ "Preserve", "the", "context", "stack", "by", "transferring", "it", "to", "a", "new", "instance" ]
12113375bd748f40a8a26e59847dbe291b957c0d
https://github.com/defnull/contexter/blob/12113375bd748f40a8a26e59847dbe291b957c0d/contexter.py#L158-L162
242,325
briancappello/flask-sqlalchemy-bundle
flask_sqlalchemy_bundle/base_model.py
BaseModel.update
def update(self, **kwargs): """Update fields on the model. :param kwargs: The model attribute values to update the model with. """ self.validate(**kwargs) for attr, value in kwargs.items(): setattr(self, attr, value) return self
python
def update(self, **kwargs): """Update fields on the model. :param kwargs: The model attribute values to update the model with. """ self.validate(**kwargs) for attr, value in kwargs.items(): setattr(self, attr, value) return self
[ "def", "update", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "validate", "(", "*", "*", "kwargs", ")", "for", "attr", ",", "value", "in", "kwargs", ".", "items", "(", ")", ":", "setattr", "(", "self", ",", "attr", ",", "value", ")", "return", "self" ]
Update fields on the model. :param kwargs: The model attribute values to update the model with.
[ "Update", "fields", "on", "the", "model", "." ]
8150896787907ef0001839b5a6ef303edccb9b6c
https://github.com/briancappello/flask-sqlalchemy-bundle/blob/8150896787907ef0001839b5a6ef303edccb9b6c/flask_sqlalchemy_bundle/base_model.py#L76-L84
242,326
briancappello/flask-sqlalchemy-bundle
flask_sqlalchemy_bundle/base_model.py
BaseModel.validate
def validate(cls, partial=True, **kwargs): """ Validate kwargs before setting attributes on the model """ data = kwargs if not partial: data = dict(**kwargs, **{col.name: None for col in cls.__table__.c if col.name not in kwargs}) errors = defaultdict(list) for name, value in data.items(): for validator in cls._get_validators(name): try: validator(value) except ValidationError as e: e.model = cls e.column = name errors[name].append(str(e)) if errors: raise ValidationErrors(errors)
python
def validate(cls, partial=True, **kwargs): """ Validate kwargs before setting attributes on the model """ data = kwargs if not partial: data = dict(**kwargs, **{col.name: None for col in cls.__table__.c if col.name not in kwargs}) errors = defaultdict(list) for name, value in data.items(): for validator in cls._get_validators(name): try: validator(value) except ValidationError as e: e.model = cls e.column = name errors[name].append(str(e)) if errors: raise ValidationErrors(errors)
[ "def", "validate", "(", "cls", ",", "partial", "=", "True", ",", "*", "*", "kwargs", ")", ":", "data", "=", "kwargs", "if", "not", "partial", ":", "data", "=", "dict", "(", "*", "*", "kwargs", ",", "*", "*", "{", "col", ".", "name", ":", "None", "for", "col", "in", "cls", ".", "__table__", ".", "c", "if", "col", ".", "name", "not", "in", "kwargs", "}", ")", "errors", "=", "defaultdict", "(", "list", ")", "for", "name", ",", "value", "in", "data", ".", "items", "(", ")", ":", "for", "validator", "in", "cls", ".", "_get_validators", "(", "name", ")", ":", "try", ":", "validator", "(", "value", ")", "except", "ValidationError", "as", "e", ":", "e", ".", "model", "=", "cls", "e", ".", "column", "=", "name", "errors", "[", "name", "]", ".", "append", "(", "str", "(", "e", ")", ")", "if", "errors", ":", "raise", "ValidationErrors", "(", "errors", ")" ]
Validate kwargs before setting attributes on the model
[ "Validate", "kwargs", "before", "setting", "attributes", "on", "the", "model" ]
8150896787907ef0001839b5a6ef303edccb9b6c
https://github.com/briancappello/flask-sqlalchemy-bundle/blob/8150896787907ef0001839b5a6ef303edccb9b6c/flask_sqlalchemy_bundle/base_model.py#L87-L107
242,327
racker/torment
torment/fixtures/__init__.py
of
def of(fixture_classes: Iterable[type], context: Union[None, 'torment.TestContext'] = None) -> Iterable['torment.fixtures.Fixture']: '''Obtain all Fixture objects of the provided classes. **Parameters** :``fixture_classes``: classes inheriting from ``torment.fixtures.Fixture`` :``context``: a ``torment.TestContext`` to initialize Fixtures with **Return Value(s)** Instantiated ``torment.fixtures.Fixture`` objects for each individual fixture class that inherits from one of the provided classes. ''' classes = list(copy.copy(fixture_classes)) fixtures = [] # type: Iterable[torment.fixtures.Fixture] while len(classes): current = classes.pop() subclasses = current.__subclasses__() if len(subclasses): classes.extend(subclasses) elif current not in fixture_classes: fixtures.append(current(context)) return fixtures
python
def of(fixture_classes: Iterable[type], context: Union[None, 'torment.TestContext'] = None) -> Iterable['torment.fixtures.Fixture']: '''Obtain all Fixture objects of the provided classes. **Parameters** :``fixture_classes``: classes inheriting from ``torment.fixtures.Fixture`` :``context``: a ``torment.TestContext`` to initialize Fixtures with **Return Value(s)** Instantiated ``torment.fixtures.Fixture`` objects for each individual fixture class that inherits from one of the provided classes. ''' classes = list(copy.copy(fixture_classes)) fixtures = [] # type: Iterable[torment.fixtures.Fixture] while len(classes): current = classes.pop() subclasses = current.__subclasses__() if len(subclasses): classes.extend(subclasses) elif current not in fixture_classes: fixtures.append(current(context)) return fixtures
[ "def", "of", "(", "fixture_classes", ":", "Iterable", "[", "type", "]", ",", "context", ":", "Union", "[", "None", ",", "'torment.TestContext'", "]", "=", "None", ")", "->", "Iterable", "[", "'torment.fixtures.Fixture'", "]", ":", "classes", "=", "list", "(", "copy", ".", "copy", "(", "fixture_classes", ")", ")", "fixtures", "=", "[", "]", "# type: Iterable[torment.fixtures.Fixture]", "while", "len", "(", "classes", ")", ":", "current", "=", "classes", ".", "pop", "(", ")", "subclasses", "=", "current", ".", "__subclasses__", "(", ")", "if", "len", "(", "subclasses", ")", ":", "classes", ".", "extend", "(", "subclasses", ")", "elif", "current", "not", "in", "fixture_classes", ":", "fixtures", ".", "append", "(", "current", "(", "context", ")", ")", "return", "fixtures" ]
Obtain all Fixture objects of the provided classes. **Parameters** :``fixture_classes``: classes inheriting from ``torment.fixtures.Fixture`` :``context``: a ``torment.TestContext`` to initialize Fixtures with **Return Value(s)** Instantiated ``torment.fixtures.Fixture`` objects for each individual fixture class that inherits from one of the provided classes.
[ "Obtain", "all", "Fixture", "objects", "of", "the", "provided", "classes", "." ]
bd5d2f978324bf9b7360edfae76d853b226c63e1
https://github.com/racker/torment/blob/bd5d2f978324bf9b7360edfae76d853b226c63e1/torment/fixtures/__init__.py#L293-L320
242,328
racker/torment
torment/fixtures/__init__.py
register
def register(namespace, base_classes: Tuple[type], properties: Dict[str, Any]) -> None: '''Register a Fixture class in namespace with the given properties. Creates a Fixture class (not object) and inserts it into the provided namespace. The properties is a dict but allows functions to reference other properties and acts like a small DSL (domain specific language). This is really just a declarative way to compose data about a test fixture and make it repeatable. Files calling this function are expected to house one or more Fixtures and have a name that ends with a UUID without its hyphens. For example: foo_38de9ceec5694c96ace90c9ca37e5bcb.py. This UUID is used to uniquely track the Fixture through the test suite and allow Fixtures to scale without concern. **Parameters** :``namespace``: dictionary to insert the generated class into :``base_classes``: list of classes the new class should inherit :``properties``: dictionary of properties with their values Properties can have the following forms: :functions: invoked with the Fixture as it's argument :classes: instantiated without any arguments (unless it subclasses ``torment.fixtures.Fixture`` in which case it's passed context) :literals: any standard python type (i.e. int, str, dict) .. note:: function execution may error (this will be emitted as a logging event). functions will continually be tried until they resolve or the same set of functions is continually erroring. These functions that failed to resolve are left in tact for later processing. Properties by the following names also have defined behavior: :description: added to the Fixture's description as an addendum :error: must be a dictionary with three keys: :class: class to instantiate (usually an exception) :args: arguments to pass to class initialization :kwargs: keyword arguments to pass to class initialization :mocks: dictionary mapping mock symbols to corresponding values Properties by the following names are reserved and should not be used: * name ''' # ensure we have a clean copy of the data # and won't stomp on re-uses elsewhere in # someone's code props = copy.deepcopy(properties) desc = props.pop('description', None) # type: Union[str, None] caller_frame = inspect.stack()[1] caller_file = caller_frame[1] caller_module = inspect.getmodule(caller_frame[0]) my_uuid = uuid.UUID(os.path.basename(caller_file).replace('.py', '').rsplit('_', 1)[-1]) class_name = _unique_class_name(namespace, my_uuid) @property def description(self) -> str: _ = super(self.__class__, self).description if desc is not None: _ += '—' + desc return _ def __init__(self, context: 'torment.TestContext') -> None: super(self.__class__, self).__init__(context) functions = {} for name, value in props.items(): if name == 'error': self.error = value['class'](*value.get('args', ()), **value.get('kwargs', {})) continue if inspect.isclass(value): if issubclass(value, Fixture): value = value(self.context) else: value = value() if inspect.isfunction(value): functions[name] = value continue setattr(self, name, value) _resolve_functions(functions, self) self.initialize() def setup(self) -> None: if hasattr(self, 'mocks'): logger.debug('self.mocks: %s', self.mocks) for mock_symbol, mock_result in self.mocks.items(): if _find_mocker(mock_symbol, self.context)(): _prepare_mock(self.context, mock_symbol, **mock_result) super(self.__class__, self).setup() namespace[class_name] = type(class_name, base_classes, { 'description': description, '__init__': __init__, '__module__': caller_module, 'setup': setup, 'uuid': my_uuid, })
python
def register(namespace, base_classes: Tuple[type], properties: Dict[str, Any]) -> None: '''Register a Fixture class in namespace with the given properties. Creates a Fixture class (not object) and inserts it into the provided namespace. The properties is a dict but allows functions to reference other properties and acts like a small DSL (domain specific language). This is really just a declarative way to compose data about a test fixture and make it repeatable. Files calling this function are expected to house one or more Fixtures and have a name that ends with a UUID without its hyphens. For example: foo_38de9ceec5694c96ace90c9ca37e5bcb.py. This UUID is used to uniquely track the Fixture through the test suite and allow Fixtures to scale without concern. **Parameters** :``namespace``: dictionary to insert the generated class into :``base_classes``: list of classes the new class should inherit :``properties``: dictionary of properties with their values Properties can have the following forms: :functions: invoked with the Fixture as it's argument :classes: instantiated without any arguments (unless it subclasses ``torment.fixtures.Fixture`` in which case it's passed context) :literals: any standard python type (i.e. int, str, dict) .. note:: function execution may error (this will be emitted as a logging event). functions will continually be tried until they resolve or the same set of functions is continually erroring. These functions that failed to resolve are left in tact for later processing. Properties by the following names also have defined behavior: :description: added to the Fixture's description as an addendum :error: must be a dictionary with three keys: :class: class to instantiate (usually an exception) :args: arguments to pass to class initialization :kwargs: keyword arguments to pass to class initialization :mocks: dictionary mapping mock symbols to corresponding values Properties by the following names are reserved and should not be used: * name ''' # ensure we have a clean copy of the data # and won't stomp on re-uses elsewhere in # someone's code props = copy.deepcopy(properties) desc = props.pop('description', None) # type: Union[str, None] caller_frame = inspect.stack()[1] caller_file = caller_frame[1] caller_module = inspect.getmodule(caller_frame[0]) my_uuid = uuid.UUID(os.path.basename(caller_file).replace('.py', '').rsplit('_', 1)[-1]) class_name = _unique_class_name(namespace, my_uuid) @property def description(self) -> str: _ = super(self.__class__, self).description if desc is not None: _ += '—' + desc return _ def __init__(self, context: 'torment.TestContext') -> None: super(self.__class__, self).__init__(context) functions = {} for name, value in props.items(): if name == 'error': self.error = value['class'](*value.get('args', ()), **value.get('kwargs', {})) continue if inspect.isclass(value): if issubclass(value, Fixture): value = value(self.context) else: value = value() if inspect.isfunction(value): functions[name] = value continue setattr(self, name, value) _resolve_functions(functions, self) self.initialize() def setup(self) -> None: if hasattr(self, 'mocks'): logger.debug('self.mocks: %s', self.mocks) for mock_symbol, mock_result in self.mocks.items(): if _find_mocker(mock_symbol, self.context)(): _prepare_mock(self.context, mock_symbol, **mock_result) super(self.__class__, self).setup() namespace[class_name] = type(class_name, base_classes, { 'description': description, '__init__': __init__, '__module__': caller_module, 'setup': setup, 'uuid': my_uuid, })
[ "def", "register", "(", "namespace", ",", "base_classes", ":", "Tuple", "[", "type", "]", ",", "properties", ":", "Dict", "[", "str", ",", "Any", "]", ")", "->", "None", ":", "# ensure we have a clean copy of the data", "# and won't stomp on re-uses elsewhere in", "# someone's code", "props", "=", "copy", ".", "deepcopy", "(", "properties", ")", "desc", "=", "props", ".", "pop", "(", "'description'", ",", "None", ")", "# type: Union[str, None]", "caller_frame", "=", "inspect", ".", "stack", "(", ")", "[", "1", "]", "caller_file", "=", "caller_frame", "[", "1", "]", "caller_module", "=", "inspect", ".", "getmodule", "(", "caller_frame", "[", "0", "]", ")", "my_uuid", "=", "uuid", ".", "UUID", "(", "os", ".", "path", ".", "basename", "(", "caller_file", ")", ".", "replace", "(", "'.py'", ",", "''", ")", ".", "rsplit", "(", "'_'", ",", "1", ")", "[", "-", "1", "]", ")", "class_name", "=", "_unique_class_name", "(", "namespace", ",", "my_uuid", ")", "@", "property", "def", "description", "(", "self", ")", "->", "str", ":", "_", "=", "super", "(", "self", ".", "__class__", ",", "self", ")", ".", "description", "if", "desc", "is", "not", "None", ":", "_", "+=", "'—' +", "d", "sc", "return", "_", "def", "__init__", "(", "self", ",", "context", ":", "'torment.TestContext'", ")", "->", "None", ":", "super", "(", "self", ".", "__class__", ",", "self", ")", ".", "__init__", "(", "context", ")", "functions", "=", "{", "}", "for", "name", ",", "value", "in", "props", ".", "items", "(", ")", ":", "if", "name", "==", "'error'", ":", "self", ".", "error", "=", "value", "[", "'class'", "]", "(", "*", "value", ".", "get", "(", "'args'", ",", "(", ")", ")", ",", "*", "*", "value", ".", "get", "(", "'kwargs'", ",", "{", "}", ")", ")", "continue", "if", "inspect", ".", "isclass", "(", "value", ")", ":", "if", "issubclass", "(", "value", ",", "Fixture", ")", ":", "value", "=", "value", "(", "self", ".", "context", ")", "else", ":", "value", "=", "value", "(", ")", "if", "inspect", ".", "isfunction", "(", "value", ")", ":", "functions", "[", "name", "]", "=", "value", "continue", "setattr", "(", "self", ",", "name", ",", "value", ")", "_resolve_functions", "(", "functions", ",", "self", ")", "self", ".", "initialize", "(", ")", "def", "setup", "(", "self", ")", "->", "None", ":", "if", "hasattr", "(", "self", ",", "'mocks'", ")", ":", "logger", ".", "debug", "(", "'self.mocks: %s'", ",", "self", ".", "mocks", ")", "for", "mock_symbol", ",", "mock_result", "in", "self", ".", "mocks", ".", "items", "(", ")", ":", "if", "_find_mocker", "(", "mock_symbol", ",", "self", ".", "context", ")", "(", ")", ":", "_prepare_mock", "(", "self", ".", "context", ",", "mock_symbol", ",", "*", "*", "mock_result", ")", "super", "(", "self", ".", "__class__", ",", "self", ")", ".", "setup", "(", ")", "namespace", "[", "class_name", "]", "=", "type", "(", "class_name", ",", "base_classes", ",", "{", "'description'", ":", "description", ",", "'__init__'", ":", "__init__", ",", "'__module__'", ":", "caller_module", ",", "'setup'", ":", "setup", ",", "'uuid'", ":", "my_uuid", ",", "}", ")" ]
Register a Fixture class in namespace with the given properties. Creates a Fixture class (not object) and inserts it into the provided namespace. The properties is a dict but allows functions to reference other properties and acts like a small DSL (domain specific language). This is really just a declarative way to compose data about a test fixture and make it repeatable. Files calling this function are expected to house one or more Fixtures and have a name that ends with a UUID without its hyphens. For example: foo_38de9ceec5694c96ace90c9ca37e5bcb.py. This UUID is used to uniquely track the Fixture through the test suite and allow Fixtures to scale without concern. **Parameters** :``namespace``: dictionary to insert the generated class into :``base_classes``: list of classes the new class should inherit :``properties``: dictionary of properties with their values Properties can have the following forms: :functions: invoked with the Fixture as it's argument :classes: instantiated without any arguments (unless it subclasses ``torment.fixtures.Fixture`` in which case it's passed context) :literals: any standard python type (i.e. int, str, dict) .. note:: function execution may error (this will be emitted as a logging event). functions will continually be tried until they resolve or the same set of functions is continually erroring. These functions that failed to resolve are left in tact for later processing. Properties by the following names also have defined behavior: :description: added to the Fixture's description as an addendum :error: must be a dictionary with three keys: :class: class to instantiate (usually an exception) :args: arguments to pass to class initialization :kwargs: keyword arguments to pass to class initialization :mocks: dictionary mapping mock symbols to corresponding values Properties by the following names are reserved and should not be used: * name
[ "Register", "a", "Fixture", "class", "in", "namespace", "with", "the", "given", "properties", "." ]
bd5d2f978324bf9b7360edfae76d853b226c63e1
https://github.com/racker/torment/blob/bd5d2f978324bf9b7360edfae76d853b226c63e1/torment/fixtures/__init__.py#L323-L438
242,329
racker/torment
torment/fixtures/__init__.py
_prepare_mock
def _prepare_mock(context: 'torment.contexts.TestContext', symbol: str, return_value = None, side_effect = None) -> None: '''Sets return value or side effect of symbol's mock in context. .. seealso:: :py:func:`_find_mocker` **Parameters** :``context``: the search context :``symbol``: the symbol to be located :``return_value``: pass through to mock ``return_value`` :``side_effect``: pass through to mock ``side_effect`` ''' methods = symbol.split('.') index = len(methods) mock = None while index > 0: name = 'mocked_' + '_'.join(methods[:index]).lower() logger.debug('name: %s', name) if hasattr(context, name): mock = getattr(context, name) break index -= 1 logger.debug('mock: %s', mock) if mock is not None: mock = functools.reduce(getattr, methods[index:], mock) logger.debug('mock: %s', mock) if return_value is not None: mock.return_value = return_value if side_effect is not None: mock.side_effect = side_effect mock.reset_mock()
python
def _prepare_mock(context: 'torment.contexts.TestContext', symbol: str, return_value = None, side_effect = None) -> None: '''Sets return value or side effect of symbol's mock in context. .. seealso:: :py:func:`_find_mocker` **Parameters** :``context``: the search context :``symbol``: the symbol to be located :``return_value``: pass through to mock ``return_value`` :``side_effect``: pass through to mock ``side_effect`` ''' methods = symbol.split('.') index = len(methods) mock = None while index > 0: name = 'mocked_' + '_'.join(methods[:index]).lower() logger.debug('name: %s', name) if hasattr(context, name): mock = getattr(context, name) break index -= 1 logger.debug('mock: %s', mock) if mock is not None: mock = functools.reduce(getattr, methods[index:], mock) logger.debug('mock: %s', mock) if return_value is not None: mock.return_value = return_value if side_effect is not None: mock.side_effect = side_effect mock.reset_mock()
[ "def", "_prepare_mock", "(", "context", ":", "'torment.contexts.TestContext'", ",", "symbol", ":", "str", ",", "return_value", "=", "None", ",", "side_effect", "=", "None", ")", "->", "None", ":", "methods", "=", "symbol", ".", "split", "(", "'.'", ")", "index", "=", "len", "(", "methods", ")", "mock", "=", "None", "while", "index", ">", "0", ":", "name", "=", "'mocked_'", "+", "'_'", ".", "join", "(", "methods", "[", ":", "index", "]", ")", ".", "lower", "(", ")", "logger", ".", "debug", "(", "'name: %s'", ",", "name", ")", "if", "hasattr", "(", "context", ",", "name", ")", ":", "mock", "=", "getattr", "(", "context", ",", "name", ")", "break", "index", "-=", "1", "logger", ".", "debug", "(", "'mock: %s'", ",", "mock", ")", "if", "mock", "is", "not", "None", ":", "mock", "=", "functools", ".", "reduce", "(", "getattr", ",", "methods", "[", "index", ":", "]", ",", "mock", ")", "logger", ".", "debug", "(", "'mock: %s'", ",", "mock", ")", "if", "return_value", "is", "not", "None", ":", "mock", ".", "return_value", "=", "return_value", "if", "side_effect", "is", "not", "None", ":", "mock", ".", "side_effect", "=", "side_effect", "mock", ".", "reset_mock", "(", ")" ]
Sets return value or side effect of symbol's mock in context. .. seealso:: :py:func:`_find_mocker` **Parameters** :``context``: the search context :``symbol``: the symbol to be located :``return_value``: pass through to mock ``return_value`` :``side_effect``: pass through to mock ``side_effect``
[ "Sets", "return", "value", "or", "side", "effect", "of", "symbol", "s", "mock", "in", "context", "." ]
bd5d2f978324bf9b7360edfae76d853b226c63e1
https://github.com/racker/torment/blob/bd5d2f978324bf9b7360edfae76d853b226c63e1/torment/fixtures/__init__.py#L441-L482
242,330
racker/torment
torment/fixtures/__init__.py
_find_mocker
def _find_mocker(symbol: str, context: 'torment.contexts.TestContext') -> Callable[[], bool]: '''Find method within the context that mocks symbol. Given a symbol (i.e. ``tornado.httpclient.AsyncHTTPClient.fetch``), find the shortest ``mock_`` method that resembles the symbol. Resembles means the lowercased and periods replaced with underscores. If no match is found, a dummy function (only returns False) is returned. **Parameters** :``symbol``: the symbol to be located :``context``: the search context **Return Value(s)** The method used to mock the symbol. **Examples** Assuming the symbol is ``tornado.httpclient.AsyncHTTPClient.fetch``, the first of the following methods would be returned: * ``mock_tornado`` * ``mock_tornado_httpclient`` * ``mock_tornado_httpclient_asynchttpclient`` * ``mock_tornado_httpclient_asynchttpclient_fetch`` ''' components = [] method = None for component in symbol.split('.'): components.append(component.lower()) name = '_'.join([ 'mock' ] + components) if hasattr(context, name): method = getattr(context, name) break if method is None: logger.warn('no mocker for %s', symbol) def noop(*args, **kwargs): return False method = noop return method
python
def _find_mocker(symbol: str, context: 'torment.contexts.TestContext') -> Callable[[], bool]: '''Find method within the context that mocks symbol. Given a symbol (i.e. ``tornado.httpclient.AsyncHTTPClient.fetch``), find the shortest ``mock_`` method that resembles the symbol. Resembles means the lowercased and periods replaced with underscores. If no match is found, a dummy function (only returns False) is returned. **Parameters** :``symbol``: the symbol to be located :``context``: the search context **Return Value(s)** The method used to mock the symbol. **Examples** Assuming the symbol is ``tornado.httpclient.AsyncHTTPClient.fetch``, the first of the following methods would be returned: * ``mock_tornado`` * ``mock_tornado_httpclient`` * ``mock_tornado_httpclient_asynchttpclient`` * ``mock_tornado_httpclient_asynchttpclient_fetch`` ''' components = [] method = None for component in symbol.split('.'): components.append(component.lower()) name = '_'.join([ 'mock' ] + components) if hasattr(context, name): method = getattr(context, name) break if method is None: logger.warn('no mocker for %s', symbol) def noop(*args, **kwargs): return False method = noop return method
[ "def", "_find_mocker", "(", "symbol", ":", "str", ",", "context", ":", "'torment.contexts.TestContext'", ")", "->", "Callable", "[", "[", "]", ",", "bool", "]", ":", "components", "=", "[", "]", "method", "=", "None", "for", "component", "in", "symbol", ".", "split", "(", "'.'", ")", ":", "components", ".", "append", "(", "component", ".", "lower", "(", ")", ")", "name", "=", "'_'", ".", "join", "(", "[", "'mock'", "]", "+", "components", ")", "if", "hasattr", "(", "context", ",", "name", ")", ":", "method", "=", "getattr", "(", "context", ",", "name", ")", "break", "if", "method", "is", "None", ":", "logger", ".", "warn", "(", "'no mocker for %s'", ",", "symbol", ")", "def", "noop", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "False", "method", "=", "noop", "return", "method" ]
Find method within the context that mocks symbol. Given a symbol (i.e. ``tornado.httpclient.AsyncHTTPClient.fetch``), find the shortest ``mock_`` method that resembles the symbol. Resembles means the lowercased and periods replaced with underscores. If no match is found, a dummy function (only returns False) is returned. **Parameters** :``symbol``: the symbol to be located :``context``: the search context **Return Value(s)** The method used to mock the symbol. **Examples** Assuming the symbol is ``tornado.httpclient.AsyncHTTPClient.fetch``, the first of the following methods would be returned: * ``mock_tornado`` * ``mock_tornado_httpclient`` * ``mock_tornado_httpclient_asynchttpclient`` * ``mock_tornado_httpclient_asynchttpclient_fetch``
[ "Find", "method", "within", "the", "context", "that", "mocks", "symbol", "." ]
bd5d2f978324bf9b7360edfae76d853b226c63e1
https://github.com/racker/torment/blob/bd5d2f978324bf9b7360edfae76d853b226c63e1/torment/fixtures/__init__.py#L485-L534
242,331
racker/torment
torment/fixtures/__init__.py
_resolve_functions
def _resolve_functions(functions: Dict[str, Callable[[Any], Any]], fixture: Fixture) -> None: '''Apply functions and collect values as properties on fixture. Call functions and apply their values as properteis on fixture. Functions will continue to get applied until no more functions resolve. All unresolved functions are logged and the last exception to have occurred is also logged. This function does not return but adds the results to fixture directly. **Parameters** :``functions``: dict mapping function names (property names) to callable functions :``fixture``: Fixture to add values to ''' exc_info = last_function = None function_count = float('inf') while function_count > len(functions): function_count = len(functions) for name, function in copy.copy(functions).items(): try: setattr(fixture, name, copy.deepcopy(function(fixture))) del functions[name] except: exc_info = sys.exc_info() logger.debug('name: %s', name) logger.debug('exc_info: %s', exc_info) last_function = name if len(functions): logger.warning('unprocessed Fixture properties: %s', ','.join(functions.keys())) logger.warning('last exception from %s.%s:', fixture.name, last_function, exc_info = exc_info) setattr(fixture, '_last_resolver_exception', ( last_function, exc_info, )) for name, function in copy.copy(functions).items(): setattr(fixture, name, function)
python
def _resolve_functions(functions: Dict[str, Callable[[Any], Any]], fixture: Fixture) -> None: '''Apply functions and collect values as properties on fixture. Call functions and apply their values as properteis on fixture. Functions will continue to get applied until no more functions resolve. All unresolved functions are logged and the last exception to have occurred is also logged. This function does not return but adds the results to fixture directly. **Parameters** :``functions``: dict mapping function names (property names) to callable functions :``fixture``: Fixture to add values to ''' exc_info = last_function = None function_count = float('inf') while function_count > len(functions): function_count = len(functions) for name, function in copy.copy(functions).items(): try: setattr(fixture, name, copy.deepcopy(function(fixture))) del functions[name] except: exc_info = sys.exc_info() logger.debug('name: %s', name) logger.debug('exc_info: %s', exc_info) last_function = name if len(functions): logger.warning('unprocessed Fixture properties: %s', ','.join(functions.keys())) logger.warning('last exception from %s.%s:', fixture.name, last_function, exc_info = exc_info) setattr(fixture, '_last_resolver_exception', ( last_function, exc_info, )) for name, function in copy.copy(functions).items(): setattr(fixture, name, function)
[ "def", "_resolve_functions", "(", "functions", ":", "Dict", "[", "str", ",", "Callable", "[", "[", "Any", "]", ",", "Any", "]", "]", ",", "fixture", ":", "Fixture", ")", "->", "None", ":", "exc_info", "=", "last_function", "=", "None", "function_count", "=", "float", "(", "'inf'", ")", "while", "function_count", ">", "len", "(", "functions", ")", ":", "function_count", "=", "len", "(", "functions", ")", "for", "name", ",", "function", "in", "copy", ".", "copy", "(", "functions", ")", ".", "items", "(", ")", ":", "try", ":", "setattr", "(", "fixture", ",", "name", ",", "copy", ".", "deepcopy", "(", "function", "(", "fixture", ")", ")", ")", "del", "functions", "[", "name", "]", "except", ":", "exc_info", "=", "sys", ".", "exc_info", "(", ")", "logger", ".", "debug", "(", "'name: %s'", ",", "name", ")", "logger", ".", "debug", "(", "'exc_info: %s'", ",", "exc_info", ")", "last_function", "=", "name", "if", "len", "(", "functions", ")", ":", "logger", ".", "warning", "(", "'unprocessed Fixture properties: %s'", ",", "','", ".", "join", "(", "functions", ".", "keys", "(", ")", ")", ")", "logger", ".", "warning", "(", "'last exception from %s.%s:'", ",", "fixture", ".", "name", ",", "last_function", ",", "exc_info", "=", "exc_info", ")", "setattr", "(", "fixture", ",", "'_last_resolver_exception'", ",", "(", "last_function", ",", "exc_info", ",", ")", ")", "for", "name", ",", "function", "in", "copy", ".", "copy", "(", "functions", ")", ".", "items", "(", ")", ":", "setattr", "(", "fixture", ",", "name", ",", "function", ")" ]
Apply functions and collect values as properties on fixture. Call functions and apply their values as properteis on fixture. Functions will continue to get applied until no more functions resolve. All unresolved functions are logged and the last exception to have occurred is also logged. This function does not return but adds the results to fixture directly. **Parameters** :``functions``: dict mapping function names (property names) to callable functions :``fixture``: Fixture to add values to
[ "Apply", "functions", "and", "collect", "values", "as", "properties", "on", "fixture", "." ]
bd5d2f978324bf9b7360edfae76d853b226c63e1
https://github.com/racker/torment/blob/bd5d2f978324bf9b7360edfae76d853b226c63e1/torment/fixtures/__init__.py#L537-L579
242,332
racker/torment
torment/fixtures/__init__.py
_unique_class_name
def _unique_class_name(namespace: Dict[str, Any], uuid: uuid.UUID) -> str: '''Generate unique to namespace name for a class using uuid. **Parameters** :``namespace``: the namespace to verify uniqueness against :``uuid``: the "unique" portion of the name **Return Value(s)** A unique string (in namespace) using uuid. ''' count = 0 name = original_name = 'f_' + uuid.hex while name in namespace: count += 1 name = original_name + '_' + str(count) return name
python
def _unique_class_name(namespace: Dict[str, Any], uuid: uuid.UUID) -> str: '''Generate unique to namespace name for a class using uuid. **Parameters** :``namespace``: the namespace to verify uniqueness against :``uuid``: the "unique" portion of the name **Return Value(s)** A unique string (in namespace) using uuid. ''' count = 0 name = original_name = 'f_' + uuid.hex while name in namespace: count += 1 name = original_name + '_' + str(count) return name
[ "def", "_unique_class_name", "(", "namespace", ":", "Dict", "[", "str", ",", "Any", "]", ",", "uuid", ":", "uuid", ".", "UUID", ")", "->", "str", ":", "count", "=", "0", "name", "=", "original_name", "=", "'f_'", "+", "uuid", ".", "hex", "while", "name", "in", "namespace", ":", "count", "+=", "1", "name", "=", "original_name", "+", "'_'", "+", "str", "(", "count", ")", "return", "name" ]
Generate unique to namespace name for a class using uuid. **Parameters** :``namespace``: the namespace to verify uniqueness against :``uuid``: the "unique" portion of the name **Return Value(s)** A unique string (in namespace) using uuid.
[ "Generate", "unique", "to", "namespace", "name", "for", "a", "class", "using", "uuid", "." ]
bd5d2f978324bf9b7360edfae76d853b226c63e1
https://github.com/racker/torment/blob/bd5d2f978324bf9b7360edfae76d853b226c63e1/torment/fixtures/__init__.py#L582-L603
242,333
racker/torment
torment/fixtures/__init__.py
ErrorFixture.run
def run(self) -> None: '''Calls sibling with exception expectation.''' with self.context.assertRaises(self.error.__class__) as error: super().run() self.exception = error.exception
python
def run(self) -> None: '''Calls sibling with exception expectation.''' with self.context.assertRaises(self.error.__class__) as error: super().run() self.exception = error.exception
[ "def", "run", "(", "self", ")", "->", "None", ":", "with", "self", ".", "context", ".", "assertRaises", "(", "self", ".", "error", ".", "__class__", ")", "as", "error", ":", "super", "(", ")", ".", "run", "(", ")", "self", ".", "exception", "=", "error", ".", "exception" ]
Calls sibling with exception expectation.
[ "Calls", "sibling", "with", "exception", "expectation", "." ]
bd5d2f978324bf9b7360edfae76d853b226c63e1
https://github.com/racker/torment/blob/bd5d2f978324bf9b7360edfae76d853b226c63e1/torment/fixtures/__init__.py#L283-L289
242,334
jalanb/pysyte
pysyte/getch.py
cache_keys
def cache_keys(keys): """Allow debugging via PyCharm""" d = known_keys() known_names = dict(zip(d.values(), d.keys())) for k in keys: i = (ord(k),) if len(k) == 1 else known_names[k] _key_cache.insert(0, i)
python
def cache_keys(keys): """Allow debugging via PyCharm""" d = known_keys() known_names = dict(zip(d.values(), d.keys())) for k in keys: i = (ord(k),) if len(k) == 1 else known_names[k] _key_cache.insert(0, i)
[ "def", "cache_keys", "(", "keys", ")", ":", "d", "=", "known_keys", "(", ")", "known_names", "=", "dict", "(", "zip", "(", "d", ".", "values", "(", ")", ",", "d", ".", "keys", "(", ")", ")", ")", "for", "k", "in", "keys", ":", "i", "=", "(", "ord", "(", "k", ")", ",", ")", "if", "len", "(", "k", ")", "==", "1", "else", "known_names", "[", "k", "]", "_key_cache", ".", "insert", "(", "0", ",", "i", ")" ]
Allow debugging via PyCharm
[ "Allow", "debugging", "via", "PyCharm" ]
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/getch.py#L92-L98
242,335
jalanb/pysyte
pysyte/getch.py
_get_keycodes
def _get_keycodes(): """Read keypress giving a tuple of key codes A 'key code' is the ordinal value of characters read For example, pressing 'A' will give (65,) """ try: return _key_cache.pop() except IndexError: pass result = [] terminators = 'ABCDFHPQRS~' with TerminalContext(): code = get_ord() result.append(code) if code == 27: with TimerContext(0.1) as timer: code = get_ord() if not timer.timed_out: result.append(code) result.append(get_ord()) if 64 < result[-1] < 69: pass elif result[1] == 91: while True: code = get_ord() result.append(code) if chr(code) in terminators: break return tuple(result)
python
def _get_keycodes(): """Read keypress giving a tuple of key codes A 'key code' is the ordinal value of characters read For example, pressing 'A' will give (65,) """ try: return _key_cache.pop() except IndexError: pass result = [] terminators = 'ABCDFHPQRS~' with TerminalContext(): code = get_ord() result.append(code) if code == 27: with TimerContext(0.1) as timer: code = get_ord() if not timer.timed_out: result.append(code) result.append(get_ord()) if 64 < result[-1] < 69: pass elif result[1] == 91: while True: code = get_ord() result.append(code) if chr(code) in terminators: break return tuple(result)
[ "def", "_get_keycodes", "(", ")", ":", "try", ":", "return", "_key_cache", ".", "pop", "(", ")", "except", "IndexError", ":", "pass", "result", "=", "[", "]", "terminators", "=", "'ABCDFHPQRS~'", "with", "TerminalContext", "(", ")", ":", "code", "=", "get_ord", "(", ")", "result", ".", "append", "(", "code", ")", "if", "code", "==", "27", ":", "with", "TimerContext", "(", "0.1", ")", "as", "timer", ":", "code", "=", "get_ord", "(", ")", "if", "not", "timer", ".", "timed_out", ":", "result", ".", "append", "(", "code", ")", "result", ".", "append", "(", "get_ord", "(", ")", ")", "if", "64", "<", "result", "[", "-", "1", "]", "<", "69", ":", "pass", "elif", "result", "[", "1", "]", "==", "91", ":", "while", "True", ":", "code", "=", "get_ord", "(", ")", "result", ".", "append", "(", "code", ")", "if", "chr", "(", "code", ")", "in", "terminators", ":", "break", "return", "tuple", "(", "result", ")" ]
Read keypress giving a tuple of key codes A 'key code' is the ordinal value of characters read For example, pressing 'A' will give (65,)
[ "Read", "keypress", "giving", "a", "tuple", "of", "key", "codes" ]
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/getch.py#L101-L131
242,336
jalanb/pysyte
pysyte/getch.py
get_key
def get_key(): """Get a key from the keyboard as a string A 'key' will be a single char, or the name of an extended key """ character_name = chr codes = _get_keycodes() if len(codes) == 1: code = codes[0] if code >= 32: return character_name(code) return control_key_name(code) return get_extended_key_name(codes)
python
def get_key(): """Get a key from the keyboard as a string A 'key' will be a single char, or the name of an extended key """ character_name = chr codes = _get_keycodes() if len(codes) == 1: code = codes[0] if code >= 32: return character_name(code) return control_key_name(code) return get_extended_key_name(codes)
[ "def", "get_key", "(", ")", ":", "character_name", "=", "chr", "codes", "=", "_get_keycodes", "(", ")", "if", "len", "(", "codes", ")", "==", "1", ":", "code", "=", "codes", "[", "0", "]", "if", "code", ">=", "32", ":", "return", "character_name", "(", "code", ")", "return", "control_key_name", "(", "code", ")", "return", "get_extended_key_name", "(", "codes", ")" ]
Get a key from the keyboard as a string A 'key' will be a single char, or the name of an extended key
[ "Get", "a", "key", "from", "the", "keyboard", "as", "a", "string" ]
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/getch.py#L155-L167
242,337
unitedstack/steth
stetho/agent/api.py
AgentApi.check_ports_on_br
def check_ports_on_br(self, bridge='br-ex', ports=['eth3']): """Check ports exist on bridge. ovs-vsctl list-ports bridge """ LOG.info("RPC: check_ports_on_br bridge: %s, ports: %s" % (bridge, ports)) cmd = ['ovs-vsctl', 'list-ports', bridge] stdcode, stdout = agent_utils.execute(cmd, root=True) data = dict() if stdcode == 0: for port in ports: if port in stdout: data[port] = True stdout.remove(port) else: data[port] = False return agent_utils.make_response(code=stdcode, data=data) # execute failed. message = stdout.pop(0) return agent_utils.make_response(code=stdcode, message=message)
python
def check_ports_on_br(self, bridge='br-ex', ports=['eth3']): """Check ports exist on bridge. ovs-vsctl list-ports bridge """ LOG.info("RPC: check_ports_on_br bridge: %s, ports: %s" % (bridge, ports)) cmd = ['ovs-vsctl', 'list-ports', bridge] stdcode, stdout = agent_utils.execute(cmd, root=True) data = dict() if stdcode == 0: for port in ports: if port in stdout: data[port] = True stdout.remove(port) else: data[port] = False return agent_utils.make_response(code=stdcode, data=data) # execute failed. message = stdout.pop(0) return agent_utils.make_response(code=stdcode, message=message)
[ "def", "check_ports_on_br", "(", "self", ",", "bridge", "=", "'br-ex'", ",", "ports", "=", "[", "'eth3'", "]", ")", ":", "LOG", ".", "info", "(", "\"RPC: check_ports_on_br bridge: %s, ports: %s\"", "%", "(", "bridge", ",", "ports", ")", ")", "cmd", "=", "[", "'ovs-vsctl'", ",", "'list-ports'", ",", "bridge", "]", "stdcode", ",", "stdout", "=", "agent_utils", ".", "execute", "(", "cmd", ",", "root", "=", "True", ")", "data", "=", "dict", "(", ")", "if", "stdcode", "==", "0", ":", "for", "port", "in", "ports", ":", "if", "port", "in", "stdout", ":", "data", "[", "port", "]", "=", "True", "stdout", ".", "remove", "(", "port", ")", "else", ":", "data", "[", "port", "]", "=", "False", "return", "agent_utils", ".", "make_response", "(", "code", "=", "stdcode", ",", "data", "=", "data", ")", "# execute failed.", "message", "=", "stdout", ".", "pop", "(", "0", ")", "return", "agent_utils", ".", "make_response", "(", "code", "=", "stdcode", ",", "message", "=", "message", ")" ]
Check ports exist on bridge. ovs-vsctl list-ports bridge
[ "Check", "ports", "exist", "on", "bridge", "." ]
955884ceebf3bdc474c93cc5cf555e67d16458f1
https://github.com/unitedstack/steth/blob/955884ceebf3bdc474c93cc5cf555e67d16458f1/stetho/agent/api.py#L27-L48
242,338
unitedstack/steth
stetho/agent/api.py
AgentApi.ping
def ping(self, ips, boardcast=False, count=2, timeout=2, interface=None): """Ping host or broadcast. ping host -c 2 -W 2 """ cmd = ['ping', '-c', str(count), '-W', str(timeout)] True if not interface else cmd.extend(['-I', interface]) True if not boardcast else cmd.append('-b') # Batch create subprocess data = dict() try: for ip in ips: stdcode, stdout = agent_utils.execute(cmd + [ip]) if stdcode: data[ip] = 100 else: pattern = r',\s([0-9]+)%\spacket\sloss' data[ip] = re.search(pattern, stdout[-2]).groups()[0] return agent_utils.make_response(code=0, data=data) except Exception as e: message = e.message return agent_utils.make_response(code=1, message=message)
python
def ping(self, ips, boardcast=False, count=2, timeout=2, interface=None): """Ping host or broadcast. ping host -c 2 -W 2 """ cmd = ['ping', '-c', str(count), '-W', str(timeout)] True if not interface else cmd.extend(['-I', interface]) True if not boardcast else cmd.append('-b') # Batch create subprocess data = dict() try: for ip in ips: stdcode, stdout = agent_utils.execute(cmd + [ip]) if stdcode: data[ip] = 100 else: pattern = r',\s([0-9]+)%\spacket\sloss' data[ip] = re.search(pattern, stdout[-2]).groups()[0] return agent_utils.make_response(code=0, data=data) except Exception as e: message = e.message return agent_utils.make_response(code=1, message=message)
[ "def", "ping", "(", "self", ",", "ips", ",", "boardcast", "=", "False", ",", "count", "=", "2", ",", "timeout", "=", "2", ",", "interface", "=", "None", ")", ":", "cmd", "=", "[", "'ping'", ",", "'-c'", ",", "str", "(", "count", ")", ",", "'-W'", ",", "str", "(", "timeout", ")", "]", "True", "if", "not", "interface", "else", "cmd", ".", "extend", "(", "[", "'-I'", ",", "interface", "]", ")", "True", "if", "not", "boardcast", "else", "cmd", ".", "append", "(", "'-b'", ")", "# Batch create subprocess", "data", "=", "dict", "(", ")", "try", ":", "for", "ip", "in", "ips", ":", "stdcode", ",", "stdout", "=", "agent_utils", ".", "execute", "(", "cmd", "+", "[", "ip", "]", ")", "if", "stdcode", ":", "data", "[", "ip", "]", "=", "100", "else", ":", "pattern", "=", "r',\\s([0-9]+)%\\spacket\\sloss'", "data", "[", "ip", "]", "=", "re", ".", "search", "(", "pattern", ",", "stdout", "[", "-", "2", "]", ")", ".", "groups", "(", ")", "[", "0", "]", "return", "agent_utils", ".", "make_response", "(", "code", "=", "0", ",", "data", "=", "data", ")", "except", "Exception", "as", "e", ":", "message", "=", "e", ".", "message", "return", "agent_utils", ".", "make_response", "(", "code", "=", "1", ",", "message", "=", "message", ")" ]
Ping host or broadcast. ping host -c 2 -W 2
[ "Ping", "host", "or", "broadcast", "." ]
955884ceebf3bdc474c93cc5cf555e67d16458f1
https://github.com/unitedstack/steth/blob/955884ceebf3bdc474c93cc5cf555e67d16458f1/stetho/agent/api.py#L50-L72
242,339
unitedstack/steth
stetho/agent/api.py
AgentApi.add_vlan_to_interface
def add_vlan_to_interface(self, interface, vlan_id): """Add vlan interface. ip link add link eth0 name eth0.10 type vlan id 10 """ subif = '%s.%s' % (interface, vlan_id) vlan_id = '%s' % vlan_id cmd = ['ip', 'link', 'add', 'link', interface, 'name', subif, 'type', 'vlan', 'id', vlan_id] stdcode, stdout = agent_utils.execute(cmd, root=True) if stdcode == 0: return agent_utils.make_response(code=stdcode) # execute failed. message = stdout.pop(0) return agent_utils.make_response(code=stdcode, message=message)
python
def add_vlan_to_interface(self, interface, vlan_id): """Add vlan interface. ip link add link eth0 name eth0.10 type vlan id 10 """ subif = '%s.%s' % (interface, vlan_id) vlan_id = '%s' % vlan_id cmd = ['ip', 'link', 'add', 'link', interface, 'name', subif, 'type', 'vlan', 'id', vlan_id] stdcode, stdout = agent_utils.execute(cmd, root=True) if stdcode == 0: return agent_utils.make_response(code=stdcode) # execute failed. message = stdout.pop(0) return agent_utils.make_response(code=stdcode, message=message)
[ "def", "add_vlan_to_interface", "(", "self", ",", "interface", ",", "vlan_id", ")", ":", "subif", "=", "'%s.%s'", "%", "(", "interface", ",", "vlan_id", ")", "vlan_id", "=", "'%s'", "%", "vlan_id", "cmd", "=", "[", "'ip'", ",", "'link'", ",", "'add'", ",", "'link'", ",", "interface", ",", "'name'", ",", "subif", ",", "'type'", ",", "'vlan'", ",", "'id'", ",", "vlan_id", "]", "stdcode", ",", "stdout", "=", "agent_utils", ".", "execute", "(", "cmd", ",", "root", "=", "True", ")", "if", "stdcode", "==", "0", ":", "return", "agent_utils", ".", "make_response", "(", "code", "=", "stdcode", ")", "# execute failed.", "message", "=", "stdout", ".", "pop", "(", "0", ")", "return", "agent_utils", ".", "make_response", "(", "code", "=", "stdcode", ",", "message", "=", "message", ")" ]
Add vlan interface. ip link add link eth0 name eth0.10 type vlan id 10
[ "Add", "vlan", "interface", "." ]
955884ceebf3bdc474c93cc5cf555e67d16458f1
https://github.com/unitedstack/steth/blob/955884ceebf3bdc474c93cc5cf555e67d16458f1/stetho/agent/api.py#L74-L88
242,340
unitedstack/steth
stetho/agent/api.py
AgentApi.get_interface
def get_interface(self, interface='eth0'): """Interface info. ifconfig interface """ LOG.info("RPC: get_interface interfae: %s" % interface) code, message, data = agent_utils.get_interface(interface) return agent_utils.make_response(code, message, data)
python
def get_interface(self, interface='eth0'): """Interface info. ifconfig interface """ LOG.info("RPC: get_interface interfae: %s" % interface) code, message, data = agent_utils.get_interface(interface) return agent_utils.make_response(code, message, data)
[ "def", "get_interface", "(", "self", ",", "interface", "=", "'eth0'", ")", ":", "LOG", ".", "info", "(", "\"RPC: get_interface interfae: %s\"", "%", "interface", ")", "code", ",", "message", ",", "data", "=", "agent_utils", ".", "get_interface", "(", "interface", ")", "return", "agent_utils", ".", "make_response", "(", "code", ",", "message", ",", "data", ")" ]
Interface info. ifconfig interface
[ "Interface", "info", "." ]
955884ceebf3bdc474c93cc5cf555e67d16458f1
https://github.com/unitedstack/steth/blob/955884ceebf3bdc474c93cc5cf555e67d16458f1/stetho/agent/api.py#L90-L97
242,341
unitedstack/steth
stetho/agent/api.py
AgentApi.setup_link
def setup_link(self, interface, cidr): """Setup a link. ip addr add dev interface ip link set dev interface up """ # clear old ipaddr in interface cmd = ['ip', 'addr', 'flush', 'dev', interface] agent_utils.execute(cmd, root=True) ip = IPNetwork(cidr) cmd = ['ip', 'addr', 'add', cidr, 'broadcast', str(ip.broadcast), 'dev', interface] stdcode, stdout = agent_utils.execute(cmd, root=True) if stdcode == 0: cmd = ['ip', 'link', 'set', 'dev', interface, 'up'] stdcode, stdout = agent_utils.execute(cmd, root=True) if stdcode == 0: return agent_utils.make_response(code=stdcode) # execute failed. message = stdout.pop(0) return agent_utils.make_response(code=stdcode, message=message)
python
def setup_link(self, interface, cidr): """Setup a link. ip addr add dev interface ip link set dev interface up """ # clear old ipaddr in interface cmd = ['ip', 'addr', 'flush', 'dev', interface] agent_utils.execute(cmd, root=True) ip = IPNetwork(cidr) cmd = ['ip', 'addr', 'add', cidr, 'broadcast', str(ip.broadcast), 'dev', interface] stdcode, stdout = agent_utils.execute(cmd, root=True) if stdcode == 0: cmd = ['ip', 'link', 'set', 'dev', interface, 'up'] stdcode, stdout = agent_utils.execute(cmd, root=True) if stdcode == 0: return agent_utils.make_response(code=stdcode) # execute failed. message = stdout.pop(0) return agent_utils.make_response(code=stdcode, message=message)
[ "def", "setup_link", "(", "self", ",", "interface", ",", "cidr", ")", ":", "# clear old ipaddr in interface", "cmd", "=", "[", "'ip'", ",", "'addr'", ",", "'flush'", ",", "'dev'", ",", "interface", "]", "agent_utils", ".", "execute", "(", "cmd", ",", "root", "=", "True", ")", "ip", "=", "IPNetwork", "(", "cidr", ")", "cmd", "=", "[", "'ip'", ",", "'addr'", ",", "'add'", ",", "cidr", ",", "'broadcast'", ",", "str", "(", "ip", ".", "broadcast", ")", ",", "'dev'", ",", "interface", "]", "stdcode", ",", "stdout", "=", "agent_utils", ".", "execute", "(", "cmd", ",", "root", "=", "True", ")", "if", "stdcode", "==", "0", ":", "cmd", "=", "[", "'ip'", ",", "'link'", ",", "'set'", ",", "'dev'", ",", "interface", ",", "'up'", "]", "stdcode", ",", "stdout", "=", "agent_utils", ".", "execute", "(", "cmd", ",", "root", "=", "True", ")", "if", "stdcode", "==", "0", ":", "return", "agent_utils", ".", "make_response", "(", "code", "=", "stdcode", ")", "# execute failed.", "message", "=", "stdout", ".", "pop", "(", "0", ")", "return", "agent_utils", ".", "make_response", "(", "code", "=", "stdcode", ",", "message", "=", "message", ")" ]
Setup a link. ip addr add dev interface ip link set dev interface up
[ "Setup", "a", "link", "." ]
955884ceebf3bdc474c93cc5cf555e67d16458f1
https://github.com/unitedstack/steth/blob/955884ceebf3bdc474c93cc5cf555e67d16458f1/stetho/agent/api.py#L99-L119
242,342
unitedstack/steth
stetho/agent/api.py
AgentApi.setup_iperf_server
def setup_iperf_server(self, protocol='TCP', port=5001, window=None): """iperf -s """ iperf = iperf_driver.IPerfDriver() try: data = iperf.start_server(protocol='TCP', port=5001, window=None) return agent_utils.make_response(code=0, data=data) except: message = 'Start iperf server failed!' return agent_utils.make_response(code=1, message=message)
python
def setup_iperf_server(self, protocol='TCP', port=5001, window=None): """iperf -s """ iperf = iperf_driver.IPerfDriver() try: data = iperf.start_server(protocol='TCP', port=5001, window=None) return agent_utils.make_response(code=0, data=data) except: message = 'Start iperf server failed!' return agent_utils.make_response(code=1, message=message)
[ "def", "setup_iperf_server", "(", "self", ",", "protocol", "=", "'TCP'", ",", "port", "=", "5001", ",", "window", "=", "None", ")", ":", "iperf", "=", "iperf_driver", ".", "IPerfDriver", "(", ")", "try", ":", "data", "=", "iperf", ".", "start_server", "(", "protocol", "=", "'TCP'", ",", "port", "=", "5001", ",", "window", "=", "None", ")", "return", "agent_utils", ".", "make_response", "(", "code", "=", "0", ",", "data", "=", "data", ")", "except", ":", "message", "=", "'Start iperf server failed!'", "return", "agent_utils", ".", "make_response", "(", "code", "=", "1", ",", "message", "=", "message", ")" ]
iperf -s
[ "iperf", "-", "s" ]
955884ceebf3bdc474c93cc5cf555e67d16458f1
https://github.com/unitedstack/steth/blob/955884ceebf3bdc474c93cc5cf555e67d16458f1/stetho/agent/api.py#L132-L141
242,343
DecBayComp/RWA-python
rwa/generic.py
isreference
def isreference(a): """ Tell whether a variable is an object reference. Due to garbage collection, some objects happen to get the id of a distinct variable. As a consequence, linking is not ready yet and `isreference` returns ``False``. """ return False return id(a) != id(copy.copy(a)) check = ('__dict__', '__slots__') for attr in check: try: getattr(a, attr) except (SystemExit, KeyboardInterrupt): raise except: pass else: return True return False
python
def isreference(a): """ Tell whether a variable is an object reference. Due to garbage collection, some objects happen to get the id of a distinct variable. As a consequence, linking is not ready yet and `isreference` returns ``False``. """ return False return id(a) != id(copy.copy(a)) check = ('__dict__', '__slots__') for attr in check: try: getattr(a, attr) except (SystemExit, KeyboardInterrupt): raise except: pass else: return True return False
[ "def", "isreference", "(", "a", ")", ":", "return", "False", "return", "id", "(", "a", ")", "!=", "id", "(", "copy", ".", "copy", "(", "a", ")", ")", "check", "=", "(", "'__dict__'", ",", "'__slots__'", ")", "for", "attr", "in", "check", ":", "try", ":", "getattr", "(", "a", ",", "attr", ")", "except", "(", "SystemExit", ",", "KeyboardInterrupt", ")", ":", "raise", "except", ":", "pass", "else", ":", "return", "True", "return", "False" ]
Tell whether a variable is an object reference. Due to garbage collection, some objects happen to get the id of a distinct variable. As a consequence, linking is not ready yet and `isreference` returns ``False``.
[ "Tell", "whether", "a", "variable", "is", "an", "object", "reference", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L24-L43
242,344
DecBayComp/RWA-python
rwa/generic.py
lookup_type
def lookup_type(storable_type): """ Look for the Python type that corresponds to a storable type name. """ if storable_type.startswith('Python'): _, module_name = storable_type.split('.', 1) else: module_name = storable_type #type_name, module_name = \ names = [ _name[::-1] for _name in module_name[::-1].split('.', 1) ] if names[1:]: type_name, module_name = names else: type_name = names[0] return eval(type_name) try: module = importlib.import_module(module_name) python_type = getattr(module, type_name) except (ImportError, AttributeError): python_type = None return python_type
python
def lookup_type(storable_type): """ Look for the Python type that corresponds to a storable type name. """ if storable_type.startswith('Python'): _, module_name = storable_type.split('.', 1) else: module_name = storable_type #type_name, module_name = \ names = [ _name[::-1] for _name in module_name[::-1].split('.', 1) ] if names[1:]: type_name, module_name = names else: type_name = names[0] return eval(type_name) try: module = importlib.import_module(module_name) python_type = getattr(module, type_name) except (ImportError, AttributeError): python_type = None return python_type
[ "def", "lookup_type", "(", "storable_type", ")", ":", "if", "storable_type", ".", "startswith", "(", "'Python'", ")", ":", "_", ",", "module_name", "=", "storable_type", ".", "split", "(", "'.'", ",", "1", ")", "else", ":", "module_name", "=", "storable_type", "#type_name, module_name = \\", "names", "=", "[", "_name", "[", ":", ":", "-", "1", "]", "for", "_name", "in", "module_name", "[", ":", ":", "-", "1", "]", ".", "split", "(", "'.'", ",", "1", ")", "]", "if", "names", "[", "1", ":", "]", ":", "type_name", ",", "module_name", "=", "names", "else", ":", "type_name", "=", "names", "[", "0", "]", "return", "eval", "(", "type_name", ")", "try", ":", "module", "=", "importlib", ".", "import_module", "(", "module_name", ")", "python_type", "=", "getattr", "(", "module", ",", "type_name", ")", "except", "(", "ImportError", ",", "AttributeError", ")", ":", "python_type", "=", "None", "return", "python_type" ]
Look for the Python type that corresponds to a storable type name.
[ "Look", "for", "the", "Python", "type", "that", "corresponds", "to", "a", "storable", "type", "name", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L46-L66
242,345
DecBayComp/RWA-python
rwa/generic.py
poke
def poke(exposes): """ Default serializer factory. Arguments: exposes (iterable): attributes to serialized. Returns: callable: serializer (`poke` routine). """ def _poke(store, objname, obj, container, visited=None, _stack=None): try: sub_container = store.newContainer(objname, obj, container) except (SystemExit, KeyboardInterrupt): raise except: raise ValueError('generic poke not supported by store') #_stack = _add_to_stack(_stack, objname) for iobjname in exposes: try: iobj = getattr(obj, iobjname) except AttributeError: pass else: store.poke(iobjname, iobj, sub_container, visited=visited, \ _stack=_stack) return _poke
python
def poke(exposes): """ Default serializer factory. Arguments: exposes (iterable): attributes to serialized. Returns: callable: serializer (`poke` routine). """ def _poke(store, objname, obj, container, visited=None, _stack=None): try: sub_container = store.newContainer(objname, obj, container) except (SystemExit, KeyboardInterrupt): raise except: raise ValueError('generic poke not supported by store') #_stack = _add_to_stack(_stack, objname) for iobjname in exposes: try: iobj = getattr(obj, iobjname) except AttributeError: pass else: store.poke(iobjname, iobj, sub_container, visited=visited, \ _stack=_stack) return _poke
[ "def", "poke", "(", "exposes", ")", ":", "def", "_poke", "(", "store", ",", "objname", ",", "obj", ",", "container", ",", "visited", "=", "None", ",", "_stack", "=", "None", ")", ":", "try", ":", "sub_container", "=", "store", ".", "newContainer", "(", "objname", ",", "obj", ",", "container", ")", "except", "(", "SystemExit", ",", "KeyboardInterrupt", ")", ":", "raise", "except", ":", "raise", "ValueError", "(", "'generic poke not supported by store'", ")", "#_stack = _add_to_stack(_stack, objname)", "for", "iobjname", "in", "exposes", ":", "try", ":", "iobj", "=", "getattr", "(", "obj", ",", "iobjname", ")", "except", "AttributeError", ":", "pass", "else", ":", "store", ".", "poke", "(", "iobjname", ",", "iobj", ",", "sub_container", ",", "visited", "=", "visited", ",", "_stack", "=", "_stack", ")", "return", "_poke" ]
Default serializer factory. Arguments: exposes (iterable): attributes to serialized. Returns: callable: serializer (`poke` routine).
[ "Default", "serializer", "factory", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L466-L494
242,346
DecBayComp/RWA-python
rwa/generic.py
poke_assoc
def poke_assoc(store, objname, assoc, container, visited=None, _stack=None): """ Serialize association lists. """ try: sub_container = store.newContainer(objname, assoc, container) except (SystemExit, KeyboardInterrupt): raise except: raise ValueError('generic poke not supported by store') escape_keys = assoc and not all(isinstance(iobjname, strtypes) for iobjname,_ in assoc) reported_item_counter = 0 escaped_key_counter = 0 try: if escape_keys: store.setRecordAttr('key', 'escaped', sub_container) verbose = store.verbose # save state for obj in assoc: store.poke(str(escaped_key_counter), obj, sub_container, \ visited=visited, _stack=_stack) escaped_key_counter += 1 if store.verbose: reported_item_counter += 1 if reported_item_counter == 9: store.verbose = False print('...') store.verbose = verbose # restore state else: for iobjname, iobj in assoc: store.poke(iobjname, iobj, sub_container, visited=visited, \ _stack=_stack) except TypeError as e: msg = 'wrong type for keys in associative list' if e.args[0].startswith(msg): raise else: raise TypeError("{}:\n\t{}".format(msg, e.args[0]))
python
def poke_assoc(store, objname, assoc, container, visited=None, _stack=None): """ Serialize association lists. """ try: sub_container = store.newContainer(objname, assoc, container) except (SystemExit, KeyboardInterrupt): raise except: raise ValueError('generic poke not supported by store') escape_keys = assoc and not all(isinstance(iobjname, strtypes) for iobjname,_ in assoc) reported_item_counter = 0 escaped_key_counter = 0 try: if escape_keys: store.setRecordAttr('key', 'escaped', sub_container) verbose = store.verbose # save state for obj in assoc: store.poke(str(escaped_key_counter), obj, sub_container, \ visited=visited, _stack=_stack) escaped_key_counter += 1 if store.verbose: reported_item_counter += 1 if reported_item_counter == 9: store.verbose = False print('...') store.verbose = verbose # restore state else: for iobjname, iobj in assoc: store.poke(iobjname, iobj, sub_container, visited=visited, \ _stack=_stack) except TypeError as e: msg = 'wrong type for keys in associative list' if e.args[0].startswith(msg): raise else: raise TypeError("{}:\n\t{}".format(msg, e.args[0]))
[ "def", "poke_assoc", "(", "store", ",", "objname", ",", "assoc", ",", "container", ",", "visited", "=", "None", ",", "_stack", "=", "None", ")", ":", "try", ":", "sub_container", "=", "store", ".", "newContainer", "(", "objname", ",", "assoc", ",", "container", ")", "except", "(", "SystemExit", ",", "KeyboardInterrupt", ")", ":", "raise", "except", ":", "raise", "ValueError", "(", "'generic poke not supported by store'", ")", "escape_keys", "=", "assoc", "and", "not", "all", "(", "isinstance", "(", "iobjname", ",", "strtypes", ")", "for", "iobjname", ",", "_", "in", "assoc", ")", "reported_item_counter", "=", "0", "escaped_key_counter", "=", "0", "try", ":", "if", "escape_keys", ":", "store", ".", "setRecordAttr", "(", "'key'", ",", "'escaped'", ",", "sub_container", ")", "verbose", "=", "store", ".", "verbose", "# save state", "for", "obj", "in", "assoc", ":", "store", ".", "poke", "(", "str", "(", "escaped_key_counter", ")", ",", "obj", ",", "sub_container", ",", "visited", "=", "visited", ",", "_stack", "=", "_stack", ")", "escaped_key_counter", "+=", "1", "if", "store", ".", "verbose", ":", "reported_item_counter", "+=", "1", "if", "reported_item_counter", "==", "9", ":", "store", ".", "verbose", "=", "False", "print", "(", "'...'", ")", "store", ".", "verbose", "=", "verbose", "# restore state", "else", ":", "for", "iobjname", ",", "iobj", "in", "assoc", ":", "store", ".", "poke", "(", "iobjname", ",", "iobj", ",", "sub_container", ",", "visited", "=", "visited", ",", "_stack", "=", "_stack", ")", "except", "TypeError", "as", "e", ":", "msg", "=", "'wrong type for keys in associative list'", "if", "e", ".", "args", "[", "0", "]", ".", "startswith", "(", "msg", ")", ":", "raise", "else", ":", "raise", "TypeError", "(", "\"{}:\\n\\t{}\"", ".", "format", "(", "msg", ",", "e", ".", "args", "[", "0", "]", ")", ")" ]
Serialize association lists.
[ "Serialize", "association", "lists", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L496-L532
242,347
DecBayComp/RWA-python
rwa/generic.py
default_peek
def default_peek(python_type, exposes): """ Autoserializer factory. Works best in Python 3. Arguments: python_type (type): type constructor. exposes (iterable): sequence of attributes. Returns: callable: deserializer (`peek` routine). """ with_args = False make = python_type try: make() except (SystemExit, KeyboardInterrupt): raise except: make = lambda: python_type.__new__(python_type) try: make() except (SystemExit, KeyboardInterrupt): raise except: make = lambda args: python_type.__new__(python_type, *args) with_args = True def missing(attr): return AttributeError("can't set attribute '{}' ({})".format(attr, python_type)) if with_args: def peek(store, container, _stack=None): state = [] for attr in exposes: # force order instead of iterating over `container` #print((attr, attr in container)) # debugging if attr in container: state.append(store.peek(attr, container, _stack=_stack)) else: state.append(None) return make(state) elif '__dict__' in exposes: def peek(store, container, _stack=None): obj = make() for attr in container: val = store.peek(attr, container, _stack=_stack) try: setattr(obj, attr, val) except AttributeError: raise missing(attr) return obj else: def peek(store, container, _stack=None): obj = make() for attr in exposes: # force order instead of iterating over `container` #print((attr, attr in container)) # debugging if attr in container: val = store.peek(attr, container, _stack=_stack) else: val = None try: setattr(obj, attr, val) except AttributeError: raise missing(attr) return obj return peek
python
def default_peek(python_type, exposes): """ Autoserializer factory. Works best in Python 3. Arguments: python_type (type): type constructor. exposes (iterable): sequence of attributes. Returns: callable: deserializer (`peek` routine). """ with_args = False make = python_type try: make() except (SystemExit, KeyboardInterrupt): raise except: make = lambda: python_type.__new__(python_type) try: make() except (SystemExit, KeyboardInterrupt): raise except: make = lambda args: python_type.__new__(python_type, *args) with_args = True def missing(attr): return AttributeError("can't set attribute '{}' ({})".format(attr, python_type)) if with_args: def peek(store, container, _stack=None): state = [] for attr in exposes: # force order instead of iterating over `container` #print((attr, attr in container)) # debugging if attr in container: state.append(store.peek(attr, container, _stack=_stack)) else: state.append(None) return make(state) elif '__dict__' in exposes: def peek(store, container, _stack=None): obj = make() for attr in container: val = store.peek(attr, container, _stack=_stack) try: setattr(obj, attr, val) except AttributeError: raise missing(attr) return obj else: def peek(store, container, _stack=None): obj = make() for attr in exposes: # force order instead of iterating over `container` #print((attr, attr in container)) # debugging if attr in container: val = store.peek(attr, container, _stack=_stack) else: val = None try: setattr(obj, attr, val) except AttributeError: raise missing(attr) return obj return peek
[ "def", "default_peek", "(", "python_type", ",", "exposes", ")", ":", "with_args", "=", "False", "make", "=", "python_type", "try", ":", "make", "(", ")", "except", "(", "SystemExit", ",", "KeyboardInterrupt", ")", ":", "raise", "except", ":", "make", "=", "lambda", ":", "python_type", ".", "__new__", "(", "python_type", ")", "try", ":", "make", "(", ")", "except", "(", "SystemExit", ",", "KeyboardInterrupt", ")", ":", "raise", "except", ":", "make", "=", "lambda", "args", ":", "python_type", ".", "__new__", "(", "python_type", ",", "*", "args", ")", "with_args", "=", "True", "def", "missing", "(", "attr", ")", ":", "return", "AttributeError", "(", "\"can't set attribute '{}' ({})\"", ".", "format", "(", "attr", ",", "python_type", ")", ")", "if", "with_args", ":", "def", "peek", "(", "store", ",", "container", ",", "_stack", "=", "None", ")", ":", "state", "=", "[", "]", "for", "attr", "in", "exposes", ":", "# force order instead of iterating over `container`", "#print((attr, attr in container)) # debugging", "if", "attr", "in", "container", ":", "state", ".", "append", "(", "store", ".", "peek", "(", "attr", ",", "container", ",", "_stack", "=", "_stack", ")", ")", "else", ":", "state", ".", "append", "(", "None", ")", "return", "make", "(", "state", ")", "elif", "'__dict__'", "in", "exposes", ":", "def", "peek", "(", "store", ",", "container", ",", "_stack", "=", "None", ")", ":", "obj", "=", "make", "(", ")", "for", "attr", "in", "container", ":", "val", "=", "store", ".", "peek", "(", "attr", ",", "container", ",", "_stack", "=", "_stack", ")", "try", ":", "setattr", "(", "obj", ",", "attr", ",", "val", ")", "except", "AttributeError", ":", "raise", "missing", "(", "attr", ")", "return", "obj", "else", ":", "def", "peek", "(", "store", ",", "container", ",", "_stack", "=", "None", ")", ":", "obj", "=", "make", "(", ")", "for", "attr", "in", "exposes", ":", "# force order instead of iterating over `container`", "#print((attr, attr in container)) # debugging", "if", "attr", "in", "container", ":", "val", "=", "store", ".", "peek", "(", "attr", ",", "container", ",", "_stack", "=", "_stack", ")", "else", ":", "val", "=", "None", "try", ":", "setattr", "(", "obj", ",", "attr", ",", "val", ")", "except", "AttributeError", ":", "raise", "missing", "(", "attr", ")", "return", "obj", "return", "peek" ]
Autoserializer factory. Works best in Python 3. Arguments: python_type (type): type constructor. exposes (iterable): sequence of attributes. Returns: callable: deserializer (`peek` routine).
[ "Autoserializer", "factory", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L536-L604
242,348
DecBayComp/RWA-python
rwa/generic.py
unsafe_peek
def unsafe_peek(init): """ Deserialize all the attributes available in the container and pass them in the same order as they come in the container. This is a factory function; returns the actual `peek` routine. Arguments: init: type constructor. Returns: callable: deserializer (`peek` routine). """ def peek(store, container, _stack=None): return init(*[ store.peek(attr, container, _stack=_stack) for attr in container ]) return peek
python
def unsafe_peek(init): """ Deserialize all the attributes available in the container and pass them in the same order as they come in the container. This is a factory function; returns the actual `peek` routine. Arguments: init: type constructor. Returns: callable: deserializer (`peek` routine). """ def peek(store, container, _stack=None): return init(*[ store.peek(attr, container, _stack=_stack) for attr in container ]) return peek
[ "def", "unsafe_peek", "(", "init", ")", ":", "def", "peek", "(", "store", ",", "container", ",", "_stack", "=", "None", ")", ":", "return", "init", "(", "*", "[", "store", ".", "peek", "(", "attr", ",", "container", ",", "_stack", "=", "_stack", ")", "for", "attr", "in", "container", "]", ")", "return", "peek" ]
Deserialize all the attributes available in the container and pass them in the same order as they come in the container. This is a factory function; returns the actual `peek` routine. Arguments: init: type constructor. Returns: callable: deserializer (`peek` routine).
[ "Deserialize", "all", "the", "attributes", "available", "in", "the", "container", "and", "pass", "them", "in", "the", "same", "order", "as", "they", "come", "in", "the", "container", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L606-L624
242,349
DecBayComp/RWA-python
rwa/generic.py
peek_with_kwargs
def peek_with_kwargs(init, args=[]): """ Make datatypes passing keyworded arguments to the constructor. This is a factory function; returns the actual `peek` routine. Arguments: init (callable): type constructor. args (iterable): arguments NOT to be keyworded; order does matter. Returns: callable: deserializer (`peek` routine). All the peeked attributes that are not referenced in `args` are passed to `init` as keyworded arguments. """ def peek(store, container, _stack=None): return init(\ *[ store.peek(attr, container, _stack=_stack) for attr in args ], \ **dict([ (attr, store.peek(attr, container, _stack=_stack)) \ for attr in container if attr not in args ])) return peek
python
def peek_with_kwargs(init, args=[]): """ Make datatypes passing keyworded arguments to the constructor. This is a factory function; returns the actual `peek` routine. Arguments: init (callable): type constructor. args (iterable): arguments NOT to be keyworded; order does matter. Returns: callable: deserializer (`peek` routine). All the peeked attributes that are not referenced in `args` are passed to `init` as keyworded arguments. """ def peek(store, container, _stack=None): return init(\ *[ store.peek(attr, container, _stack=_stack) for attr in args ], \ **dict([ (attr, store.peek(attr, container, _stack=_stack)) \ for attr in container if attr not in args ])) return peek
[ "def", "peek_with_kwargs", "(", "init", ",", "args", "=", "[", "]", ")", ":", "def", "peek", "(", "store", ",", "container", ",", "_stack", "=", "None", ")", ":", "return", "init", "(", "*", "[", "store", ".", "peek", "(", "attr", ",", "container", ",", "_stack", "=", "_stack", ")", "for", "attr", "in", "args", "]", ",", "*", "*", "dict", "(", "[", "(", "attr", ",", "store", ".", "peek", "(", "attr", ",", "container", ",", "_stack", "=", "_stack", ")", ")", "for", "attr", "in", "container", "if", "attr", "not", "in", "args", "]", ")", ")", "return", "peek" ]
Make datatypes passing keyworded arguments to the constructor. This is a factory function; returns the actual `peek` routine. Arguments: init (callable): type constructor. args (iterable): arguments NOT to be keyworded; order does matter. Returns: callable: deserializer (`peek` routine). All the peeked attributes that are not referenced in `args` are passed to `init` as keyworded arguments.
[ "Make", "datatypes", "passing", "keyworded", "arguments", "to", "the", "constructor", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L626-L650
242,350
DecBayComp/RWA-python
rwa/generic.py
peek
def peek(init, exposes, debug=False): """ Default deserializer factory. Arguments: init (callable): type constructor. exposes (iterable): attributes to be peeked and passed to `init`. Returns: callable: deserializer (`peek` routine). """ def _peek(store, container, _stack=None): args = [ store.peek(objname, container, _stack=_stack) \ for objname in exposes ] if debug: print(args) return init(*args) return _peek
python
def peek(init, exposes, debug=False): """ Default deserializer factory. Arguments: init (callable): type constructor. exposes (iterable): attributes to be peeked and passed to `init`. Returns: callable: deserializer (`peek` routine). """ def _peek(store, container, _stack=None): args = [ store.peek(objname, container, _stack=_stack) \ for objname in exposes ] if debug: print(args) return init(*args) return _peek
[ "def", "peek", "(", "init", ",", "exposes", ",", "debug", "=", "False", ")", ":", "def", "_peek", "(", "store", ",", "container", ",", "_stack", "=", "None", ")", ":", "args", "=", "[", "store", ".", "peek", "(", "objname", ",", "container", ",", "_stack", "=", "_stack", ")", "for", "objname", "in", "exposes", "]", "if", "debug", ":", "print", "(", "args", ")", "return", "init", "(", "*", "args", ")", "return", "_peek" ]
Default deserializer factory. Arguments: init (callable): type constructor. exposes (iterable): attributes to be peeked and passed to `init`. Returns: callable: deserializer (`peek` routine).
[ "Default", "deserializer", "factory", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L655-L675
242,351
DecBayComp/RWA-python
rwa/generic.py
peek_assoc
def peek_assoc(store, container, _stack=None): """ Deserialize association lists. """ assoc = [] try: if store.getRecordAttr('key', container) == 'escaped': for i in container: assoc.append(store.peek(i, container, _stack=_stack)) else: for i in container: assoc.append((store.strRecord(i, container), store.peek(i, container, _stack=_stack))) #print(assoc) # debugging except TypeError as e: try: for i in container: pass raise e except TypeError: raise TypeError("container is not iterable; peek is not compatible\n\t{}".format(e.args[0])) return assoc
python
def peek_assoc(store, container, _stack=None): """ Deserialize association lists. """ assoc = [] try: if store.getRecordAttr('key', container) == 'escaped': for i in container: assoc.append(store.peek(i, container, _stack=_stack)) else: for i in container: assoc.append((store.strRecord(i, container), store.peek(i, container, _stack=_stack))) #print(assoc) # debugging except TypeError as e: try: for i in container: pass raise e except TypeError: raise TypeError("container is not iterable; peek is not compatible\n\t{}".format(e.args[0])) return assoc
[ "def", "peek_assoc", "(", "store", ",", "container", ",", "_stack", "=", "None", ")", ":", "assoc", "=", "[", "]", "try", ":", "if", "store", ".", "getRecordAttr", "(", "'key'", ",", "container", ")", "==", "'escaped'", ":", "for", "i", "in", "container", ":", "assoc", ".", "append", "(", "store", ".", "peek", "(", "i", ",", "container", ",", "_stack", "=", "_stack", ")", ")", "else", ":", "for", "i", "in", "container", ":", "assoc", ".", "append", "(", "(", "store", ".", "strRecord", "(", "i", ",", "container", ")", ",", "store", ".", "peek", "(", "i", ",", "container", ",", "_stack", "=", "_stack", ")", ")", ")", "#print(assoc) # debugging", "except", "TypeError", "as", "e", ":", "try", ":", "for", "i", "in", "container", ":", "pass", "raise", "e", "except", "TypeError", ":", "raise", "TypeError", "(", "\"container is not iterable; peek is not compatible\\n\\t{}\"", ".", "format", "(", "e", ".", "args", "[", "0", "]", ")", ")", "return", "assoc" ]
Deserialize association lists.
[ "Deserialize", "association", "lists", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L677-L697
242,352
DecBayComp/RWA-python
rwa/generic.py
most_exposes
def most_exposes(python_type): """ Core engine for the automatic generation of storable instances. Finds the attributes exposed by the objects of a given type. Mostly Python3-only. Does not handle types which `__new__` method requires extra arguments either. Arguments: python_type (type): object type. Returns: list: attributes exposed. """ _exposes = set() try: # list all standard class attributes and methods: do_not_expose = set(python_type.__dir__(object) + \ ['__slots__', '__module__', '__weakref__']) # may raise `AttributeError` empty = python_type.__new__(python_type) # may raise `TypeError` except AttributeError: # Py2 does not have `__dir__` try: _exposes = python_type.__slots__ except AttributeError: pass except TypeError: # `__new__` requires input arguments for _workaround in storable_workarounds: try: _exposes = _workaround(python_type) except (SystemExit, KeyboardInterrupt): raise except: pass else: break else: # note that slots from parent classes are not in `__dict__` (like all slots) # and - in principle - not in `__slots__` either. all_members = empty.__dir__() # all slots are supposed to appear in this list for attr in all_members: if attr in do_not_expose: # note that '__dict__' is in `do_not_expose` (comes from `object`) continue try: # identify the methods and properties getattr(empty, attr) except AttributeError as e: # then `attr` might be a slot # properties can still throw an `AttributeError`; # try to filter some more out if e.args: msg = e.args[0] if msg == attr or msg.endswith("' object has no attribute '{}'".format(attr)): _exposes.add(attr) except (SystemExit, KeyboardInterrupt): raise except: pass for attr in ('__dict__',): if attr in all_members: _exposes.add(attr) return list(_exposes)
python
def most_exposes(python_type): """ Core engine for the automatic generation of storable instances. Finds the attributes exposed by the objects of a given type. Mostly Python3-only. Does not handle types which `__new__` method requires extra arguments either. Arguments: python_type (type): object type. Returns: list: attributes exposed. """ _exposes = set() try: # list all standard class attributes and methods: do_not_expose = set(python_type.__dir__(object) + \ ['__slots__', '__module__', '__weakref__']) # may raise `AttributeError` empty = python_type.__new__(python_type) # may raise `TypeError` except AttributeError: # Py2 does not have `__dir__` try: _exposes = python_type.__slots__ except AttributeError: pass except TypeError: # `__new__` requires input arguments for _workaround in storable_workarounds: try: _exposes = _workaround(python_type) except (SystemExit, KeyboardInterrupt): raise except: pass else: break else: # note that slots from parent classes are not in `__dict__` (like all slots) # and - in principle - not in `__slots__` either. all_members = empty.__dir__() # all slots are supposed to appear in this list for attr in all_members: if attr in do_not_expose: # note that '__dict__' is in `do_not_expose` (comes from `object`) continue try: # identify the methods and properties getattr(empty, attr) except AttributeError as e: # then `attr` might be a slot # properties can still throw an `AttributeError`; # try to filter some more out if e.args: msg = e.args[0] if msg == attr or msg.endswith("' object has no attribute '{}'".format(attr)): _exposes.add(attr) except (SystemExit, KeyboardInterrupt): raise except: pass for attr in ('__dict__',): if attr in all_members: _exposes.add(attr) return list(_exposes)
[ "def", "most_exposes", "(", "python_type", ")", ":", "_exposes", "=", "set", "(", ")", "try", ":", "# list all standard class attributes and methods:", "do_not_expose", "=", "set", "(", "python_type", ".", "__dir__", "(", "object", ")", "+", "[", "'__slots__'", ",", "'__module__'", ",", "'__weakref__'", "]", ")", "# may raise `AttributeError`", "empty", "=", "python_type", ".", "__new__", "(", "python_type", ")", "# may raise `TypeError`", "except", "AttributeError", ":", "# Py2 does not have `__dir__`", "try", ":", "_exposes", "=", "python_type", ".", "__slots__", "except", "AttributeError", ":", "pass", "except", "TypeError", ":", "# `__new__` requires input arguments", "for", "_workaround", "in", "storable_workarounds", ":", "try", ":", "_exposes", "=", "_workaround", "(", "python_type", ")", "except", "(", "SystemExit", ",", "KeyboardInterrupt", ")", ":", "raise", "except", ":", "pass", "else", ":", "break", "else", ":", "# note that slots from parent classes are not in `__dict__` (like all slots)", "# and - in principle - not in `__slots__` either.", "all_members", "=", "empty", ".", "__dir__", "(", ")", "# all slots are supposed to appear in this list", "for", "attr", "in", "all_members", ":", "if", "attr", "in", "do_not_expose", ":", "# note that '__dict__' is in `do_not_expose` (comes from `object`)", "continue", "try", ":", "# identify the methods and properties", "getattr", "(", "empty", ",", "attr", ")", "except", "AttributeError", "as", "e", ":", "# then `attr` might be a slot", "# properties can still throw an `AttributeError`;", "# try to filter some more out", "if", "e", ".", "args", ":", "msg", "=", "e", ".", "args", "[", "0", "]", "if", "msg", "==", "attr", "or", "msg", ".", "endswith", "(", "\"' object has no attribute '{}'\"", ".", "format", "(", "attr", ")", ")", ":", "_exposes", ".", "add", "(", "attr", ")", "except", "(", "SystemExit", ",", "KeyboardInterrupt", ")", ":", "raise", "except", ":", "pass", "for", "attr", "in", "(", "'__dict__'", ",", ")", ":", "if", "attr", "in", "all_members", ":", "_exposes", ".", "add", "(", "attr", ")", "return", "list", "(", "_exposes", ")" ]
Core engine for the automatic generation of storable instances. Finds the attributes exposed by the objects of a given type. Mostly Python3-only. Does not handle types which `__new__` method requires extra arguments either. Arguments: python_type (type): object type. Returns: list: attributes exposed.
[ "Core", "engine", "for", "the", "automatic", "generation", "of", "storable", "instances", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L703-L766
242,353
DecBayComp/RWA-python
rwa/generic.py
default_storable
def default_storable(python_type, exposes=None, version=None, storable_type=None, peek=default_peek): """ Default mechanics for building the storable instance for a type. Arguments: python_type (type): type. exposes (iterable): attributes exposed by the type. version (tuple): version number. storable_type (str): universal string identifier for the type. peek (callable): peeking routine. Returns: Storable: storable instance. """ if not exposes: for extension in expose_extensions: try: exposes = extension(python_type) except (SystemExit, KeyboardInterrupt): raise except: pass else: if exposes: break if not exposes: raise AttributeError('`exposes` required for type: {!r}'.format(python_type)) return Storable(python_type, key=storable_type, \ handlers=StorableHandler(version=version, exposes=exposes, \ poke=poke(exposes), peek=peek(python_type, exposes)))
python
def default_storable(python_type, exposes=None, version=None, storable_type=None, peek=default_peek): """ Default mechanics for building the storable instance for a type. Arguments: python_type (type): type. exposes (iterable): attributes exposed by the type. version (tuple): version number. storable_type (str): universal string identifier for the type. peek (callable): peeking routine. Returns: Storable: storable instance. """ if not exposes: for extension in expose_extensions: try: exposes = extension(python_type) except (SystemExit, KeyboardInterrupt): raise except: pass else: if exposes: break if not exposes: raise AttributeError('`exposes` required for type: {!r}'.format(python_type)) return Storable(python_type, key=storable_type, \ handlers=StorableHandler(version=version, exposes=exposes, \ poke=poke(exposes), peek=peek(python_type, exposes)))
[ "def", "default_storable", "(", "python_type", ",", "exposes", "=", "None", ",", "version", "=", "None", ",", "storable_type", "=", "None", ",", "peek", "=", "default_peek", ")", ":", "if", "not", "exposes", ":", "for", "extension", "in", "expose_extensions", ":", "try", ":", "exposes", "=", "extension", "(", "python_type", ")", "except", "(", "SystemExit", ",", "KeyboardInterrupt", ")", ":", "raise", "except", ":", "pass", "else", ":", "if", "exposes", ":", "break", "if", "not", "exposes", ":", "raise", "AttributeError", "(", "'`exposes` required for type: {!r}'", ".", "format", "(", "python_type", ")", ")", "return", "Storable", "(", "python_type", ",", "key", "=", "storable_type", ",", "handlers", "=", "StorableHandler", "(", "version", "=", "version", ",", "exposes", "=", "exposes", ",", "poke", "=", "poke", "(", "exposes", ")", ",", "peek", "=", "peek", "(", "python_type", ",", "exposes", ")", ")", ")" ]
Default mechanics for building the storable instance for a type. Arguments: python_type (type): type. exposes (iterable): attributes exposed by the type. version (tuple): version number. storable_type (str): universal string identifier for the type. peek (callable): peeking routine. Returns: Storable: storable instance.
[ "Default", "mechanics", "for", "building", "the", "storable", "instance", "for", "a", "type", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L779-L815
242,354
DecBayComp/RWA-python
rwa/generic.py
not_storable
def not_storable(_type): """ Helper for tagging unserializable types. Arguments: _type (type): type to be ignored. Returns: Storable: storable instance that does not poke. """ return Storable(_type, handlers=StorableHandler(poke=fake_poke, peek=fail_peek(_type)))
python
def not_storable(_type): """ Helper for tagging unserializable types. Arguments: _type (type): type to be ignored. Returns: Storable: storable instance that does not poke. """ return Storable(_type, handlers=StorableHandler(poke=fake_poke, peek=fail_peek(_type)))
[ "def", "not_storable", "(", "_type", ")", ":", "return", "Storable", "(", "_type", ",", "handlers", "=", "StorableHandler", "(", "poke", "=", "fake_poke", ",", "peek", "=", "fail_peek", "(", "_type", ")", ")", ")" ]
Helper for tagging unserializable types. Arguments: _type (type): type to be ignored. Returns: Storable: storable instance that does not poke.
[ "Helper", "for", "tagging", "unserializable", "types", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L883-L896
242,355
DecBayComp/RWA-python
rwa/generic.py
force_auto
def force_auto(service, _type): """ Helper for forcing autoserialization of a datatype with already registered explicit storable instance. Arguments: service (StorableService): active storable service. _type (type): type to be autoserialized. **Not tested** """ storable = service.byPythonType(_type, istype=True) version = max(handler.version[0] for handler in storable.handlers) + 1 _storable = default_storable(_type, version=(version, )) storable.handlers.append(_storable.handlers[0])
python
def force_auto(service, _type): """ Helper for forcing autoserialization of a datatype with already registered explicit storable instance. Arguments: service (StorableService): active storable service. _type (type): type to be autoserialized. **Not tested** """ storable = service.byPythonType(_type, istype=True) version = max(handler.version[0] for handler in storable.handlers) + 1 _storable = default_storable(_type, version=(version, )) storable.handlers.append(_storable.handlers[0])
[ "def", "force_auto", "(", "service", ",", "_type", ")", ":", "storable", "=", "service", ".", "byPythonType", "(", "_type", ",", "istype", "=", "True", ")", "version", "=", "max", "(", "handler", ".", "version", "[", "0", "]", "for", "handler", "in", "storable", ".", "handlers", ")", "+", "1", "_storable", "=", "default_storable", "(", "_type", ",", "version", "=", "(", "version", ",", ")", ")", "storable", ".", "handlers", ".", "append", "(", "_storable", ".", "handlers", "[", "0", "]", ")" ]
Helper for forcing autoserialization of a datatype with already registered explicit storable instance. Arguments: service (StorableService): active storable service. _type (type): type to be autoserialized. **Not tested**
[ "Helper", "for", "forcing", "autoserialization", "of", "a", "datatype", "with", "already", "registered", "explicit", "storable", "instance", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L900-L916
242,356
DecBayComp/RWA-python
rwa/generic.py
poke_native
def poke_native(getstate): """ Serializer factory for types which state can be natively serialized. Arguments: getstate (callable): takes an object and returns the object's state to be passed to `pokeNative`. Returns: callable: serializer (`poke` routine). """ def poke(service, objname, obj, container, visited=None, _stack=None): service.pokeNative(objname, getstate(obj), container) return poke
python
def poke_native(getstate): """ Serializer factory for types which state can be natively serialized. Arguments: getstate (callable): takes an object and returns the object's state to be passed to `pokeNative`. Returns: callable: serializer (`poke` routine). """ def poke(service, objname, obj, container, visited=None, _stack=None): service.pokeNative(objname, getstate(obj), container) return poke
[ "def", "poke_native", "(", "getstate", ")", ":", "def", "poke", "(", "service", ",", "objname", ",", "obj", ",", "container", ",", "visited", "=", "None", ",", "_stack", "=", "None", ")", ":", "service", ".", "pokeNative", "(", "objname", ",", "getstate", "(", "obj", ")", ",", "container", ")", "return", "poke" ]
Serializer factory for types which state can be natively serialized. Arguments: getstate (callable): takes an object and returns the object's state to be passed to `pokeNative`. Returns: callable: serializer (`poke` routine).
[ "Serializer", "factory", "for", "types", "which", "state", "can", "be", "natively", "serialized", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L940-L956
242,357
DecBayComp/RWA-python
rwa/generic.py
peek_native
def peek_native(make): """ Deserializer factory for types which state can be natively serialized. Arguments: make (callable): type constructor. Returns: callable: deserializer (`peek` routine) """ def peek(service, container, _stack=None): return make(service.peekNative(container)) return peek
python
def peek_native(make): """ Deserializer factory for types which state can be natively serialized. Arguments: make (callable): type constructor. Returns: callable: deserializer (`peek` routine) """ def peek(service, container, _stack=None): return make(service.peekNative(container)) return peek
[ "def", "peek_native", "(", "make", ")", ":", "def", "peek", "(", "service", ",", "container", ",", "_stack", "=", "None", ")", ":", "return", "make", "(", "service", ".", "peekNative", "(", "container", ")", ")", "return", "peek" ]
Deserializer factory for types which state can be natively serialized. Arguments: make (callable): type constructor. Returns: callable: deserializer (`peek` routine)
[ "Deserializer", "factory", "for", "types", "which", "state", "can", "be", "natively", "serialized", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L958-L973
242,358
DecBayComp/RWA-python
rwa/generic.py
handler
def handler(init, exposes, version=None): """ Simple handler with default `peek` and `poke` procedures. Arguments: init (callable): type constructor. exposes (iterable): attributes to be (de-)serialized. version (tuple): version number. Returns: StorableHandler: storable handler. """ return StorableHandler(poke=poke(exposes), peek=peek(init, exposes), version=version)
python
def handler(init, exposes, version=None): """ Simple handler with default `peek` and `poke` procedures. Arguments: init (callable): type constructor. exposes (iterable): attributes to be (de-)serialized. version (tuple): version number. Returns: StorableHandler: storable handler. """ return StorableHandler(poke=poke(exposes), peek=peek(init, exposes), version=version)
[ "def", "handler", "(", "init", ",", "exposes", ",", "version", "=", "None", ")", ":", "return", "StorableHandler", "(", "poke", "=", "poke", "(", "exposes", ")", ",", "peek", "=", "peek", "(", "init", ",", "exposes", ")", ",", "version", "=", "version", ")" ]
Simple handler with default `peek` and `poke` procedures. Arguments: init (callable): type constructor. exposes (iterable): attributes to be (de-)serialized. version (tuple): version number. Returns: StorableHandler: storable handler.
[ "Simple", "handler", "with", "default", "peek", "and", "poke", "procedures", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L981-L997
242,359
DecBayComp/RWA-python
rwa/generic.py
namedtuple_storable
def namedtuple_storable(namedtuple, *args, **kwargs): """ Storable factory for named tuples. """ return default_storable(namedtuple, namedtuple._fields, *args, **kwargs)
python
def namedtuple_storable(namedtuple, *args, **kwargs): """ Storable factory for named tuples. """ return default_storable(namedtuple, namedtuple._fields, *args, **kwargs)
[ "def", "namedtuple_storable", "(", "namedtuple", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "default_storable", "(", "namedtuple", ",", "namedtuple", ".", "_fields", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Storable factory for named tuples.
[ "Storable", "factory", "for", "named", "tuples", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L1000-L1004
242,360
DecBayComp/RWA-python
rwa/generic.py
GenericStore.pokeVisited
def pokeVisited(self, objname, obj, record, existing, visited=None, _stack=None, **kwargs): """ Serialize an already serialized object. If the underlying store supports linking, this is the place where to make links. The default implementation delegates to :meth:`pokeStorable` or :meth:`pokeNative`. Arguments: objname (any): record reference. obj (any): object to be serialized. existing (any): absolute reference of the record which the object was already serialized into. visited (dict): already serialized objects. _stack (CallStack): stack of parent object names. """ if self.hasPythonType(obj): storable = self.byPythonType(obj).asVersion() self.pokeStorable(storable, objname, obj, record, visited=visited, \ _stack=_stack, **kwargs) else: try: self.pokeNative(objname, obj, record) except (SystemExit, KeyboardInterrupt): raise except: self.dump_stack(_stack) raise
python
def pokeVisited(self, objname, obj, record, existing, visited=None, _stack=None, **kwargs): """ Serialize an already serialized object. If the underlying store supports linking, this is the place where to make links. The default implementation delegates to :meth:`pokeStorable` or :meth:`pokeNative`. Arguments: objname (any): record reference. obj (any): object to be serialized. existing (any): absolute reference of the record which the object was already serialized into. visited (dict): already serialized objects. _stack (CallStack): stack of parent object names. """ if self.hasPythonType(obj): storable = self.byPythonType(obj).asVersion() self.pokeStorable(storable, objname, obj, record, visited=visited, \ _stack=_stack, **kwargs) else: try: self.pokeNative(objname, obj, record) except (SystemExit, KeyboardInterrupt): raise except: self.dump_stack(_stack) raise
[ "def", "pokeVisited", "(", "self", ",", "objname", ",", "obj", ",", "record", ",", "existing", ",", "visited", "=", "None", ",", "_stack", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "hasPythonType", "(", "obj", ")", ":", "storable", "=", "self", ".", "byPythonType", "(", "obj", ")", ".", "asVersion", "(", ")", "self", ".", "pokeStorable", "(", "storable", ",", "objname", ",", "obj", ",", "record", ",", "visited", "=", "visited", ",", "_stack", "=", "_stack", ",", "*", "*", "kwargs", ")", "else", ":", "try", ":", "self", ".", "pokeNative", "(", "objname", ",", "obj", ",", "record", ")", "except", "(", "SystemExit", ",", "KeyboardInterrupt", ")", ":", "raise", "except", ":", "self", ".", "dump_stack", "(", "_stack", ")", "raise" ]
Serialize an already serialized object. If the underlying store supports linking, this is the place where to make links. The default implementation delegates to :meth:`pokeStorable` or :meth:`pokeNative`. Arguments: objname (any): record reference. obj (any): object to be serialized. existing (any): absolute reference of the record which the object was already serialized into. visited (dict): already serialized objects. _stack (CallStack): stack of parent object names.
[ "Serialize", "an", "already", "serialized", "object", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L276-L309
242,361
DecBayComp/RWA-python
rwa/generic.py
GenericStore.defaultStorable
def defaultStorable(self, python_type=None, storable_type=None, version=None, **kwargs): """ Generate a default storable instance. Arguments: python_type (type): Python type of the object. storable_type (str): storable type name. version (tuple): version number of the storable handler. Returns: StorableHandler: storable instance. Extra keyword arguments are passed to :meth:`registerStorable`. """ if python_type is None: python_type = lookup_type(storable_type) if self.verbose: print('generating storable instance for type: {}'.format(python_type)) self.storables.registerStorable(default_storable(python_type, \ version=version, storable_type=storable_type), **kwargs) return self.byPythonType(python_type, True).asVersion(version)
python
def defaultStorable(self, python_type=None, storable_type=None, version=None, **kwargs): """ Generate a default storable instance. Arguments: python_type (type): Python type of the object. storable_type (str): storable type name. version (tuple): version number of the storable handler. Returns: StorableHandler: storable instance. Extra keyword arguments are passed to :meth:`registerStorable`. """ if python_type is None: python_type = lookup_type(storable_type) if self.verbose: print('generating storable instance for type: {}'.format(python_type)) self.storables.registerStorable(default_storable(python_type, \ version=version, storable_type=storable_type), **kwargs) return self.byPythonType(python_type, True).asVersion(version)
[ "def", "defaultStorable", "(", "self", ",", "python_type", "=", "None", ",", "storable_type", "=", "None", ",", "version", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "python_type", "is", "None", ":", "python_type", "=", "lookup_type", "(", "storable_type", ")", "if", "self", ".", "verbose", ":", "print", "(", "'generating storable instance for type: {}'", ".", "format", "(", "python_type", ")", ")", "self", ".", "storables", ".", "registerStorable", "(", "default_storable", "(", "python_type", ",", "version", "=", "version", ",", "storable_type", "=", "storable_type", ")", ",", "*", "*", "kwargs", ")", "return", "self", ".", "byPythonType", "(", "python_type", ",", "True", ")", ".", "asVersion", "(", "version", ")" ]
Generate a default storable instance. Arguments: python_type (type): Python type of the object. storable_type (str): storable type name. version (tuple): version number of the storable handler. Returns: StorableHandler: storable instance. Extra keyword arguments are passed to :meth:`registerStorable`.
[ "Generate", "a", "default", "storable", "instance", "." ]
734a52e15a0e8c244d84d74acf3fd64721074732
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/generic.py#L438-L462
242,362
zeromake/aiko
aiko/request.py
RequestParameters.get
def get(self, name: str, default: Any = None) -> Any: """Return the first value, either the default or actual""" return super().get(name, [default])[0]
python
def get(self, name: str, default: Any = None) -> Any: """Return the first value, either the default or actual""" return super().get(name, [default])[0]
[ "def", "get", "(", "self", ",", "name", ":", "str", ",", "default", ":", "Any", "=", "None", ")", "->", "Any", ":", "return", "super", "(", ")", ".", "get", "(", "name", ",", "[", "default", "]", ")", "[", "0", "]" ]
Return the first value, either the default or actual
[ "Return", "the", "first", "value", "either", "the", "default", "or", "actual" ]
53b246fa88652466a9e38ac3d1a99a6198195b0f
https://github.com/zeromake/aiko/blob/53b246fa88652466a9e38ac3d1a99a6198195b0f/aiko/request.py#L31-L33
242,363
zeromake/aiko
aiko/request.py
RequestParameters.getlist
def getlist(self, name: str, default: Any = None) -> List[Any]: """Return the entire list""" return super().get(name, default)
python
def getlist(self, name: str, default: Any = None) -> List[Any]: """Return the entire list""" return super().get(name, default)
[ "def", "getlist", "(", "self", ",", "name", ":", "str", ",", "default", ":", "Any", "=", "None", ")", "->", "List", "[", "Any", "]", ":", "return", "super", "(", ")", ".", "get", "(", "name", ",", "default", ")" ]
Return the entire list
[ "Return", "the", "entire", "list" ]
53b246fa88652466a9e38ac3d1a99a6198195b0f
https://github.com/zeromake/aiko/blob/53b246fa88652466a9e38ac3d1a99a6198195b0f/aiko/request.py#L35-L37
242,364
mrstephenneal/dirutility
dirutility/walk/filter.py
PathFilters.validate
def validate(self, path): """Run path against filter sets and return True if all pass""" # Exclude hidden files and folders with '.' prefix if os.path.basename(path).startswith('.'): return False # Check that current path level is more than min path and less than max path if not self.check_level(path): return False if self.filters: if not self._level_filters(path): return False # Force include and exclude iterations to be strings in case of integer filters # Handle exclusions if self.to_exclude: if any(str(ex).lower() in path.lower() for ex in self.to_exclude): return False # Handle inclusions if self.to_include: if not any(str(inc).lower() in path.lower() for inc in self.to_include): return False return True
python
def validate(self, path): """Run path against filter sets and return True if all pass""" # Exclude hidden files and folders with '.' prefix if os.path.basename(path).startswith('.'): return False # Check that current path level is more than min path and less than max path if not self.check_level(path): return False if self.filters: if not self._level_filters(path): return False # Force include and exclude iterations to be strings in case of integer filters # Handle exclusions if self.to_exclude: if any(str(ex).lower() in path.lower() for ex in self.to_exclude): return False # Handle inclusions if self.to_include: if not any(str(inc).lower() in path.lower() for inc in self.to_include): return False return True
[ "def", "validate", "(", "self", ",", "path", ")", ":", "# Exclude hidden files and folders with '.' prefix", "if", "os", ".", "path", ".", "basename", "(", "path", ")", ".", "startswith", "(", "'.'", ")", ":", "return", "False", "# Check that current path level is more than min path and less than max path", "if", "not", "self", ".", "check_level", "(", "path", ")", ":", "return", "False", "if", "self", ".", "filters", ":", "if", "not", "self", ".", "_level_filters", "(", "path", ")", ":", "return", "False", "# Force include and exclude iterations to be strings in case of integer filters", "# Handle exclusions", "if", "self", ".", "to_exclude", ":", "if", "any", "(", "str", "(", "ex", ")", ".", "lower", "(", ")", "in", "path", ".", "lower", "(", ")", "for", "ex", "in", "self", ".", "to_exclude", ")", ":", "return", "False", "# Handle inclusions", "if", "self", ".", "to_include", ":", "if", "not", "any", "(", "str", "(", "inc", ")", ".", "lower", "(", ")", "in", "path", ".", "lower", "(", ")", "for", "inc", "in", "self", ".", "to_include", ")", ":", "return", "False", "return", "True" ]
Run path against filter sets and return True if all pass
[ "Run", "path", "against", "filter", "sets", "and", "return", "True", "if", "all", "pass" ]
339378659e2d7e09c53acfc51c5df745bb0cd517
https://github.com/mrstephenneal/dirutility/blob/339378659e2d7e09c53acfc51c5df745bb0cd517/dirutility/walk/filter.py#L49-L74
242,365
emin63/eyap
eyap/__init__.py
Make.comment_thread
def comment_thread(cls, backend, *args, **kwargs): """Create a comment thread for the desired backend. :arg backend: String name of backend (e.g., 'file', 'github', 'redis', etc.). :arg *args, **kwargs: Arguments to be passed to contructor for that backend. ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- :returns: A CommentThread sub-class for the given backend. ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- PURPOSE: Some simple syntatic sugar for creating the desired backend. """ ct_cls = cls._known_backends.get(backend) if not ct_cls: return None return ct_cls(*args, **kwargs)
python
def comment_thread(cls, backend, *args, **kwargs): """Create a comment thread for the desired backend. :arg backend: String name of backend (e.g., 'file', 'github', 'redis', etc.). :arg *args, **kwargs: Arguments to be passed to contructor for that backend. ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- :returns: A CommentThread sub-class for the given backend. ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- PURPOSE: Some simple syntatic sugar for creating the desired backend. """ ct_cls = cls._known_backends.get(backend) if not ct_cls: return None return ct_cls(*args, **kwargs)
[ "def", "comment_thread", "(", "cls", ",", "backend", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "ct_cls", "=", "cls", ".", "_known_backends", ".", "get", "(", "backend", ")", "if", "not", "ct_cls", ":", "return", "None", "return", "ct_cls", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Create a comment thread for the desired backend. :arg backend: String name of backend (e.g., 'file', 'github', 'redis', etc.). :arg *args, **kwargs: Arguments to be passed to contructor for that backend. ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- :returns: A CommentThread sub-class for the given backend. ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- PURPOSE: Some simple syntatic sugar for creating the desired backend.
[ "Create", "a", "comment", "thread", "for", "the", "desired", "backend", "." ]
a610761973b478ca0e864e970be05ce29d5994a5
https://github.com/emin63/eyap/blob/a610761973b478ca0e864e970be05ce29d5994a5/eyap/__init__.py#L123-L145
242,366
political-memory/django-representatives-votes
representatives_votes/contrib/francedata/import_dossiers.py
find_dossier
def find_dossier(data): ''' Find dossier with reference matching either 'ref_an' or 'ref_sen', create it if not found. Ensure its reference is 'ref_an' if both fields are present. ''' changed = False dossier = None reffield = None for field in [k for k in ('ref_an', 'ref_sen') if k in data]: try: dossier = Dossier.objects.get(reference=data[field]) reffield = field break except Dossier.DoesNotExist: pass if dossier is None: reffield = 'ref_an' if 'ref_an' in data else 'ref_sen' dossier = Dossier(reference=data[reffield]) logger.debug('Created dossier %s' % data[reffield]) changed = True if 'ref_an' in data and reffield != 'ref_an': logger.debug('Changed dossier reference to %s' % data['ref_an']) dossier.reference = data['ref_an'] changed = True return dossier, changed
python
def find_dossier(data): ''' Find dossier with reference matching either 'ref_an' or 'ref_sen', create it if not found. Ensure its reference is 'ref_an' if both fields are present. ''' changed = False dossier = None reffield = None for field in [k for k in ('ref_an', 'ref_sen') if k in data]: try: dossier = Dossier.objects.get(reference=data[field]) reffield = field break except Dossier.DoesNotExist: pass if dossier is None: reffield = 'ref_an' if 'ref_an' in data else 'ref_sen' dossier = Dossier(reference=data[reffield]) logger.debug('Created dossier %s' % data[reffield]) changed = True if 'ref_an' in data and reffield != 'ref_an': logger.debug('Changed dossier reference to %s' % data['ref_an']) dossier.reference = data['ref_an'] changed = True return dossier, changed
[ "def", "find_dossier", "(", "data", ")", ":", "changed", "=", "False", "dossier", "=", "None", "reffield", "=", "None", "for", "field", "in", "[", "k", "for", "k", "in", "(", "'ref_an'", ",", "'ref_sen'", ")", "if", "k", "in", "data", "]", ":", "try", ":", "dossier", "=", "Dossier", ".", "objects", ".", "get", "(", "reference", "=", "data", "[", "field", "]", ")", "reffield", "=", "field", "break", "except", "Dossier", ".", "DoesNotExist", ":", "pass", "if", "dossier", "is", "None", ":", "reffield", "=", "'ref_an'", "if", "'ref_an'", "in", "data", "else", "'ref_sen'", "dossier", "=", "Dossier", "(", "reference", "=", "data", "[", "reffield", "]", ")", "logger", ".", "debug", "(", "'Created dossier %s'", "%", "data", "[", "reffield", "]", ")", "changed", "=", "True", "if", "'ref_an'", "in", "data", "and", "reffield", "!=", "'ref_an'", ":", "logger", ".", "debug", "(", "'Changed dossier reference to %s'", "%", "data", "[", "'ref_an'", "]", ")", "dossier", ".", "reference", "=", "data", "[", "'ref_an'", "]", "changed", "=", "True", "return", "dossier", ",", "changed" ]
Find dossier with reference matching either 'ref_an' or 'ref_sen', create it if not found. Ensure its reference is 'ref_an' if both fields are present.
[ "Find", "dossier", "with", "reference", "matching", "either", "ref_an", "or", "ref_sen", "create", "it", "if", "not", "found", ".", "Ensure", "its", "reference", "is", "ref_an", "if", "both", "fields", "are", "present", "." ]
97f9f29deefec05f18e51f6c38270b791b921bac
https://github.com/political-memory/django-representatives-votes/blob/97f9f29deefec05f18e51f6c38270b791b921bac/representatives_votes/contrib/francedata/import_dossiers.py#L36-L66
242,367
donovan-duplessis/pwnurl
pwnurl/models/base.py
BaseModel.saveform
def saveform(cls, form): """ Create and save form model data to database """ columns = dict() for name, field in cls.form_fields.iteritems(): columns[name] = getattr(form, field).data instance = cls(**columns) return instance.save()
python
def saveform(cls, form): """ Create and save form model data to database """ columns = dict() for name, field in cls.form_fields.iteritems(): columns[name] = getattr(form, field).data instance = cls(**columns) return instance.save()
[ "def", "saveform", "(", "cls", ",", "form", ")", ":", "columns", "=", "dict", "(", ")", "for", "name", ",", "field", "in", "cls", ".", "form_fields", ".", "iteritems", "(", ")", ":", "columns", "[", "name", "]", "=", "getattr", "(", "form", ",", "field", ")", ".", "data", "instance", "=", "cls", "(", "*", "*", "columns", ")", "return", "instance", ".", "save", "(", ")" ]
Create and save form model data to database
[ "Create", "and", "save", "form", "model", "data", "to", "database" ]
a13e27694f738228d186ea437b4d15ef5a925a87
https://github.com/donovan-duplessis/pwnurl/blob/a13e27694f738228d186ea437b4d15ef5a925a87/pwnurl/models/base.py#L28-L35
242,368
donovan-duplessis/pwnurl
pwnurl/models/base.py
BaseModel.get_by_id
def get_by_id(cls, id): """ Get model by identifier """ if any((isinstance(id, basestring) and id.isdigit(), isinstance(id, (int, float)))): return cls.query.get(int(id)) return None
python
def get_by_id(cls, id): """ Get model by identifier """ if any((isinstance(id, basestring) and id.isdigit(), isinstance(id, (int, float)))): return cls.query.get(int(id)) return None
[ "def", "get_by_id", "(", "cls", ",", "id", ")", ":", "if", "any", "(", "(", "isinstance", "(", "id", ",", "basestring", ")", "and", "id", ".", "isdigit", "(", ")", ",", "isinstance", "(", "id", ",", "(", "int", ",", "float", ")", ")", ")", ")", ":", "return", "cls", ".", "query", ".", "get", "(", "int", "(", "id", ")", ")", "return", "None" ]
Get model by identifier
[ "Get", "model", "by", "identifier" ]
a13e27694f738228d186ea437b4d15ef5a925a87
https://github.com/donovan-duplessis/pwnurl/blob/a13e27694f738228d186ea437b4d15ef5a925a87/pwnurl/models/base.py#L38-L44
242,369
donovan-duplessis/pwnurl
pwnurl/models/base.py
BaseModel.update
def update(self, commit=True, **kwargs): """ Update model attributes and save to database """ for (attr, value) in kwargs.iteritems(): setattr(self, attr, value) return commit and self.save() or self
python
def update(self, commit=True, **kwargs): """ Update model attributes and save to database """ for (attr, value) in kwargs.iteritems(): setattr(self, attr, value) return commit and self.save() or self
[ "def", "update", "(", "self", ",", "commit", "=", "True", ",", "*", "*", "kwargs", ")", ":", "for", "(", "attr", ",", "value", ")", "in", "kwargs", ".", "iteritems", "(", ")", ":", "setattr", "(", "self", ",", "attr", ",", "value", ")", "return", "commit", "and", "self", ".", "save", "(", ")", "or", "self" ]
Update model attributes and save to database
[ "Update", "model", "attributes", "and", "save", "to", "database" ]
a13e27694f738228d186ea437b4d15ef5a925a87
https://github.com/donovan-duplessis/pwnurl/blob/a13e27694f738228d186ea437b4d15ef5a925a87/pwnurl/models/base.py#L53-L58
242,370
donovan-duplessis/pwnurl
pwnurl/models/base.py
BaseModel.save
def save(self, commit=True): """ Save model to database """ db.session.add(self) if commit: db.session.commit() return self
python
def save(self, commit=True): """ Save model to database """ db.session.add(self) if commit: db.session.commit() return self
[ "def", "save", "(", "self", ",", "commit", "=", "True", ")", ":", "db", ".", "session", ".", "add", "(", "self", ")", "if", "commit", ":", "db", ".", "session", ".", "commit", "(", ")", "return", "self" ]
Save model to database
[ "Save", "model", "to", "database" ]
a13e27694f738228d186ea437b4d15ef5a925a87
https://github.com/donovan-duplessis/pwnurl/blob/a13e27694f738228d186ea437b4d15ef5a925a87/pwnurl/models/base.py#L60-L66
242,371
donovan-duplessis/pwnurl
pwnurl/models/base.py
BaseModel.delete
def delete(self, commit=True): """ Delete model from database """ db.session.delete(self) return commit and db.session.commit()
python
def delete(self, commit=True): """ Delete model from database """ db.session.delete(self) return commit and db.session.commit()
[ "def", "delete", "(", "self", ",", "commit", "=", "True", ")", ":", "db", ".", "session", ".", "delete", "(", "self", ")", "return", "commit", "and", "db", ".", "session", ".", "commit", "(", ")" ]
Delete model from database
[ "Delete", "model", "from", "database" ]
a13e27694f738228d186ea437b4d15ef5a925a87
https://github.com/donovan-duplessis/pwnurl/blob/a13e27694f738228d186ea437b4d15ef5a925a87/pwnurl/models/base.py#L68-L72
242,372
jpablo128/simplystatic
simplystatic/s2site.py
verify_dir_structure
def verify_dir_structure(full_path): '''Check if given directory to see if it is usable by s2. Checks that all required directories exist under the given directory, and also checks that they are writable. ''' if full_path == None: return False r = True for d2c in PREDEFINED_DIR_NAMES: #if d2c == "s2": # d2c = ".s2" cp2c = os.path.join(full_path, d2c) #complete path to check if not os.path.isdir(cp2c): r = False break else: #exists, let's check it's writable if not os.access(cp2c, os.W_OK): r = False break return r
python
def verify_dir_structure(full_path): '''Check if given directory to see if it is usable by s2. Checks that all required directories exist under the given directory, and also checks that they are writable. ''' if full_path == None: return False r = True for d2c in PREDEFINED_DIR_NAMES: #if d2c == "s2": # d2c = ".s2" cp2c = os.path.join(full_path, d2c) #complete path to check if not os.path.isdir(cp2c): r = False break else: #exists, let's check it's writable if not os.access(cp2c, os.W_OK): r = False break return r
[ "def", "verify_dir_structure", "(", "full_path", ")", ":", "if", "full_path", "==", "None", ":", "return", "False", "r", "=", "True", "for", "d2c", "in", "PREDEFINED_DIR_NAMES", ":", "#if d2c == \"s2\":", "# d2c = \".s2\"", "cp2c", "=", "os", ".", "path", ".", "join", "(", "full_path", ",", "d2c", ")", "#complete path to check", "if", "not", "os", ".", "path", ".", "isdir", "(", "cp2c", ")", ":", "r", "=", "False", "break", "else", ":", "#exists, let's check it's writable", "if", "not", "os", ".", "access", "(", "cp2c", ",", "os", ".", "W_OK", ")", ":", "r", "=", "False", "break", "return", "r" ]
Check if given directory to see if it is usable by s2. Checks that all required directories exist under the given directory, and also checks that they are writable.
[ "Check", "if", "given", "directory", "to", "see", "if", "it", "is", "usable", "by", "s2", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L32-L53
242,373
jpablo128/simplystatic
simplystatic/s2site.py
dir_param_valid
def dir_param_valid(d): '''True if d is a string and it's an existing directory.''' r = True if not isinstance(d, str) : r = False raise TypeError if not os.path.isdir(d): r = False raise ValueError return r
python
def dir_param_valid(d): '''True if d is a string and it's an existing directory.''' r = True if not isinstance(d, str) : r = False raise TypeError if not os.path.isdir(d): r = False raise ValueError return r
[ "def", "dir_param_valid", "(", "d", ")", ":", "r", "=", "True", "if", "not", "isinstance", "(", "d", ",", "str", ")", ":", "r", "=", "False", "raise", "TypeError", "if", "not", "os", ".", "path", ".", "isdir", "(", "d", ")", ":", "r", "=", "False", "raise", "ValueError", "return", "r" ]
True if d is a string and it's an existing directory.
[ "True", "if", "d", "is", "a", "string", "and", "it", "s", "an", "existing", "directory", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L55-L64
242,374
jpablo128/simplystatic
simplystatic/s2site.py
dir_empty
def dir_empty(d): '''Return True if given directory is empty, false otherwise.''' flist = glob.glob(os.path.join(d,'*')) return (len(flist) == 0)
python
def dir_empty(d): '''Return True if given directory is empty, false otherwise.''' flist = glob.glob(os.path.join(d,'*')) return (len(flist) == 0)
[ "def", "dir_empty", "(", "d", ")", ":", "flist", "=", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "d", ",", "'*'", ")", ")", "return", "(", "len", "(", "flist", ")", "==", "0", ")" ]
Return True if given directory is empty, false otherwise.
[ "Return", "True", "if", "given", "directory", "is", "empty", "false", "otherwise", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L66-L69
242,375
jpablo128/simplystatic
simplystatic/s2site.py
is_base_dir
def is_base_dir(d): '''True if the dir is valid and it contains a dir called s2''' if not dir_param_valid(d): # pragma: no cover raise else: mfn = os.path.join(d,'s2') #marker name. it must be a directory. return os.path.isdir(mfn)
python
def is_base_dir(d): '''True if the dir is valid and it contains a dir called s2''' if not dir_param_valid(d): # pragma: no cover raise else: mfn = os.path.join(d,'s2') #marker name. it must be a directory. return os.path.isdir(mfn)
[ "def", "is_base_dir", "(", "d", ")", ":", "if", "not", "dir_param_valid", "(", "d", ")", ":", "# pragma: no cover", "raise", "else", ":", "mfn", "=", "os", ".", "path", ".", "join", "(", "d", ",", "'s2'", ")", "#marker name. it must be a directory.", "return", "os", ".", "path", ".", "isdir", "(", "mfn", ")" ]
True if the dir is valid and it contains a dir called s2
[ "True", "if", "the", "dir", "is", "valid", "and", "it", "contains", "a", "dir", "called", "s2" ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L71-L77
242,376
jpablo128/simplystatic
simplystatic/s2site.py
discover_base_dir
def discover_base_dir(start_dir): '''Return start_dir or the parent dir that has the s2 marker. Starting from the specified directory, and going up the parent chain, check each directory to see if it's a base_dir (contains the "marker" directory *s2*) and return it. Otherwise, return the start_dir. ''' if is_base_dir(start_dir): return start_dir pcl = start_dir.split('/') #path component list found_base_dir = None for i in range(1, len(pcl)+1): d2c = '/'.join(pcl[:-i]) if (d2c == ''): d2c = '/' if is_base_dir(d2c): found_base_dir = d2c break return found_base_dir
python
def discover_base_dir(start_dir): '''Return start_dir or the parent dir that has the s2 marker. Starting from the specified directory, and going up the parent chain, check each directory to see if it's a base_dir (contains the "marker" directory *s2*) and return it. Otherwise, return the start_dir. ''' if is_base_dir(start_dir): return start_dir pcl = start_dir.split('/') #path component list found_base_dir = None for i in range(1, len(pcl)+1): d2c = '/'.join(pcl[:-i]) if (d2c == ''): d2c = '/' if is_base_dir(d2c): found_base_dir = d2c break return found_base_dir
[ "def", "discover_base_dir", "(", "start_dir", ")", ":", "if", "is_base_dir", "(", "start_dir", ")", ":", "return", "start_dir", "pcl", "=", "start_dir", ".", "split", "(", "'/'", ")", "#path component list", "found_base_dir", "=", "None", "for", "i", "in", "range", "(", "1", ",", "len", "(", "pcl", ")", "+", "1", ")", ":", "d2c", "=", "'/'", ".", "join", "(", "pcl", "[", ":", "-", "i", "]", ")", "if", "(", "d2c", "==", "''", ")", ":", "d2c", "=", "'/'", "if", "is_base_dir", "(", "d2c", ")", ":", "found_base_dir", "=", "d2c", "break", "return", "found_base_dir" ]
Return start_dir or the parent dir that has the s2 marker. Starting from the specified directory, and going up the parent chain, check each directory to see if it's a base_dir (contains the "marker" directory *s2*) and return it. Otherwise, return the start_dir.
[ "Return", "start_dir", "or", "the", "parent", "dir", "that", "has", "the", "s2", "marker", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L79-L99
242,377
jpablo128/simplystatic
simplystatic/s2site.py
package_data_location
def package_data_location(): '''Get the locations of themes distributed with this package. Just finds if there are templates, and returns a dictionary with the corresponding values. ''' pkg_dir = os.path.split(__file__)[0] pkg_data_dir = os.path.join(pkg_dir,'data') return pkg_data_dir
python
def package_data_location(): '''Get the locations of themes distributed with this package. Just finds if there are templates, and returns a dictionary with the corresponding values. ''' pkg_dir = os.path.split(__file__)[0] pkg_data_dir = os.path.join(pkg_dir,'data') return pkg_data_dir
[ "def", "package_data_location", "(", ")", ":", "pkg_dir", "=", "os", ".", "path", ".", "split", "(", "__file__", ")", "[", "0", "]", "pkg_data_dir", "=", "os", ".", "path", ".", "join", "(", "pkg_dir", ",", "'data'", ")", "return", "pkg_data_dir" ]
Get the locations of themes distributed with this package. Just finds if there are templates, and returns a dictionary with the corresponding values.
[ "Get", "the", "locations", "of", "themes", "distributed", "with", "this", "package", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L107-L116
242,378
jpablo128/simplystatic
simplystatic/s2site.py
Site._set_directories
def _set_directories(self): '''Initialize variables based on evidence about the directories.''' if self._dirs['initial'] == None: self._dirs['base'] = discover_base_dir(self._dirs['run']) else: self._dirs['base'] = discover_base_dir(self._dirs['initial']) # now, if 'base' is None (no base directory was found) then the only # allowed operation is init self._update_dirs_on_base() # we might have set the directory variables fine, but the tree # might not exist yet. _tree_ready is a flag for that. self._tree_ready = verify_dir_structure(self._dirs['base']) if self._tree_ready: self._read_site_config()
python
def _set_directories(self): '''Initialize variables based on evidence about the directories.''' if self._dirs['initial'] == None: self._dirs['base'] = discover_base_dir(self._dirs['run']) else: self._dirs['base'] = discover_base_dir(self._dirs['initial']) # now, if 'base' is None (no base directory was found) then the only # allowed operation is init self._update_dirs_on_base() # we might have set the directory variables fine, but the tree # might not exist yet. _tree_ready is a flag for that. self._tree_ready = verify_dir_structure(self._dirs['base']) if self._tree_ready: self._read_site_config()
[ "def", "_set_directories", "(", "self", ")", ":", "if", "self", ".", "_dirs", "[", "'initial'", "]", "==", "None", ":", "self", ".", "_dirs", "[", "'base'", "]", "=", "discover_base_dir", "(", "self", ".", "_dirs", "[", "'run'", "]", ")", "else", ":", "self", ".", "_dirs", "[", "'base'", "]", "=", "discover_base_dir", "(", "self", ".", "_dirs", "[", "'initial'", "]", ")", "# now, if 'base' is None (no base directory was found) then the only", "# allowed operation is init ", "self", ".", "_update_dirs_on_base", "(", ")", "# we might have set the directory variables fine, but the tree", "# might not exist yet. _tree_ready is a flag for that.", "self", ".", "_tree_ready", "=", "verify_dir_structure", "(", "self", ".", "_dirs", "[", "'base'", "]", ")", "if", "self", ".", "_tree_ready", ":", "self", ".", "_read_site_config", "(", ")" ]
Initialize variables based on evidence about the directories.
[ "Initialize", "variables", "based", "on", "evidence", "about", "the", "directories", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L207-L220
242,379
jpablo128/simplystatic
simplystatic/s2site.py
Site._update_dirs_on_base
def _update_dirs_on_base(self): '''Fill up the names of dirs based on the contents of 'base'.''' if self._dirs['base'] != None: for d in self._predefined_dir_names: dstr = d #if d == "s2": # dstr = '.'+d self._dirs[d] = os.path.join(self._dirs['base'], dstr)
python
def _update_dirs_on_base(self): '''Fill up the names of dirs based on the contents of 'base'.''' if self._dirs['base'] != None: for d in self._predefined_dir_names: dstr = d #if d == "s2": # dstr = '.'+d self._dirs[d] = os.path.join(self._dirs['base'], dstr)
[ "def", "_update_dirs_on_base", "(", "self", ")", ":", "if", "self", ".", "_dirs", "[", "'base'", "]", "!=", "None", ":", "for", "d", "in", "self", ".", "_predefined_dir_names", ":", "dstr", "=", "d", "#if d == \"s2\":", "# dstr = '.'+d", "self", ".", "_dirs", "[", "d", "]", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_dirs", "[", "'base'", "]", ",", "dstr", ")" ]
Fill up the names of dirs based on the contents of 'base'.
[ "Fill", "up", "the", "names", "of", "dirs", "based", "on", "the", "contents", "of", "base", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L222-L229
242,380
jpablo128/simplystatic
simplystatic/s2site.py
Site.init_structure
def init_structure(self): '''Initialize a directory to serve as a Simply Static site. Initialization is done on the base_dir (base_dir is set upon __init__, so it has a value when this method is called), and it is only performed if base_dir is empty and it is writeable. This operation creates the directories, copies any existing templates to the source_dir and common_dir, and creates the default configuration file within the directory s2 ''' if self._dirs['base'] != None: # pragma: no cover #there's a base here or up the chain raise ValueError #cannot initialize else: if self._dirs['initial'] != None: # pragma: no cover self._dirs['base'] = self._dirs['initial'] else: # pragma: no cover self._dirs['base'] = self._dirs['run'] #now proceed self._update_dirs_on_base() if (not dir_empty(self._dirs['base']) ) or \ (not os.access(self._dirs['base'], os.W_OK)): raise ValueError # copy the dirs from package data to the base dir (common,themes) pdl = package_data_location() datadirs = glob.glob(os.path.join(pdl,"*")) for dd in datadirs: if os.path.isdir(dd): shutil.copytree(dd, os.path.join(self._dirs['base'], os.path.split(dd)[1])) # create all predefined dirs that don't exist yet in base for d in self._dirs: if not d in ['initial', 'run', 'base']: if not os.path.isdir(self._dirs[d]): os.mkdir(self._dirs[d]) self._tree_ready = verify_dir_structure(self._dirs['base']) self.site_config = self._create_default_config()
python
def init_structure(self): '''Initialize a directory to serve as a Simply Static site. Initialization is done on the base_dir (base_dir is set upon __init__, so it has a value when this method is called), and it is only performed if base_dir is empty and it is writeable. This operation creates the directories, copies any existing templates to the source_dir and common_dir, and creates the default configuration file within the directory s2 ''' if self._dirs['base'] != None: # pragma: no cover #there's a base here or up the chain raise ValueError #cannot initialize else: if self._dirs['initial'] != None: # pragma: no cover self._dirs['base'] = self._dirs['initial'] else: # pragma: no cover self._dirs['base'] = self._dirs['run'] #now proceed self._update_dirs_on_base() if (not dir_empty(self._dirs['base']) ) or \ (not os.access(self._dirs['base'], os.W_OK)): raise ValueError # copy the dirs from package data to the base dir (common,themes) pdl = package_data_location() datadirs = glob.glob(os.path.join(pdl,"*")) for dd in datadirs: if os.path.isdir(dd): shutil.copytree(dd, os.path.join(self._dirs['base'], os.path.split(dd)[1])) # create all predefined dirs that don't exist yet in base for d in self._dirs: if not d in ['initial', 'run', 'base']: if not os.path.isdir(self._dirs[d]): os.mkdir(self._dirs[d]) self._tree_ready = verify_dir_structure(self._dirs['base']) self.site_config = self._create_default_config()
[ "def", "init_structure", "(", "self", ")", ":", "if", "self", ".", "_dirs", "[", "'base'", "]", "!=", "None", ":", "# pragma: no cover ", "#there's a base here or up the chain", "raise", "ValueError", "#cannot initialize", "else", ":", "if", "self", ".", "_dirs", "[", "'initial'", "]", "!=", "None", ":", "# pragma: no cover", "self", ".", "_dirs", "[", "'base'", "]", "=", "self", ".", "_dirs", "[", "'initial'", "]", "else", ":", "# pragma: no cover", "self", ".", "_dirs", "[", "'base'", "]", "=", "self", ".", "_dirs", "[", "'run'", "]", "#now proceed", "self", ".", "_update_dirs_on_base", "(", ")", "if", "(", "not", "dir_empty", "(", "self", ".", "_dirs", "[", "'base'", "]", ")", ")", "or", "(", "not", "os", ".", "access", "(", "self", ".", "_dirs", "[", "'base'", "]", ",", "os", ".", "W_OK", ")", ")", ":", "raise", "ValueError", "# copy the dirs from package data to the base dir (common,themes)", "pdl", "=", "package_data_location", "(", ")", "datadirs", "=", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "pdl", ",", "\"*\"", ")", ")", "for", "dd", "in", "datadirs", ":", "if", "os", ".", "path", ".", "isdir", "(", "dd", ")", ":", "shutil", ".", "copytree", "(", "dd", ",", "os", ".", "path", ".", "join", "(", "self", ".", "_dirs", "[", "'base'", "]", ",", "os", ".", "path", ".", "split", "(", "dd", ")", "[", "1", "]", ")", ")", "# create all predefined dirs that don't exist yet in base", "for", "d", "in", "self", ".", "_dirs", ":", "if", "not", "d", "in", "[", "'initial'", ",", "'run'", ",", "'base'", "]", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "self", ".", "_dirs", "[", "d", "]", ")", ":", "os", ".", "mkdir", "(", "self", ".", "_dirs", "[", "d", "]", ")", "self", ".", "_tree_ready", "=", "verify_dir_structure", "(", "self", ".", "_dirs", "[", "'base'", "]", ")", "self", ".", "site_config", "=", "self", ".", "_create_default_config", "(", ")" ]
Initialize a directory to serve as a Simply Static site. Initialization is done on the base_dir (base_dir is set upon __init__, so it has a value when this method is called), and it is only performed if base_dir is empty and it is writeable. This operation creates the directories, copies any existing templates to the source_dir and common_dir, and creates the default configuration file within the directory s2
[ "Initialize", "a", "directory", "to", "serve", "as", "a", "Simply", "Static", "site", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L236-L275
242,381
jpablo128/simplystatic
simplystatic/s2site.py
Site.random_page
def random_page(self, title=None, content=None, creation_date=None, tags=None): '''Generate random page, write it and return the corresponding \ object.''' if title == None: title = util.random_title() if content == None: content = util.random_md_page() if creation_date == None: creation_date = util.random_date() if tags == None: tags = [] # yes, we pass self as a param. It's a ref to this site, that # is needed by the page p = s2page.Page(self, title) #here, set date and tags?? # date = util.random_date() p.content = content p.creation_date = creation_date p.tags = tags p.write() return p
python
def random_page(self, title=None, content=None, creation_date=None, tags=None): '''Generate random page, write it and return the corresponding \ object.''' if title == None: title = util.random_title() if content == None: content = util.random_md_page() if creation_date == None: creation_date = util.random_date() if tags == None: tags = [] # yes, we pass self as a param. It's a ref to this site, that # is needed by the page p = s2page.Page(self, title) #here, set date and tags?? # date = util.random_date() p.content = content p.creation_date = creation_date p.tags = tags p.write() return p
[ "def", "random_page", "(", "self", ",", "title", "=", "None", ",", "content", "=", "None", ",", "creation_date", "=", "None", ",", "tags", "=", "None", ")", ":", "if", "title", "==", "None", ":", "title", "=", "util", ".", "random_title", "(", ")", "if", "content", "==", "None", ":", "content", "=", "util", ".", "random_md_page", "(", ")", "if", "creation_date", "==", "None", ":", "creation_date", "=", "util", ".", "random_date", "(", ")", "if", "tags", "==", "None", ":", "tags", "=", "[", "]", "# yes, we pass self as a param. It's a ref to this site, that", "# is needed by the page", "p", "=", "s2page", ".", "Page", "(", "self", ",", "title", ")", "#here, set date and tags??", "# date = util.random_date()", "p", ".", "content", "=", "content", "p", ".", "creation_date", "=", "creation_date", "p", ".", "tags", "=", "tags", "p", ".", "write", "(", ")", "return", "p" ]
Generate random page, write it and return the corresponding \ object.
[ "Generate", "random", "page", "write", "it", "and", "return", "the", "corresponding", "\\", "object", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L474-L495
242,382
jpablo128/simplystatic
simplystatic/s2site.py
Site.page_exists_on_disk
def page_exists_on_disk(self, slug): '''Return true if post directory and post file both exist.''' r = False page_dir = os.path.join(self.dirs['source'], slug) page_file_name = os.path.join(page_dir, slug + '.md') if os.path.isdir(page_dir): if os.path.isfile(page_file_name): r = True return r
python
def page_exists_on_disk(self, slug): '''Return true if post directory and post file both exist.''' r = False page_dir = os.path.join(self.dirs['source'], slug) page_file_name = os.path.join(page_dir, slug + '.md') if os.path.isdir(page_dir): if os.path.isfile(page_file_name): r = True return r
[ "def", "page_exists_on_disk", "(", "self", ",", "slug", ")", ":", "r", "=", "False", "page_dir", "=", "os", ".", "path", ".", "join", "(", "self", ".", "dirs", "[", "'source'", "]", ",", "slug", ")", "page_file_name", "=", "os", ".", "path", ".", "join", "(", "page_dir", ",", "slug", "+", "'.md'", ")", "if", "os", ".", "path", ".", "isdir", "(", "page_dir", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "page_file_name", ")", ":", "r", "=", "True", "return", "r" ]
Return true if post directory and post file both exist.
[ "Return", "true", "if", "post", "directory", "and", "post", "file", "both", "exist", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L497-L506
242,383
jpablo128/simplystatic
simplystatic/s2site.py
Site.rename_page
def rename_page(self, old_slug, new_title): '''Load the page corresponding to the slug, and rename it.''' #load page p = s2page.Page(self, old_slug, isslug=True) p.rename(new_title)
python
def rename_page(self, old_slug, new_title): '''Load the page corresponding to the slug, and rename it.''' #load page p = s2page.Page(self, old_slug, isslug=True) p.rename(new_title)
[ "def", "rename_page", "(", "self", ",", "old_slug", ",", "new_title", ")", ":", "#load page", "p", "=", "s2page", ".", "Page", "(", "self", ",", "old_slug", ",", "isslug", "=", "True", ")", "p", ".", "rename", "(", "new_title", ")" ]
Load the page corresponding to the slug, and rename it.
[ "Load", "the", "page", "corresponding", "to", "the", "slug", "and", "rename", "it", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L508-L512
242,384
jpablo128/simplystatic
simplystatic/s2site.py
Site._wipe_www_page
def _wipe_www_page(self, slug): '''Remove all data in www about the page identified by slug.''' wd = os.path.join(self._dirs['www'], slug) if os.path.isdir(wd): # pragma: no cover shutil.rmtree(wd)
python
def _wipe_www_page(self, slug): '''Remove all data in www about the page identified by slug.''' wd = os.path.join(self._dirs['www'], slug) if os.path.isdir(wd): # pragma: no cover shutil.rmtree(wd)
[ "def", "_wipe_www_page", "(", "self", ",", "slug", ")", ":", "wd", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_dirs", "[", "'www'", "]", ",", "slug", ")", "if", "os", ".", "path", ".", "isdir", "(", "wd", ")", ":", "# pragma: no cover", "shutil", ".", "rmtree", "(", "wd", ")" ]
Remove all data in www about the page identified by slug.
[ "Remove", "all", "data", "in", "www", "about", "the", "page", "identified", "by", "slug", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L524-L528
242,385
jpablo128/simplystatic
simplystatic/s2site.py
Site._pages_to_generate
def _pages_to_generate(self): '''Return list of slugs that correspond to pages to generate.''' # right now it gets all the files. In theory, It should only # get what's changed... but the program is not doing that yet. all_pages = self.get_page_names() # keep only those whose status is published ptg = [] for slug in all_pages: p = s2page.Page(self, slug, isslug=True) if p.published: ptg.append({'slug': p.slug, 'title':p.title, 'date': p.creation_date }) # sort the ptg array in reverse chronological order of its entries. sptg = sorted(ptg, key=lambda x : x['date'],reverse=True) res = [ pinfo['slug'] for pinfo in sptg] return res
python
def _pages_to_generate(self): '''Return list of slugs that correspond to pages to generate.''' # right now it gets all the files. In theory, It should only # get what's changed... but the program is not doing that yet. all_pages = self.get_page_names() # keep only those whose status is published ptg = [] for slug in all_pages: p = s2page.Page(self, slug, isslug=True) if p.published: ptg.append({'slug': p.slug, 'title':p.title, 'date': p.creation_date }) # sort the ptg array in reverse chronological order of its entries. sptg = sorted(ptg, key=lambda x : x['date'],reverse=True) res = [ pinfo['slug'] for pinfo in sptg] return res
[ "def", "_pages_to_generate", "(", "self", ")", ":", "# right now it gets all the files. In theory, It should only", "# get what's changed... but the program is not doing that yet.", "all_pages", "=", "self", ".", "get_page_names", "(", ")", "# keep only those whose status is published", "ptg", "=", "[", "]", "for", "slug", "in", "all_pages", ":", "p", "=", "s2page", ".", "Page", "(", "self", ",", "slug", ",", "isslug", "=", "True", ")", "if", "p", ".", "published", ":", "ptg", ".", "append", "(", "{", "'slug'", ":", "p", ".", "slug", ",", "'title'", ":", "p", ".", "title", ",", "'date'", ":", "p", ".", "creation_date", "}", ")", "# sort the ptg array in reverse chronological order of its entries.", "sptg", "=", "sorted", "(", "ptg", ",", "key", "=", "lambda", "x", ":", "x", "[", "'date'", "]", ",", "reverse", "=", "True", ")", "res", "=", "[", "pinfo", "[", "'slug'", "]", "for", "pinfo", "in", "sptg", "]", "return", "res" ]
Return list of slugs that correspond to pages to generate.
[ "Return", "list", "of", "slugs", "that", "correspond", "to", "pages", "to", "generate", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L530-L546
242,386
jpablo128/simplystatic
simplystatic/s2site.py
Site._create_default_config
def _create_default_config(self): '''Create and write to disk a default site config file.''' # maybe I should read the default config from somewhere in the package? cfg = { 'site_title': '', 'site_subtitle': '', 'default_author': '', 'site_url': '', 'default_theme': 'blog1', 'default_template': 'main.html.tpl', 'fixed_frontpage': '' } file_name = os.path.join(self._dirs['s2'],'config.yml') f = open(file_name,'w') f.write(yaml.dump(cfg,default_flow_style=False)) f.close() return cfg
python
def _create_default_config(self): '''Create and write to disk a default site config file.''' # maybe I should read the default config from somewhere in the package? cfg = { 'site_title': '', 'site_subtitle': '', 'default_author': '', 'site_url': '', 'default_theme': 'blog1', 'default_template': 'main.html.tpl', 'fixed_frontpage': '' } file_name = os.path.join(self._dirs['s2'],'config.yml') f = open(file_name,'w') f.write(yaml.dump(cfg,default_flow_style=False)) f.close() return cfg
[ "def", "_create_default_config", "(", "self", ")", ":", "# maybe I should read the default config from somewhere in the package?", "cfg", "=", "{", "'site_title'", ":", "''", ",", "'site_subtitle'", ":", "''", ",", "'default_author'", ":", "''", ",", "'site_url'", ":", "''", ",", "'default_theme'", ":", "'blog1'", ",", "'default_template'", ":", "'main.html.tpl'", ",", "'fixed_frontpage'", ":", "''", "}", "file_name", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_dirs", "[", "'s2'", "]", ",", "'config.yml'", ")", "f", "=", "open", "(", "file_name", ",", "'w'", ")", "f", ".", "write", "(", "yaml", ".", "dump", "(", "cfg", ",", "default_flow_style", "=", "False", ")", ")", "f", ".", "close", "(", ")", "return", "cfg" ]
Create and write to disk a default site config file.
[ "Create", "and", "write", "to", "disk", "a", "default", "site", "config", "file", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L548-L565
242,387
jpablo128/simplystatic
simplystatic/s2site.py
Site._read_site_config
def _read_site_config(self): '''Read and return the site config, as a dictionary.''' file_name = os.path.join(self._dirs['s2'],'config.yml') if os.path.isfile(file_name): f = open(file_name,'r') cfg = yaml.load(f.read()) f.close() else: cfg = self._create_default_config() return cfg
python
def _read_site_config(self): '''Read and return the site config, as a dictionary.''' file_name = os.path.join(self._dirs['s2'],'config.yml') if os.path.isfile(file_name): f = open(file_name,'r') cfg = yaml.load(f.read()) f.close() else: cfg = self._create_default_config() return cfg
[ "def", "_read_site_config", "(", "self", ")", ":", "file_name", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_dirs", "[", "'s2'", "]", ",", "'config.yml'", ")", "if", "os", ".", "path", ".", "isfile", "(", "file_name", ")", ":", "f", "=", "open", "(", "file_name", ",", "'r'", ")", "cfg", "=", "yaml", ".", "load", "(", "f", ".", "read", "(", ")", ")", "f", ".", "close", "(", ")", "else", ":", "cfg", "=", "self", ".", "_create_default_config", "(", ")", "return", "cfg" ]
Read and return the site config, as a dictionary.
[ "Read", "and", "return", "the", "site", "config", "as", "a", "dictionary", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/simplystatic/s2site.py#L567-L576
242,388
OiNutter/lean
lean/__init__.py
Lean.register
def register(template_class,*extensions): ''' Register a template for a given extension or range of extensions ''' for ext in extensions: ext = normalize(ext) if not Lean.template_mappings.has_key(ext): Lean.template_mappings[ext] = [] Lean.template_mappings[ext].insert(0,template_class) Lean.template_mappings[ext] = unique(Lean.template_mappings[ext])
python
def register(template_class,*extensions): ''' Register a template for a given extension or range of extensions ''' for ext in extensions: ext = normalize(ext) if not Lean.template_mappings.has_key(ext): Lean.template_mappings[ext] = [] Lean.template_mappings[ext].insert(0,template_class) Lean.template_mappings[ext] = unique(Lean.template_mappings[ext])
[ "def", "register", "(", "template_class", ",", "*", "extensions", ")", ":", "for", "ext", "in", "extensions", ":", "ext", "=", "normalize", "(", "ext", ")", "if", "not", "Lean", ".", "template_mappings", ".", "has_key", "(", "ext", ")", ":", "Lean", ".", "template_mappings", "[", "ext", "]", "=", "[", "]", "Lean", ".", "template_mappings", "[", "ext", "]", ".", "insert", "(", "0", ",", "template_class", ")", "Lean", ".", "template_mappings", "[", "ext", "]", "=", "unique", "(", "Lean", ".", "template_mappings", "[", "ext", "]", ")" ]
Register a template for a given extension or range of extensions
[ "Register", "a", "template", "for", "a", "given", "extension", "or", "range", "of", "extensions" ]
5d251f923acd44265ed401de14a9ead6752c543f
https://github.com/OiNutter/lean/blob/5d251f923acd44265ed401de14a9ead6752c543f/lean/__init__.py#L10-L18
242,389
OiNutter/lean
lean/__init__.py
Lean.is_registered
def is_registered(ext): ''' Returns true when a template exists on an exact match of the provided file extension ''' return Lean.template_mappings.has_key(ext.lower()) and len(Lean.template_mappings[ext])
python
def is_registered(ext): ''' Returns true when a template exists on an exact match of the provided file extension ''' return Lean.template_mappings.has_key(ext.lower()) and len(Lean.template_mappings[ext])
[ "def", "is_registered", "(", "ext", ")", ":", "return", "Lean", ".", "template_mappings", ".", "has_key", "(", "ext", ".", "lower", "(", ")", ")", "and", "len", "(", "Lean", ".", "template_mappings", "[", "ext", "]", ")" ]
Returns true when a template exists on an exact match of the provided file extension
[ "Returns", "true", "when", "a", "template", "exists", "on", "an", "exact", "match", "of", "the", "provided", "file", "extension" ]
5d251f923acd44265ed401de14a9ead6752c543f
https://github.com/OiNutter/lean/blob/5d251f923acd44265ed401de14a9ead6752c543f/lean/__init__.py#L44-L46
242,390
OiNutter/lean
lean/__init__.py
Lean.load
def load(file,line=None,options={},block=None): ''' Create a new template for the given file using the file's extension to determine the the template mapping. ''' template_class = Lean.get_template(file) if template_class: return template_class(file,line,options,block) else: raise LookupError('No template engine registered for ' + os.path.basename(file))
python
def load(file,line=None,options={},block=None): ''' Create a new template for the given file using the file's extension to determine the the template mapping. ''' template_class = Lean.get_template(file) if template_class: return template_class(file,line,options,block) else: raise LookupError('No template engine registered for ' + os.path.basename(file))
[ "def", "load", "(", "file", ",", "line", "=", "None", ",", "options", "=", "{", "}", ",", "block", "=", "None", ")", ":", "template_class", "=", "Lean", ".", "get_template", "(", "file", ")", "if", "template_class", ":", "return", "template_class", "(", "file", ",", "line", ",", "options", ",", "block", ")", "else", ":", "raise", "LookupError", "(", "'No template engine registered for '", "+", "os", ".", "path", ".", "basename", "(", "file", ")", ")" ]
Create a new template for the given file using the file's extension to determine the the template mapping.
[ "Create", "a", "new", "template", "for", "the", "given", "file", "using", "the", "file", "s", "extension", "to", "determine", "the", "the", "template", "mapping", "." ]
5d251f923acd44265ed401de14a9ead6752c543f
https://github.com/OiNutter/lean/blob/5d251f923acd44265ed401de14a9ead6752c543f/lean/__init__.py#L49-L58
242,391
OiNutter/lean
lean/__init__.py
Lean.get_template
def get_template(file): ''' Lookup a template class for the given filename or file extension. Return nil when no implementation is found. ''' pattern = str(file).lower() while len(pattern) and not Lean.is_registered(pattern): pattern = os.path.basename(pattern) pattern = re.sub(r'^[^.]*\.?','',pattern) # Try to find a preferred engine. preferred_klass = Lean.preferred_mappings[pattern] if Lean.preferred_mappings.has_key(pattern) else None if preferred_klass: return preferred_klass # Fall back to the general list of mappings klasses = Lean.template_mappings[pattern] # Try to find an engine which is already loaded template = None for klass in klasses: if hasattr(klass,'is_engine_initialized') and callable(klass.is_engine_initialized): if klass.is_engine_initialized(): template = klass break if template: return template # Try each of the classes until one succeeds. If all of them fails, # we'll raise the error of the first class. first_failure = None for klass in klasses: try: return klass except Exception, e: if not first_failure: first_failure = e if first_failure: raise Exception(first_failure)
python
def get_template(file): ''' Lookup a template class for the given filename or file extension. Return nil when no implementation is found. ''' pattern = str(file).lower() while len(pattern) and not Lean.is_registered(pattern): pattern = os.path.basename(pattern) pattern = re.sub(r'^[^.]*\.?','',pattern) # Try to find a preferred engine. preferred_klass = Lean.preferred_mappings[pattern] if Lean.preferred_mappings.has_key(pattern) else None if preferred_klass: return preferred_klass # Fall back to the general list of mappings klasses = Lean.template_mappings[pattern] # Try to find an engine which is already loaded template = None for klass in klasses: if hasattr(klass,'is_engine_initialized') and callable(klass.is_engine_initialized): if klass.is_engine_initialized(): template = klass break if template: return template # Try each of the classes until one succeeds. If all of them fails, # we'll raise the error of the first class. first_failure = None for klass in klasses: try: return klass except Exception, e: if not first_failure: first_failure = e if first_failure: raise Exception(first_failure)
[ "def", "get_template", "(", "file", ")", ":", "pattern", "=", "str", "(", "file", ")", ".", "lower", "(", ")", "while", "len", "(", "pattern", ")", "and", "not", "Lean", ".", "is_registered", "(", "pattern", ")", ":", "pattern", "=", "os", ".", "path", ".", "basename", "(", "pattern", ")", "pattern", "=", "re", ".", "sub", "(", "r'^[^.]*\\.?'", ",", "''", ",", "pattern", ")", "# Try to find a preferred engine.", "preferred_klass", "=", "Lean", ".", "preferred_mappings", "[", "pattern", "]", "if", "Lean", ".", "preferred_mappings", ".", "has_key", "(", "pattern", ")", "else", "None", "if", "preferred_klass", ":", "return", "preferred_klass", "# Fall back to the general list of mappings", "klasses", "=", "Lean", ".", "template_mappings", "[", "pattern", "]", "# Try to find an engine which is already loaded", "template", "=", "None", "for", "klass", "in", "klasses", ":", "if", "hasattr", "(", "klass", ",", "'is_engine_initialized'", ")", "and", "callable", "(", "klass", ".", "is_engine_initialized", ")", ":", "if", "klass", ".", "is_engine_initialized", "(", ")", ":", "template", "=", "klass", "break", "if", "template", ":", "return", "template", "# Try each of the classes until one succeeds. If all of them fails,", "# we'll raise the error of the first class.", "first_failure", "=", "None", "for", "klass", "in", "klasses", ":", "try", ":", "return", "klass", "except", "Exception", ",", "e", ":", "if", "not", "first_failure", ":", "first_failure", "=", "e", "if", "first_failure", ":", "raise", "Exception", "(", "first_failure", ")" ]
Lookup a template class for the given filename or file extension. Return nil when no implementation is found.
[ "Lookup", "a", "template", "class", "for", "the", "given", "filename", "or", "file", "extension", ".", "Return", "nil", "when", "no", "implementation", "is", "found", "." ]
5d251f923acd44265ed401de14a9ead6752c543f
https://github.com/OiNutter/lean/blob/5d251f923acd44265ed401de14a9ead6752c543f/lean/__init__.py#L61-L103
242,392
steenzout/python-sphinx
steenzout/sphinx/cli.py
generate
def generate(organization, package, destination): """Generates the Sphinx configuration and Makefile. Args: organization (str): the organization name. package (str): the package to be documented. destination (str): the destination directory. """ gen = ResourceGenerator(organization, package) tmp = tempfile.NamedTemporaryFile(mode='w+t', delete=False) try: tmp.write(gen.conf()) finally: tmp.close() shutil.copy(tmp.name, os.path.join(destination, 'conf.py')) tmp = tempfile.NamedTemporaryFile(mode='w+t', delete=False) try: tmp.write(gen.makefile()) finally: tmp.close() shutil.copy(tmp.name, os.path.join(destination, 'Makefile'))
python
def generate(organization, package, destination): """Generates the Sphinx configuration and Makefile. Args: organization (str): the organization name. package (str): the package to be documented. destination (str): the destination directory. """ gen = ResourceGenerator(organization, package) tmp = tempfile.NamedTemporaryFile(mode='w+t', delete=False) try: tmp.write(gen.conf()) finally: tmp.close() shutil.copy(tmp.name, os.path.join(destination, 'conf.py')) tmp = tempfile.NamedTemporaryFile(mode='w+t', delete=False) try: tmp.write(gen.makefile()) finally: tmp.close() shutil.copy(tmp.name, os.path.join(destination, 'Makefile'))
[ "def", "generate", "(", "organization", ",", "package", ",", "destination", ")", ":", "gen", "=", "ResourceGenerator", "(", "organization", ",", "package", ")", "tmp", "=", "tempfile", ".", "NamedTemporaryFile", "(", "mode", "=", "'w+t'", ",", "delete", "=", "False", ")", "try", ":", "tmp", ".", "write", "(", "gen", ".", "conf", "(", ")", ")", "finally", ":", "tmp", ".", "close", "(", ")", "shutil", ".", "copy", "(", "tmp", ".", "name", ",", "os", ".", "path", ".", "join", "(", "destination", ",", "'conf.py'", ")", ")", "tmp", "=", "tempfile", ".", "NamedTemporaryFile", "(", "mode", "=", "'w+t'", ",", "delete", "=", "False", ")", "try", ":", "tmp", ".", "write", "(", "gen", ".", "makefile", "(", ")", ")", "finally", ":", "tmp", ".", "close", "(", ")", "shutil", ".", "copy", "(", "tmp", ".", "name", ",", "os", ".", "path", ".", "join", "(", "destination", ",", "'Makefile'", ")", ")" ]
Generates the Sphinx configuration and Makefile. Args: organization (str): the organization name. package (str): the package to be documented. destination (str): the destination directory.
[ "Generates", "the", "Sphinx", "configuration", "and", "Makefile", "." ]
b9767195fba74540c385fdf5f94cc4a24bc5e46d
https://github.com/steenzout/python-sphinx/blob/b9767195fba74540c385fdf5f94cc4a24bc5e46d/steenzout/sphinx/cli.py#L33-L57
242,393
elifesciences/elife-article
elifearticle/article.py
Article.get_self_uri
def get_self_uri(self, content_type): "return the first self uri with the content_type" try: return [self_uri for self_uri in self.self_uri_list if self_uri.content_type == content_type][0] except IndexError: return None
python
def get_self_uri(self, content_type): "return the first self uri with the content_type" try: return [self_uri for self_uri in self.self_uri_list if self_uri.content_type == content_type][0] except IndexError: return None
[ "def", "get_self_uri", "(", "self", ",", "content_type", ")", ":", "try", ":", "return", "[", "self_uri", "for", "self_uri", "in", "self", ".", "self_uri_list", "if", "self_uri", ".", "content_type", "==", "content_type", "]", "[", "0", "]", "except", "IndexError", ":", "return", "None" ]
return the first self uri with the content_type
[ "return", "the", "first", "self", "uri", "with", "the", "content_type" ]
99710c213cd81fe6fd1e5c150d6e20efe2d1e33b
https://github.com/elifesciences/elife-article/blob/99710c213cd81fe6fd1e5c150d6e20efe2d1e33b/elifearticle/article.py#L145-L151
242,394
elifesciences/elife-article
elifearticle/article.py
Article.pretty
def pretty(self): "sort values and format output for viewing and comparing in test scenarios" pretty_obj = OrderedDict() for key, value in sorted(iteritems(self.__dict__)): if value is None: pretty_obj[key] = None elif is_str_or_unicode(value): pretty_obj[key] = self.__dict__.get(key) elif isinstance(value, list): pretty_obj[key] = [] elif isinstance(value, dict): pretty_obj[key] = {} else: pretty_obj[key] = unicode_value(value) return pretty_obj
python
def pretty(self): "sort values and format output for viewing and comparing in test scenarios" pretty_obj = OrderedDict() for key, value in sorted(iteritems(self.__dict__)): if value is None: pretty_obj[key] = None elif is_str_or_unicode(value): pretty_obj[key] = self.__dict__.get(key) elif isinstance(value, list): pretty_obj[key] = [] elif isinstance(value, dict): pretty_obj[key] = {} else: pretty_obj[key] = unicode_value(value) return pretty_obj
[ "def", "pretty", "(", "self", ")", ":", "pretty_obj", "=", "OrderedDict", "(", ")", "for", "key", ",", "value", "in", "sorted", "(", "iteritems", "(", "self", ".", "__dict__", ")", ")", ":", "if", "value", "is", "None", ":", "pretty_obj", "[", "key", "]", "=", "None", "elif", "is_str_or_unicode", "(", "value", ")", ":", "pretty_obj", "[", "key", "]", "=", "self", ".", "__dict__", ".", "get", "(", "key", ")", "elif", "isinstance", "(", "value", ",", "list", ")", ":", "pretty_obj", "[", "key", "]", "=", "[", "]", "elif", "isinstance", "(", "value", ",", "dict", ")", ":", "pretty_obj", "[", "key", "]", "=", "{", "}", "else", ":", "pretty_obj", "[", "key", "]", "=", "unicode_value", "(", "value", ")", "return", "pretty_obj" ]
sort values and format output for viewing and comparing in test scenarios
[ "sort", "values", "and", "format", "output", "for", "viewing", "and", "comparing", "in", "test", "scenarios" ]
99710c213cd81fe6fd1e5c150d6e20efe2d1e33b
https://github.com/elifesciences/elife-article/blob/99710c213cd81fe6fd1e5c150d6e20efe2d1e33b/elifearticle/article.py#L153-L167
242,395
biocore/mustached-octo-ironman
moi/__init__.py
_support_directory
def _support_directory(): """Get the path of the support_files directory""" from os.path import join, dirname, abspath return join(dirname(abspath(__file__)), 'support_files')
python
def _support_directory(): """Get the path of the support_files directory""" from os.path import join, dirname, abspath return join(dirname(abspath(__file__)), 'support_files')
[ "def", "_support_directory", "(", ")", ":", "from", "os", ".", "path", "import", "join", ",", "dirname", ",", "abspath", "return", "join", "(", "dirname", "(", "abspath", "(", "__file__", ")", ")", ",", "'support_files'", ")" ]
Get the path of the support_files directory
[ "Get", "the", "path", "of", "the", "support_files", "directory" ]
54128d8fdff327e1b7ffd9bb77bf38c3df9526d7
https://github.com/biocore/mustached-octo-ironman/blob/54128d8fdff327e1b7ffd9bb77bf38c3df9526d7/moi/__init__.py#L21-L24
242,396
EnigmaBridge/client.py
ebclient/eb_create_uo.py
CreateUO.create_uo
def create_uo(self, configuration=None, tpl=None, keys=None, obj_type=None): """ Create a new UserObject from the given template. :param configuration: EB configuration to use :param tpl: CreateUserObject template, contain misc settings :param keys: dictionary of keys, create_uo.KeyTypes. Communication keys, application key (if applicable). :param obj_type: optional field for easy object type entry - required flags are computed from keys dict and tpl. :return: UO - user object ready to use """ if configuration is not None: self.configuration = configuration if tpl is not None: self.tpl = tpl if keys is not None: self.keys = keys if self.keys is None: self.keys = dict() # generate comm keys if not present TemplateProcessor.generate_comm_keys_if_not_present(self.keys) # obj_type infer if obj_type is not None: tpl_type = CreateUO.get_uo_type(obj_type, KeyTypes.COMM_ENC in self.keys, KeyTypes.APP_KEY in self.keys) self.tpl = CreateUO.set_type(self.tpl if self.tpl is not None else dict(), tpl_type) # Create template specifications, using local config and defaults. spec = CreateUO.get_template_request_spec(self.configuration) if self.tpl is not None: if isinstance(self.tpl, dict): spec = EBUtils.update(spec, self.tpl) else: raise ValueError('Unknown tpl format') # Fetch template for new UO. tpl_resp = CreateUO.template_request(self.configuration, spec) # Process the template, fill in the keys, do the crypto tpl_processor = TemplateProcessor(configuration=self.configuration, keys=self.keys, tpl_response=tpl_resp) tpl_req = tpl_processor.process() # Import the initialized UO self.import_resp = CreateUO.import_object(configuration=self.configuration, tpl=tpl_req) # Build UO uo = CreateUO.build_imported_object(configuration=self.configuration, tpl_import_req=tpl_req, import_resp=self.import_resp) return uo
python
def create_uo(self, configuration=None, tpl=None, keys=None, obj_type=None): """ Create a new UserObject from the given template. :param configuration: EB configuration to use :param tpl: CreateUserObject template, contain misc settings :param keys: dictionary of keys, create_uo.KeyTypes. Communication keys, application key (if applicable). :param obj_type: optional field for easy object type entry - required flags are computed from keys dict and tpl. :return: UO - user object ready to use """ if configuration is not None: self.configuration = configuration if tpl is not None: self.tpl = tpl if keys is not None: self.keys = keys if self.keys is None: self.keys = dict() # generate comm keys if not present TemplateProcessor.generate_comm_keys_if_not_present(self.keys) # obj_type infer if obj_type is not None: tpl_type = CreateUO.get_uo_type(obj_type, KeyTypes.COMM_ENC in self.keys, KeyTypes.APP_KEY in self.keys) self.tpl = CreateUO.set_type(self.tpl if self.tpl is not None else dict(), tpl_type) # Create template specifications, using local config and defaults. spec = CreateUO.get_template_request_spec(self.configuration) if self.tpl is not None: if isinstance(self.tpl, dict): spec = EBUtils.update(spec, self.tpl) else: raise ValueError('Unknown tpl format') # Fetch template for new UO. tpl_resp = CreateUO.template_request(self.configuration, spec) # Process the template, fill in the keys, do the crypto tpl_processor = TemplateProcessor(configuration=self.configuration, keys=self.keys, tpl_response=tpl_resp) tpl_req = tpl_processor.process() # Import the initialized UO self.import_resp = CreateUO.import_object(configuration=self.configuration, tpl=tpl_req) # Build UO uo = CreateUO.build_imported_object(configuration=self.configuration, tpl_import_req=tpl_req, import_resp=self.import_resp) return uo
[ "def", "create_uo", "(", "self", ",", "configuration", "=", "None", ",", "tpl", "=", "None", ",", "keys", "=", "None", ",", "obj_type", "=", "None", ")", ":", "if", "configuration", "is", "not", "None", ":", "self", ".", "configuration", "=", "configuration", "if", "tpl", "is", "not", "None", ":", "self", ".", "tpl", "=", "tpl", "if", "keys", "is", "not", "None", ":", "self", ".", "keys", "=", "keys", "if", "self", ".", "keys", "is", "None", ":", "self", ".", "keys", "=", "dict", "(", ")", "# generate comm keys if not present", "TemplateProcessor", ".", "generate_comm_keys_if_not_present", "(", "self", ".", "keys", ")", "# obj_type infer", "if", "obj_type", "is", "not", "None", ":", "tpl_type", "=", "CreateUO", ".", "get_uo_type", "(", "obj_type", ",", "KeyTypes", ".", "COMM_ENC", "in", "self", ".", "keys", ",", "KeyTypes", ".", "APP_KEY", "in", "self", ".", "keys", ")", "self", ".", "tpl", "=", "CreateUO", ".", "set_type", "(", "self", ".", "tpl", "if", "self", ".", "tpl", "is", "not", "None", "else", "dict", "(", ")", ",", "tpl_type", ")", "# Create template specifications, using local config and defaults.", "spec", "=", "CreateUO", ".", "get_template_request_spec", "(", "self", ".", "configuration", ")", "if", "self", ".", "tpl", "is", "not", "None", ":", "if", "isinstance", "(", "self", ".", "tpl", ",", "dict", ")", ":", "spec", "=", "EBUtils", ".", "update", "(", "spec", ",", "self", ".", "tpl", ")", "else", ":", "raise", "ValueError", "(", "'Unknown tpl format'", ")", "# Fetch template for new UO.", "tpl_resp", "=", "CreateUO", ".", "template_request", "(", "self", ".", "configuration", ",", "spec", ")", "# Process the template, fill in the keys, do the crypto", "tpl_processor", "=", "TemplateProcessor", "(", "configuration", "=", "self", ".", "configuration", ",", "keys", "=", "self", ".", "keys", ",", "tpl_response", "=", "tpl_resp", ")", "tpl_req", "=", "tpl_processor", ".", "process", "(", ")", "# Import the initialized UO", "self", ".", "import_resp", "=", "CreateUO", ".", "import_object", "(", "configuration", "=", "self", ".", "configuration", ",", "tpl", "=", "tpl_req", ")", "# Build UO", "uo", "=", "CreateUO", ".", "build_imported_object", "(", "configuration", "=", "self", ".", "configuration", ",", "tpl_import_req", "=", "tpl_req", ",", "import_resp", "=", "self", ".", "import_resp", ")", "return", "uo" ]
Create a new UserObject from the given template. :param configuration: EB configuration to use :param tpl: CreateUserObject template, contain misc settings :param keys: dictionary of keys, create_uo.KeyTypes. Communication keys, application key (if applicable). :param obj_type: optional field for easy object type entry - required flags are computed from keys dict and tpl. :return: UO - user object ready to use
[ "Create", "a", "new", "UserObject", "from", "the", "given", "template", "." ]
0fafe3902da394da88e9f960751d695ca65bbabd
https://github.com/EnigmaBridge/client.py/blob/0fafe3902da394da88e9f960751d695ca65bbabd/ebclient/eb_create_uo.py#L36-L84
242,397
EnigmaBridge/client.py
ebclient/eb_create_uo.py
CreateUO.set_type
def set_type(spec, obj_type): """ Updates type integer in the cerate UO specification. Type has to already have generations flags set correctly. Generation field is set accordingly. :param spec: :param obj_type: :return: """ if spec is None: raise ValueError('Spec cannot be None') if TemplateFields.generation not in spec: spec[TemplateFields.generation] = {} spec[TemplateFields.generation][TemplateFields.commkey] = \ Gen.CLIENT if (obj_type & (int(1) << TemplateFields.FLAG_COMM_GEN)) > 0 else Gen.LEGACY_RANDOM spec[TemplateFields.generation][TemplateFields.appkey] = \ Gen.CLIENT if (obj_type & (int(1) << TemplateFields.FLAG_APP_GEN)) > 0 else Gen.LEGACY_RANDOM spec[TemplateFields.type] = "%x" % obj_type return spec
python
def set_type(spec, obj_type): """ Updates type integer in the cerate UO specification. Type has to already have generations flags set correctly. Generation field is set accordingly. :param spec: :param obj_type: :return: """ if spec is None: raise ValueError('Spec cannot be None') if TemplateFields.generation not in spec: spec[TemplateFields.generation] = {} spec[TemplateFields.generation][TemplateFields.commkey] = \ Gen.CLIENT if (obj_type & (int(1) << TemplateFields.FLAG_COMM_GEN)) > 0 else Gen.LEGACY_RANDOM spec[TemplateFields.generation][TemplateFields.appkey] = \ Gen.CLIENT if (obj_type & (int(1) << TemplateFields.FLAG_APP_GEN)) > 0 else Gen.LEGACY_RANDOM spec[TemplateFields.type] = "%x" % obj_type return spec
[ "def", "set_type", "(", "spec", ",", "obj_type", ")", ":", "if", "spec", "is", "None", ":", "raise", "ValueError", "(", "'Spec cannot be None'", ")", "if", "TemplateFields", ".", "generation", "not", "in", "spec", ":", "spec", "[", "TemplateFields", ".", "generation", "]", "=", "{", "}", "spec", "[", "TemplateFields", ".", "generation", "]", "[", "TemplateFields", ".", "commkey", "]", "=", "Gen", ".", "CLIENT", "if", "(", "obj_type", "&", "(", "int", "(", "1", ")", "<<", "TemplateFields", ".", "FLAG_COMM_GEN", ")", ")", ">", "0", "else", "Gen", ".", "LEGACY_RANDOM", "spec", "[", "TemplateFields", ".", "generation", "]", "[", "TemplateFields", ".", "appkey", "]", "=", "Gen", ".", "CLIENT", "if", "(", "obj_type", "&", "(", "int", "(", "1", ")", "<<", "TemplateFields", ".", "FLAG_APP_GEN", ")", ")", ">", "0", "else", "Gen", ".", "LEGACY_RANDOM", "spec", "[", "TemplateFields", ".", "type", "]", "=", "\"%x\"", "%", "obj_type", "return", "spec" ]
Updates type integer in the cerate UO specification. Type has to already have generations flags set correctly. Generation field is set accordingly. :param spec: :param obj_type: :return:
[ "Updates", "type", "integer", "in", "the", "cerate", "UO", "specification", ".", "Type", "has", "to", "already", "have", "generations", "flags", "set", "correctly", ".", "Generation", "field", "is", "set", "accordingly", "." ]
0fafe3902da394da88e9f960751d695ca65bbabd
https://github.com/EnigmaBridge/client.py/blob/0fafe3902da394da88e9f960751d695ca65bbabd/ebclient/eb_create_uo.py#L165-L185
242,398
EnigmaBridge/client.py
ebclient/eb_create_uo.py
CreateUO.get_uo_type
def get_uo_type(obj_type, comm_keys_provided=True, app_keys_provided=True): """ Constructs UO type from the operation and keys provided, clears bits set ib obj_type before unless None is specified to the given parameters. :param obj_type: :param comm_keys_provided: :param app_keys_provided: :return: """ if comm_keys_provided is not None and comm_keys_provided == False: obj_type &= ~(int(1) << TemplateFields.FLAG_COMM_GEN) elif comm_keys_provided: obj_type |= (int(1) << TemplateFields.FLAG_COMM_GEN) if app_keys_provided is not None and app_keys_provided == False: obj_type &= ~(int(1) << TemplateFields.FLAG_APP_GEN) elif app_keys_provided: obj_type |= (int(1) << TemplateFields.FLAG_APP_GEN) return obj_type
python
def get_uo_type(obj_type, comm_keys_provided=True, app_keys_provided=True): """ Constructs UO type from the operation and keys provided, clears bits set ib obj_type before unless None is specified to the given parameters. :param obj_type: :param comm_keys_provided: :param app_keys_provided: :return: """ if comm_keys_provided is not None and comm_keys_provided == False: obj_type &= ~(int(1) << TemplateFields.FLAG_COMM_GEN) elif comm_keys_provided: obj_type |= (int(1) << TemplateFields.FLAG_COMM_GEN) if app_keys_provided is not None and app_keys_provided == False: obj_type &= ~(int(1) << TemplateFields.FLAG_APP_GEN) elif app_keys_provided: obj_type |= (int(1) << TemplateFields.FLAG_APP_GEN) return obj_type
[ "def", "get_uo_type", "(", "obj_type", ",", "comm_keys_provided", "=", "True", ",", "app_keys_provided", "=", "True", ")", ":", "if", "comm_keys_provided", "is", "not", "None", "and", "comm_keys_provided", "==", "False", ":", "obj_type", "&=", "~", "(", "int", "(", "1", ")", "<<", "TemplateFields", ".", "FLAG_COMM_GEN", ")", "elif", "comm_keys_provided", ":", "obj_type", "|=", "(", "int", "(", "1", ")", "<<", "TemplateFields", ".", "FLAG_COMM_GEN", ")", "if", "app_keys_provided", "is", "not", "None", "and", "app_keys_provided", "==", "False", ":", "obj_type", "&=", "~", "(", "int", "(", "1", ")", "<<", "TemplateFields", ".", "FLAG_APP_GEN", ")", "elif", "app_keys_provided", ":", "obj_type", "|=", "(", "int", "(", "1", ")", "<<", "TemplateFields", ".", "FLAG_APP_GEN", ")", "return", "obj_type" ]
Constructs UO type from the operation and keys provided, clears bits set ib obj_type before unless None is specified to the given parameters. :param obj_type: :param comm_keys_provided: :param app_keys_provided: :return:
[ "Constructs", "UO", "type", "from", "the", "operation", "and", "keys", "provided", "clears", "bits", "set", "ib", "obj_type", "before", "unless", "None", "is", "specified", "to", "the", "given", "parameters", "." ]
0fafe3902da394da88e9f960751d695ca65bbabd
https://github.com/EnigmaBridge/client.py/blob/0fafe3902da394da88e9f960751d695ca65bbabd/ebclient/eb_create_uo.py#L188-L208
242,399
EnigmaBridge/client.py
ebclient/eb_create_uo.py
CreateUO.template_request
def template_request(configuration, spec): """ Calls the get template request :param configuration: :param spec: :return: """ # Template request, nonce will be regenerated. req = CreateUO.get_template_request(configuration, spec) # Do the request with retry. caller = RequestCall(req) resp = caller.call() return resp
python
def template_request(configuration, spec): """ Calls the get template request :param configuration: :param spec: :return: """ # Template request, nonce will be regenerated. req = CreateUO.get_template_request(configuration, spec) # Do the request with retry. caller = RequestCall(req) resp = caller.call() return resp
[ "def", "template_request", "(", "configuration", ",", "spec", ")", ":", "# Template request, nonce will be regenerated.", "req", "=", "CreateUO", ".", "get_template_request", "(", "configuration", ",", "spec", ")", "# Do the request with retry.", "caller", "=", "RequestCall", "(", "req", ")", "resp", "=", "caller", ".", "call", "(", ")", "return", "resp" ]
Calls the get template request :param configuration: :param spec: :return:
[ "Calls", "the", "get", "template", "request" ]
0fafe3902da394da88e9f960751d695ca65bbabd
https://github.com/EnigmaBridge/client.py/blob/0fafe3902da394da88e9f960751d695ca65bbabd/ebclient/eb_create_uo.py#L237-L251