id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
8,100
barrust/mediawiki
mediawiki/mediawikipage.py
MediaWikiPage.__load
def __load(self, redirect=True, preload=False): """ load the basic page information """ query_params = { "prop": "info|pageprops", "inprop": "url", "ppprop": "disambiguation", "redirects": "", } query_params.update(self.__title_query_param()) request = self.mediawiki.wiki_request(query_params) query = request["query"] pageid = list(query["pages"].keys())[0] page = query["pages"][pageid] # determine result of the request # missing is present if the page is missing if "missing" in page: self._raise_page_error() # redirects is present in query if page is a redirect elif "redirects" in query: self._handle_redirect(redirect, preload, query, page) # if pageprops is returned, it must be a disambiguation error elif "pageprops" in page: self._raise_disambiguation_error(page, pageid) else: self.pageid = pageid self.title = page["title"] self.url = page["fullurl"]
python
def __load(self, redirect=True, preload=False): """ load the basic page information """ query_params = { "prop": "info|pageprops", "inprop": "url", "ppprop": "disambiguation", "redirects": "", } query_params.update(self.__title_query_param()) request = self.mediawiki.wiki_request(query_params) query = request["query"] pageid = list(query["pages"].keys())[0] page = query["pages"][pageid] # determine result of the request # missing is present if the page is missing if "missing" in page: self._raise_page_error() # redirects is present in query if page is a redirect elif "redirects" in query: self._handle_redirect(redirect, preload, query, page) # if pageprops is returned, it must be a disambiguation error elif "pageprops" in page: self._raise_disambiguation_error(page, pageid) else: self.pageid = pageid self.title = page["title"] self.url = page["fullurl"]
[ "def", "__load", "(", "self", ",", "redirect", "=", "True", ",", "preload", "=", "False", ")", ":", "query_params", "=", "{", "\"prop\"", ":", "\"info|pageprops\"", ",", "\"inprop\"", ":", "\"url\"", ",", "\"ppprop\"", ":", "\"disambiguation\"", ",", "\"redirects\"", ":", "\"\"", ",", "}", "query_params", ".", "update", "(", "self", ".", "__title_query_param", "(", ")", ")", "request", "=", "self", ".", "mediawiki", ".", "wiki_request", "(", "query_params", ")", "query", "=", "request", "[", "\"query\"", "]", "pageid", "=", "list", "(", "query", "[", "\"pages\"", "]", ".", "keys", "(", ")", ")", "[", "0", "]", "page", "=", "query", "[", "\"pages\"", "]", "[", "pageid", "]", "# determine result of the request", "# missing is present if the page is missing", "if", "\"missing\"", "in", "page", ":", "self", ".", "_raise_page_error", "(", ")", "# redirects is present in query if page is a redirect", "elif", "\"redirects\"", "in", "query", ":", "self", ".", "_handle_redirect", "(", "redirect", ",", "preload", ",", "query", ",", "page", ")", "# if pageprops is returned, it must be a disambiguation error", "elif", "\"pageprops\"", "in", "page", ":", "self", ".", "_raise_disambiguation_error", "(", "page", ",", "pageid", ")", "else", ":", "self", ".", "pageid", "=", "pageid", "self", ".", "title", "=", "page", "[", "\"title\"", "]", "self", ".", "url", "=", "page", "[", "\"fullurl\"", "]" ]
load the basic page information
[ "load", "the", "basic", "page", "information" ]
292e0be6c752409062dceed325d74839caf16a9b
https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawikipage.py#L478-L507
8,101
barrust/mediawiki
mediawiki/mediawikipage.py
MediaWikiPage._raise_page_error
def _raise_page_error(self): """ raise the correct type of page error """ if hasattr(self, "title"): raise PageError(title=self.title) else: raise PageError(pageid=self.pageid)
python
def _raise_page_error(self): """ raise the correct type of page error """ if hasattr(self, "title"): raise PageError(title=self.title) else: raise PageError(pageid=self.pageid)
[ "def", "_raise_page_error", "(", "self", ")", ":", "if", "hasattr", "(", "self", ",", "\"title\"", ")", ":", "raise", "PageError", "(", "title", "=", "self", ".", "title", ")", "else", ":", "raise", "PageError", "(", "pageid", "=", "self", ".", "pageid", ")" ]
raise the correct type of page error
[ "raise", "the", "correct", "type", "of", "page", "error" ]
292e0be6c752409062dceed325d74839caf16a9b
https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawikipage.py#L509-L514
8,102
barrust/mediawiki
mediawiki/mediawikipage.py
MediaWikiPage._raise_disambiguation_error
def _raise_disambiguation_error(self, page, pageid): """ parse and throw a disambiguation error """ query_params = { "prop": "revisions", "rvprop": "content", "rvparse": "", "rvlimit": 1, } query_params.update(self.__title_query_param()) request = self.mediawiki.wiki_request(query_params) html = request["query"]["pages"][pageid]["revisions"][0]["*"] lis = BeautifulSoup(html, "html.parser").find_all("li") filtered_lis = [ li for li in lis if "tocsection" not in "".join(li.get("class", list())) ] may_refer_to = [li.a.get_text() for li in filtered_lis if li.a] disambiguation = list() for lis_item in filtered_lis: item = lis_item.find_all("a") one_disambiguation = dict() one_disambiguation["description"] = lis_item.text if item and hasattr(item[0], "title"): one_disambiguation["title"] = item[0]["title"] else: # these are non-linked records so double up the text one_disambiguation["title"] = lis_item.text disambiguation.append(one_disambiguation) raise DisambiguationError( getattr(self, "title", page["title"]), may_refer_to, page["fullurl"], disambiguation, )
python
def _raise_disambiguation_error(self, page, pageid): """ parse and throw a disambiguation error """ query_params = { "prop": "revisions", "rvprop": "content", "rvparse": "", "rvlimit": 1, } query_params.update(self.__title_query_param()) request = self.mediawiki.wiki_request(query_params) html = request["query"]["pages"][pageid]["revisions"][0]["*"] lis = BeautifulSoup(html, "html.parser").find_all("li") filtered_lis = [ li for li in lis if "tocsection" not in "".join(li.get("class", list())) ] may_refer_to = [li.a.get_text() for li in filtered_lis if li.a] disambiguation = list() for lis_item in filtered_lis: item = lis_item.find_all("a") one_disambiguation = dict() one_disambiguation["description"] = lis_item.text if item and hasattr(item[0], "title"): one_disambiguation["title"] = item[0]["title"] else: # these are non-linked records so double up the text one_disambiguation["title"] = lis_item.text disambiguation.append(one_disambiguation) raise DisambiguationError( getattr(self, "title", page["title"]), may_refer_to, page["fullurl"], disambiguation, )
[ "def", "_raise_disambiguation_error", "(", "self", ",", "page", ",", "pageid", ")", ":", "query_params", "=", "{", "\"prop\"", ":", "\"revisions\"", ",", "\"rvprop\"", ":", "\"content\"", ",", "\"rvparse\"", ":", "\"\"", ",", "\"rvlimit\"", ":", "1", ",", "}", "query_params", ".", "update", "(", "self", ".", "__title_query_param", "(", ")", ")", "request", "=", "self", ".", "mediawiki", ".", "wiki_request", "(", "query_params", ")", "html", "=", "request", "[", "\"query\"", "]", "[", "\"pages\"", "]", "[", "pageid", "]", "[", "\"revisions\"", "]", "[", "0", "]", "[", "\"*\"", "]", "lis", "=", "BeautifulSoup", "(", "html", ",", "\"html.parser\"", ")", ".", "find_all", "(", "\"li\"", ")", "filtered_lis", "=", "[", "li", "for", "li", "in", "lis", "if", "\"tocsection\"", "not", "in", "\"\"", ".", "join", "(", "li", ".", "get", "(", "\"class\"", ",", "list", "(", ")", ")", ")", "]", "may_refer_to", "=", "[", "li", ".", "a", ".", "get_text", "(", ")", "for", "li", "in", "filtered_lis", "if", "li", ".", "a", "]", "disambiguation", "=", "list", "(", ")", "for", "lis_item", "in", "filtered_lis", ":", "item", "=", "lis_item", ".", "find_all", "(", "\"a\"", ")", "one_disambiguation", "=", "dict", "(", ")", "one_disambiguation", "[", "\"description\"", "]", "=", "lis_item", ".", "text", "if", "item", "and", "hasattr", "(", "item", "[", "0", "]", ",", "\"title\"", ")", ":", "one_disambiguation", "[", "\"title\"", "]", "=", "item", "[", "0", "]", "[", "\"title\"", "]", "else", ":", "# these are non-linked records so double up the text", "one_disambiguation", "[", "\"title\"", "]", "=", "lis_item", ".", "text", "disambiguation", ".", "append", "(", "one_disambiguation", ")", "raise", "DisambiguationError", "(", "getattr", "(", "self", ",", "\"title\"", ",", "page", "[", "\"title\"", "]", ")", ",", "may_refer_to", ",", "page", "[", "\"fullurl\"", "]", ",", "disambiguation", ",", ")" ]
parse and throw a disambiguation error
[ "parse", "and", "throw", "a", "disambiguation", "error" ]
292e0be6c752409062dceed325d74839caf16a9b
https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawikipage.py#L516-L550
8,103
barrust/mediawiki
mediawiki/mediawikipage.py
MediaWikiPage._parse_section_links
def _parse_section_links(self, id_tag): """ given a section id, parse the links in the unordered list """ soup = BeautifulSoup(self.html, "html.parser") info = soup.find("span", {"id": id_tag}) all_links = list() if info is None: return all_links for node in soup.find(id=id_tag).parent.next_siblings: if not isinstance(node, Tag): continue elif node.get("role", "") == "navigation": continue elif "infobox" in node.get("class", []): continue # this is actually the child node's class... is_headline = node.find("span", {"class": "mw-headline"}) if is_headline is not None: break elif node.name == "a": all_links.append(self.__parse_link_info(node)) else: for link in node.findAll("a"): all_links.append(self.__parse_link_info(link)) return all_links
python
def _parse_section_links(self, id_tag): """ given a section id, parse the links in the unordered list """ soup = BeautifulSoup(self.html, "html.parser") info = soup.find("span", {"id": id_tag}) all_links = list() if info is None: return all_links for node in soup.find(id=id_tag).parent.next_siblings: if not isinstance(node, Tag): continue elif node.get("role", "") == "navigation": continue elif "infobox" in node.get("class", []): continue # this is actually the child node's class... is_headline = node.find("span", {"class": "mw-headline"}) if is_headline is not None: break elif node.name == "a": all_links.append(self.__parse_link_info(node)) else: for link in node.findAll("a"): all_links.append(self.__parse_link_info(link)) return all_links
[ "def", "_parse_section_links", "(", "self", ",", "id_tag", ")", ":", "soup", "=", "BeautifulSoup", "(", "self", ".", "html", ",", "\"html.parser\"", ")", "info", "=", "soup", ".", "find", "(", "\"span\"", ",", "{", "\"id\"", ":", "id_tag", "}", ")", "all_links", "=", "list", "(", ")", "if", "info", "is", "None", ":", "return", "all_links", "for", "node", "in", "soup", ".", "find", "(", "id", "=", "id_tag", ")", ".", "parent", ".", "next_siblings", ":", "if", "not", "isinstance", "(", "node", ",", "Tag", ")", ":", "continue", "elif", "node", ".", "get", "(", "\"role\"", ",", "\"\"", ")", "==", "\"navigation\"", ":", "continue", "elif", "\"infobox\"", "in", "node", ".", "get", "(", "\"class\"", ",", "[", "]", ")", ":", "continue", "# this is actually the child node's class...", "is_headline", "=", "node", ".", "find", "(", "\"span\"", ",", "{", "\"class\"", ":", "\"mw-headline\"", "}", ")", "if", "is_headline", "is", "not", "None", ":", "break", "elif", "node", ".", "name", "==", "\"a\"", ":", "all_links", ".", "append", "(", "self", ".", "__parse_link_info", "(", "node", ")", ")", "else", ":", "for", "link", "in", "node", ".", "findAll", "(", "\"a\"", ")", ":", "all_links", ".", "append", "(", "self", ".", "__parse_link_info", "(", "link", ")", ")", "return", "all_links" ]
given a section id, parse the links in the unordered list
[ "given", "a", "section", "id", "parse", "the", "links", "in", "the", "unordered", "list" ]
292e0be6c752409062dceed325d74839caf16a9b
https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawikipage.py#L613-L639
8,104
barrust/mediawiki
mediawiki/mediawikipage.py
MediaWikiPage._parse_sections
def _parse_sections(self): """ parse sections and TOC """ def _list_to_dict(_dict, path, sec): tmp = _dict for elm in path[:-1]: tmp = tmp[elm] tmp[sec] = OrderedDict() self._sections = list() section_regexp = r"\n==* .* ==*\n" # '== {STUFF_NOT_\n} ==' found_obj = re.findall(section_regexp, self.content) res = OrderedDict() path = list() last_depth = 0 for obj in found_obj: depth = obj.count("=") / 2 # this gets us to the single side... depth -= 2 # now, we can calculate depth sec = obj.lstrip("\n= ").rstrip(" =\n") if depth == 0: last_depth = 0 path = [sec] res[sec] = OrderedDict() elif depth > last_depth: last_depth = depth path.append(sec) _list_to_dict(res, path, sec) elif depth < last_depth: # path.pop() while last_depth > depth: path.pop() last_depth -= 1 path.pop() path.append(sec) _list_to_dict(res, path, sec) last_depth = depth else: path.pop() path.append(sec) _list_to_dict(res, path, sec) last_depth = depth self._sections.append(sec) self._table_of_contents = res
python
def _parse_sections(self): """ parse sections and TOC """ def _list_to_dict(_dict, path, sec): tmp = _dict for elm in path[:-1]: tmp = tmp[elm] tmp[sec] = OrderedDict() self._sections = list() section_regexp = r"\n==* .* ==*\n" # '== {STUFF_NOT_\n} ==' found_obj = re.findall(section_regexp, self.content) res = OrderedDict() path = list() last_depth = 0 for obj in found_obj: depth = obj.count("=") / 2 # this gets us to the single side... depth -= 2 # now, we can calculate depth sec = obj.lstrip("\n= ").rstrip(" =\n") if depth == 0: last_depth = 0 path = [sec] res[sec] = OrderedDict() elif depth > last_depth: last_depth = depth path.append(sec) _list_to_dict(res, path, sec) elif depth < last_depth: # path.pop() while last_depth > depth: path.pop() last_depth -= 1 path.pop() path.append(sec) _list_to_dict(res, path, sec) last_depth = depth else: path.pop() path.append(sec) _list_to_dict(res, path, sec) last_depth = depth self._sections.append(sec) self._table_of_contents = res
[ "def", "_parse_sections", "(", "self", ")", ":", "def", "_list_to_dict", "(", "_dict", ",", "path", ",", "sec", ")", ":", "tmp", "=", "_dict", "for", "elm", "in", "path", "[", ":", "-", "1", "]", ":", "tmp", "=", "tmp", "[", "elm", "]", "tmp", "[", "sec", "]", "=", "OrderedDict", "(", ")", "self", ".", "_sections", "=", "list", "(", ")", "section_regexp", "=", "r\"\\n==* .* ==*\\n\"", "# '== {STUFF_NOT_\\n} =='", "found_obj", "=", "re", ".", "findall", "(", "section_regexp", ",", "self", ".", "content", ")", "res", "=", "OrderedDict", "(", ")", "path", "=", "list", "(", ")", "last_depth", "=", "0", "for", "obj", "in", "found_obj", ":", "depth", "=", "obj", ".", "count", "(", "\"=\"", ")", "/", "2", "# this gets us to the single side...", "depth", "-=", "2", "# now, we can calculate depth", "sec", "=", "obj", ".", "lstrip", "(", "\"\\n= \"", ")", ".", "rstrip", "(", "\" =\\n\"", ")", "if", "depth", "==", "0", ":", "last_depth", "=", "0", "path", "=", "[", "sec", "]", "res", "[", "sec", "]", "=", "OrderedDict", "(", ")", "elif", "depth", ">", "last_depth", ":", "last_depth", "=", "depth", "path", ".", "append", "(", "sec", ")", "_list_to_dict", "(", "res", ",", "path", ",", "sec", ")", "elif", "depth", "<", "last_depth", ":", "# path.pop()", "while", "last_depth", ">", "depth", ":", "path", ".", "pop", "(", ")", "last_depth", "-=", "1", "path", ".", "pop", "(", ")", "path", ".", "append", "(", "sec", ")", "_list_to_dict", "(", "res", ",", "path", ",", "sec", ")", "last_depth", "=", "depth", "else", ":", "path", ".", "pop", "(", ")", "path", ".", "append", "(", "sec", ")", "_list_to_dict", "(", "res", ",", "path", ",", "sec", ")", "last_depth", "=", "depth", "self", ".", "_sections", ".", "append", "(", "sec", ")", "self", ".", "_table_of_contents", "=", "res" ]
parse sections and TOC
[ "parse", "sections", "and", "TOC" ]
292e0be6c752409062dceed325d74839caf16a9b
https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawikipage.py#L654-L699
8,105
barrust/mediawiki
mediawiki/mediawikipage.py
MediaWikiPage.__pull_combined_properties
def __pull_combined_properties(self): """ something here... """ query_params = { "titles": self.title, "prop": "extracts|redirects|links|coordinates|categories|extlinks", "continue": dict(), # summary "explaintext": "", "exintro": "", # full first section for the summary! # redirects "rdprop": "title", "rdlimit": "max", # links "plnamespace": 0, "pllimit": "max", # coordinates "colimit": "max", # categories "cllimit": "max", "clshow": "!hidden", # references "ellimit": "max", } last_cont = dict() results = dict() idx = 0 while True: params = query_params.copy() params.update(last_cont) request = self.mediawiki.wiki_request(params) idx += 1 # print(idx) # quick exit if "query" not in request: # print(request) break keys = [ "extracts", "redirects", "links", "coordinates", "categories", "extlinks", ] new_cont = request.get("continue") request = request["query"]["pages"][self.pageid] if not results: results = request else: for key in keys: if key in request and request.get(key) is not None: val = request.get(key) tmp = results.get(key) if isinstance(tmp, (list, tuple)): results[key] = results.get(key, list) + val if new_cont is None or new_cont == last_cont: break last_cont = new_cont # redirects tmp = [link["title"] for link in results.get("redirects", list())] self._redirects = sorted(tmp) # summary self._summary = results["extract"] # links tmp = [link["title"] for link in results.get("links", list())] self._links = sorted(tmp) # categories def _get_cat(val): """ parse the category correctly """ tmp = val["title"] if tmp.startswith(self.mediawiki.category_prefix): return tmp[len(self.mediawiki.category_prefix) + 1 :] return tmp tmp = [_get_cat(link) for link in results.get("categories", list())] self._categories = sorted(tmp) # coordinates if "coordinates" in results: self._coordinates = ( Decimal(results["coordinates"][0]["lat"]), Decimal(results["coordinates"][0]["lon"]), ) # references tmp = [link["*"] for link in results.get("extlinks", list())] self._references = sorted(tmp)
python
def __pull_combined_properties(self): """ something here... """ query_params = { "titles": self.title, "prop": "extracts|redirects|links|coordinates|categories|extlinks", "continue": dict(), # summary "explaintext": "", "exintro": "", # full first section for the summary! # redirects "rdprop": "title", "rdlimit": "max", # links "plnamespace": 0, "pllimit": "max", # coordinates "colimit": "max", # categories "cllimit": "max", "clshow": "!hidden", # references "ellimit": "max", } last_cont = dict() results = dict() idx = 0 while True: params = query_params.copy() params.update(last_cont) request = self.mediawiki.wiki_request(params) idx += 1 # print(idx) # quick exit if "query" not in request: # print(request) break keys = [ "extracts", "redirects", "links", "coordinates", "categories", "extlinks", ] new_cont = request.get("continue") request = request["query"]["pages"][self.pageid] if not results: results = request else: for key in keys: if key in request and request.get(key) is not None: val = request.get(key) tmp = results.get(key) if isinstance(tmp, (list, tuple)): results[key] = results.get(key, list) + val if new_cont is None or new_cont == last_cont: break last_cont = new_cont # redirects tmp = [link["title"] for link in results.get("redirects", list())] self._redirects = sorted(tmp) # summary self._summary = results["extract"] # links tmp = [link["title"] for link in results.get("links", list())] self._links = sorted(tmp) # categories def _get_cat(val): """ parse the category correctly """ tmp = val["title"] if tmp.startswith(self.mediawiki.category_prefix): return tmp[len(self.mediawiki.category_prefix) + 1 :] return tmp tmp = [_get_cat(link) for link in results.get("categories", list())] self._categories = sorted(tmp) # coordinates if "coordinates" in results: self._coordinates = ( Decimal(results["coordinates"][0]["lat"]), Decimal(results["coordinates"][0]["lon"]), ) # references tmp = [link["*"] for link in results.get("extlinks", list())] self._references = sorted(tmp)
[ "def", "__pull_combined_properties", "(", "self", ")", ":", "query_params", "=", "{", "\"titles\"", ":", "self", ".", "title", ",", "\"prop\"", ":", "\"extracts|redirects|links|coordinates|categories|extlinks\"", ",", "\"continue\"", ":", "dict", "(", ")", ",", "# summary", "\"explaintext\"", ":", "\"\"", ",", "\"exintro\"", ":", "\"\"", ",", "# full first section for the summary!", "# redirects", "\"rdprop\"", ":", "\"title\"", ",", "\"rdlimit\"", ":", "\"max\"", ",", "# links", "\"plnamespace\"", ":", "0", ",", "\"pllimit\"", ":", "\"max\"", ",", "# coordinates", "\"colimit\"", ":", "\"max\"", ",", "# categories", "\"cllimit\"", ":", "\"max\"", ",", "\"clshow\"", ":", "\"!hidden\"", ",", "# references", "\"ellimit\"", ":", "\"max\"", ",", "}", "last_cont", "=", "dict", "(", ")", "results", "=", "dict", "(", ")", "idx", "=", "0", "while", "True", ":", "params", "=", "query_params", ".", "copy", "(", ")", "params", ".", "update", "(", "last_cont", ")", "request", "=", "self", ".", "mediawiki", ".", "wiki_request", "(", "params", ")", "idx", "+=", "1", "# print(idx)", "# quick exit", "if", "\"query\"", "not", "in", "request", ":", "# print(request)", "break", "keys", "=", "[", "\"extracts\"", ",", "\"redirects\"", ",", "\"links\"", ",", "\"coordinates\"", ",", "\"categories\"", ",", "\"extlinks\"", ",", "]", "new_cont", "=", "request", ".", "get", "(", "\"continue\"", ")", "request", "=", "request", "[", "\"query\"", "]", "[", "\"pages\"", "]", "[", "self", ".", "pageid", "]", "if", "not", "results", ":", "results", "=", "request", "else", ":", "for", "key", "in", "keys", ":", "if", "key", "in", "request", "and", "request", ".", "get", "(", "key", ")", "is", "not", "None", ":", "val", "=", "request", ".", "get", "(", "key", ")", "tmp", "=", "results", ".", "get", "(", "key", ")", "if", "isinstance", "(", "tmp", ",", "(", "list", ",", "tuple", ")", ")", ":", "results", "[", "key", "]", "=", "results", ".", "get", "(", "key", ",", "list", ")", "+", "val", "if", "new_cont", "is", "None", "or", "new_cont", "==", "last_cont", ":", "break", "last_cont", "=", "new_cont", "# redirects", "tmp", "=", "[", "link", "[", "\"title\"", "]", "for", "link", "in", "results", ".", "get", "(", "\"redirects\"", ",", "list", "(", ")", ")", "]", "self", ".", "_redirects", "=", "sorted", "(", "tmp", ")", "# summary", "self", ".", "_summary", "=", "results", "[", "\"extract\"", "]", "# links", "tmp", "=", "[", "link", "[", "\"title\"", "]", "for", "link", "in", "results", ".", "get", "(", "\"links\"", ",", "list", "(", ")", ")", "]", "self", ".", "_links", "=", "sorted", "(", "tmp", ")", "# categories", "def", "_get_cat", "(", "val", ")", ":", "\"\"\" parse the category correctly \"\"\"", "tmp", "=", "val", "[", "\"title\"", "]", "if", "tmp", ".", "startswith", "(", "self", ".", "mediawiki", ".", "category_prefix", ")", ":", "return", "tmp", "[", "len", "(", "self", ".", "mediawiki", ".", "category_prefix", ")", "+", "1", ":", "]", "return", "tmp", "tmp", "=", "[", "_get_cat", "(", "link", ")", "for", "link", "in", "results", ".", "get", "(", "\"categories\"", ",", "list", "(", ")", ")", "]", "self", ".", "_categories", "=", "sorted", "(", "tmp", ")", "# coordinates", "if", "\"coordinates\"", "in", "results", ":", "self", ".", "_coordinates", "=", "(", "Decimal", "(", "results", "[", "\"coordinates\"", "]", "[", "0", "]", "[", "\"lat\"", "]", ")", ",", "Decimal", "(", "results", "[", "\"coordinates\"", "]", "[", "0", "]", "[", "\"lon\"", "]", ")", ",", ")", "# references", "tmp", "=", "[", "link", "[", "\"*\"", "]", "for", "link", "in", "results", ".", "get", "(", "\"extlinks\"", ",", "list", "(", ")", ")", "]", "self", ".", "_references", "=", "sorted", "(", "tmp", ")" ]
something here...
[ "something", "here", "..." ]
292e0be6c752409062dceed325d74839caf16a9b
https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawikipage.py#L707-L803
8,106
domwillcode/yale-smart-alarm-client
yalesmartalarmclient/client.py
YaleSmartAlarmClient.is_armed
def is_armed(self): """Return True or False if the system is armed in any way""" alarm_code = self.get_armed_status() if alarm_code == YALE_STATE_ARM_FULL: return True if alarm_code == YALE_STATE_ARM_PARTIAL: return True return False
python
def is_armed(self): """Return True or False if the system is armed in any way""" alarm_code = self.get_armed_status() if alarm_code == YALE_STATE_ARM_FULL: return True if alarm_code == YALE_STATE_ARM_PARTIAL: return True return False
[ "def", "is_armed", "(", "self", ")", ":", "alarm_code", "=", "self", ".", "get_armed_status", "(", ")", "if", "alarm_code", "==", "YALE_STATE_ARM_FULL", ":", "return", "True", "if", "alarm_code", "==", "YALE_STATE_ARM_PARTIAL", ":", "return", "True", "return", "False" ]
Return True or False if the system is armed in any way
[ "Return", "True", "or", "False", "if", "the", "system", "is", "armed", "in", "any", "way" ]
a33b6db31440b8611c63081e231597bf0629e098
https://github.com/domwillcode/yale-smart-alarm-client/blob/a33b6db31440b8611c63081e231597bf0629e098/yalesmartalarmclient/client.py#L110-L120
8,107
springload/draftjs_exporter
example.py
linkify
def linkify(props): """ Wrap plain URLs with link tags. """ match = props['match'] protocol = match.group(1) url = match.group(2) href = protocol + url if props['block']['type'] == BLOCK_TYPES.CODE: return href link_props = { 'href': href, } if href.startswith('www'): link_props['href'] = 'http://' + href return DOM.create_element('a', link_props, href)
python
def linkify(props): """ Wrap plain URLs with link tags. """ match = props['match'] protocol = match.group(1) url = match.group(2) href = protocol + url if props['block']['type'] == BLOCK_TYPES.CODE: return href link_props = { 'href': href, } if href.startswith('www'): link_props['href'] = 'http://' + href return DOM.create_element('a', link_props, href)
[ "def", "linkify", "(", "props", ")", ":", "match", "=", "props", "[", "'match'", "]", "protocol", "=", "match", ".", "group", "(", "1", ")", "url", "=", "match", ".", "group", "(", "2", ")", "href", "=", "protocol", "+", "url", "if", "props", "[", "'block'", "]", "[", "'type'", "]", "==", "BLOCK_TYPES", ".", "CODE", ":", "return", "href", "link_props", "=", "{", "'href'", ":", "href", ",", "}", "if", "href", ".", "startswith", "(", "'www'", ")", ":", "link_props", "[", "'href'", "]", "=", "'http://'", "+", "href", "return", "DOM", ".", "create_element", "(", "'a'", ",", "link_props", ",", "href", ")" ]
Wrap plain URLs with link tags.
[ "Wrap", "plain", "URLs", "with", "link", "tags", "." ]
1e391a46f162740f90511cde1ef615858e8de5cb
https://github.com/springload/draftjs_exporter/blob/1e391a46f162740f90511cde1ef615858e8de5cb/example.py#L85-L104
8,108
springload/draftjs_exporter
draftjs_exporter/options.py
Options.for_kind
def for_kind(kind_map, type_, fallback_key): """ Create an Options object from any mapping. """ if type_ not in kind_map: if fallback_key not in kind_map: raise ConfigException('"%s" is not in the config and has no fallback' % type_) config = kind_map[fallback_key] else: config = kind_map[type_] if isinstance(config, dict): if 'element' not in config: raise ConfigException('"%s" does not define an element' % type_) opts = Options(type_, **config) else: opts = Options(type_, config) return opts
python
def for_kind(kind_map, type_, fallback_key): """ Create an Options object from any mapping. """ if type_ not in kind_map: if fallback_key not in kind_map: raise ConfigException('"%s" is not in the config and has no fallback' % type_) config = kind_map[fallback_key] else: config = kind_map[type_] if isinstance(config, dict): if 'element' not in config: raise ConfigException('"%s" does not define an element' % type_) opts = Options(type_, **config) else: opts = Options(type_, config) return opts
[ "def", "for_kind", "(", "kind_map", ",", "type_", ",", "fallback_key", ")", ":", "if", "type_", "not", "in", "kind_map", ":", "if", "fallback_key", "not", "in", "kind_map", ":", "raise", "ConfigException", "(", "'\"%s\" is not in the config and has no fallback'", "%", "type_", ")", "config", "=", "kind_map", "[", "fallback_key", "]", "else", ":", "config", "=", "kind_map", "[", "type_", "]", "if", "isinstance", "(", "config", ",", "dict", ")", ":", "if", "'element'", "not", "in", "config", ":", "raise", "ConfigException", "(", "'\"%s\" does not define an element'", "%", "type_", ")", "opts", "=", "Options", "(", "type_", ",", "*", "*", "config", ")", "else", ":", "opts", "=", "Options", "(", "type_", ",", "config", ")", "return", "opts" ]
Create an Options object from any mapping.
[ "Create", "an", "Options", "object", "from", "any", "mapping", "." ]
1e391a46f162740f90511cde1ef615858e8de5cb
https://github.com/springload/draftjs_exporter/blob/1e391a46f162740f90511cde1ef615858e8de5cb/draftjs_exporter/options.py#L31-L51
8,109
springload/draftjs_exporter
draftjs_exporter/html.py
HTML.render
def render(self, content_state=None): """ Starts the export process on a given piece of content state. """ if content_state is None: content_state = {} blocks = content_state.get('blocks', []) wrapper_state = WrapperState(self.block_map, blocks) document = DOM.create_element() entity_map = content_state.get('entityMap', {}) min_depth = 0 for block in blocks: depth = block['depth'] elt = self.render_block(block, entity_map, wrapper_state) if depth > min_depth: min_depth = depth # At level 0, append the element to the document. if depth == 0: DOM.append_child(document, elt) # If there is no block at depth 0, we need to add the wrapper that contains the whole tree to the document. if min_depth > 0 and wrapper_state.stack.length() != 0: DOM.append_child(document, wrapper_state.stack.tail().elt) return DOM.render(document)
python
def render(self, content_state=None): """ Starts the export process on a given piece of content state. """ if content_state is None: content_state = {} blocks = content_state.get('blocks', []) wrapper_state = WrapperState(self.block_map, blocks) document = DOM.create_element() entity_map = content_state.get('entityMap', {}) min_depth = 0 for block in blocks: depth = block['depth'] elt = self.render_block(block, entity_map, wrapper_state) if depth > min_depth: min_depth = depth # At level 0, append the element to the document. if depth == 0: DOM.append_child(document, elt) # If there is no block at depth 0, we need to add the wrapper that contains the whole tree to the document. if min_depth > 0 and wrapper_state.stack.length() != 0: DOM.append_child(document, wrapper_state.stack.tail().elt) return DOM.render(document)
[ "def", "render", "(", "self", ",", "content_state", "=", "None", ")", ":", "if", "content_state", "is", "None", ":", "content_state", "=", "{", "}", "blocks", "=", "content_state", ".", "get", "(", "'blocks'", ",", "[", "]", ")", "wrapper_state", "=", "WrapperState", "(", "self", ".", "block_map", ",", "blocks", ")", "document", "=", "DOM", ".", "create_element", "(", ")", "entity_map", "=", "content_state", ".", "get", "(", "'entityMap'", ",", "{", "}", ")", "min_depth", "=", "0", "for", "block", "in", "blocks", ":", "depth", "=", "block", "[", "'depth'", "]", "elt", "=", "self", ".", "render_block", "(", "block", ",", "entity_map", ",", "wrapper_state", ")", "if", "depth", ">", "min_depth", ":", "min_depth", "=", "depth", "# At level 0, append the element to the document.", "if", "depth", "==", "0", ":", "DOM", ".", "append_child", "(", "document", ",", "elt", ")", "# If there is no block at depth 0, we need to add the wrapper that contains the whole tree to the document.", "if", "min_depth", ">", "0", "and", "wrapper_state", ".", "stack", ".", "length", "(", ")", "!=", "0", ":", "DOM", ".", "append_child", "(", "document", ",", "wrapper_state", ".", "stack", ".", "tail", "(", ")", ".", "elt", ")", "return", "DOM", ".", "render", "(", "document", ")" ]
Starts the export process on a given piece of content state.
[ "Starts", "the", "export", "process", "on", "a", "given", "piece", "of", "content", "state", "." ]
1e391a46f162740f90511cde1ef615858e8de5cb
https://github.com/springload/draftjs_exporter/blob/1e391a46f162740f90511cde1ef615858e8de5cb/draftjs_exporter/html.py#L31-L59
8,110
springload/draftjs_exporter
draftjs_exporter/html.py
HTML.build_command_groups
def build_command_groups(self, block): """ Creates block modification commands, grouped by start index, with the text to apply them on. """ text = block['text'] commands = sorted(self.build_commands(block)) grouped = groupby(commands, Command.key) listed = list(groupby(commands, Command.key)) sliced = [] i = 0 for start_index, commands in grouped: if i < len(listed) - 1: stop_index = listed[i + 1][0] sliced.append((text[start_index:stop_index], list(commands))) else: sliced.append((text[start_index:start_index], list(commands))) i += 1 return sliced
python
def build_command_groups(self, block): """ Creates block modification commands, grouped by start index, with the text to apply them on. """ text = block['text'] commands = sorted(self.build_commands(block)) grouped = groupby(commands, Command.key) listed = list(groupby(commands, Command.key)) sliced = [] i = 0 for start_index, commands in grouped: if i < len(listed) - 1: stop_index = listed[i + 1][0] sliced.append((text[start_index:stop_index], list(commands))) else: sliced.append((text[start_index:start_index], list(commands))) i += 1 return sliced
[ "def", "build_command_groups", "(", "self", ",", "block", ")", ":", "text", "=", "block", "[", "'text'", "]", "commands", "=", "sorted", "(", "self", ".", "build_commands", "(", "block", ")", ")", "grouped", "=", "groupby", "(", "commands", ",", "Command", ".", "key", ")", "listed", "=", "list", "(", "groupby", "(", "commands", ",", "Command", ".", "key", ")", ")", "sliced", "=", "[", "]", "i", "=", "0", "for", "start_index", ",", "commands", "in", "grouped", ":", "if", "i", "<", "len", "(", "listed", ")", "-", "1", ":", "stop_index", "=", "listed", "[", "i", "+", "1", "]", "[", "0", "]", "sliced", ".", "append", "(", "(", "text", "[", "start_index", ":", "stop_index", "]", ",", "list", "(", "commands", ")", ")", ")", "else", ":", "sliced", ".", "append", "(", "(", "text", "[", "start_index", ":", "start_index", "]", ",", "list", "(", "commands", ")", ")", ")", "i", "+=", "1", "return", "sliced" ]
Creates block modification commands, grouped by start index, with the text to apply them on.
[ "Creates", "block", "modification", "commands", "grouped", "by", "start", "index", "with", "the", "text", "to", "apply", "them", "on", "." ]
1e391a46f162740f90511cde1ef615858e8de5cb
https://github.com/springload/draftjs_exporter/blob/1e391a46f162740f90511cde1ef615858e8de5cb/draftjs_exporter/html.py#L95-L116
8,111
springload/draftjs_exporter
draftjs_exporter/html.py
HTML.build_commands
def build_commands(self, block): """ Build all of the manipulation commands for a given block. - One pair to set the text. - Multiple pairs for styles. - Multiple pairs for entities. """ text_commands = Command.start_stop('text', 0, len(block['text'])) style_commands = self.build_style_commands(block) entity_commands = self.build_entity_commands(block) return text_commands + style_commands + entity_commands
python
def build_commands(self, block): """ Build all of the manipulation commands for a given block. - One pair to set the text. - Multiple pairs for styles. - Multiple pairs for entities. """ text_commands = Command.start_stop('text', 0, len(block['text'])) style_commands = self.build_style_commands(block) entity_commands = self.build_entity_commands(block) return text_commands + style_commands + entity_commands
[ "def", "build_commands", "(", "self", ",", "block", ")", ":", "text_commands", "=", "Command", ".", "start_stop", "(", "'text'", ",", "0", ",", "len", "(", "block", "[", "'text'", "]", ")", ")", "style_commands", "=", "self", ".", "build_style_commands", "(", "block", ")", "entity_commands", "=", "self", ".", "build_entity_commands", "(", "block", ")", "return", "text_commands", "+", "style_commands", "+", "entity_commands" ]
Build all of the manipulation commands for a given block. - One pair to set the text. - Multiple pairs for styles. - Multiple pairs for entities.
[ "Build", "all", "of", "the", "manipulation", "commands", "for", "a", "given", "block", ".", "-", "One", "pair", "to", "set", "the", "text", ".", "-", "Multiple", "pairs", "for", "styles", ".", "-", "Multiple", "pairs", "for", "entities", "." ]
1e391a46f162740f90511cde1ef615858e8de5cb
https://github.com/springload/draftjs_exporter/blob/1e391a46f162740f90511cde1ef615858e8de5cb/draftjs_exporter/html.py#L118-L129
8,112
sloria/doitlive
doitlive/cli.py
run
def run( commands, shell=None, prompt_template="default", speed=1, quiet=False, test_mode=False, commentecho=False, ): """Main function for "magic-running" a list of commands.""" if not quiet: secho("We'll do it live!", fg="red", bold=True) secho( "STARTING SESSION: Press Ctrl-C at any time to exit.", fg="yellow", bold=True, ) click.pause() click.clear() state = SessionState( shell=shell, prompt_template=prompt_template, speed=speed, test_mode=test_mode, commentecho=commentecho, ) i = 0 while i < len(commands): command = commands[i].strip() i += 1 if not command: continue is_comment = command.startswith("#") if not is_comment: command_as_list = shlex.split(ensure_utf8(command)) else: command_as_list = None shell_match = SHELL_RE.match(command) if is_comment: # Parse comment magic match = OPTION_RE.match(command) if match: option, arg = match.group("option"), match.group("arg") func = OPTION_MAP[option] func(state, arg) elif state.commentecho(): comment = command.lstrip("#") secho(comment, fg="yellow", bold=True) continue # Handle 'export' and 'alias' commands by storing them in SessionState elif command_as_list and command_as_list[0] in ["alias", "export"]: magictype( command, prompt_template=state["prompt_template"], speed=state["speed"] ) # Store the raw commands instead of using add_envvar and add_alias # to avoid having to parse the command ourselves state.add_command(command) # Handle ```python and ```ipython by running "player" consoles elif shell_match: shell_name = shell_match.groups()[0].strip() py_commands = [] more = True while more: # slurp up all the python code try: py_command = commands[i].rstrip() except IndexError: raise SessionError( "Unmatched {0} code block in " "session file.".format(shell_name) ) i += 1 if py_command.startswith("```"): i += 1 more = False else: py_commands.append(py_command) # Run the player console magictype( shell_name, prompt_template=state["prompt_template"], speed=state["speed"], ) if shell_name == "ipython": try: from doitlive.ipython_consoles import start_ipython_player except ImportError: raise RuntimeError( "```ipython blocks require IPython to be installed" ) # dedent all the commands to account for IPython's autoindentation ipy_commands = [textwrap.dedent(cmd) for cmd in py_commands] start_ipython_player(ipy_commands, speed=state["speed"]) else: start_python_player(py_commands, speed=state["speed"]) else: # goto_stealthmode determines when to switch to stealthmode goto_stealthmode = magicrun(command, **state) # stealthmode allows user to type live commands outside of automated script i -= stealthmode(state, goto_stealthmode) echo_prompt(state["prompt_template"]) wait_for(RETURNS) if not quiet: secho("FINISHED SESSION", fg="yellow", bold=True)
python
def run( commands, shell=None, prompt_template="default", speed=1, quiet=False, test_mode=False, commentecho=False, ): """Main function for "magic-running" a list of commands.""" if not quiet: secho("We'll do it live!", fg="red", bold=True) secho( "STARTING SESSION: Press Ctrl-C at any time to exit.", fg="yellow", bold=True, ) click.pause() click.clear() state = SessionState( shell=shell, prompt_template=prompt_template, speed=speed, test_mode=test_mode, commentecho=commentecho, ) i = 0 while i < len(commands): command = commands[i].strip() i += 1 if not command: continue is_comment = command.startswith("#") if not is_comment: command_as_list = shlex.split(ensure_utf8(command)) else: command_as_list = None shell_match = SHELL_RE.match(command) if is_comment: # Parse comment magic match = OPTION_RE.match(command) if match: option, arg = match.group("option"), match.group("arg") func = OPTION_MAP[option] func(state, arg) elif state.commentecho(): comment = command.lstrip("#") secho(comment, fg="yellow", bold=True) continue # Handle 'export' and 'alias' commands by storing them in SessionState elif command_as_list and command_as_list[0] in ["alias", "export"]: magictype( command, prompt_template=state["prompt_template"], speed=state["speed"] ) # Store the raw commands instead of using add_envvar and add_alias # to avoid having to parse the command ourselves state.add_command(command) # Handle ```python and ```ipython by running "player" consoles elif shell_match: shell_name = shell_match.groups()[0].strip() py_commands = [] more = True while more: # slurp up all the python code try: py_command = commands[i].rstrip() except IndexError: raise SessionError( "Unmatched {0} code block in " "session file.".format(shell_name) ) i += 1 if py_command.startswith("```"): i += 1 more = False else: py_commands.append(py_command) # Run the player console magictype( shell_name, prompt_template=state["prompt_template"], speed=state["speed"], ) if shell_name == "ipython": try: from doitlive.ipython_consoles import start_ipython_player except ImportError: raise RuntimeError( "```ipython blocks require IPython to be installed" ) # dedent all the commands to account for IPython's autoindentation ipy_commands = [textwrap.dedent(cmd) for cmd in py_commands] start_ipython_player(ipy_commands, speed=state["speed"]) else: start_python_player(py_commands, speed=state["speed"]) else: # goto_stealthmode determines when to switch to stealthmode goto_stealthmode = magicrun(command, **state) # stealthmode allows user to type live commands outside of automated script i -= stealthmode(state, goto_stealthmode) echo_prompt(state["prompt_template"]) wait_for(RETURNS) if not quiet: secho("FINISHED SESSION", fg="yellow", bold=True)
[ "def", "run", "(", "commands", ",", "shell", "=", "None", ",", "prompt_template", "=", "\"default\"", ",", "speed", "=", "1", ",", "quiet", "=", "False", ",", "test_mode", "=", "False", ",", "commentecho", "=", "False", ",", ")", ":", "if", "not", "quiet", ":", "secho", "(", "\"We'll do it live!\"", ",", "fg", "=", "\"red\"", ",", "bold", "=", "True", ")", "secho", "(", "\"STARTING SESSION: Press Ctrl-C at any time to exit.\"", ",", "fg", "=", "\"yellow\"", ",", "bold", "=", "True", ",", ")", "click", ".", "pause", "(", ")", "click", ".", "clear", "(", ")", "state", "=", "SessionState", "(", "shell", "=", "shell", ",", "prompt_template", "=", "prompt_template", ",", "speed", "=", "speed", ",", "test_mode", "=", "test_mode", ",", "commentecho", "=", "commentecho", ",", ")", "i", "=", "0", "while", "i", "<", "len", "(", "commands", ")", ":", "command", "=", "commands", "[", "i", "]", ".", "strip", "(", ")", "i", "+=", "1", "if", "not", "command", ":", "continue", "is_comment", "=", "command", ".", "startswith", "(", "\"#\"", ")", "if", "not", "is_comment", ":", "command_as_list", "=", "shlex", ".", "split", "(", "ensure_utf8", "(", "command", ")", ")", "else", ":", "command_as_list", "=", "None", "shell_match", "=", "SHELL_RE", ".", "match", "(", "command", ")", "if", "is_comment", ":", "# Parse comment magic", "match", "=", "OPTION_RE", ".", "match", "(", "command", ")", "if", "match", ":", "option", ",", "arg", "=", "match", ".", "group", "(", "\"option\"", ")", ",", "match", ".", "group", "(", "\"arg\"", ")", "func", "=", "OPTION_MAP", "[", "option", "]", "func", "(", "state", ",", "arg", ")", "elif", "state", ".", "commentecho", "(", ")", ":", "comment", "=", "command", ".", "lstrip", "(", "\"#\"", ")", "secho", "(", "comment", ",", "fg", "=", "\"yellow\"", ",", "bold", "=", "True", ")", "continue", "# Handle 'export' and 'alias' commands by storing them in SessionState", "elif", "command_as_list", "and", "command_as_list", "[", "0", "]", "in", "[", "\"alias\"", ",", "\"export\"", "]", ":", "magictype", "(", "command", ",", "prompt_template", "=", "state", "[", "\"prompt_template\"", "]", ",", "speed", "=", "state", "[", "\"speed\"", "]", ")", "# Store the raw commands instead of using add_envvar and add_alias", "# to avoid having to parse the command ourselves", "state", ".", "add_command", "(", "command", ")", "# Handle ```python and ```ipython by running \"player\" consoles", "elif", "shell_match", ":", "shell_name", "=", "shell_match", ".", "groups", "(", ")", "[", "0", "]", ".", "strip", "(", ")", "py_commands", "=", "[", "]", "more", "=", "True", "while", "more", ":", "# slurp up all the python code", "try", ":", "py_command", "=", "commands", "[", "i", "]", ".", "rstrip", "(", ")", "except", "IndexError", ":", "raise", "SessionError", "(", "\"Unmatched {0} code block in \"", "\"session file.\"", ".", "format", "(", "shell_name", ")", ")", "i", "+=", "1", "if", "py_command", ".", "startswith", "(", "\"```\"", ")", ":", "i", "+=", "1", "more", "=", "False", "else", ":", "py_commands", ".", "append", "(", "py_command", ")", "# Run the player console", "magictype", "(", "shell_name", ",", "prompt_template", "=", "state", "[", "\"prompt_template\"", "]", ",", "speed", "=", "state", "[", "\"speed\"", "]", ",", ")", "if", "shell_name", "==", "\"ipython\"", ":", "try", ":", "from", "doitlive", ".", "ipython_consoles", "import", "start_ipython_player", "except", "ImportError", ":", "raise", "RuntimeError", "(", "\"```ipython blocks require IPython to be installed\"", ")", "# dedent all the commands to account for IPython's autoindentation", "ipy_commands", "=", "[", "textwrap", ".", "dedent", "(", "cmd", ")", "for", "cmd", "in", "py_commands", "]", "start_ipython_player", "(", "ipy_commands", ",", "speed", "=", "state", "[", "\"speed\"", "]", ")", "else", ":", "start_python_player", "(", "py_commands", ",", "speed", "=", "state", "[", "\"speed\"", "]", ")", "else", ":", "# goto_stealthmode determines when to switch to stealthmode", "goto_stealthmode", "=", "magicrun", "(", "command", ",", "*", "*", "state", ")", "# stealthmode allows user to type live commands outside of automated script", "i", "-=", "stealthmode", "(", "state", ",", "goto_stealthmode", ")", "echo_prompt", "(", "state", "[", "\"prompt_template\"", "]", ")", "wait_for", "(", "RETURNS", ")", "if", "not", "quiet", ":", "secho", "(", "\"FINISHED SESSION\"", ",", "fg", "=", "\"yellow\"", ",", "bold", "=", "True", ")" ]
Main function for "magic-running" a list of commands.
[ "Main", "function", "for", "magic", "-", "running", "a", "list", "of", "commands", "." ]
baf43f8ad3f2e4593fe21f6af42aedd34ef1efee
https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/cli.py#L146-L251
8,113
sloria/doitlive
doitlive/cli.py
play
def play(quiet, session_file, shell, speed, prompt, commentecho): """Play a session file.""" run( session_file.readlines(), shell=shell, speed=speed, quiet=quiet, test_mode=TESTING, prompt_template=prompt, commentecho=commentecho, )
python
def play(quiet, session_file, shell, speed, prompt, commentecho): """Play a session file.""" run( session_file.readlines(), shell=shell, speed=speed, quiet=quiet, test_mode=TESTING, prompt_template=prompt, commentecho=commentecho, )
[ "def", "play", "(", "quiet", ",", "session_file", ",", "shell", ",", "speed", ",", "prompt", ",", "commentecho", ")", ":", "run", "(", "session_file", ".", "readlines", "(", ")", ",", "shell", "=", "shell", ",", "speed", "=", "speed", ",", "quiet", "=", "quiet", ",", "test_mode", "=", "TESTING", ",", "prompt_template", "=", "prompt", ",", "commentecho", "=", "commentecho", ",", ")" ]
Play a session file.
[ "Play", "a", "session", "file", "." ]
baf43f8ad3f2e4593fe21f6af42aedd34ef1efee
https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/cli.py#L419-L429
8,114
sloria/doitlive
doitlive/cli.py
demo
def demo(quiet, shell, speed, prompt, commentecho): """Run a demo doitlive session.""" run( DEMO, shell=shell, speed=speed, test_mode=TESTING, prompt_template=prompt, quiet=quiet, commentecho=commentecho, )
python
def demo(quiet, shell, speed, prompt, commentecho): """Run a demo doitlive session.""" run( DEMO, shell=shell, speed=speed, test_mode=TESTING, prompt_template=prompt, quiet=quiet, commentecho=commentecho, )
[ "def", "demo", "(", "quiet", ",", "shell", ",", "speed", ",", "prompt", ",", "commentecho", ")", ":", "run", "(", "DEMO", ",", "shell", "=", "shell", ",", "speed", "=", "speed", ",", "test_mode", "=", "TESTING", ",", "prompt_template", "=", "prompt", ",", "quiet", "=", "quiet", ",", "commentecho", "=", "commentecho", ",", ")" ]
Run a demo doitlive session.
[ "Run", "a", "demo", "doitlive", "session", "." ]
baf43f8ad3f2e4593fe21f6af42aedd34ef1efee
https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/cli.py#L442-L452
8,115
sloria/doitlive
doitlive/styling.py
echo
def echo( message=None, file=None, nl=True, err=False, color=None, carriage_return=False ): """ Patched click echo function. """ message = message or "" if carriage_return and nl: click_echo(message + "\r\n", file, False, err, color) elif carriage_return and not nl: click_echo(message + "\r", file, False, err, color) else: click_echo(message, file, nl, err, color)
python
def echo( message=None, file=None, nl=True, err=False, color=None, carriage_return=False ): """ Patched click echo function. """ message = message or "" if carriage_return and nl: click_echo(message + "\r\n", file, False, err, color) elif carriage_return and not nl: click_echo(message + "\r", file, False, err, color) else: click_echo(message, file, nl, err, color)
[ "def", "echo", "(", "message", "=", "None", ",", "file", "=", "None", ",", "nl", "=", "True", ",", "err", "=", "False", ",", "color", "=", "None", ",", "carriage_return", "=", "False", ")", ":", "message", "=", "message", "or", "\"\"", "if", "carriage_return", "and", "nl", ":", "click_echo", "(", "message", "+", "\"\\r\\n\"", ",", "file", ",", "False", ",", "err", ",", "color", ")", "elif", "carriage_return", "and", "not", "nl", ":", "click_echo", "(", "message", "+", "\"\\r\"", ",", "file", ",", "False", ",", "err", ",", "color", ")", "else", ":", "click_echo", "(", "message", ",", "file", ",", "nl", ",", "err", ",", "color", ")" ]
Patched click echo function.
[ "Patched", "click", "echo", "function", "." ]
baf43f8ad3f2e4593fe21f6af42aedd34ef1efee
https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/styling.py#L201-L213
8,116
sloria/doitlive
doitlive/keyboard.py
magictype
def magictype(text, prompt_template="default", speed=1): """Echo each character in ``text`` as keyboard characters are pressed. Characters are echo'd ``speed`` characters at a time. """ echo_prompt(prompt_template) cursor_position = 0 return_to_regular_type = False with raw_mode(): while True: char = text[cursor_position : cursor_position + speed] in_char = getchar() if in_char in {ESC, CTRLC}: echo(carriage_return=True) raise click.Abort() elif in_char == TAB: return_to_regular_type = True break elif in_char == BACKSPACE: if cursor_position > 0: echo("\b \b", nl=False) cursor_position -= 1 elif in_char in RETURNS: # Only return at end of command if cursor_position >= len(text): echo("\r", nl=True) break elif in_char == CTRLZ and hasattr(signal, "SIGTSTP"): # Background process os.kill(0, signal.SIGTSTP) # When doitlive is back in foreground, clear the terminal # and resume where we left off click.clear() echo_prompt(prompt_template) echo(text[:cursor_position], nl=False) else: if cursor_position < len(text): echo(char, nl=False) increment = min([speed, len(text) - cursor_position]) cursor_position += increment return return_to_regular_type
python
def magictype(text, prompt_template="default", speed=1): """Echo each character in ``text`` as keyboard characters are pressed. Characters are echo'd ``speed`` characters at a time. """ echo_prompt(prompt_template) cursor_position = 0 return_to_regular_type = False with raw_mode(): while True: char = text[cursor_position : cursor_position + speed] in_char = getchar() if in_char in {ESC, CTRLC}: echo(carriage_return=True) raise click.Abort() elif in_char == TAB: return_to_regular_type = True break elif in_char == BACKSPACE: if cursor_position > 0: echo("\b \b", nl=False) cursor_position -= 1 elif in_char in RETURNS: # Only return at end of command if cursor_position >= len(text): echo("\r", nl=True) break elif in_char == CTRLZ and hasattr(signal, "SIGTSTP"): # Background process os.kill(0, signal.SIGTSTP) # When doitlive is back in foreground, clear the terminal # and resume where we left off click.clear() echo_prompt(prompt_template) echo(text[:cursor_position], nl=False) else: if cursor_position < len(text): echo(char, nl=False) increment = min([speed, len(text) - cursor_position]) cursor_position += increment return return_to_regular_type
[ "def", "magictype", "(", "text", ",", "prompt_template", "=", "\"default\"", ",", "speed", "=", "1", ")", ":", "echo_prompt", "(", "prompt_template", ")", "cursor_position", "=", "0", "return_to_regular_type", "=", "False", "with", "raw_mode", "(", ")", ":", "while", "True", ":", "char", "=", "text", "[", "cursor_position", ":", "cursor_position", "+", "speed", "]", "in_char", "=", "getchar", "(", ")", "if", "in_char", "in", "{", "ESC", ",", "CTRLC", "}", ":", "echo", "(", "carriage_return", "=", "True", ")", "raise", "click", ".", "Abort", "(", ")", "elif", "in_char", "==", "TAB", ":", "return_to_regular_type", "=", "True", "break", "elif", "in_char", "==", "BACKSPACE", ":", "if", "cursor_position", ">", "0", ":", "echo", "(", "\"\\b \\b\"", ",", "nl", "=", "False", ")", "cursor_position", "-=", "1", "elif", "in_char", "in", "RETURNS", ":", "# Only return at end of command", "if", "cursor_position", ">=", "len", "(", "text", ")", ":", "echo", "(", "\"\\r\"", ",", "nl", "=", "True", ")", "break", "elif", "in_char", "==", "CTRLZ", "and", "hasattr", "(", "signal", ",", "\"SIGTSTP\"", ")", ":", "# Background process", "os", ".", "kill", "(", "0", ",", "signal", ".", "SIGTSTP", ")", "# When doitlive is back in foreground, clear the terminal", "# and resume where we left off", "click", ".", "clear", "(", ")", "echo_prompt", "(", "prompt_template", ")", "echo", "(", "text", "[", ":", "cursor_position", "]", ",", "nl", "=", "False", ")", "else", ":", "if", "cursor_position", "<", "len", "(", "text", ")", ":", "echo", "(", "char", ",", "nl", "=", "False", ")", "increment", "=", "min", "(", "[", "speed", ",", "len", "(", "text", ")", "-", "cursor_position", "]", ")", "cursor_position", "+=", "increment", "return", "return_to_regular_type" ]
Echo each character in ``text`` as keyboard characters are pressed. Characters are echo'd ``speed`` characters at a time.
[ "Echo", "each", "character", "in", "text", "as", "keyboard", "characters", "are", "pressed", ".", "Characters", "are", "echo", "d", "speed", "characters", "at", "a", "time", "." ]
baf43f8ad3f2e4593fe21f6af42aedd34ef1efee
https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/keyboard.py#L36-L75
8,117
sloria/doitlive
doitlive/keyboard.py
regulartype
def regulartype(prompt_template="default"): """Echo each character typed. Unlike magictype, this echos the characters the user is pressing. Returns: command_string | The command to be passed to the shell to run. This is | typed by the user. """ echo_prompt(prompt_template) command_string = "" cursor_position = 0 with raw_mode(): while True: in_char = getchar() if in_char in {ESC, CTRLC}: echo(carriage_return=True) raise click.Abort() elif in_char == TAB: echo("\r", nl=True) return in_char elif in_char == BACKSPACE: if cursor_position > 0: echo("\b \b", nl=False) command_string = command_string[:-1] cursor_position -= 1 elif in_char in RETURNS: echo("\r", nl=True) return command_string elif in_char == CTRLZ and hasattr(signal, "SIGTSTP"): # Background process os.kill(0, signal.SIGTSTP) # When doitlive is back in foreground, clear the terminal # and resume where we left off click.clear() echo_prompt(prompt_template) else: echo(in_char, nl=False) command_string += in_char cursor_position += 1
python
def regulartype(prompt_template="default"): """Echo each character typed. Unlike magictype, this echos the characters the user is pressing. Returns: command_string | The command to be passed to the shell to run. This is | typed by the user. """ echo_prompt(prompt_template) command_string = "" cursor_position = 0 with raw_mode(): while True: in_char = getchar() if in_char in {ESC, CTRLC}: echo(carriage_return=True) raise click.Abort() elif in_char == TAB: echo("\r", nl=True) return in_char elif in_char == BACKSPACE: if cursor_position > 0: echo("\b \b", nl=False) command_string = command_string[:-1] cursor_position -= 1 elif in_char in RETURNS: echo("\r", nl=True) return command_string elif in_char == CTRLZ and hasattr(signal, "SIGTSTP"): # Background process os.kill(0, signal.SIGTSTP) # When doitlive is back in foreground, clear the terminal # and resume where we left off click.clear() echo_prompt(prompt_template) else: echo(in_char, nl=False) command_string += in_char cursor_position += 1
[ "def", "regulartype", "(", "prompt_template", "=", "\"default\"", ")", ":", "echo_prompt", "(", "prompt_template", ")", "command_string", "=", "\"\"", "cursor_position", "=", "0", "with", "raw_mode", "(", ")", ":", "while", "True", ":", "in_char", "=", "getchar", "(", ")", "if", "in_char", "in", "{", "ESC", ",", "CTRLC", "}", ":", "echo", "(", "carriage_return", "=", "True", ")", "raise", "click", ".", "Abort", "(", ")", "elif", "in_char", "==", "TAB", ":", "echo", "(", "\"\\r\"", ",", "nl", "=", "True", ")", "return", "in_char", "elif", "in_char", "==", "BACKSPACE", ":", "if", "cursor_position", ">", "0", ":", "echo", "(", "\"\\b \\b\"", ",", "nl", "=", "False", ")", "command_string", "=", "command_string", "[", ":", "-", "1", "]", "cursor_position", "-=", "1", "elif", "in_char", "in", "RETURNS", ":", "echo", "(", "\"\\r\"", ",", "nl", "=", "True", ")", "return", "command_string", "elif", "in_char", "==", "CTRLZ", "and", "hasattr", "(", "signal", ",", "\"SIGTSTP\"", ")", ":", "# Background process", "os", ".", "kill", "(", "0", ",", "signal", ".", "SIGTSTP", ")", "# When doitlive is back in foreground, clear the terminal", "# and resume where we left off", "click", ".", "clear", "(", ")", "echo_prompt", "(", "prompt_template", ")", "else", ":", "echo", "(", "in_char", ",", "nl", "=", "False", ")", "command_string", "+=", "in_char", "cursor_position", "+=", "1" ]
Echo each character typed. Unlike magictype, this echos the characters the user is pressing. Returns: command_string | The command to be passed to the shell to run. This is | typed by the user.
[ "Echo", "each", "character", "typed", ".", "Unlike", "magictype", "this", "echos", "the", "characters", "the", "user", "is", "pressing", "." ]
baf43f8ad3f2e4593fe21f6af42aedd34ef1efee
https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/keyboard.py#L134-L171
8,118
sloria/doitlive
doitlive/keyboard.py
regularrun
def regularrun( shell, prompt_template="default", aliases=None, envvars=None, extra_commands=None, speed=1, test_mode=False, commentecho=False, ): """Allow user to run their own live commands until CTRL-Z is pressed again. """ loop_again = True command_string = regulartype(prompt_template) if command_string == TAB: loop_again = False return loop_again run_command( command_string, shell, aliases=aliases, envvars=envvars, extra_commands=extra_commands, test_mode=test_mode, ) return loop_again
python
def regularrun( shell, prompt_template="default", aliases=None, envvars=None, extra_commands=None, speed=1, test_mode=False, commentecho=False, ): """Allow user to run their own live commands until CTRL-Z is pressed again. """ loop_again = True command_string = regulartype(prompt_template) if command_string == TAB: loop_again = False return loop_again run_command( command_string, shell, aliases=aliases, envvars=envvars, extra_commands=extra_commands, test_mode=test_mode, ) return loop_again
[ "def", "regularrun", "(", "shell", ",", "prompt_template", "=", "\"default\"", ",", "aliases", "=", "None", ",", "envvars", "=", "None", ",", "extra_commands", "=", "None", ",", "speed", "=", "1", ",", "test_mode", "=", "False", ",", "commentecho", "=", "False", ",", ")", ":", "loop_again", "=", "True", "command_string", "=", "regulartype", "(", "prompt_template", ")", "if", "command_string", "==", "TAB", ":", "loop_again", "=", "False", "return", "loop_again", "run_command", "(", "command_string", ",", "shell", ",", "aliases", "=", "aliases", ",", "envvars", "=", "envvars", ",", "extra_commands", "=", "extra_commands", ",", "test_mode", "=", "test_mode", ",", ")", "return", "loop_again" ]
Allow user to run their own live commands until CTRL-Z is pressed again.
[ "Allow", "user", "to", "run", "their", "own", "live", "commands", "until", "CTRL", "-", "Z", "is", "pressed", "again", "." ]
baf43f8ad3f2e4593fe21f6af42aedd34ef1efee
https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/keyboard.py#L174-L199
8,119
sloria/doitlive
doitlive/keyboard.py
magicrun
def magicrun( text, shell, prompt_template="default", aliases=None, envvars=None, extra_commands=None, speed=1, test_mode=False, commentecho=False, ): """Echo out each character in ``text`` as keyboard characters are pressed, wait for a RETURN keypress, then run the ``text`` in a shell context. """ goto_regulartype = magictype(text, prompt_template, speed) if goto_regulartype: return goto_regulartype run_command( text, shell, aliases=aliases, envvars=envvars, extra_commands=extra_commands, test_mode=test_mode, ) return goto_regulartype
python
def magicrun( text, shell, prompt_template="default", aliases=None, envvars=None, extra_commands=None, speed=1, test_mode=False, commentecho=False, ): """Echo out each character in ``text`` as keyboard characters are pressed, wait for a RETURN keypress, then run the ``text`` in a shell context. """ goto_regulartype = magictype(text, prompt_template, speed) if goto_regulartype: return goto_regulartype run_command( text, shell, aliases=aliases, envvars=envvars, extra_commands=extra_commands, test_mode=test_mode, ) return goto_regulartype
[ "def", "magicrun", "(", "text", ",", "shell", ",", "prompt_template", "=", "\"default\"", ",", "aliases", "=", "None", ",", "envvars", "=", "None", ",", "extra_commands", "=", "None", ",", "speed", "=", "1", ",", "test_mode", "=", "False", ",", "commentecho", "=", "False", ",", ")", ":", "goto_regulartype", "=", "magictype", "(", "text", ",", "prompt_template", ",", "speed", ")", "if", "goto_regulartype", ":", "return", "goto_regulartype", "run_command", "(", "text", ",", "shell", ",", "aliases", "=", "aliases", ",", "envvars", "=", "envvars", ",", "extra_commands", "=", "extra_commands", ",", "test_mode", "=", "test_mode", ",", ")", "return", "goto_regulartype" ]
Echo out each character in ``text`` as keyboard characters are pressed, wait for a RETURN keypress, then run the ``text`` in a shell context.
[ "Echo", "out", "each", "character", "in", "text", "as", "keyboard", "characters", "are", "pressed", "wait", "for", "a", "RETURN", "keypress", "then", "run", "the", "text", "in", "a", "shell", "context", "." ]
baf43f8ad3f2e4593fe21f6af42aedd34ef1efee
https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/keyboard.py#L202-L227
8,120
sloria/doitlive
doitlive/python_consoles.py
PythonPlayerConsole.run_commands
def run_commands(self): """Automatically type and execute all commands.""" more = 0 prompt = sys.ps1 for command in self.commands: try: prompt = sys.ps2 if more else sys.ps1 try: magictype(command, prompt_template=prompt, speed=self.speed) except EOFError: self.write("\n") break else: if command.strip() == "exit()": return more = self.push(command) except KeyboardInterrupt: self.write("\nKeyboardInterrupt\n") self.resetbuffer() more = 0 sys.exit(1) echo_prompt(prompt) wait_for(RETURNS)
python
def run_commands(self): """Automatically type and execute all commands.""" more = 0 prompt = sys.ps1 for command in self.commands: try: prompt = sys.ps2 if more else sys.ps1 try: magictype(command, prompt_template=prompt, speed=self.speed) except EOFError: self.write("\n") break else: if command.strip() == "exit()": return more = self.push(command) except KeyboardInterrupt: self.write("\nKeyboardInterrupt\n") self.resetbuffer() more = 0 sys.exit(1) echo_prompt(prompt) wait_for(RETURNS)
[ "def", "run_commands", "(", "self", ")", ":", "more", "=", "0", "prompt", "=", "sys", ".", "ps1", "for", "command", "in", "self", ".", "commands", ":", "try", ":", "prompt", "=", "sys", ".", "ps2", "if", "more", "else", "sys", ".", "ps1", "try", ":", "magictype", "(", "command", ",", "prompt_template", "=", "prompt", ",", "speed", "=", "self", ".", "speed", ")", "except", "EOFError", ":", "self", ".", "write", "(", "\"\\n\"", ")", "break", "else", ":", "if", "command", ".", "strip", "(", ")", "==", "\"exit()\"", ":", "return", "more", "=", "self", ".", "push", "(", "command", ")", "except", "KeyboardInterrupt", ":", "self", ".", "write", "(", "\"\\nKeyboardInterrupt\\n\"", ")", "self", ".", "resetbuffer", "(", ")", "more", "=", "0", "sys", ".", "exit", "(", "1", ")", "echo_prompt", "(", "prompt", ")", "wait_for", "(", "RETURNS", ")" ]
Automatically type and execute all commands.
[ "Automatically", "type", "and", "execute", "all", "commands", "." ]
baf43f8ad3f2e4593fe21f6af42aedd34ef1efee
https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/python_consoles.py#L20-L42
8,121
sloria/doitlive
doitlive/python_consoles.py
PythonPlayerConsole.interact
def interact(self, banner=None): """Run an interactive session.""" try: sys.ps1 except AttributeError: sys.ps1 = ">>>" try: sys.ps2 except AttributeError: sys.ps2 = "... " cprt = ( 'Type "help", "copyright", "credits" or "license" for ' "more information." ) if banner is None: self.write("Python %s on %s\n%s\n" % (sys.version, sys.platform, cprt)) else: self.write("%s\n" % str(banner)) self.run_commands()
python
def interact(self, banner=None): """Run an interactive session.""" try: sys.ps1 except AttributeError: sys.ps1 = ">>>" try: sys.ps2 except AttributeError: sys.ps2 = "... " cprt = ( 'Type "help", "copyright", "credits" or "license" for ' "more information." ) if banner is None: self.write("Python %s on %s\n%s\n" % (sys.version, sys.platform, cprt)) else: self.write("%s\n" % str(banner)) self.run_commands()
[ "def", "interact", "(", "self", ",", "banner", "=", "None", ")", ":", "try", ":", "sys", ".", "ps1", "except", "AttributeError", ":", "sys", ".", "ps1", "=", "\">>>\"", "try", ":", "sys", ".", "ps2", "except", "AttributeError", ":", "sys", ".", "ps2", "=", "\"... \"", "cprt", "=", "(", "'Type \"help\", \"copyright\", \"credits\" or \"license\" for '", "\"more information.\"", ")", "if", "banner", "is", "None", ":", "self", ".", "write", "(", "\"Python %s on %s\\n%s\\n\"", "%", "(", "sys", ".", "version", ",", "sys", ".", "platform", ",", "cprt", ")", ")", "else", ":", "self", ".", "write", "(", "\"%s\\n\"", "%", "str", "(", "banner", ")", ")", "self", ".", "run_commands", "(", ")" ]
Run an interactive session.
[ "Run", "an", "interactive", "session", "." ]
baf43f8ad3f2e4593fe21f6af42aedd34ef1efee
https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/python_consoles.py#L44-L61
8,122
sloria/doitlive
doitlive/ipython_consoles.py
start_ipython_player
def start_ipython_player(commands, speed=1): """Starts a new magic IPython shell.""" PlayerTerminalIPythonApp.commands = commands PlayerTerminalIPythonApp.speed = speed PlayerTerminalIPythonApp.launch_instance()
python
def start_ipython_player(commands, speed=1): """Starts a new magic IPython shell.""" PlayerTerminalIPythonApp.commands = commands PlayerTerminalIPythonApp.speed = speed PlayerTerminalIPythonApp.launch_instance()
[ "def", "start_ipython_player", "(", "commands", ",", "speed", "=", "1", ")", ":", "PlayerTerminalIPythonApp", ".", "commands", "=", "commands", "PlayerTerminalIPythonApp", ".", "speed", "=", "speed", "PlayerTerminalIPythonApp", ".", "launch_instance", "(", ")" ]
Starts a new magic IPython shell.
[ "Starts", "a", "new", "magic", "IPython", "shell", "." ]
baf43f8ad3f2e4593fe21f6af42aedd34ef1efee
https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/ipython_consoles.py#L168-L172
8,123
sloria/doitlive
doitlive/ipython_consoles.py
PlayerTerminalInteractiveShell.on_feed_key
def on_feed_key(self, key_press): """Handles the magictyping when a key is pressed""" if key_press.key in {Keys.Escape, Keys.ControlC}: echo(carriage_return=True) raise Abort() if key_press.key == Keys.Backspace: if self.current_command_pos > 0: self.current_command_pos -= 1 return key_press ret = None if key_press.key != Keys.CPRResponse: if self.current_command_pos < len(self.current_command): current_key = self.current_command_key ret = KeyPress(current_key) increment = min( [self.speed, len(self.current_command) - self.current_command_pos] ) self.current_command_pos += increment else: # Command is finished, wait for Enter if key_press.key != Keys.Enter: return None self.current_command_index += 1 self.current_command_pos = 0 ret = key_press return ret
python
def on_feed_key(self, key_press): """Handles the magictyping when a key is pressed""" if key_press.key in {Keys.Escape, Keys.ControlC}: echo(carriage_return=True) raise Abort() if key_press.key == Keys.Backspace: if self.current_command_pos > 0: self.current_command_pos -= 1 return key_press ret = None if key_press.key != Keys.CPRResponse: if self.current_command_pos < len(self.current_command): current_key = self.current_command_key ret = KeyPress(current_key) increment = min( [self.speed, len(self.current_command) - self.current_command_pos] ) self.current_command_pos += increment else: # Command is finished, wait for Enter if key_press.key != Keys.Enter: return None self.current_command_index += 1 self.current_command_pos = 0 ret = key_press return ret
[ "def", "on_feed_key", "(", "self", ",", "key_press", ")", ":", "if", "key_press", ".", "key", "in", "{", "Keys", ".", "Escape", ",", "Keys", ".", "ControlC", "}", ":", "echo", "(", "carriage_return", "=", "True", ")", "raise", "Abort", "(", ")", "if", "key_press", ".", "key", "==", "Keys", ".", "Backspace", ":", "if", "self", ".", "current_command_pos", ">", "0", ":", "self", ".", "current_command_pos", "-=", "1", "return", "key_press", "ret", "=", "None", "if", "key_press", ".", "key", "!=", "Keys", ".", "CPRResponse", ":", "if", "self", ".", "current_command_pos", "<", "len", "(", "self", ".", "current_command", ")", ":", "current_key", "=", "self", ".", "current_command_key", "ret", "=", "KeyPress", "(", "current_key", ")", "increment", "=", "min", "(", "[", "self", ".", "speed", ",", "len", "(", "self", ".", "current_command", ")", "-", "self", ".", "current_command_pos", "]", ")", "self", ".", "current_command_pos", "+=", "increment", "else", ":", "# Command is finished, wait for Enter", "if", "key_press", ".", "key", "!=", "Keys", ".", "Enter", ":", "return", "None", "self", ".", "current_command_index", "+=", "1", "self", ".", "current_command_pos", "=", "0", "ret", "=", "key_press", "return", "ret" ]
Handles the magictyping when a key is pressed
[ "Handles", "the", "magictyping", "when", "a", "key", "is", "pressed" ]
baf43f8ad3f2e4593fe21f6af42aedd34ef1efee
https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/ipython_consoles.py#L61-L86
8,124
sloria/doitlive
doitlive/ipython_consoles.py
PlayerTerminalIPythonApp.init_shell
def init_shell(self): """initialize the InteractiveShell instance""" self.shell = PlayerTerminalInteractiveShell.instance( commands=self.commands, speed=self.speed, parent=self, display_banner=False, profile_dir=self.profile_dir, ipython_dir=self.ipython_dir, user_ns=self.user_ns, ) self.shell.configurables.append(self)
python
def init_shell(self): """initialize the InteractiveShell instance""" self.shell = PlayerTerminalInteractiveShell.instance( commands=self.commands, speed=self.speed, parent=self, display_banner=False, profile_dir=self.profile_dir, ipython_dir=self.ipython_dir, user_ns=self.user_ns, ) self.shell.configurables.append(self)
[ "def", "init_shell", "(", "self", ")", ":", "self", ".", "shell", "=", "PlayerTerminalInteractiveShell", ".", "instance", "(", "commands", "=", "self", ".", "commands", ",", "speed", "=", "self", ".", "speed", ",", "parent", "=", "self", ",", "display_banner", "=", "False", ",", "profile_dir", "=", "self", ".", "profile_dir", ",", "ipython_dir", "=", "self", ".", "ipython_dir", ",", "user_ns", "=", "self", ".", "user_ns", ",", ")", "self", ".", "shell", ".", "configurables", ".", "append", "(", "self", ")" ]
initialize the InteractiveShell instance
[ "initialize", "the", "InteractiveShell", "instance" ]
baf43f8ad3f2e4593fe21f6af42aedd34ef1efee
https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/ipython_consoles.py#L154-L165
8,125
sloria/doitlive
doitlive/termutils.py
raw_mode
def raw_mode(): """ Enables terminal raw mode during the context. Note: Currently noop for Windows systems. Usage: :: with raw_mode(): do_some_stuff() """ if WIN: # No implementation for windows yet. yield # needed for the empty context manager to work else: # imports are placed here because this will fail under Windows import tty import termios if not isatty(sys.stdin): f = open("/dev/tty") fd = f.fileno() else: fd = sys.stdin.fileno() f = None try: old_settings = termios.tcgetattr(fd) tty.setraw(fd) except termios.error: pass try: yield finally: # this block sets the terminal to sane mode again, # also in case an exception occured in the context manager try: termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) # sys.stdout.flush() # not needed I think. if f is not None: f.close() except termios.error: pass
python
def raw_mode(): """ Enables terminal raw mode during the context. Note: Currently noop for Windows systems. Usage: :: with raw_mode(): do_some_stuff() """ if WIN: # No implementation for windows yet. yield # needed for the empty context manager to work else: # imports are placed here because this will fail under Windows import tty import termios if not isatty(sys.stdin): f = open("/dev/tty") fd = f.fileno() else: fd = sys.stdin.fileno() f = None try: old_settings = termios.tcgetattr(fd) tty.setraw(fd) except termios.error: pass try: yield finally: # this block sets the terminal to sane mode again, # also in case an exception occured in the context manager try: termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) # sys.stdout.flush() # not needed I think. if f is not None: f.close() except termios.error: pass
[ "def", "raw_mode", "(", ")", ":", "if", "WIN", ":", "# No implementation for windows yet.", "yield", "# needed for the empty context manager to work", "else", ":", "# imports are placed here because this will fail under Windows", "import", "tty", "import", "termios", "if", "not", "isatty", "(", "sys", ".", "stdin", ")", ":", "f", "=", "open", "(", "\"/dev/tty\"", ")", "fd", "=", "f", ".", "fileno", "(", ")", "else", ":", "fd", "=", "sys", ".", "stdin", ".", "fileno", "(", ")", "f", "=", "None", "try", ":", "old_settings", "=", "termios", ".", "tcgetattr", "(", "fd", ")", "tty", ".", "setraw", "(", "fd", ")", "except", "termios", ".", "error", ":", "pass", "try", ":", "yield", "finally", ":", "# this block sets the terminal to sane mode again,", "# also in case an exception occured in the context manager", "try", ":", "termios", ".", "tcsetattr", "(", "fd", ",", "termios", ".", "TCSADRAIN", ",", "old_settings", ")", "# sys.stdout.flush() # not needed I think.", "if", "f", "is", "not", "None", ":", "f", ".", "close", "(", ")", "except", "termios", ".", "error", ":", "pass" ]
Enables terminal raw mode during the context. Note: Currently noop for Windows systems. Usage: :: with raw_mode(): do_some_stuff()
[ "Enables", "terminal", "raw", "mode", "during", "the", "context", "." ]
baf43f8ad3f2e4593fe21f6af42aedd34ef1efee
https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/termutils.py#L13-L54
8,126
skorokithakis/shortuuid
shortuuid/main.py
int_to_string
def int_to_string(number, alphabet, padding=None): """ Convert a number to a string, using the given alphabet. The output has the most significant digit first. """ output = "" alpha_len = len(alphabet) while number: number, digit = divmod(number, alpha_len) output += alphabet[digit] if padding: remainder = max(padding - len(output), 0) output = output + alphabet[0] * remainder return output[::-1]
python
def int_to_string(number, alphabet, padding=None): """ Convert a number to a string, using the given alphabet. The output has the most significant digit first. """ output = "" alpha_len = len(alphabet) while number: number, digit = divmod(number, alpha_len) output += alphabet[digit] if padding: remainder = max(padding - len(output), 0) output = output + alphabet[0] * remainder return output[::-1]
[ "def", "int_to_string", "(", "number", ",", "alphabet", ",", "padding", "=", "None", ")", ":", "output", "=", "\"\"", "alpha_len", "=", "len", "(", "alphabet", ")", "while", "number", ":", "number", ",", "digit", "=", "divmod", "(", "number", ",", "alpha_len", ")", "output", "+=", "alphabet", "[", "digit", "]", "if", "padding", ":", "remainder", "=", "max", "(", "padding", "-", "len", "(", "output", ")", ",", "0", ")", "output", "=", "output", "+", "alphabet", "[", "0", "]", "*", "remainder", "return", "output", "[", ":", ":", "-", "1", "]" ]
Convert a number to a string, using the given alphabet. The output has the most significant digit first.
[ "Convert", "a", "number", "to", "a", "string", "using", "the", "given", "alphabet", ".", "The", "output", "has", "the", "most", "significant", "digit", "first", "." ]
4da632a986c3a43f75c7df64f27a90bbf7ff8039
https://github.com/skorokithakis/shortuuid/blob/4da632a986c3a43f75c7df64f27a90bbf7ff8039/shortuuid/main.py#L9-L22
8,127
skorokithakis/shortuuid
shortuuid/main.py
string_to_int
def string_to_int(string, alphabet): """ Convert a string to a number, using the given alphabet. The input is assumed to have the most significant digit first. """ number = 0 alpha_len = len(alphabet) for char in string: number = number * alpha_len + alphabet.index(char) return number
python
def string_to_int(string, alphabet): """ Convert a string to a number, using the given alphabet. The input is assumed to have the most significant digit first. """ number = 0 alpha_len = len(alphabet) for char in string: number = number * alpha_len + alphabet.index(char) return number
[ "def", "string_to_int", "(", "string", ",", "alphabet", ")", ":", "number", "=", "0", "alpha_len", "=", "len", "(", "alphabet", ")", "for", "char", "in", "string", ":", "number", "=", "number", "*", "alpha_len", "+", "alphabet", ".", "index", "(", "char", ")", "return", "number" ]
Convert a string to a number, using the given alphabet. The input is assumed to have the most significant digit first.
[ "Convert", "a", "string", "to", "a", "number", "using", "the", "given", "alphabet", ".", "The", "input", "is", "assumed", "to", "have", "the", "most", "significant", "digit", "first", "." ]
4da632a986c3a43f75c7df64f27a90bbf7ff8039
https://github.com/skorokithakis/shortuuid/blob/4da632a986c3a43f75c7df64f27a90bbf7ff8039/shortuuid/main.py#L25-L34
8,128
skorokithakis/shortuuid
shortuuid/main.py
ShortUUID.decode
def decode(self, string, legacy=False): """ Decode a string according to the current alphabet into a UUID Raises ValueError when encountering illegal characters or a too-long string. If string too short, fills leftmost (MSB) bits with 0. Pass `legacy=True` if your UUID was encoded with a ShortUUID version prior to 0.6.0. """ if legacy: string = string[::-1] return _uu.UUID(int=string_to_int(string, self._alphabet))
python
def decode(self, string, legacy=False): """ Decode a string according to the current alphabet into a UUID Raises ValueError when encountering illegal characters or a too-long string. If string too short, fills leftmost (MSB) bits with 0. Pass `legacy=True` if your UUID was encoded with a ShortUUID version prior to 0.6.0. """ if legacy: string = string[::-1] return _uu.UUID(int=string_to_int(string, self._alphabet))
[ "def", "decode", "(", "self", ",", "string", ",", "legacy", "=", "False", ")", ":", "if", "legacy", ":", "string", "=", "string", "[", ":", ":", "-", "1", "]", "return", "_uu", ".", "UUID", "(", "int", "=", "string_to_int", "(", "string", ",", "self", ".", "_alphabet", ")", ")" ]
Decode a string according to the current alphabet into a UUID Raises ValueError when encountering illegal characters or a too-long string. If string too short, fills leftmost (MSB) bits with 0. Pass `legacy=True` if your UUID was encoded with a ShortUUID version prior to 0.6.0.
[ "Decode", "a", "string", "according", "to", "the", "current", "alphabet", "into", "a", "UUID", "Raises", "ValueError", "when", "encountering", "illegal", "characters", "or", "a", "too", "-", "long", "string", "." ]
4da632a986c3a43f75c7df64f27a90bbf7ff8039
https://github.com/skorokithakis/shortuuid/blob/4da632a986c3a43f75c7df64f27a90bbf7ff8039/shortuuid/main.py#L62-L75
8,129
skorokithakis/shortuuid
shortuuid/main.py
ShortUUID.set_alphabet
def set_alphabet(self, alphabet): """Set the alphabet to be used for new UUIDs.""" # Turn the alphabet into a set and sort it to prevent duplicates # and ensure reproducibility. new_alphabet = list(sorted(set(alphabet))) if len(new_alphabet) > 1: self._alphabet = new_alphabet self._alpha_len = len(self._alphabet) else: raise ValueError("Alphabet with more than " "one unique symbols required.")
python
def set_alphabet(self, alphabet): """Set the alphabet to be used for new UUIDs.""" # Turn the alphabet into a set and sort it to prevent duplicates # and ensure reproducibility. new_alphabet = list(sorted(set(alphabet))) if len(new_alphabet) > 1: self._alphabet = new_alphabet self._alpha_len = len(self._alphabet) else: raise ValueError("Alphabet with more than " "one unique symbols required.")
[ "def", "set_alphabet", "(", "self", ",", "alphabet", ")", ":", "# Turn the alphabet into a set and sort it to prevent duplicates", "# and ensure reproducibility.", "new_alphabet", "=", "list", "(", "sorted", "(", "set", "(", "alphabet", ")", ")", ")", "if", "len", "(", "new_alphabet", ")", ">", "1", ":", "self", ".", "_alphabet", "=", "new_alphabet", "self", ".", "_alpha_len", "=", "len", "(", "self", ".", "_alphabet", ")", "else", ":", "raise", "ValueError", "(", "\"Alphabet with more than \"", "\"one unique symbols required.\"", ")" ]
Set the alphabet to be used for new UUIDs.
[ "Set", "the", "alphabet", "to", "be", "used", "for", "new", "UUIDs", "." ]
4da632a986c3a43f75c7df64f27a90bbf7ff8039
https://github.com/skorokithakis/shortuuid/blob/4da632a986c3a43f75c7df64f27a90bbf7ff8039/shortuuid/main.py#L111-L121
8,130
skorokithakis/shortuuid
shortuuid/main.py
ShortUUID.encoded_length
def encoded_length(self, num_bytes=16): """ Returns the string length of the shortened UUID. """ factor = math.log(256) / math.log(self._alpha_len) return int(math.ceil(factor * num_bytes))
python
def encoded_length(self, num_bytes=16): """ Returns the string length of the shortened UUID. """ factor = math.log(256) / math.log(self._alpha_len) return int(math.ceil(factor * num_bytes))
[ "def", "encoded_length", "(", "self", ",", "num_bytes", "=", "16", ")", ":", "factor", "=", "math", ".", "log", "(", "256", ")", "/", "math", ".", "log", "(", "self", ".", "_alpha_len", ")", "return", "int", "(", "math", ".", "ceil", "(", "factor", "*", "num_bytes", ")", ")" ]
Returns the string length of the shortened UUID.
[ "Returns", "the", "string", "length", "of", "the", "shortened", "UUID", "." ]
4da632a986c3a43f75c7df64f27a90bbf7ff8039
https://github.com/skorokithakis/shortuuid/blob/4da632a986c3a43f75c7df64f27a90bbf7ff8039/shortuuid/main.py#L123-L128
8,131
quarkslab/arybo
arybo/lib/exprs_asm.py
asm_module
def asm_module(exprs, dst_reg, sym_to_reg, triple_or_target=None): ''' Generate an LLVM module for a list of expressions Arguments: * See :meth:`arybo.lib.exprs_asm.asm_binary` for a description of the list of arguments Output: * An LLVM module with one function named "__arybo", containing the translated expression. See :meth:`arybo.lib.exprs_asm.asm_binary` for an usage example. ''' if not llvmlite_available: raise RuntimeError("llvmlite module unavailable! can't assemble...") target = llvm_get_target(triple_or_target) M = ll.Module() fntype = ll.FunctionType(ll.VoidType(), []) func = ll.Function(M, fntype, name='__arybo') func.attributes.add("naked") func.attributes.add("nounwind") BB = func.append_basic_block() IRB = ll.IRBuilder() IRB.position_at_end(BB) sym_to_value = {sym: IRB.load_reg(IntType(reg[1]), reg[0], reg[0]) for sym,reg in six.iteritems(sym_to_reg)} ret = to_llvm_ir(exprs, sym_to_value, IRB) IRB.store_reg(ret, IntType(dst_reg[1]), dst_reg[0]) # See https://llvm.org/bugs/show_bug.cgi?id=15806 IRB.unreachable() return M
python
def asm_module(exprs, dst_reg, sym_to_reg, triple_or_target=None): ''' Generate an LLVM module for a list of expressions Arguments: * See :meth:`arybo.lib.exprs_asm.asm_binary` for a description of the list of arguments Output: * An LLVM module with one function named "__arybo", containing the translated expression. See :meth:`arybo.lib.exprs_asm.asm_binary` for an usage example. ''' if not llvmlite_available: raise RuntimeError("llvmlite module unavailable! can't assemble...") target = llvm_get_target(triple_or_target) M = ll.Module() fntype = ll.FunctionType(ll.VoidType(), []) func = ll.Function(M, fntype, name='__arybo') func.attributes.add("naked") func.attributes.add("nounwind") BB = func.append_basic_block() IRB = ll.IRBuilder() IRB.position_at_end(BB) sym_to_value = {sym: IRB.load_reg(IntType(reg[1]), reg[0], reg[0]) for sym,reg in six.iteritems(sym_to_reg)} ret = to_llvm_ir(exprs, sym_to_value, IRB) IRB.store_reg(ret, IntType(dst_reg[1]), dst_reg[0]) # See https://llvm.org/bugs/show_bug.cgi?id=15806 IRB.unreachable() return M
[ "def", "asm_module", "(", "exprs", ",", "dst_reg", ",", "sym_to_reg", ",", "triple_or_target", "=", "None", ")", ":", "if", "not", "llvmlite_available", ":", "raise", "RuntimeError", "(", "\"llvmlite module unavailable! can't assemble...\"", ")", "target", "=", "llvm_get_target", "(", "triple_or_target", ")", "M", "=", "ll", ".", "Module", "(", ")", "fntype", "=", "ll", ".", "FunctionType", "(", "ll", ".", "VoidType", "(", ")", ",", "[", "]", ")", "func", "=", "ll", ".", "Function", "(", "M", ",", "fntype", ",", "name", "=", "'__arybo'", ")", "func", ".", "attributes", ".", "add", "(", "\"naked\"", ")", "func", ".", "attributes", ".", "add", "(", "\"nounwind\"", ")", "BB", "=", "func", ".", "append_basic_block", "(", ")", "IRB", "=", "ll", ".", "IRBuilder", "(", ")", "IRB", ".", "position_at_end", "(", "BB", ")", "sym_to_value", "=", "{", "sym", ":", "IRB", ".", "load_reg", "(", "IntType", "(", "reg", "[", "1", "]", ")", ",", "reg", "[", "0", "]", ",", "reg", "[", "0", "]", ")", "for", "sym", ",", "reg", "in", "six", ".", "iteritems", "(", "sym_to_reg", ")", "}", "ret", "=", "to_llvm_ir", "(", "exprs", ",", "sym_to_value", ",", "IRB", ")", "IRB", ".", "store_reg", "(", "ret", ",", "IntType", "(", "dst_reg", "[", "1", "]", ")", ",", "dst_reg", "[", "0", "]", ")", "# See https://llvm.org/bugs/show_bug.cgi?id=15806", "IRB", ".", "unreachable", "(", ")", "return", "M" ]
Generate an LLVM module for a list of expressions Arguments: * See :meth:`arybo.lib.exprs_asm.asm_binary` for a description of the list of arguments Output: * An LLVM module with one function named "__arybo", containing the translated expression. See :meth:`arybo.lib.exprs_asm.asm_binary` for an usage example.
[ "Generate", "an", "LLVM", "module", "for", "a", "list", "of", "expressions" ]
04fad817090b3b9f2328a5e984457aba6024e971
https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/exprs_asm.py#L225-L261
8,132
quarkslab/arybo
arybo/lib/exprs_asm.py
asm_binary
def asm_binary(exprs, dst_reg, sym_to_reg, triple_or_target=None): ''' Compile and assemble an expression for a given architecture. Arguments: * *exprs*: list of expressions to convert. This can represent a graph of expressions. * *dst_reg*: final register on which to store the result of the last expression. This is represented by a tuple ("reg_name", reg_size_bits). Example: ("rax", 64) * *sym_to_reg*: a dictionnary that maps Arybo variable name to registers (described as tuple, see *dst_reg*). Example: {"x": ("rdi",64), "y": ("rsi", 64)} * *triple_or_target*: LLVM architecture triple to use. Use by default the host architecture. Example: "x86_64-unknown-unknown" Output: * binary stream of the assembled expression for the given target Here is an example that will compile and assemble "x+y" for x86_64:: from arybo.lib import MBA from arybo.lib import mba_exprs from arybo.lib.exprs_asm import asm_binary mba = MBA(64) x = mba.var("x") y = mba.var("y") e = mba_exprs.ExprBV(x) + mba_exprs.ExprBV(y) code = asm_binary([e], ("rax", 64), {"x": ("rdi", 64), "y": ("rsi", 64)}, "x86_64-unknown-unknown") print(code.hex()) which outputs ``488d0437`` (which is equivalent to ``lea rax,[rdi+rsi*1]``). ''' if not llvmlite_available: raise RuntimeError("llvmlite module unavailable! can't assemble...") target = llvm_get_target(triple_or_target) M = asm_module(exprs, dst_reg, sym_to_reg, target) # Use LLVM to compile the '__arybo' function. As the function is naked and # is the only, we just got to dump the .text section to get the binary # assembly. # No need for keystone or whatever hype stuff. llvmlite does the job. M = llvm.parse_assembly(str(M)) M.verify() target_machine = target.create_target_machine() obj_bin = target_machine.emit_object(M) obj = llvm.ObjectFileRef.from_data(obj_bin) for s in obj.sections(): if s.is_text(): return s.data() raise RuntimeError("unable to get the assembled binary!")
python
def asm_binary(exprs, dst_reg, sym_to_reg, triple_or_target=None): ''' Compile and assemble an expression for a given architecture. Arguments: * *exprs*: list of expressions to convert. This can represent a graph of expressions. * *dst_reg*: final register on which to store the result of the last expression. This is represented by a tuple ("reg_name", reg_size_bits). Example: ("rax", 64) * *sym_to_reg*: a dictionnary that maps Arybo variable name to registers (described as tuple, see *dst_reg*). Example: {"x": ("rdi",64), "y": ("rsi", 64)} * *triple_or_target*: LLVM architecture triple to use. Use by default the host architecture. Example: "x86_64-unknown-unknown" Output: * binary stream of the assembled expression for the given target Here is an example that will compile and assemble "x+y" for x86_64:: from arybo.lib import MBA from arybo.lib import mba_exprs from arybo.lib.exprs_asm import asm_binary mba = MBA(64) x = mba.var("x") y = mba.var("y") e = mba_exprs.ExprBV(x) + mba_exprs.ExprBV(y) code = asm_binary([e], ("rax", 64), {"x": ("rdi", 64), "y": ("rsi", 64)}, "x86_64-unknown-unknown") print(code.hex()) which outputs ``488d0437`` (which is equivalent to ``lea rax,[rdi+rsi*1]``). ''' if not llvmlite_available: raise RuntimeError("llvmlite module unavailable! can't assemble...") target = llvm_get_target(triple_or_target) M = asm_module(exprs, dst_reg, sym_to_reg, target) # Use LLVM to compile the '__arybo' function. As the function is naked and # is the only, we just got to dump the .text section to get the binary # assembly. # No need for keystone or whatever hype stuff. llvmlite does the job. M = llvm.parse_assembly(str(M)) M.verify() target_machine = target.create_target_machine() obj_bin = target_machine.emit_object(M) obj = llvm.ObjectFileRef.from_data(obj_bin) for s in obj.sections(): if s.is_text(): return s.data() raise RuntimeError("unable to get the assembled binary!")
[ "def", "asm_binary", "(", "exprs", ",", "dst_reg", ",", "sym_to_reg", ",", "triple_or_target", "=", "None", ")", ":", "if", "not", "llvmlite_available", ":", "raise", "RuntimeError", "(", "\"llvmlite module unavailable! can't assemble...\"", ")", "target", "=", "llvm_get_target", "(", "triple_or_target", ")", "M", "=", "asm_module", "(", "exprs", ",", "dst_reg", ",", "sym_to_reg", ",", "target", ")", "# Use LLVM to compile the '__arybo' function. As the function is naked and", "# is the only, we just got to dump the .text section to get the binary", "# assembly.", "# No need for keystone or whatever hype stuff. llvmlite does the job.", "M", "=", "llvm", ".", "parse_assembly", "(", "str", "(", "M", ")", ")", "M", ".", "verify", "(", ")", "target_machine", "=", "target", ".", "create_target_machine", "(", ")", "obj_bin", "=", "target_machine", ".", "emit_object", "(", "M", ")", "obj", "=", "llvm", ".", "ObjectFileRef", ".", "from_data", "(", "obj_bin", ")", "for", "s", "in", "obj", ".", "sections", "(", ")", ":", "if", "s", ".", "is_text", "(", ")", ":", "return", "s", ".", "data", "(", ")", "raise", "RuntimeError", "(", "\"unable to get the assembled binary!\"", ")" ]
Compile and assemble an expression for a given architecture. Arguments: * *exprs*: list of expressions to convert. This can represent a graph of expressions. * *dst_reg*: final register on which to store the result of the last expression. This is represented by a tuple ("reg_name", reg_size_bits). Example: ("rax", 64) * *sym_to_reg*: a dictionnary that maps Arybo variable name to registers (described as tuple, see *dst_reg*). Example: {"x": ("rdi",64), "y": ("rsi", 64)} * *triple_or_target*: LLVM architecture triple to use. Use by default the host architecture. Example: "x86_64-unknown-unknown" Output: * binary stream of the assembled expression for the given target Here is an example that will compile and assemble "x+y" for x86_64:: from arybo.lib import MBA from arybo.lib import mba_exprs from arybo.lib.exprs_asm import asm_binary mba = MBA(64) x = mba.var("x") y = mba.var("y") e = mba_exprs.ExprBV(x) + mba_exprs.ExprBV(y) code = asm_binary([e], ("rax", 64), {"x": ("rdi", 64), "y": ("rsi", 64)}, "x86_64-unknown-unknown") print(code.hex()) which outputs ``488d0437`` (which is equivalent to ``lea rax,[rdi+rsi*1]``).
[ "Compile", "and", "assemble", "an", "expression", "for", "a", "given", "architecture", "." ]
04fad817090b3b9f2328a5e984457aba6024e971
https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/exprs_asm.py#L263-L314
8,133
quarkslab/arybo
arybo/lib/mba_if.py
expr_contains
def expr_contains(e, o): ''' Returns true if o is in e ''' if o == e: return True if e.has_args(): for a in e.args(): if expr_contains(a, o): return True return False
python
def expr_contains(e, o): ''' Returns true if o is in e ''' if o == e: return True if e.has_args(): for a in e.args(): if expr_contains(a, o): return True return False
[ "def", "expr_contains", "(", "e", ",", "o", ")", ":", "if", "o", "==", "e", ":", "return", "True", "if", "e", ".", "has_args", "(", ")", ":", "for", "a", "in", "e", ".", "args", "(", ")", ":", "if", "expr_contains", "(", "a", ",", "o", ")", ":", "return", "True", "return", "False" ]
Returns true if o is in e
[ "Returns", "true", "if", "o", "is", "in", "e" ]
04fad817090b3b9f2328a5e984457aba6024e971
https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/mba_if.py#L36-L44
8,134
quarkslab/arybo
arybo/lib/mba_if.py
MBAVariable.zext
def zext(self, n): ''' Zero-extend the variable to n bits. n bits must be stricly larger than the actual number of bits, or a ValueError is thrown ''' if n <= self.nbits: raise ValueError("n must be > %d bits" % self.nbits) mba_ret = self.__new_mba(n) ret = mba_ret.from_cst(0) for i in range(self.nbits): ret.vec[i] = self.vec[i] return mba_ret.from_vec(ret)
python
def zext(self, n): ''' Zero-extend the variable to n bits. n bits must be stricly larger than the actual number of bits, or a ValueError is thrown ''' if n <= self.nbits: raise ValueError("n must be > %d bits" % self.nbits) mba_ret = self.__new_mba(n) ret = mba_ret.from_cst(0) for i in range(self.nbits): ret.vec[i] = self.vec[i] return mba_ret.from_vec(ret)
[ "def", "zext", "(", "self", ",", "n", ")", ":", "if", "n", "<=", "self", ".", "nbits", ":", "raise", "ValueError", "(", "\"n must be > %d bits\"", "%", "self", ".", "nbits", ")", "mba_ret", "=", "self", ".", "__new_mba", "(", "n", ")", "ret", "=", "mba_ret", ".", "from_cst", "(", "0", ")", "for", "i", "in", "range", "(", "self", ".", "nbits", ")", ":", "ret", ".", "vec", "[", "i", "]", "=", "self", ".", "vec", "[", "i", "]", "return", "mba_ret", ".", "from_vec", "(", "ret", ")" ]
Zero-extend the variable to n bits. n bits must be stricly larger than the actual number of bits, or a ValueError is thrown
[ "Zero", "-", "extend", "the", "variable", "to", "n", "bits", ".", "n", "bits", "must", "be", "stricly", "larger", "than", "the", "actual", "number", "of", "bits", "or", "a", "ValueError", "is", "thrown" ]
04fad817090b3b9f2328a5e984457aba6024e971
https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/mba_if.py#L255-L269
8,135
quarkslab/arybo
arybo/lib/mba_if.py
MBAVariable.sext
def sext(self, n): ''' Sign-extend the variable to n bits. n bits must be stricly larger than the actual number of bits, or a ValueError is thrown ''' if n <= self.nbits: raise ValueError("n must be > %d bits" % self.nbits) mba_ret = self.__new_mba(n) ret = mba_ret.from_cst(0) for i in range(self.nbits): ret.vec[i] = self.vec[i] last_bit = self.vec[self.nbits-1] for i in range(self.nbits,n): ret.vec[i] = last_bit return mba_ret.from_vec(ret)
python
def sext(self, n): ''' Sign-extend the variable to n bits. n bits must be stricly larger than the actual number of bits, or a ValueError is thrown ''' if n <= self.nbits: raise ValueError("n must be > %d bits" % self.nbits) mba_ret = self.__new_mba(n) ret = mba_ret.from_cst(0) for i in range(self.nbits): ret.vec[i] = self.vec[i] last_bit = self.vec[self.nbits-1] for i in range(self.nbits,n): ret.vec[i] = last_bit return mba_ret.from_vec(ret)
[ "def", "sext", "(", "self", ",", "n", ")", ":", "if", "n", "<=", "self", ".", "nbits", ":", "raise", "ValueError", "(", "\"n must be > %d bits\"", "%", "self", ".", "nbits", ")", "mba_ret", "=", "self", ".", "__new_mba", "(", "n", ")", "ret", "=", "mba_ret", ".", "from_cst", "(", "0", ")", "for", "i", "in", "range", "(", "self", ".", "nbits", ")", ":", "ret", ".", "vec", "[", "i", "]", "=", "self", ".", "vec", "[", "i", "]", "last_bit", "=", "self", ".", "vec", "[", "self", ".", "nbits", "-", "1", "]", "for", "i", "in", "range", "(", "self", ".", "nbits", ",", "n", ")", ":", "ret", ".", "vec", "[", "i", "]", "=", "last_bit", "return", "mba_ret", ".", "from_vec", "(", "ret", ")" ]
Sign-extend the variable to n bits. n bits must be stricly larger than the actual number of bits, or a ValueError is thrown
[ "Sign", "-", "extend", "the", "variable", "to", "n", "bits", ".", "n", "bits", "must", "be", "stricly", "larger", "than", "the", "actual", "number", "of", "bits", "or", "a", "ValueError", "is", "thrown" ]
04fad817090b3b9f2328a5e984457aba6024e971
https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/mba_if.py#L271-L288
8,136
quarkslab/arybo
arybo/lib/mba_if.py
MBAVariable.evaluate
def evaluate(self, values): ''' Evaluates the expression to an integer values is a dictionnary that associates n-bit variables to integer values. Every symbolic variables used in the expression must be represented. For instance, let x and y 4-bit variables, and e = x+y: >>> mba = MBA(4) >>> x = mba.var('x') >>> y = mba.var('y') >>> e = x+y To evaluate e with x=4 and y=5, we can do: >>> e.eval({x: 4, y: 5}) 9 If a variable is missing from values, an exception will occur. (x or y in the example above) ''' ret = self.mba.evaluate(self.vec, values) if isinstance(ret, six.integer_types): return ret return self.from_vec(self.mba, ret)
python
def evaluate(self, values): ''' Evaluates the expression to an integer values is a dictionnary that associates n-bit variables to integer values. Every symbolic variables used in the expression must be represented. For instance, let x and y 4-bit variables, and e = x+y: >>> mba = MBA(4) >>> x = mba.var('x') >>> y = mba.var('y') >>> e = x+y To evaluate e with x=4 and y=5, we can do: >>> e.eval({x: 4, y: 5}) 9 If a variable is missing from values, an exception will occur. (x or y in the example above) ''' ret = self.mba.evaluate(self.vec, values) if isinstance(ret, six.integer_types): return ret return self.from_vec(self.mba, ret)
[ "def", "evaluate", "(", "self", ",", "values", ")", ":", "ret", "=", "self", ".", "mba", ".", "evaluate", "(", "self", ".", "vec", ",", "values", ")", "if", "isinstance", "(", "ret", ",", "six", ".", "integer_types", ")", ":", "return", "ret", "return", "self", ".", "from_vec", "(", "self", ".", "mba", ",", "ret", ")" ]
Evaluates the expression to an integer values is a dictionnary that associates n-bit variables to integer values. Every symbolic variables used in the expression must be represented. For instance, let x and y 4-bit variables, and e = x+y: >>> mba = MBA(4) >>> x = mba.var('x') >>> y = mba.var('y') >>> e = x+y To evaluate e with x=4 and y=5, we can do: >>> e.eval({x: 4, y: 5}) 9 If a variable is missing from values, an exception will occur. (x or y in the example above)
[ "Evaluates", "the", "expression", "to", "an", "integer" ]
04fad817090b3b9f2328a5e984457aba6024e971
https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/mba_if.py#L290-L315
8,137
quarkslab/arybo
arybo/lib/mba_if.py
MBAVariable.vectorial_decomp
def vectorial_decomp(self, symbols): ''' Compute the vectorial decomposition of the expression according to the given symbols. symbols is a list that represents the input of the resulting application. They are considerated as a flatten vector of bits. Args: symbols: TODO Returns: An :class:`pytanque.App` object Example: >>> mba = MBA(4) >>> x = mba.var('x') >>> y = mba.var('y') >>> e = x^y^6 >>> e.vectorial_decomp([x,y]) App NL = Vec([ 0, 0, 0, 0 ]) AffApp matrix = Mat([ [1, 0, 0, 0, 1, 0, 0, 0] [0, 1, 0, 0, 0, 1, 0, 0] [0, 0, 1, 0, 0, 0, 1, 0] [0, 0, 0, 1, 0, 0, 0, 1] ]) AffApp cst = Vec([ 0, 1, 1, 0 ]) ''' try: symbols = [s.vec for s in symbols] N = sum(map(lambda s: len(s), symbols)) symbols_ = Vector(N) i = 0 for v in symbols: for s in v: symbols_[i] = s i += 1 symbols = symbols_ except TypeError: pass return self.mba.vectorial_decomp(symbols, self.vec)
python
def vectorial_decomp(self, symbols): ''' Compute the vectorial decomposition of the expression according to the given symbols. symbols is a list that represents the input of the resulting application. They are considerated as a flatten vector of bits. Args: symbols: TODO Returns: An :class:`pytanque.App` object Example: >>> mba = MBA(4) >>> x = mba.var('x') >>> y = mba.var('y') >>> e = x^y^6 >>> e.vectorial_decomp([x,y]) App NL = Vec([ 0, 0, 0, 0 ]) AffApp matrix = Mat([ [1, 0, 0, 0, 1, 0, 0, 0] [0, 1, 0, 0, 0, 1, 0, 0] [0, 0, 1, 0, 0, 0, 1, 0] [0, 0, 0, 1, 0, 0, 0, 1] ]) AffApp cst = Vec([ 0, 1, 1, 0 ]) ''' try: symbols = [s.vec for s in symbols] N = sum(map(lambda s: len(s), symbols)) symbols_ = Vector(N) i = 0 for v in symbols: for s in v: symbols_[i] = s i += 1 symbols = symbols_ except TypeError: pass return self.mba.vectorial_decomp(symbols, self.vec)
[ "def", "vectorial_decomp", "(", "self", ",", "symbols", ")", ":", "try", ":", "symbols", "=", "[", "s", ".", "vec", "for", "s", "in", "symbols", "]", "N", "=", "sum", "(", "map", "(", "lambda", "s", ":", "len", "(", "s", ")", ",", "symbols", ")", ")", "symbols_", "=", "Vector", "(", "N", ")", "i", "=", "0", "for", "v", "in", "symbols", ":", "for", "s", "in", "v", ":", "symbols_", "[", "i", "]", "=", "s", "i", "+=", "1", "symbols", "=", "symbols_", "except", "TypeError", ":", "pass", "return", "self", ".", "mba", ".", "vectorial_decomp", "(", "symbols", ",", "self", ".", "vec", ")" ]
Compute the vectorial decomposition of the expression according to the given symbols. symbols is a list that represents the input of the resulting application. They are considerated as a flatten vector of bits. Args: symbols: TODO Returns: An :class:`pytanque.App` object Example: >>> mba = MBA(4) >>> x = mba.var('x') >>> y = mba.var('y') >>> e = x^y^6 >>> e.vectorial_decomp([x,y]) App NL = Vec([ 0, 0, 0, 0 ]) AffApp matrix = Mat([ [1, 0, 0, 0, 1, 0, 0, 0] [0, 1, 0, 0, 0, 1, 0, 0] [0, 0, 1, 0, 0, 0, 1, 0] [0, 0, 0, 1, 0, 0, 0, 1] ]) AffApp cst = Vec([ 0, 1, 1, 0 ])
[ "Compute", "the", "vectorial", "decomposition", "of", "the", "expression", "according", "to", "the", "given", "symbols", "." ]
04fad817090b3b9f2328a5e984457aba6024e971
https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/mba_if.py#L338-L386
8,138
quarkslab/arybo
arybo/lib/mba_if.py
MBA.var
def var(self, name): ''' Get an n-bit named symbolic variable Returns: An :class:`MBAVariable` object representing a symbolic variable Example: >>> mba.var('x') Vec([ x0, x1, x2, x3 ]) ''' ret = self.from_vec(self.var_symbols(name)) ret.name = name return ret
python
def var(self, name): ''' Get an n-bit named symbolic variable Returns: An :class:`MBAVariable` object representing a symbolic variable Example: >>> mba.var('x') Vec([ x0, x1, x2, x3 ]) ''' ret = self.from_vec(self.var_symbols(name)) ret.name = name return ret
[ "def", "var", "(", "self", ",", "name", ")", ":", "ret", "=", "self", ".", "from_vec", "(", "self", ".", "var_symbols", "(", "name", ")", ")", "ret", ".", "name", "=", "name", "return", "ret" ]
Get an n-bit named symbolic variable Returns: An :class:`MBAVariable` object representing a symbolic variable Example: >>> mba.var('x') Vec([ x0, x1, x2, x3 ])
[ "Get", "an", "n", "-", "bit", "named", "symbolic", "variable" ]
04fad817090b3b9f2328a5e984457aba6024e971
https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/mba_if.py#L408-L426
8,139
quarkslab/arybo
arybo/lib/mba_if.py
MBA.permut2expr
def permut2expr(self, P): ''' Convert a substitution table into an arybo application Args: P: list of integers. The list must not contain more than 2**nbits elements. Returns: A tuple containing an :class:`MBAVariable` object with the result and the symbolic input variable used in this object. A typical use case is to feed these into vectorial_decomp. Example: >>> mba = MBA(4) >>> P = [i^7 for i in range(16)] >>> E,X = mba.permut2expr(P) >>> E.vectorial_decomp([X]) App NL = Vec([ 0, 0, 0, 0 ]) AffApp matrix = Mat([ [1, 0, 0, 0] [0, 1, 0, 0] [0, 0, 1, 0] [0, 0, 0, 1] ]) AffApp cst = Vec([ 1, 1, 1, 0 ]) ''' if len(P) > (1<<self.nbits): raise ValueError("P must not contain more than %d elements" % (1<<self.nbits)) X = self.var('X') ret = super(MBA, self).permut2expr(P, X.vec) return self.from_vec(ret), X
python
def permut2expr(self, P): ''' Convert a substitution table into an arybo application Args: P: list of integers. The list must not contain more than 2**nbits elements. Returns: A tuple containing an :class:`MBAVariable` object with the result and the symbolic input variable used in this object. A typical use case is to feed these into vectorial_decomp. Example: >>> mba = MBA(4) >>> P = [i^7 for i in range(16)] >>> E,X = mba.permut2expr(P) >>> E.vectorial_decomp([X]) App NL = Vec([ 0, 0, 0, 0 ]) AffApp matrix = Mat([ [1, 0, 0, 0] [0, 1, 0, 0] [0, 0, 1, 0] [0, 0, 0, 1] ]) AffApp cst = Vec([ 1, 1, 1, 0 ]) ''' if len(P) > (1<<self.nbits): raise ValueError("P must not contain more than %d elements" % (1<<self.nbits)) X = self.var('X') ret = super(MBA, self).permut2expr(P, X.vec) return self.from_vec(ret), X
[ "def", "permut2expr", "(", "self", ",", "P", ")", ":", "if", "len", "(", "P", ")", ">", "(", "1", "<<", "self", ".", "nbits", ")", ":", "raise", "ValueError", "(", "\"P must not contain more than %d elements\"", "%", "(", "1", "<<", "self", ".", "nbits", ")", ")", "X", "=", "self", ".", "var", "(", "'X'", ")", "ret", "=", "super", "(", "MBA", ",", "self", ")", ".", "permut2expr", "(", "P", ",", "X", ".", "vec", ")", "return", "self", ".", "from_vec", "(", "ret", ")", ",", "X" ]
Convert a substitution table into an arybo application Args: P: list of integers. The list must not contain more than 2**nbits elements. Returns: A tuple containing an :class:`MBAVariable` object with the result and the symbolic input variable used in this object. A typical use case is to feed these into vectorial_decomp. Example: >>> mba = MBA(4) >>> P = [i^7 for i in range(16)] >>> E,X = mba.permut2expr(P) >>> E.vectorial_decomp([X]) App NL = Vec([ 0, 0, 0, 0 ]) AffApp matrix = Mat([ [1, 0, 0, 0] [0, 1, 0, 0] [0, 0, 1, 0] [0, 0, 0, 1] ]) AffApp cst = Vec([ 1, 1, 1, 0 ])
[ "Convert", "a", "substitution", "table", "into", "an", "arybo", "application" ]
04fad817090b3b9f2328a5e984457aba6024e971
https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/mba_if.py#L444-L483
8,140
requests/requests-ntlm
requests_ntlm/requests_ntlm.py
HttpNtlmAuth.response_hook
def response_hook(self, r, **kwargs): """The actual hook handler.""" if r.status_code == 401: # Handle server auth. www_authenticate = r.headers.get('www-authenticate', '').lower() auth_type = _auth_type_from_header(www_authenticate) if auth_type is not None: return self.retry_using_http_NTLM_auth( 'www-authenticate', 'Authorization', r, auth_type, kwargs ) elif r.status_code == 407: # If we didn't have server auth, do proxy auth. proxy_authenticate = r.headers.get( 'proxy-authenticate', '' ).lower() auth_type = _auth_type_from_header(proxy_authenticate) if auth_type is not None: return self.retry_using_http_NTLM_auth( 'proxy-authenticate', 'Proxy-authorization', r, auth_type, kwargs ) return r
python
def response_hook(self, r, **kwargs): """The actual hook handler.""" if r.status_code == 401: # Handle server auth. www_authenticate = r.headers.get('www-authenticate', '').lower() auth_type = _auth_type_from_header(www_authenticate) if auth_type is not None: return self.retry_using_http_NTLM_auth( 'www-authenticate', 'Authorization', r, auth_type, kwargs ) elif r.status_code == 407: # If we didn't have server auth, do proxy auth. proxy_authenticate = r.headers.get( 'proxy-authenticate', '' ).lower() auth_type = _auth_type_from_header(proxy_authenticate) if auth_type is not None: return self.retry_using_http_NTLM_auth( 'proxy-authenticate', 'Proxy-authorization', r, auth_type, kwargs ) return r
[ "def", "response_hook", "(", "self", ",", "r", ",", "*", "*", "kwargs", ")", ":", "if", "r", ".", "status_code", "==", "401", ":", "# Handle server auth.", "www_authenticate", "=", "r", ".", "headers", ".", "get", "(", "'www-authenticate'", ",", "''", ")", ".", "lower", "(", ")", "auth_type", "=", "_auth_type_from_header", "(", "www_authenticate", ")", "if", "auth_type", "is", "not", "None", ":", "return", "self", ".", "retry_using_http_NTLM_auth", "(", "'www-authenticate'", ",", "'Authorization'", ",", "r", ",", "auth_type", ",", "kwargs", ")", "elif", "r", ".", "status_code", "==", "407", ":", "# If we didn't have server auth, do proxy auth.", "proxy_authenticate", "=", "r", ".", "headers", ".", "get", "(", "'proxy-authenticate'", ",", "''", ")", ".", "lower", "(", ")", "auth_type", "=", "_auth_type_from_header", "(", "proxy_authenticate", ")", "if", "auth_type", "is", "not", "None", ":", "return", "self", ".", "retry_using_http_NTLM_auth", "(", "'proxy-authenticate'", ",", "'Proxy-authorization'", ",", "r", ",", "auth_type", ",", "kwargs", ")", "return", "r" ]
The actual hook handler.
[ "The", "actual", "hook", "handler", "." ]
f71fee60aa64c17941114d4eae40aed670a77afd
https://github.com/requests/requests-ntlm/blob/f71fee60aa64c17941114d4eae40aed670a77afd/requests_ntlm/requests_ntlm.py#L138-L168
8,141
esafak/mca
src/mca.py
dummy
def dummy(DF, cols=None): """Dummy code select columns of a DataFrame.""" dummies = (get_dummies(DF[col]) for col in (DF.columns if cols is None else cols)) return concat(dummies, axis=1, keys=DF.columns)
python
def dummy(DF, cols=None): """Dummy code select columns of a DataFrame.""" dummies = (get_dummies(DF[col]) for col in (DF.columns if cols is None else cols)) return concat(dummies, axis=1, keys=DF.columns)
[ "def", "dummy", "(", "DF", ",", "cols", "=", "None", ")", ":", "dummies", "=", "(", "get_dummies", "(", "DF", "[", "col", "]", ")", "for", "col", "in", "(", "DF", ".", "columns", "if", "cols", "is", "None", "else", "cols", ")", ")", "return", "concat", "(", "dummies", ",", "axis", "=", "1", ",", "keys", "=", "DF", ".", "columns", ")" ]
Dummy code select columns of a DataFrame.
[ "Dummy", "code", "select", "columns", "of", "a", "DataFrame", "." ]
f2b79ecbf37629902ccdbad2e1a556977c53d370
https://github.com/esafak/mca/blob/f2b79ecbf37629902ccdbad2e1a556977c53d370/src/mca.py#L30-L34
8,142
esafak/mca
src/mca.py
MCA.cos_r
def cos_r(self, N=None): # percent=0.9 """Return the squared cosines for each row.""" if not hasattr(self, 'F') or self.F.shape[1] < self.rank: self.fs_r(N=self.rank) # generate F self.dr = norm(self.F, axis=1)**2 # cheaper than diag(self.F.dot(self.F.T))? return apply_along_axis(lambda _: _/self.dr, 0, self.F[:, :N]**2)
python
def cos_r(self, N=None): # percent=0.9 """Return the squared cosines for each row.""" if not hasattr(self, 'F') or self.F.shape[1] < self.rank: self.fs_r(N=self.rank) # generate F self.dr = norm(self.F, axis=1)**2 # cheaper than diag(self.F.dot(self.F.T))? return apply_along_axis(lambda _: _/self.dr, 0, self.F[:, :N]**2)
[ "def", "cos_r", "(", "self", ",", "N", "=", "None", ")", ":", "# percent=0.9", "if", "not", "hasattr", "(", "self", ",", "'F'", ")", "or", "self", ".", "F", ".", "shape", "[", "1", "]", "<", "self", ".", "rank", ":", "self", ".", "fs_r", "(", "N", "=", "self", ".", "rank", ")", "# generate F", "self", ".", "dr", "=", "norm", "(", "self", ".", "F", ",", "axis", "=", "1", ")", "**", "2", "# cheaper than diag(self.F.dot(self.F.T))?", "return", "apply_along_axis", "(", "lambda", "_", ":", "_", "/", "self", ".", "dr", ",", "0", ",", "self", ".", "F", "[", ":", ",", ":", "N", "]", "**", "2", ")" ]
Return the squared cosines for each row.
[ "Return", "the", "squared", "cosines", "for", "each", "row", "." ]
f2b79ecbf37629902ccdbad2e1a556977c53d370
https://github.com/esafak/mca/blob/f2b79ecbf37629902ccdbad2e1a556977c53d370/src/mca.py#L149-L157
8,143
esafak/mca
src/mca.py
MCA.cos_c
def cos_c(self, N=None): # percent=0.9, """Return the squared cosines for each column.""" if not hasattr(self, 'G') or self.G.shape[1] < self.rank: self.fs_c(N=self.rank) # generate self.dc = norm(self.G, axis=1)**2 # cheaper than diag(self.G.dot(self.G.T))? return apply_along_axis(lambda _: _/self.dc, 0, self.G[:, :N]**2)
python
def cos_c(self, N=None): # percent=0.9, """Return the squared cosines for each column.""" if not hasattr(self, 'G') or self.G.shape[1] < self.rank: self.fs_c(N=self.rank) # generate self.dc = norm(self.G, axis=1)**2 # cheaper than diag(self.G.dot(self.G.T))? return apply_along_axis(lambda _: _/self.dc, 0, self.G[:, :N]**2)
[ "def", "cos_c", "(", "self", ",", "N", "=", "None", ")", ":", "# percent=0.9,", "if", "not", "hasattr", "(", "self", ",", "'G'", ")", "or", "self", ".", "G", ".", "shape", "[", "1", "]", "<", "self", ".", "rank", ":", "self", ".", "fs_c", "(", "N", "=", "self", ".", "rank", ")", "# generate", "self", ".", "dc", "=", "norm", "(", "self", ".", "G", ",", "axis", "=", "1", ")", "**", "2", "# cheaper than diag(self.G.dot(self.G.T))?", "return", "apply_along_axis", "(", "lambda", "_", ":", "_", "/", "self", ".", "dc", ",", "0", ",", "self", ".", "G", "[", ":", ",", ":", "N", "]", "**", "2", ")" ]
Return the squared cosines for each column.
[ "Return", "the", "squared", "cosines", "for", "each", "column", "." ]
f2b79ecbf37629902ccdbad2e1a556977c53d370
https://github.com/esafak/mca/blob/f2b79ecbf37629902ccdbad2e1a556977c53d370/src/mca.py#L159-L167
8,144
esafak/mca
src/mca.py
MCA.cont_r
def cont_r(self, percent=0.9, N=None): """Return the contribution of each row.""" if not hasattr(self, 'F'): self.fs_r(N=self.rank) # generate F return apply_along_axis(lambda _: _/self.L[:N], 1, apply_along_axis(lambda _: _*self.r, 0, self.F[:, :N]**2))
python
def cont_r(self, percent=0.9, N=None): """Return the contribution of each row.""" if not hasattr(self, 'F'): self.fs_r(N=self.rank) # generate F return apply_along_axis(lambda _: _/self.L[:N], 1, apply_along_axis(lambda _: _*self.r, 0, self.F[:, :N]**2))
[ "def", "cont_r", "(", "self", ",", "percent", "=", "0.9", ",", "N", "=", "None", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'F'", ")", ":", "self", ".", "fs_r", "(", "N", "=", "self", ".", "rank", ")", "# generate F", "return", "apply_along_axis", "(", "lambda", "_", ":", "_", "/", "self", ".", "L", "[", ":", "N", "]", ",", "1", ",", "apply_along_axis", "(", "lambda", "_", ":", "_", "*", "self", ".", "r", ",", "0", ",", "self", ".", "F", "[", ":", ",", ":", "N", "]", "**", "2", ")", ")" ]
Return the contribution of each row.
[ "Return", "the", "contribution", "of", "each", "row", "." ]
f2b79ecbf37629902ccdbad2e1a556977c53d370
https://github.com/esafak/mca/blob/f2b79ecbf37629902ccdbad2e1a556977c53d370/src/mca.py#L169-L175
8,145
esafak/mca
src/mca.py
MCA.cont_c
def cont_c(self, percent=0.9, N=None): # bug? check axis number 0 vs 1 here """Return the contribution of each column.""" if not hasattr(self, 'G'): self.fs_c(N=self.rank) # generate G return apply_along_axis(lambda _: _/self.L[:N], 1, apply_along_axis(lambda _: _*self.c, 0, self.G[:, :N]**2))
python
def cont_c(self, percent=0.9, N=None): # bug? check axis number 0 vs 1 here """Return the contribution of each column.""" if not hasattr(self, 'G'): self.fs_c(N=self.rank) # generate G return apply_along_axis(lambda _: _/self.L[:N], 1, apply_along_axis(lambda _: _*self.c, 0, self.G[:, :N]**2))
[ "def", "cont_c", "(", "self", ",", "percent", "=", "0.9", ",", "N", "=", "None", ")", ":", "# bug? check axis number 0 vs 1 here", "if", "not", "hasattr", "(", "self", ",", "'G'", ")", ":", "self", ".", "fs_c", "(", "N", "=", "self", ".", "rank", ")", "# generate G", "return", "apply_along_axis", "(", "lambda", "_", ":", "_", "/", "self", ".", "L", "[", ":", "N", "]", ",", "1", ",", "apply_along_axis", "(", "lambda", "_", ":", "_", "*", "self", ".", "c", ",", "0", ",", "self", ".", "G", "[", ":", ",", ":", "N", "]", "**", "2", ")", ")" ]
Return the contribution of each column.
[ "Return", "the", "contribution", "of", "each", "column", "." ]
f2b79ecbf37629902ccdbad2e1a556977c53d370
https://github.com/esafak/mca/blob/f2b79ecbf37629902ccdbad2e1a556977c53d370/src/mca.py#L177-L183
8,146
esafak/mca
src/mca.py
MCA.fs_r_sup
def fs_r_sup(self, DF, N=None): """Find the supplementary row factor scores. ncols: The number of singular vectors to retain. If both are passed, cols is given preference. """ if not hasattr(self, 'G'): self.fs_c(N=self.rank) # generate G if N and (not isinstance(N, int) or N <= 0): raise ValueError("ncols should be a positive integer.") s = -sqrt(self.E) if self.cor else self.s N = min(N, self.rank) if N else self.rank S_inv = diagsvd(-1/s[:N], len(self.G.T), N) # S = diagsvd(s[:N], len(self.tau), N) return _mul(DF.div(DF.sum(axis=1), axis=0), self.G, S_inv)[:, :N]
python
def fs_r_sup(self, DF, N=None): """Find the supplementary row factor scores. ncols: The number of singular vectors to retain. If both are passed, cols is given preference. """ if not hasattr(self, 'G'): self.fs_c(N=self.rank) # generate G if N and (not isinstance(N, int) or N <= 0): raise ValueError("ncols should be a positive integer.") s = -sqrt(self.E) if self.cor else self.s N = min(N, self.rank) if N else self.rank S_inv = diagsvd(-1/s[:N], len(self.G.T), N) # S = diagsvd(s[:N], len(self.tau), N) return _mul(DF.div(DF.sum(axis=1), axis=0), self.G, S_inv)[:, :N]
[ "def", "fs_r_sup", "(", "self", ",", "DF", ",", "N", "=", "None", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'G'", ")", ":", "self", ".", "fs_c", "(", "N", "=", "self", ".", "rank", ")", "# generate G", "if", "N", "and", "(", "not", "isinstance", "(", "N", ",", "int", ")", "or", "N", "<=", "0", ")", ":", "raise", "ValueError", "(", "\"ncols should be a positive integer.\"", ")", "s", "=", "-", "sqrt", "(", "self", ".", "E", ")", "if", "self", ".", "cor", "else", "self", ".", "s", "N", "=", "min", "(", "N", ",", "self", ".", "rank", ")", "if", "N", "else", "self", ".", "rank", "S_inv", "=", "diagsvd", "(", "-", "1", "/", "s", "[", ":", "N", "]", ",", "len", "(", "self", ".", "G", ".", "T", ")", ",", "N", ")", "# S = diagsvd(s[:N], len(self.tau), N)", "return", "_mul", "(", "DF", ".", "div", "(", "DF", ".", "sum", "(", "axis", "=", "1", ")", ",", "axis", "=", "0", ")", ",", "self", ".", "G", ",", "S_inv", ")", "[", ":", ",", ":", "N", "]" ]
Find the supplementary row factor scores. ncols: The number of singular vectors to retain. If both are passed, cols is given preference.
[ "Find", "the", "supplementary", "row", "factor", "scores", "." ]
f2b79ecbf37629902ccdbad2e1a556977c53d370
https://github.com/esafak/mca/blob/f2b79ecbf37629902ccdbad2e1a556977c53d370/src/mca.py#L199-L214
8,147
esafak/mca
src/mca.py
MCA.fs_c_sup
def fs_c_sup(self, DF, N=None): """Find the supplementary column factor scores. ncols: The number of singular vectors to retain. If both are passed, cols is given preference. """ if not hasattr(self, 'F'): self.fs_r(N=self.rank) # generate F if N and (not isinstance(N, int) or N <= 0): raise ValueError("ncols should be a positive integer.") s = -sqrt(self.E) if self.cor else self.s N = min(N, self.rank) if N else self.rank S_inv = diagsvd(-1/s[:N], len(self.F.T), N) # S = diagsvd(s[:N], len(self.tau), N) return _mul((DF/DF.sum()).T, self.F, S_inv)[:, :N]
python
def fs_c_sup(self, DF, N=None): """Find the supplementary column factor scores. ncols: The number of singular vectors to retain. If both are passed, cols is given preference. """ if not hasattr(self, 'F'): self.fs_r(N=self.rank) # generate F if N and (not isinstance(N, int) or N <= 0): raise ValueError("ncols should be a positive integer.") s = -sqrt(self.E) if self.cor else self.s N = min(N, self.rank) if N else self.rank S_inv = diagsvd(-1/s[:N], len(self.F.T), N) # S = diagsvd(s[:N], len(self.tau), N) return _mul((DF/DF.sum()).T, self.F, S_inv)[:, :N]
[ "def", "fs_c_sup", "(", "self", ",", "DF", ",", "N", "=", "None", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'F'", ")", ":", "self", ".", "fs_r", "(", "N", "=", "self", ".", "rank", ")", "# generate F", "if", "N", "and", "(", "not", "isinstance", "(", "N", ",", "int", ")", "or", "N", "<=", "0", ")", ":", "raise", "ValueError", "(", "\"ncols should be a positive integer.\"", ")", "s", "=", "-", "sqrt", "(", "self", ".", "E", ")", "if", "self", ".", "cor", "else", "self", ".", "s", "N", "=", "min", "(", "N", ",", "self", ".", "rank", ")", "if", "N", "else", "self", ".", "rank", "S_inv", "=", "diagsvd", "(", "-", "1", "/", "s", "[", ":", "N", "]", ",", "len", "(", "self", ".", "F", ".", "T", ")", ",", "N", ")", "# S = diagsvd(s[:N], len(self.tau), N)", "return", "_mul", "(", "(", "DF", "/", "DF", ".", "sum", "(", ")", ")", ".", "T", ",", "self", ".", "F", ",", "S_inv", ")", "[", ":", ",", ":", "N", "]" ]
Find the supplementary column factor scores. ncols: The number of singular vectors to retain. If both are passed, cols is given preference.
[ "Find", "the", "supplementary", "column", "factor", "scores", "." ]
f2b79ecbf37629902ccdbad2e1a556977c53d370
https://github.com/esafak/mca/blob/f2b79ecbf37629902ccdbad2e1a556977c53d370/src/mca.py#L216-L231
8,148
primetang/qrtools
src/qrtools.py
QR.data_recognise
def data_recognise(self, data=None): """Returns an unicode string indicating the data type of the data paramater""" data = data or self.data data_lower = data.lower() if data_lower.startswith(u"http://") or data_lower.startswith(u"https://"): return u'url' elif data_lower.startswith(u"mailto:"): return u'email' elif data_lower.startswith(u"matmsg:to:"): return u'emailmessage' elif data_lower.startswith(u"tel:"): return u'telephone' elif data_lower.startswith(u"smsto:"): return u'sms' elif data_lower.startswith(u"mmsto:"): return u'mms' elif data_lower.startswith(u"geo:"): return u'geo' elif data_lower.startswith(u"mebkm:title:"): return u'bookmark' elif data_lower.startswith(u"mecard:"): return u'phonebook' else: return u'text'
python
def data_recognise(self, data=None): """Returns an unicode string indicating the data type of the data paramater""" data = data or self.data data_lower = data.lower() if data_lower.startswith(u"http://") or data_lower.startswith(u"https://"): return u'url' elif data_lower.startswith(u"mailto:"): return u'email' elif data_lower.startswith(u"matmsg:to:"): return u'emailmessage' elif data_lower.startswith(u"tel:"): return u'telephone' elif data_lower.startswith(u"smsto:"): return u'sms' elif data_lower.startswith(u"mmsto:"): return u'mms' elif data_lower.startswith(u"geo:"): return u'geo' elif data_lower.startswith(u"mebkm:title:"): return u'bookmark' elif data_lower.startswith(u"mecard:"): return u'phonebook' else: return u'text'
[ "def", "data_recognise", "(", "self", ",", "data", "=", "None", ")", ":", "data", "=", "data", "or", "self", ".", "data", "data_lower", "=", "data", ".", "lower", "(", ")", "if", "data_lower", ".", "startswith", "(", "u\"http://\"", ")", "or", "data_lower", ".", "startswith", "(", "u\"https://\"", ")", ":", "return", "u'url'", "elif", "data_lower", ".", "startswith", "(", "u\"mailto:\"", ")", ":", "return", "u'email'", "elif", "data_lower", ".", "startswith", "(", "u\"matmsg:to:\"", ")", ":", "return", "u'emailmessage'", "elif", "data_lower", ".", "startswith", "(", "u\"tel:\"", ")", ":", "return", "u'telephone'", "elif", "data_lower", ".", "startswith", "(", "u\"smsto:\"", ")", ":", "return", "u'sms'", "elif", "data_lower", ".", "startswith", "(", "u\"mmsto:\"", ")", ":", "return", "u'mms'", "elif", "data_lower", ".", "startswith", "(", "u\"geo:\"", ")", ":", "return", "u'geo'", "elif", "data_lower", ".", "startswith", "(", "u\"mebkm:title:\"", ")", ":", "return", "u'bookmark'", "elif", "data_lower", ".", "startswith", "(", "u\"mecard:\"", ")", ":", "return", "u'phonebook'", "else", ":", "return", "u'text'" ]
Returns an unicode string indicating the data type of the data paramater
[ "Returns", "an", "unicode", "string", "indicating", "the", "data", "type", "of", "the", "data", "paramater" ]
3263c6136f54f0499b9945bfad593537d436c7a1
https://github.com/primetang/qrtools/blob/3263c6136f54f0499b9945bfad593537d436c7a1/src/qrtools.py#L84-L107
8,149
primetang/qrtools
src/qrtools.py
QR.data_to_string
def data_to_string(self): """Returns a UTF8 string with the QR Code's data""" # FIX-ME: if we don't add the BOM_UTF8 char, QtQR doesn't decode # correctly; but if we add it, mobile apps don't.- # Apparently is a zbar bug. if self.data_type == 'text': return BOM_UTF8 + self.__class__.data_encode[self.data_type](self.data).encode('utf-8') else: return self.__class__.data_encode[self.data_type](self.data).encode('utf-8')
python
def data_to_string(self): """Returns a UTF8 string with the QR Code's data""" # FIX-ME: if we don't add the BOM_UTF8 char, QtQR doesn't decode # correctly; but if we add it, mobile apps don't.- # Apparently is a zbar bug. if self.data_type == 'text': return BOM_UTF8 + self.__class__.data_encode[self.data_type](self.data).encode('utf-8') else: return self.__class__.data_encode[self.data_type](self.data).encode('utf-8')
[ "def", "data_to_string", "(", "self", ")", ":", "# FIX-ME: if we don't add the BOM_UTF8 char, QtQR doesn't decode", "# correctly; but if we add it, mobile apps don't.-", "# Apparently is a zbar bug.", "if", "self", ".", "data_type", "==", "'text'", ":", "return", "BOM_UTF8", "+", "self", ".", "__class__", ".", "data_encode", "[", "self", ".", "data_type", "]", "(", "self", ".", "data", ")", ".", "encode", "(", "'utf-8'", ")", "else", ":", "return", "self", ".", "__class__", ".", "data_encode", "[", "self", ".", "data_type", "]", "(", "self", ".", "data", ")", ".", "encode", "(", "'utf-8'", ")" ]
Returns a UTF8 string with the QR Code's data
[ "Returns", "a", "UTF8", "string", "with", "the", "QR", "Code", "s", "data" ]
3263c6136f54f0499b9945bfad593537d436c7a1
https://github.com/primetang/qrtools/blob/3263c6136f54f0499b9945bfad593537d436c7a1/src/qrtools.py#L125-L133
8,150
python-visualization/branca
branca/utilities.py
split_six
def split_six(series=None): """ Given a Pandas Series, get a domain of values from zero to the 90% quantile rounded to the nearest order-of-magnitude integer. For example, 2100 is rounded to 2000, 2790 to 3000. Parameters ---------- series: Pandas series, default None Returns ------- list """ if pd is None: raise ImportError('The Pandas package is required' ' for this functionality') if np is None: raise ImportError('The NumPy package is required' ' for this functionality') def base(x): if x > 0: base = pow(10, math.floor(math.log10(x))) return round(x/base)*base else: return 0 quants = [0, 50, 75, 85, 90] # Some weirdness in series quantiles a la 0.13. arr = series.values return [base(np.percentile(arr, x)) for x in quants]
python
def split_six(series=None): """ Given a Pandas Series, get a domain of values from zero to the 90% quantile rounded to the nearest order-of-magnitude integer. For example, 2100 is rounded to 2000, 2790 to 3000. Parameters ---------- series: Pandas series, default None Returns ------- list """ if pd is None: raise ImportError('The Pandas package is required' ' for this functionality') if np is None: raise ImportError('The NumPy package is required' ' for this functionality') def base(x): if x > 0: base = pow(10, math.floor(math.log10(x))) return round(x/base)*base else: return 0 quants = [0, 50, 75, 85, 90] # Some weirdness in series quantiles a la 0.13. arr = series.values return [base(np.percentile(arr, x)) for x in quants]
[ "def", "split_six", "(", "series", "=", "None", ")", ":", "if", "pd", "is", "None", ":", "raise", "ImportError", "(", "'The Pandas package is required'", "' for this functionality'", ")", "if", "np", "is", "None", ":", "raise", "ImportError", "(", "'The NumPy package is required'", "' for this functionality'", ")", "def", "base", "(", "x", ")", ":", "if", "x", ">", "0", ":", "base", "=", "pow", "(", "10", ",", "math", ".", "floor", "(", "math", ".", "log10", "(", "x", ")", ")", ")", "return", "round", "(", "x", "/", "base", ")", "*", "base", "else", ":", "return", "0", "quants", "=", "[", "0", ",", "50", ",", "75", ",", "85", ",", "90", "]", "# Some weirdness in series quantiles a la 0.13.", "arr", "=", "series", ".", "values", "return", "[", "base", "(", "np", ".", "percentile", "(", "arr", ",", "x", ")", ")", "for", "x", "in", "quants", "]" ]
Given a Pandas Series, get a domain of values from zero to the 90% quantile rounded to the nearest order-of-magnitude integer. For example, 2100 is rounded to 2000, 2790 to 3000. Parameters ---------- series: Pandas series, default None Returns ------- list
[ "Given", "a", "Pandas", "Series", "get", "a", "domain", "of", "values", "from", "zero", "to", "the", "90%", "quantile", "rounded", "to", "the", "nearest", "order", "-", "of", "-", "magnitude", "integer", ".", "For", "example", "2100", "is", "rounded", "to", "2000", "2790", "to", "3000", "." ]
4e89e88a5a7ff3586f0852249c2c125f72316da8
https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/utilities.py#L183-L215
8,151
python-visualization/branca
branca/colormap.py
StepColormap.to_linear
def to_linear(self, index=None): """ Transforms the StepColormap into a LinearColormap. Parameters ---------- index : list of floats, default None The values corresponding to each color in the output colormap. It has to be sorted. If None, a regular grid between `vmin` and `vmax` is created. """ if index is None: n = len(self.index)-1 index = [self.index[i]*(1.-i/(n-1.))+self.index[i+1]*i/(n-1.) for i in range(n)] colors = [self.rgba_floats_tuple(x) for x in index] return LinearColormap(colors, index=index, vmin=self.vmin, vmax=self.vmax)
python
def to_linear(self, index=None): """ Transforms the StepColormap into a LinearColormap. Parameters ---------- index : list of floats, default None The values corresponding to each color in the output colormap. It has to be sorted. If None, a regular grid between `vmin` and `vmax` is created. """ if index is None: n = len(self.index)-1 index = [self.index[i]*(1.-i/(n-1.))+self.index[i+1]*i/(n-1.) for i in range(n)] colors = [self.rgba_floats_tuple(x) for x in index] return LinearColormap(colors, index=index, vmin=self.vmin, vmax=self.vmax)
[ "def", "to_linear", "(", "self", ",", "index", "=", "None", ")", ":", "if", "index", "is", "None", ":", "n", "=", "len", "(", "self", ".", "index", ")", "-", "1", "index", "=", "[", "self", ".", "index", "[", "i", "]", "*", "(", "1.", "-", "i", "/", "(", "n", "-", "1.", ")", ")", "+", "self", ".", "index", "[", "i", "+", "1", "]", "*", "i", "/", "(", "n", "-", "1.", ")", "for", "i", "in", "range", "(", "n", ")", "]", "colors", "=", "[", "self", ".", "rgba_floats_tuple", "(", "x", ")", "for", "x", "in", "index", "]", "return", "LinearColormap", "(", "colors", ",", "index", "=", "index", ",", "vmin", "=", "self", ".", "vmin", ",", "vmax", "=", "self", ".", "vmax", ")" ]
Transforms the StepColormap into a LinearColormap. Parameters ---------- index : list of floats, default None The values corresponding to each color in the output colormap. It has to be sorted. If None, a regular grid between `vmin` and `vmax` is created.
[ "Transforms", "the", "StepColormap", "into", "a", "LinearColormap", "." ]
4e89e88a5a7ff3586f0852249c2c125f72316da8
https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/colormap.py#L390-L409
8,152
python-visualization/branca
branca/element.py
Element.add_to
def add_to(self, parent, name=None, index=None): """Add element to a parent.""" parent.add_child(self, name=name, index=index) return self
python
def add_to(self, parent, name=None, index=None): """Add element to a parent.""" parent.add_child(self, name=name, index=index) return self
[ "def", "add_to", "(", "self", ",", "parent", ",", "name", "=", "None", ",", "index", "=", "None", ")", ":", "parent", ".", "add_child", "(", "self", ",", "name", "=", "name", ",", "index", "=", "index", ")", "return", "self" ]
Add element to a parent.
[ "Add", "element", "to", "a", "parent", "." ]
4e89e88a5a7ff3586f0852249c2c125f72316da8
https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/element.py#L119-L122
8,153
python-visualization/branca
branca/element.py
Element.to_json
def to_json(self, depth=-1, **kwargs): """Returns a JSON representation of the object.""" return json.dumps(self.to_dict(depth=depth, ordered=True), **kwargs)
python
def to_json(self, depth=-1, **kwargs): """Returns a JSON representation of the object.""" return json.dumps(self.to_dict(depth=depth, ordered=True), **kwargs)
[ "def", "to_json", "(", "self", ",", "depth", "=", "-", "1", ",", "*", "*", "kwargs", ")", ":", "return", "json", ".", "dumps", "(", "self", ".", "to_dict", "(", "depth", "=", "depth", ",", "ordered", "=", "True", ")", ",", "*", "*", "kwargs", ")" ]
Returns a JSON representation of the object.
[ "Returns", "a", "JSON", "representation", "of", "the", "object", "." ]
4e89e88a5a7ff3586f0852249c2c125f72316da8
https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/element.py#L138-L140
8,154
python-visualization/branca
branca/element.py
Element.save
def save(self, outfile, close_file=True, **kwargs): """Saves an Element into a file. Parameters ---------- outfile : str or file object The file (or filename) where you want to output the html. close_file : bool, default True Whether the file has to be closed after write. """ if isinstance(outfile, text_type) or isinstance(outfile, binary_type): fid = open(outfile, 'wb') else: fid = outfile root = self.get_root() html = root.render(**kwargs) fid.write(html.encode('utf8')) if close_file: fid.close()
python
def save(self, outfile, close_file=True, **kwargs): """Saves an Element into a file. Parameters ---------- outfile : str or file object The file (or filename) where you want to output the html. close_file : bool, default True Whether the file has to be closed after write. """ if isinstance(outfile, text_type) or isinstance(outfile, binary_type): fid = open(outfile, 'wb') else: fid = outfile root = self.get_root() html = root.render(**kwargs) fid.write(html.encode('utf8')) if close_file: fid.close()
[ "def", "save", "(", "self", ",", "outfile", ",", "close_file", "=", "True", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "outfile", ",", "text_type", ")", "or", "isinstance", "(", "outfile", ",", "binary_type", ")", ":", "fid", "=", "open", "(", "outfile", ",", "'wb'", ")", "else", ":", "fid", "=", "outfile", "root", "=", "self", ".", "get_root", "(", ")", "html", "=", "root", ".", "render", "(", "*", "*", "kwargs", ")", "fid", ".", "write", "(", "html", ".", "encode", "(", "'utf8'", ")", ")", "if", "close_file", ":", "fid", ".", "close", "(", ")" ]
Saves an Element into a file. Parameters ---------- outfile : str or file object The file (or filename) where you want to output the html. close_file : bool, default True Whether the file has to be closed after write.
[ "Saves", "an", "Element", "into", "a", "file", "." ]
4e89e88a5a7ff3586f0852249c2c125f72316da8
https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/element.py#L153-L172
8,155
python-visualization/branca
branca/element.py
Link.get_code
def get_code(self): """Opens the link and returns the response's content.""" if self.code is None: self.code = urlopen(self.url).read() return self.code
python
def get_code(self): """Opens the link and returns the response's content.""" if self.code is None: self.code = urlopen(self.url).read() return self.code
[ "def", "get_code", "(", "self", ")", ":", "if", "self", ".", "code", "is", "None", ":", "self", ".", "code", "=", "urlopen", "(", "self", ".", "url", ")", ".", "read", "(", ")", "return", "self", ".", "code" ]
Opens the link and returns the response's content.
[ "Opens", "the", "link", "and", "returns", "the", "response", "s", "content", "." ]
4e89e88a5a7ff3586f0852249c2c125f72316da8
https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/element.py#L177-L181
8,156
python-visualization/branca
branca/element.py
Figure._repr_html_
def _repr_html_(self, **kwargs): """Displays the Figure in a Jupyter notebook. """ html = self.render(**kwargs) html = "data:text/html;charset=utf-8;base64," + base64.b64encode(html.encode('utf8')).decode('utf8') # noqa if self.height is None: iframe = ( '<div style="width:{width};">' '<div style="position:relative;width:100%;height:0;padding-bottom:{ratio};">' # noqa '<iframe src="{html}" style="position:absolute;width:100%;height:100%;left:0;top:0;' # noqa 'border:none !important;" ' 'allowfullscreen webkitallowfullscreen mozallowfullscreen>' '</iframe>' '</div></div>').format iframe = iframe(html=html, width=self.width, ratio=self.ratio) else: iframe = ('<iframe src="{html}" width="{width}" height="{height}"' 'style="border:none !important;" ' '"allowfullscreen" "webkitallowfullscreen" "mozallowfullscreen">' # noqa '</iframe>').format iframe = iframe(html=html, width=self.width, height=self.height) return iframe
python
def _repr_html_(self, **kwargs): """Displays the Figure in a Jupyter notebook. """ html = self.render(**kwargs) html = "data:text/html;charset=utf-8;base64," + base64.b64encode(html.encode('utf8')).decode('utf8') # noqa if self.height is None: iframe = ( '<div style="width:{width};">' '<div style="position:relative;width:100%;height:0;padding-bottom:{ratio};">' # noqa '<iframe src="{html}" style="position:absolute;width:100%;height:100%;left:0;top:0;' # noqa 'border:none !important;" ' 'allowfullscreen webkitallowfullscreen mozallowfullscreen>' '</iframe>' '</div></div>').format iframe = iframe(html=html, width=self.width, ratio=self.ratio) else: iframe = ('<iframe src="{html}" width="{width}" height="{height}"' 'style="border:none !important;" ' '"allowfullscreen" "webkitallowfullscreen" "mozallowfullscreen">' # noqa '</iframe>').format iframe = iframe(html=html, width=self.width, height=self.height) return iframe
[ "def", "_repr_html_", "(", "self", ",", "*", "*", "kwargs", ")", ":", "html", "=", "self", ".", "render", "(", "*", "*", "kwargs", ")", "html", "=", "\"data:text/html;charset=utf-8;base64,\"", "+", "base64", ".", "b64encode", "(", "html", ".", "encode", "(", "'utf8'", ")", ")", ".", "decode", "(", "'utf8'", ")", "# noqa", "if", "self", ".", "height", "is", "None", ":", "iframe", "=", "(", "'<div style=\"width:{width};\">'", "'<div style=\"position:relative;width:100%;height:0;padding-bottom:{ratio};\">'", "# noqa", "'<iframe src=\"{html}\" style=\"position:absolute;width:100%;height:100%;left:0;top:0;'", "# noqa", "'border:none !important;\" '", "'allowfullscreen webkitallowfullscreen mozallowfullscreen>'", "'</iframe>'", "'</div></div>'", ")", ".", "format", "iframe", "=", "iframe", "(", "html", "=", "html", ",", "width", "=", "self", ".", "width", ",", "ratio", "=", "self", ".", "ratio", ")", "else", ":", "iframe", "=", "(", "'<iframe src=\"{html}\" width=\"{width}\" height=\"{height}\"'", "'style=\"border:none !important;\" '", "'\"allowfullscreen\" \"webkitallowfullscreen\" \"mozallowfullscreen\">'", "# noqa", "'</iframe>'", ")", ".", "format", "iframe", "=", "iframe", "(", "html", "=", "html", ",", "width", "=", "self", ".", "width", ",", "height", "=", "self", ".", "height", ")", "return", "iframe" ]
Displays the Figure in a Jupyter notebook.
[ "Displays", "the", "Figure", "in", "a", "Jupyter", "notebook", "." ]
4e89e88a5a7ff3586f0852249c2c125f72316da8
https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/element.py#L324-L349
8,157
python-visualization/branca
branca/element.py
Figure.add_subplot
def add_subplot(self, x, y, n, margin=0.05): """Creates a div child subplot in a matplotlib.figure.add_subplot style. Parameters ---------- x : int The number of rows in the grid. y : int The number of columns in the grid. n : int The cell number in the grid, counted from 1 to x*y. Example: >>> fig.add_subplot(3,2,5) # Create a div in the 5th cell of a 3rows x 2columns grid(bottom-left corner). """ width = 1./y height = 1./x left = ((n-1) % y)*width top = ((n-1)//y)*height left = left+width*margin top = top+height*margin width = width*(1-2.*margin) height = height*(1-2.*margin) div = Div(position='absolute', width='{}%'.format(100.*width), height='{}%'.format(100.*height), left='{}%'.format(100.*left), top='{}%'.format(100.*top), ) self.add_child(div) return div
python
def add_subplot(self, x, y, n, margin=0.05): """Creates a div child subplot in a matplotlib.figure.add_subplot style. Parameters ---------- x : int The number of rows in the grid. y : int The number of columns in the grid. n : int The cell number in the grid, counted from 1 to x*y. Example: >>> fig.add_subplot(3,2,5) # Create a div in the 5th cell of a 3rows x 2columns grid(bottom-left corner). """ width = 1./y height = 1./x left = ((n-1) % y)*width top = ((n-1)//y)*height left = left+width*margin top = top+height*margin width = width*(1-2.*margin) height = height*(1-2.*margin) div = Div(position='absolute', width='{}%'.format(100.*width), height='{}%'.format(100.*height), left='{}%'.format(100.*left), top='{}%'.format(100.*top), ) self.add_child(div) return div
[ "def", "add_subplot", "(", "self", ",", "x", ",", "y", ",", "n", ",", "margin", "=", "0.05", ")", ":", "width", "=", "1.", "/", "y", "height", "=", "1.", "/", "x", "left", "=", "(", "(", "n", "-", "1", ")", "%", "y", ")", "*", "width", "top", "=", "(", "(", "n", "-", "1", ")", "//", "y", ")", "*", "height", "left", "=", "left", "+", "width", "*", "margin", "top", "=", "top", "+", "height", "*", "margin", "width", "=", "width", "*", "(", "1", "-", "2.", "*", "margin", ")", "height", "=", "height", "*", "(", "1", "-", "2.", "*", "margin", ")", "div", "=", "Div", "(", "position", "=", "'absolute'", ",", "width", "=", "'{}%'", ".", "format", "(", "100.", "*", "width", ")", ",", "height", "=", "'{}%'", ".", "format", "(", "100.", "*", "height", ")", ",", "left", "=", "'{}%'", ".", "format", "(", "100.", "*", "left", ")", ",", "top", "=", "'{}%'", ".", "format", "(", "100.", "*", "top", ")", ",", ")", "self", ".", "add_child", "(", "div", ")", "return", "div" ]
Creates a div child subplot in a matplotlib.figure.add_subplot style. Parameters ---------- x : int The number of rows in the grid. y : int The number of columns in the grid. n : int The cell number in the grid, counted from 1 to x*y. Example: >>> fig.add_subplot(3,2,5) # Create a div in the 5th cell of a 3rows x 2columns grid(bottom-left corner).
[ "Creates", "a", "div", "child", "subplot", "in", "a", "matplotlib", ".", "figure", ".", "add_subplot", "style", "." ]
4e89e88a5a7ff3586f0852249c2c125f72316da8
https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/element.py#L351-L385
8,158
rasbt/pyprind
pyprind/prog_class.py
Prog._elapsed
def _elapsed(self): """ Returns elapsed time at update. """ self.last_time = time.time() return self.last_time - self.start
python
def _elapsed(self): """ Returns elapsed time at update. """ self.last_time = time.time() return self.last_time - self.start
[ "def", "_elapsed", "(", "self", ")", ":", "self", ".", "last_time", "=", "time", ".", "time", "(", ")", "return", "self", ".", "last_time", "-", "self", ".", "start" ]
Returns elapsed time at update.
[ "Returns", "elapsed", "time", "at", "update", "." ]
57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a
https://github.com/rasbt/pyprind/blob/57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a/pyprind/prog_class.py#L120-L123
8,159
rasbt/pyprind
pyprind/prog_class.py
Prog._calc_eta
def _calc_eta(self): """ Calculates estimated time left until completion. """ elapsed = self._elapsed() if self.cnt == 0 or elapsed < 0.001: return None rate = float(self.cnt) / elapsed self.eta = (float(self.max_iter) - float(self.cnt)) / rate
python
def _calc_eta(self): """ Calculates estimated time left until completion. """ elapsed = self._elapsed() if self.cnt == 0 or elapsed < 0.001: return None rate = float(self.cnt) / elapsed self.eta = (float(self.max_iter) - float(self.cnt)) / rate
[ "def", "_calc_eta", "(", "self", ")", ":", "elapsed", "=", "self", ".", "_elapsed", "(", ")", "if", "self", ".", "cnt", "==", "0", "or", "elapsed", "<", "0.001", ":", "return", "None", "rate", "=", "float", "(", "self", ".", "cnt", ")", "/", "elapsed", "self", ".", "eta", "=", "(", "float", "(", "self", ".", "max_iter", ")", "-", "float", "(", "self", ".", "cnt", ")", ")", "/", "rate" ]
Calculates estimated time left until completion.
[ "Calculates", "estimated", "time", "left", "until", "completion", "." ]
57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a
https://github.com/rasbt/pyprind/blob/57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a/pyprind/prog_class.py#L125-L131
8,160
rasbt/pyprind
pyprind/prog_class.py
Prog._print_title
def _print_title(self): """ Prints tracking title at initialization. """ if self.title: self._stream_out('{}\n'.format(self.title)) self._stream_flush()
python
def _print_title(self): """ Prints tracking title at initialization. """ if self.title: self._stream_out('{}\n'.format(self.title)) self._stream_flush()
[ "def", "_print_title", "(", "self", ")", ":", "if", "self", ".", "title", ":", "self", ".", "_stream_out", "(", "'{}\\n'", ".", "format", "(", "self", ".", "title", ")", ")", "self", ".", "_stream_flush", "(", ")" ]
Prints tracking title at initialization.
[ "Prints", "tracking", "title", "at", "initialization", "." ]
57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a
https://github.com/rasbt/pyprind/blob/57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a/pyprind/prog_class.py#L162-L166
8,161
rasbt/pyprind
pyprind/prog_class.py
Prog._cache_eta
def _cache_eta(self): """ Prints the estimated time left.""" self._calc_eta() self._cached_output += ' | ETA: ' + self._get_time(self.eta)
python
def _cache_eta(self): """ Prints the estimated time left.""" self._calc_eta() self._cached_output += ' | ETA: ' + self._get_time(self.eta)
[ "def", "_cache_eta", "(", "self", ")", ":", "self", ".", "_calc_eta", "(", ")", "self", ".", "_cached_output", "+=", "' | ETA: '", "+", "self", ".", "_get_time", "(", "self", ".", "eta", ")" ]
Prints the estimated time left.
[ "Prints", "the", "estimated", "time", "left", "." ]
57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a
https://github.com/rasbt/pyprind/blob/57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a/pyprind/prog_class.py#L168-L171
8,162
rasbt/pyprind
pyprind/progbar.py
ProgBar._adjust_width
def _adjust_width(self): """Shrinks bar if number of iterations is less than the bar width""" if self.bar_width > self.max_iter: self.bar_width = int(self.max_iter)
python
def _adjust_width(self): """Shrinks bar if number of iterations is less than the bar width""" if self.bar_width > self.max_iter: self.bar_width = int(self.max_iter)
[ "def", "_adjust_width", "(", "self", ")", ":", "if", "self", ".", "bar_width", ">", "self", ".", "max_iter", ":", "self", ".", "bar_width", "=", "int", "(", "self", ".", "max_iter", ")" ]
Shrinks bar if number of iterations is less than the bar width
[ "Shrinks", "bar", "if", "number", "of", "iterations", "is", "less", "than", "the", "bar", "width" ]
57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a
https://github.com/rasbt/pyprind/blob/57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a/pyprind/progbar.py#L64-L67
8,163
rasbt/pyprind
pyprind/progpercent.py
ProgPercent._print
def _print(self, force_flush=False): """ Prints formatted percentage and tracked time to the screen.""" self._stream_flush() next_perc = self._calc_percent() if self.update_interval: do_update = time.time() - self.last_time >= self.update_interval elif force_flush: do_update = True else: do_update = next_perc > self.last_progress if do_update and self.active: self.last_progress = next_perc self._cache_percent_indicator(self.last_progress) if self.track: self._cached_output += ' Time elapsed: ' + \ self._get_time(self._elapsed()) self._cache_eta() if self.item_id: self._cache_item_id() self._stream_out('\r%s' % self._cached_output) self._stream_flush() self._cached_output = ''
python
def _print(self, force_flush=False): """ Prints formatted percentage and tracked time to the screen.""" self._stream_flush() next_perc = self._calc_percent() if self.update_interval: do_update = time.time() - self.last_time >= self.update_interval elif force_flush: do_update = True else: do_update = next_perc > self.last_progress if do_update and self.active: self.last_progress = next_perc self._cache_percent_indicator(self.last_progress) if self.track: self._cached_output += ' Time elapsed: ' + \ self._get_time(self._elapsed()) self._cache_eta() if self.item_id: self._cache_item_id() self._stream_out('\r%s' % self._cached_output) self._stream_flush() self._cached_output = ''
[ "def", "_print", "(", "self", ",", "force_flush", "=", "False", ")", ":", "self", ".", "_stream_flush", "(", ")", "next_perc", "=", "self", ".", "_calc_percent", "(", ")", "if", "self", ".", "update_interval", ":", "do_update", "=", "time", ".", "time", "(", ")", "-", "self", ".", "last_time", ">=", "self", ".", "update_interval", "elif", "force_flush", ":", "do_update", "=", "True", "else", ":", "do_update", "=", "next_perc", ">", "self", ".", "last_progress", "if", "do_update", "and", "self", ".", "active", ":", "self", ".", "last_progress", "=", "next_perc", "self", ".", "_cache_percent_indicator", "(", "self", ".", "last_progress", ")", "if", "self", ".", "track", ":", "self", ".", "_cached_output", "+=", "' Time elapsed: '", "+", "self", ".", "_get_time", "(", "self", ".", "_elapsed", "(", ")", ")", "self", ".", "_cache_eta", "(", ")", "if", "self", ".", "item_id", ":", "self", ".", "_cache_item_id", "(", ")", "self", ".", "_stream_out", "(", "'\\r%s'", "%", "self", ".", "_cached_output", ")", "self", ".", "_stream_flush", "(", ")", "self", ".", "_cached_output", "=", "''" ]
Prints formatted percentage and tracked time to the screen.
[ "Prints", "formatted", "percentage", "and", "tracked", "time", "to", "the", "screen", "." ]
57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a
https://github.com/rasbt/pyprind/blob/57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a/pyprind/progpercent.py#L58-L80
8,164
bgreenlee/pygtail
pygtail/core.py
Pygtail.next
def next(self): """ Return the next line in the file, updating the offset. """ try: line = self._get_next_line() except StopIteration: # we've reached the end of the file; if we're processing the # rotated log file or the file has been renamed, we can continue with the actual file; otherwise # update the offset file if self._is_new_file(): self._rotated_logfile = None self._fh.close() self._offset = 0 # open up current logfile and continue try: line = self._get_next_line() except StopIteration: # oops, empty file self._update_offset_file() raise else: self._update_offset_file() raise if self.paranoid: self._update_offset_file() elif self.every_n and self.every_n <= self._since_update: self._update_offset_file() return line
python
def next(self): """ Return the next line in the file, updating the offset. """ try: line = self._get_next_line() except StopIteration: # we've reached the end of the file; if we're processing the # rotated log file or the file has been renamed, we can continue with the actual file; otherwise # update the offset file if self._is_new_file(): self._rotated_logfile = None self._fh.close() self._offset = 0 # open up current logfile and continue try: line = self._get_next_line() except StopIteration: # oops, empty file self._update_offset_file() raise else: self._update_offset_file() raise if self.paranoid: self._update_offset_file() elif self.every_n and self.every_n <= self._since_update: self._update_offset_file() return line
[ "def", "next", "(", "self", ")", ":", "try", ":", "line", "=", "self", ".", "_get_next_line", "(", ")", "except", "StopIteration", ":", "# we've reached the end of the file; if we're processing the", "# rotated log file or the file has been renamed, we can continue with the actual file; otherwise", "# update the offset file", "if", "self", ".", "_is_new_file", "(", ")", ":", "self", ".", "_rotated_logfile", "=", "None", "self", ".", "_fh", ".", "close", "(", ")", "self", ".", "_offset", "=", "0", "# open up current logfile and continue", "try", ":", "line", "=", "self", ".", "_get_next_line", "(", ")", "except", "StopIteration", ":", "# oops, empty file", "self", ".", "_update_offset_file", "(", ")", "raise", "else", ":", "self", ".", "_update_offset_file", "(", ")", "raise", "if", "self", ".", "paranoid", ":", "self", ".", "_update_offset_file", "(", ")", "elif", "self", ".", "every_n", "and", "self", ".", "every_n", "<=", "self", ".", "_since_update", ":", "self", ".", "_update_offset_file", "(", ")", "return", "line" ]
Return the next line in the file, updating the offset.
[ "Return", "the", "next", "line", "in", "the", "file", "updating", "the", "offset", "." ]
d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890
https://github.com/bgreenlee/pygtail/blob/d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890/pygtail/core.py#L101-L130
8,165
bgreenlee/pygtail
pygtail/core.py
Pygtail.read
def read(self): """ Read in all unread lines and return them as a single string. """ lines = self.readlines() if lines: try: return ''.join(lines) except TypeError: return ''.join(force_text(line) for line in lines) else: return None
python
def read(self): """ Read in all unread lines and return them as a single string. """ lines = self.readlines() if lines: try: return ''.join(lines) except TypeError: return ''.join(force_text(line) for line in lines) else: return None
[ "def", "read", "(", "self", ")", ":", "lines", "=", "self", ".", "readlines", "(", ")", "if", "lines", ":", "try", ":", "return", "''", ".", "join", "(", "lines", ")", "except", "TypeError", ":", "return", "''", ".", "join", "(", "force_text", "(", "line", ")", "for", "line", "in", "lines", ")", "else", ":", "return", "None" ]
Read in all unread lines and return them as a single string.
[ "Read", "in", "all", "unread", "lines", "and", "return", "them", "as", "a", "single", "string", "." ]
d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890
https://github.com/bgreenlee/pygtail/blob/d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890/pygtail/core.py#L142-L153
8,166
bgreenlee/pygtail
pygtail/core.py
Pygtail._filehandle
def _filehandle(self): """ Return a filehandle to the file being tailed, with the position set to the current offset. """ if not self._fh or self._is_closed(): filename = self._rotated_logfile or self.filename if filename.endswith('.gz'): self._fh = gzip.open(filename, 'r') else: self._fh = open(filename, "r", 1) if self.read_from_end and not exists(self._offset_file): self._fh.seek(0, os.SEEK_END) else: self._fh.seek(self._offset) return self._fh
python
def _filehandle(self): """ Return a filehandle to the file being tailed, with the position set to the current offset. """ if not self._fh or self._is_closed(): filename = self._rotated_logfile or self.filename if filename.endswith('.gz'): self._fh = gzip.open(filename, 'r') else: self._fh = open(filename, "r", 1) if self.read_from_end and not exists(self._offset_file): self._fh.seek(0, os.SEEK_END) else: self._fh.seek(self._offset) return self._fh
[ "def", "_filehandle", "(", "self", ")", ":", "if", "not", "self", ".", "_fh", "or", "self", ".", "_is_closed", "(", ")", ":", "filename", "=", "self", ".", "_rotated_logfile", "or", "self", ".", "filename", "if", "filename", ".", "endswith", "(", "'.gz'", ")", ":", "self", ".", "_fh", "=", "gzip", ".", "open", "(", "filename", ",", "'r'", ")", "else", ":", "self", ".", "_fh", "=", "open", "(", "filename", ",", "\"r\"", ",", "1", ")", "if", "self", ".", "read_from_end", "and", "not", "exists", "(", "self", ".", "_offset_file", ")", ":", "self", ".", "_fh", ".", "seek", "(", "0", ",", "os", ".", "SEEK_END", ")", "else", ":", "self", ".", "_fh", ".", "seek", "(", "self", ".", "_offset", ")", "return", "self", ".", "_fh" ]
Return a filehandle to the file being tailed, with the position set to the current offset.
[ "Return", "a", "filehandle", "to", "the", "file", "being", "tailed", "with", "the", "position", "set", "to", "the", "current", "offset", "." ]
d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890
https://github.com/bgreenlee/pygtail/blob/d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890/pygtail/core.py#L167-L183
8,167
bgreenlee/pygtail
pygtail/core.py
Pygtail._update_offset_file
def _update_offset_file(self): """ Update the offset file with the current inode and offset. """ if self.on_update: self.on_update() offset = self._filehandle().tell() inode = stat(self.filename).st_ino fh = open(self._offset_file, "w") fh.write("%s\n%s\n" % (inode, offset)) fh.close() self._since_update = 0
python
def _update_offset_file(self): """ Update the offset file with the current inode and offset. """ if self.on_update: self.on_update() offset = self._filehandle().tell() inode = stat(self.filename).st_ino fh = open(self._offset_file, "w") fh.write("%s\n%s\n" % (inode, offset)) fh.close() self._since_update = 0
[ "def", "_update_offset_file", "(", "self", ")", ":", "if", "self", ".", "on_update", ":", "self", ".", "on_update", "(", ")", "offset", "=", "self", ".", "_filehandle", "(", ")", ".", "tell", "(", ")", "inode", "=", "stat", "(", "self", ".", "filename", ")", ".", "st_ino", "fh", "=", "open", "(", "self", ".", "_offset_file", ",", "\"w\"", ")", "fh", ".", "write", "(", "\"%s\\n%s\\n\"", "%", "(", "inode", ",", "offset", ")", ")", "fh", ".", "close", "(", ")", "self", ".", "_since_update", "=", "0" ]
Update the offset file with the current inode and offset.
[ "Update", "the", "offset", "file", "with", "the", "current", "inode", "and", "offset", "." ]
d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890
https://github.com/bgreenlee/pygtail/blob/d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890/pygtail/core.py#L185-L196
8,168
bgreenlee/pygtail
pygtail/core.py
Pygtail._determine_rotated_logfile
def _determine_rotated_logfile(self): """ We suspect the logfile has been rotated, so try to guess what the rotated filename is, and return it. """ rotated_filename = self._check_rotated_filename_candidates() if rotated_filename and exists(rotated_filename): if stat(rotated_filename).st_ino == self._offset_file_inode: return rotated_filename # if the inode hasn't changed, then the file shrank; this is expected with copytruncate, # otherwise print a warning if stat(self.filename).st_ino == self._offset_file_inode: if self.copytruncate: return rotated_filename else: sys.stderr.write( "[pygtail] [WARN] file size of %s shrank, and copytruncate support is " "disabled (expected at least %d bytes, was %d bytes).\n" % (self.filename, self._offset, stat(self.filename).st_size)) return None
python
def _determine_rotated_logfile(self): """ We suspect the logfile has been rotated, so try to guess what the rotated filename is, and return it. """ rotated_filename = self._check_rotated_filename_candidates() if rotated_filename and exists(rotated_filename): if stat(rotated_filename).st_ino == self._offset_file_inode: return rotated_filename # if the inode hasn't changed, then the file shrank; this is expected with copytruncate, # otherwise print a warning if stat(self.filename).st_ino == self._offset_file_inode: if self.copytruncate: return rotated_filename else: sys.stderr.write( "[pygtail] [WARN] file size of %s shrank, and copytruncate support is " "disabled (expected at least %d bytes, was %d bytes).\n" % (self.filename, self._offset, stat(self.filename).st_size)) return None
[ "def", "_determine_rotated_logfile", "(", "self", ")", ":", "rotated_filename", "=", "self", ".", "_check_rotated_filename_candidates", "(", ")", "if", "rotated_filename", "and", "exists", "(", "rotated_filename", ")", ":", "if", "stat", "(", "rotated_filename", ")", ".", "st_ino", "==", "self", ".", "_offset_file_inode", ":", "return", "rotated_filename", "# if the inode hasn't changed, then the file shrank; this is expected with copytruncate,", "# otherwise print a warning", "if", "stat", "(", "self", ".", "filename", ")", ".", "st_ino", "==", "self", ".", "_offset_file_inode", ":", "if", "self", ".", "copytruncate", ":", "return", "rotated_filename", "else", ":", "sys", ".", "stderr", ".", "write", "(", "\"[pygtail] [WARN] file size of %s shrank, and copytruncate support is \"", "\"disabled (expected at least %d bytes, was %d bytes).\\n\"", "%", "(", "self", ".", "filename", ",", "self", ".", "_offset", ",", "stat", "(", "self", ".", "filename", ")", ".", "st_size", ")", ")", "return", "None" ]
We suspect the logfile has been rotated, so try to guess what the rotated filename is, and return it.
[ "We", "suspect", "the", "logfile", "has", "been", "rotated", "so", "try", "to", "guess", "what", "the", "rotated", "filename", "is", "and", "return", "it", "." ]
d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890
https://github.com/bgreenlee/pygtail/blob/d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890/pygtail/core.py#L198-L219
8,169
bgreenlee/pygtail
pygtail/core.py
Pygtail._check_rotated_filename_candidates
def _check_rotated_filename_candidates(self): """ Check for various rotated logfile filename patterns and return the first match we find. """ # savelog(8) candidate = "%s.0" % self.filename if (exists(candidate) and exists("%s.1.gz" % self.filename) and (stat(candidate).st_mtime > stat("%s.1.gz" % self.filename).st_mtime)): return candidate # logrotate(8) # with delaycompress candidate = "%s.1" % self.filename if exists(candidate): return candidate # without delaycompress candidate = "%s.1.gz" % self.filename if exists(candidate): return candidate rotated_filename_patterns = [ # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + with `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]", # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + without `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz", # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + with `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]", # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + without `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz", # for TimedRotatingFileHandler "%s.[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]", ] if self.log_patterns: rotated_filename_patterns.extend(self.log_patterns) # break into directory and filename components to support cases where the # the file is prepended as part of rotation file_dir, rel_filename = os.path.split(self.filename) for rotated_filename_pattern in rotated_filename_patterns: candidates = glob.glob(os.path.join(file_dir, rotated_filename_pattern % rel_filename)) if candidates: candidates.sort() return candidates[-1] # return most recent # no match return None
python
def _check_rotated_filename_candidates(self): """ Check for various rotated logfile filename patterns and return the first match we find. """ # savelog(8) candidate = "%s.0" % self.filename if (exists(candidate) and exists("%s.1.gz" % self.filename) and (stat(candidate).st_mtime > stat("%s.1.gz" % self.filename).st_mtime)): return candidate # logrotate(8) # with delaycompress candidate = "%s.1" % self.filename if exists(candidate): return candidate # without delaycompress candidate = "%s.1.gz" % self.filename if exists(candidate): return candidate rotated_filename_patterns = [ # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + with `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]", # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + without `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz", # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + with `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]", # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + without `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz", # for TimedRotatingFileHandler "%s.[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]", ] if self.log_patterns: rotated_filename_patterns.extend(self.log_patterns) # break into directory and filename components to support cases where the # the file is prepended as part of rotation file_dir, rel_filename = os.path.split(self.filename) for rotated_filename_pattern in rotated_filename_patterns: candidates = glob.glob(os.path.join(file_dir, rotated_filename_pattern % rel_filename)) if candidates: candidates.sort() return candidates[-1] # return most recent # no match return None
[ "def", "_check_rotated_filename_candidates", "(", "self", ")", ":", "# savelog(8)", "candidate", "=", "\"%s.0\"", "%", "self", ".", "filename", "if", "(", "exists", "(", "candidate", ")", "and", "exists", "(", "\"%s.1.gz\"", "%", "self", ".", "filename", ")", "and", "(", "stat", "(", "candidate", ")", ".", "st_mtime", ">", "stat", "(", "\"%s.1.gz\"", "%", "self", ".", "filename", ")", ".", "st_mtime", ")", ")", ":", "return", "candidate", "# logrotate(8)", "# with delaycompress", "candidate", "=", "\"%s.1\"", "%", "self", ".", "filename", "if", "exists", "(", "candidate", ")", ":", "return", "candidate", "# without delaycompress", "candidate", "=", "\"%s.1.gz\"", "%", "self", ".", "filename", "if", "exists", "(", "candidate", ")", ":", "return", "candidate", "rotated_filename_patterns", "=", "[", "# logrotate dateext rotation scheme - `dateformat -%Y%m%d` + with `delaycompress`", "\"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]\"", ",", "# logrotate dateext rotation scheme - `dateformat -%Y%m%d` + without `delaycompress`", "\"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz\"", ",", "# logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + with `delaycompress`", "\"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]\"", ",", "# logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + without `delaycompress`", "\"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz\"", ",", "# for TimedRotatingFileHandler", "\"%s.[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]\"", ",", "]", "if", "self", ".", "log_patterns", ":", "rotated_filename_patterns", ".", "extend", "(", "self", ".", "log_patterns", ")", "# break into directory and filename components to support cases where the", "# the file is prepended as part of rotation", "file_dir", ",", "rel_filename", "=", "os", ".", "path", ".", "split", "(", "self", ".", "filename", ")", "for", "rotated_filename_pattern", "in", "rotated_filename_patterns", ":", "candidates", "=", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "file_dir", ",", "rotated_filename_pattern", "%", "rel_filename", ")", ")", "if", "candidates", ":", "candidates", ".", "sort", "(", ")", "return", "candidates", "[", "-", "1", "]", "# return most recent", "# no match", "return", "None" ]
Check for various rotated logfile filename patterns and return the first match we find.
[ "Check", "for", "various", "rotated", "logfile", "filename", "patterns", "and", "return", "the", "first", "match", "we", "find", "." ]
d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890
https://github.com/bgreenlee/pygtail/blob/d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890/pygtail/core.py#L221-L268
8,170
quiltdata/quilt
compiler/quilt/tools/data_transfer.py
create_s3_session
def create_s3_session(): """ Creates a session with automatic retries on 5xx errors. """ sess = requests.Session() retries = Retry(total=3, backoff_factor=.5, status_forcelist=[500, 502, 503, 504]) sess.mount('https://', HTTPAdapter(max_retries=retries)) return sess
python
def create_s3_session(): """ Creates a session with automatic retries on 5xx errors. """ sess = requests.Session() retries = Retry(total=3, backoff_factor=.5, status_forcelist=[500, 502, 503, 504]) sess.mount('https://', HTTPAdapter(max_retries=retries)) return sess
[ "def", "create_s3_session", "(", ")", ":", "sess", "=", "requests", ".", "Session", "(", ")", "retries", "=", "Retry", "(", "total", "=", "3", ",", "backoff_factor", "=", ".5", ",", "status_forcelist", "=", "[", "500", ",", "502", ",", "503", ",", "504", "]", ")", "sess", ".", "mount", "(", "'https://'", ",", "HTTPAdapter", "(", "max_retries", "=", "retries", ")", ")", "return", "sess" ]
Creates a session with automatic retries on 5xx errors.
[ "Creates", "a", "session", "with", "automatic", "retries", "on", "5xx", "errors", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/data_transfer.py#L48-L57
8,171
quiltdata/quilt
compiler/quilt/imports.py
FakeLoader.load_module
def load_module(self, fullname): """ Returns an empty module. """ mod = sys.modules.setdefault(fullname, imp.new_module(fullname)) mod.__file__ = self._path mod.__loader__ = self mod.__path__ = [] mod.__package__ = fullname return mod
python
def load_module(self, fullname): """ Returns an empty module. """ mod = sys.modules.setdefault(fullname, imp.new_module(fullname)) mod.__file__ = self._path mod.__loader__ = self mod.__path__ = [] mod.__package__ = fullname return mod
[ "def", "load_module", "(", "self", ",", "fullname", ")", ":", "mod", "=", "sys", ".", "modules", ".", "setdefault", "(", "fullname", ",", "imp", ".", "new_module", "(", "fullname", ")", ")", "mod", ".", "__file__", "=", "self", ".", "_path", "mod", ".", "__loader__", "=", "self", "mod", ".", "__path__", "=", "[", "]", "mod", ".", "__package__", "=", "fullname", "return", "mod" ]
Returns an empty module.
[ "Returns", "an", "empty", "module", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/imports.py#L31-L40
8,172
quiltdata/quilt
compiler/quilt/imports.py
PackageLoader.load_module
def load_module(self, fullname): """ Returns an object that lazily looks up tables and groups. """ mod = sys.modules.get(fullname) if mod is not None: return mod # We're creating an object rather than a module. It's a hack, but it's approved by Guido: # https://mail.python.org/pipermail/python-ideas/2012-May/014969.html mod = _from_core_node(self._store, self._root) sys.modules[fullname] = mod return mod
python
def load_module(self, fullname): """ Returns an object that lazily looks up tables and groups. """ mod = sys.modules.get(fullname) if mod is not None: return mod # We're creating an object rather than a module. It's a hack, but it's approved by Guido: # https://mail.python.org/pipermail/python-ideas/2012-May/014969.html mod = _from_core_node(self._store, self._root) sys.modules[fullname] = mod return mod
[ "def", "load_module", "(", "self", ",", "fullname", ")", ":", "mod", "=", "sys", ".", "modules", ".", "get", "(", "fullname", ")", "if", "mod", "is", "not", "None", ":", "return", "mod", "# We're creating an object rather than a module. It's a hack, but it's approved by Guido:", "# https://mail.python.org/pipermail/python-ideas/2012-May/014969.html", "mod", "=", "_from_core_node", "(", "self", ".", "_store", ",", "self", ".", "_root", ")", "sys", ".", "modules", "[", "fullname", "]", "=", "mod", "return", "mod" ]
Returns an object that lazily looks up tables and groups.
[ "Returns", "an", "object", "that", "lazily", "looks", "up", "tables", "and", "groups", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/imports.py#L81-L94
8,173
quiltdata/quilt
compiler/quilt/imports.py
ModuleFinder.find_module
def find_module(self, fullname, path=None): """ Looks up the table based on the module path. """ if not fullname.startswith(self._module_name + '.'): # Not a quilt submodule. return None submodule = fullname[len(self._module_name) + 1:] parts = submodule.split('.') # Pop the team prefix if this is a team import. if self._teams: team = parts.pop(0) else: team = None # Handle full paths first. if len(parts) == 2: store, pkg = PackageStore.find_package(team, parts[0], parts[1]) if pkg is not None: return PackageLoader(store, pkg) else: return None # Return fake loaders for partial paths. for store_dir in PackageStore.find_store_dirs(): store = PackageStore(store_dir) if len(parts) == 0: assert self._teams path = store.team_path(team) elif len(parts) == 1: path = store.user_path(team, parts[0]) if os.path.isdir(path): return FakeLoader(path) # Nothing is found. return None
python
def find_module(self, fullname, path=None): """ Looks up the table based on the module path. """ if not fullname.startswith(self._module_name + '.'): # Not a quilt submodule. return None submodule = fullname[len(self._module_name) + 1:] parts = submodule.split('.') # Pop the team prefix if this is a team import. if self._teams: team = parts.pop(0) else: team = None # Handle full paths first. if len(parts) == 2: store, pkg = PackageStore.find_package(team, parts[0], parts[1]) if pkg is not None: return PackageLoader(store, pkg) else: return None # Return fake loaders for partial paths. for store_dir in PackageStore.find_store_dirs(): store = PackageStore(store_dir) if len(parts) == 0: assert self._teams path = store.team_path(team) elif len(parts) == 1: path = store.user_path(team, parts[0]) if os.path.isdir(path): return FakeLoader(path) # Nothing is found. return None
[ "def", "find_module", "(", "self", ",", "fullname", ",", "path", "=", "None", ")", ":", "if", "not", "fullname", ".", "startswith", "(", "self", ".", "_module_name", "+", "'.'", ")", ":", "# Not a quilt submodule.", "return", "None", "submodule", "=", "fullname", "[", "len", "(", "self", ".", "_module_name", ")", "+", "1", ":", "]", "parts", "=", "submodule", ".", "split", "(", "'.'", ")", "# Pop the team prefix if this is a team import.", "if", "self", ".", "_teams", ":", "team", "=", "parts", ".", "pop", "(", "0", ")", "else", ":", "team", "=", "None", "# Handle full paths first.", "if", "len", "(", "parts", ")", "==", "2", ":", "store", ",", "pkg", "=", "PackageStore", ".", "find_package", "(", "team", ",", "parts", "[", "0", "]", ",", "parts", "[", "1", "]", ")", "if", "pkg", "is", "not", "None", ":", "return", "PackageLoader", "(", "store", ",", "pkg", ")", "else", ":", "return", "None", "# Return fake loaders for partial paths.", "for", "store_dir", "in", "PackageStore", ".", "find_store_dirs", "(", ")", ":", "store", "=", "PackageStore", "(", "store_dir", ")", "if", "len", "(", "parts", ")", "==", "0", ":", "assert", "self", ".", "_teams", "path", "=", "store", ".", "team_path", "(", "team", ")", "elif", "len", "(", "parts", ")", "==", "1", ":", "path", "=", "store", ".", "user_path", "(", "team", ",", "parts", "[", "0", "]", ")", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "return", "FakeLoader", "(", "path", ")", "# Nothing is found.", "return", "None" ]
Looks up the table based on the module path.
[ "Looks", "up", "the", "table", "based", "on", "the", "module", "path", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/imports.py#L105-L144
8,174
quiltdata/quilt
compiler/quilt/tools/build.py
_have_pyspark
def _have_pyspark(): """ Check if we're running Pyspark """ if _have_pyspark.flag is None: try: if PackageStore.get_parquet_lib() is ParquetLib.SPARK: import pyspark # pylint:disable=W0612 _have_pyspark.flag = True else: _have_pyspark.flag = False except ImportError: _have_pyspark.flag = False return _have_pyspark.flag
python
def _have_pyspark(): """ Check if we're running Pyspark """ if _have_pyspark.flag is None: try: if PackageStore.get_parquet_lib() is ParquetLib.SPARK: import pyspark # pylint:disable=W0612 _have_pyspark.flag = True else: _have_pyspark.flag = False except ImportError: _have_pyspark.flag = False return _have_pyspark.flag
[ "def", "_have_pyspark", "(", ")", ":", "if", "_have_pyspark", ".", "flag", "is", "None", ":", "try", ":", "if", "PackageStore", ".", "get_parquet_lib", "(", ")", "is", "ParquetLib", ".", "SPARK", ":", "import", "pyspark", "# pylint:disable=W0612", "_have_pyspark", ".", "flag", "=", "True", "else", ":", "_have_pyspark", ".", "flag", "=", "False", "except", "ImportError", ":", "_have_pyspark", ".", "flag", "=", "False", "return", "_have_pyspark", ".", "flag" ]
Check if we're running Pyspark
[ "Check", "if", "we", "re", "running", "Pyspark" ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/build.py#L37-L50
8,175
quiltdata/quilt
compiler/quilt/tools/build.py
_path_hash
def _path_hash(path, transform, kwargs): """ Generate a hash of source file path + transform + args """ sortedargs = ["%s:%r:%s" % (key, value, type(value)) for key, value in sorted(iteritems(kwargs))] srcinfo = "{path}:{transform}:{{{kwargs}}}".format(path=os.path.abspath(path), transform=transform, kwargs=",".join(sortedargs)) return digest_string(srcinfo)
python
def _path_hash(path, transform, kwargs): """ Generate a hash of source file path + transform + args """ sortedargs = ["%s:%r:%s" % (key, value, type(value)) for key, value in sorted(iteritems(kwargs))] srcinfo = "{path}:{transform}:{{{kwargs}}}".format(path=os.path.abspath(path), transform=transform, kwargs=",".join(sortedargs)) return digest_string(srcinfo)
[ "def", "_path_hash", "(", "path", ",", "transform", ",", "kwargs", ")", ":", "sortedargs", "=", "[", "\"%s:%r:%s\"", "%", "(", "key", ",", "value", ",", "type", "(", "value", ")", ")", "for", "key", ",", "value", "in", "sorted", "(", "iteritems", "(", "kwargs", ")", ")", "]", "srcinfo", "=", "\"{path}:{transform}:{{{kwargs}}}\"", ".", "format", "(", "path", "=", "os", ".", "path", ".", "abspath", "(", "path", ")", ",", "transform", "=", "transform", ",", "kwargs", "=", "\",\"", ".", "join", "(", "sortedargs", ")", ")", "return", "digest_string", "(", "srcinfo", ")" ]
Generate a hash of source file path + transform + args
[ "Generate", "a", "hash", "of", "source", "file", "path", "+", "transform", "+", "args" ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/build.py#L53-L62
8,176
quiltdata/quilt
compiler/quilt/tools/build.py
_gen_glob_data
def _gen_glob_data(dir, pattern, child_table): """Generates node data by globbing a directory for a pattern""" dir = pathlib.Path(dir) matched = False used_names = set() # Used by to_nodename to prevent duplicate names # sorted so that renames (if any) are consistently ordered for filepath in sorted(dir.glob(pattern)): if filepath.is_dir(): continue else: matched = True # create node info node_table = {} if child_table is None else child_table.copy() filepath = filepath.relative_to(dir) node_table[RESERVED['file']] = str(filepath) node_name = to_nodename(filepath.stem, invalid=used_names) used_names.add(node_name) print("Matched with {!r}: {!r} from {!r}".format(pattern, node_name, str(filepath))) yield node_name, node_table if not matched: print("Warning: {!r} matched no files.".format(pattern)) return
python
def _gen_glob_data(dir, pattern, child_table): """Generates node data by globbing a directory for a pattern""" dir = pathlib.Path(dir) matched = False used_names = set() # Used by to_nodename to prevent duplicate names # sorted so that renames (if any) are consistently ordered for filepath in sorted(dir.glob(pattern)): if filepath.is_dir(): continue else: matched = True # create node info node_table = {} if child_table is None else child_table.copy() filepath = filepath.relative_to(dir) node_table[RESERVED['file']] = str(filepath) node_name = to_nodename(filepath.stem, invalid=used_names) used_names.add(node_name) print("Matched with {!r}: {!r} from {!r}".format(pattern, node_name, str(filepath))) yield node_name, node_table if not matched: print("Warning: {!r} matched no files.".format(pattern)) return
[ "def", "_gen_glob_data", "(", "dir", ",", "pattern", ",", "child_table", ")", ":", "dir", "=", "pathlib", ".", "Path", "(", "dir", ")", "matched", "=", "False", "used_names", "=", "set", "(", ")", "# Used by to_nodename to prevent duplicate names", "# sorted so that renames (if any) are consistently ordered", "for", "filepath", "in", "sorted", "(", "dir", ".", "glob", "(", "pattern", ")", ")", ":", "if", "filepath", ".", "is_dir", "(", ")", ":", "continue", "else", ":", "matched", "=", "True", "# create node info", "node_table", "=", "{", "}", "if", "child_table", "is", "None", "else", "child_table", ".", "copy", "(", ")", "filepath", "=", "filepath", ".", "relative_to", "(", "dir", ")", "node_table", "[", "RESERVED", "[", "'file'", "]", "]", "=", "str", "(", "filepath", ")", "node_name", "=", "to_nodename", "(", "filepath", ".", "stem", ",", "invalid", "=", "used_names", ")", "used_names", ".", "add", "(", "node_name", ")", "print", "(", "\"Matched with {!r}: {!r} from {!r}\"", ".", "format", "(", "pattern", ",", "node_name", ",", "str", "(", "filepath", ")", ")", ")", "yield", "node_name", ",", "node_table", "if", "not", "matched", ":", "print", "(", "\"Warning: {!r} matched no files.\"", ".", "format", "(", "pattern", ")", ")", "return" ]
Generates node data by globbing a directory for a pattern
[ "Generates", "node", "data", "by", "globbing", "a", "directory", "for", "a", "pattern" ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/build.py#L95-L119
8,177
quiltdata/quilt
compiler/quilt/tools/build.py
_remove_keywords
def _remove_keywords(d): """ copy the dict, filter_keywords Parameters ---------- d : dict """ return { k:v for k, v in iteritems(d) if k not in RESERVED }
python
def _remove_keywords(d): """ copy the dict, filter_keywords Parameters ---------- d : dict """ return { k:v for k, v in iteritems(d) if k not in RESERVED }
[ "def", "_remove_keywords", "(", "d", ")", ":", "return", "{", "k", ":", "v", "for", "k", ",", "v", "in", "iteritems", "(", "d", ")", "if", "k", "not", "in", "RESERVED", "}" ]
copy the dict, filter_keywords Parameters ---------- d : dict
[ "copy", "the", "dict", "filter_keywords" ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/build.py#L372-L380
8,178
quiltdata/quilt
compiler/quilt/tools/build.py
build_package
def build_package(team, username, package, subpath, yaml_path, checks_path=None, dry_run=False, env='default'): """ Builds a package from a given Yaml file and installs it locally. Returns the name of the package. """ def find(key, value): """ find matching nodes recursively; only descend iterables that aren't strings """ if isinstance(value, Iterable) and not isinstance(value, string_types): for k, v in iteritems(value): if k == key: yield v elif isinstance(v, dict): for result in find(key, v): yield result elif isinstance(v, list): for item in v: for result in find(key, item): yield result build_data = load_yaml(yaml_path) # default to 'checks.yml' if build.yml contents: contains checks, but # there's no inlined checks: defined by build.yml if (checks_path is None and list(find('checks', build_data['contents'])) and 'checks' not in build_data): checks_path = 'checks.yml' checks_contents = load_yaml(checks_path, optional=True) elif checks_path is not None: checks_contents = load_yaml(checks_path) else: checks_contents = None build_package_from_contents(team, username, package, subpath, os.path.dirname(yaml_path), build_data, checks_contents=checks_contents, dry_run=dry_run, env=env)
python
def build_package(team, username, package, subpath, yaml_path, checks_path=None, dry_run=False, env='default'): """ Builds a package from a given Yaml file and installs it locally. Returns the name of the package. """ def find(key, value): """ find matching nodes recursively; only descend iterables that aren't strings """ if isinstance(value, Iterable) and not isinstance(value, string_types): for k, v in iteritems(value): if k == key: yield v elif isinstance(v, dict): for result in find(key, v): yield result elif isinstance(v, list): for item in v: for result in find(key, item): yield result build_data = load_yaml(yaml_path) # default to 'checks.yml' if build.yml contents: contains checks, but # there's no inlined checks: defined by build.yml if (checks_path is None and list(find('checks', build_data['contents'])) and 'checks' not in build_data): checks_path = 'checks.yml' checks_contents = load_yaml(checks_path, optional=True) elif checks_path is not None: checks_contents = load_yaml(checks_path) else: checks_contents = None build_package_from_contents(team, username, package, subpath, os.path.dirname(yaml_path), build_data, checks_contents=checks_contents, dry_run=dry_run, env=env)
[ "def", "build_package", "(", "team", ",", "username", ",", "package", ",", "subpath", ",", "yaml_path", ",", "checks_path", "=", "None", ",", "dry_run", "=", "False", ",", "env", "=", "'default'", ")", ":", "def", "find", "(", "key", ",", "value", ")", ":", "\"\"\"\n find matching nodes recursively;\n only descend iterables that aren't strings\n \"\"\"", "if", "isinstance", "(", "value", ",", "Iterable", ")", "and", "not", "isinstance", "(", "value", ",", "string_types", ")", ":", "for", "k", ",", "v", "in", "iteritems", "(", "value", ")", ":", "if", "k", "==", "key", ":", "yield", "v", "elif", "isinstance", "(", "v", ",", "dict", ")", ":", "for", "result", "in", "find", "(", "key", ",", "v", ")", ":", "yield", "result", "elif", "isinstance", "(", "v", ",", "list", ")", ":", "for", "item", "in", "v", ":", "for", "result", "in", "find", "(", "key", ",", "item", ")", ":", "yield", "result", "build_data", "=", "load_yaml", "(", "yaml_path", ")", "# default to 'checks.yml' if build.yml contents: contains checks, but", "# there's no inlined checks: defined by build.yml", "if", "(", "checks_path", "is", "None", "and", "list", "(", "find", "(", "'checks'", ",", "build_data", "[", "'contents'", "]", ")", ")", "and", "'checks'", "not", "in", "build_data", ")", ":", "checks_path", "=", "'checks.yml'", "checks_contents", "=", "load_yaml", "(", "checks_path", ",", "optional", "=", "True", ")", "elif", "checks_path", "is", "not", "None", ":", "checks_contents", "=", "load_yaml", "(", "checks_path", ")", "else", ":", "checks_contents", "=", "None", "build_package_from_contents", "(", "team", ",", "username", ",", "package", ",", "subpath", ",", "os", ".", "path", ".", "dirname", "(", "yaml_path", ")", ",", "build_data", ",", "checks_contents", "=", "checks_contents", ",", "dry_run", "=", "dry_run", ",", "env", "=", "env", ")" ]
Builds a package from a given Yaml file and installs it locally. Returns the name of the package.
[ "Builds", "a", "package", "from", "a", "given", "Yaml", "file", "and", "installs", "it", "locally", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/build.py#L454-L490
8,179
quiltdata/quilt
registry/quilt_server/mail.py
send_comment_email
def send_comment_email(email, package_owner, package_name, commenter): """Send email to owner of package regarding new comment""" link = '{CATALOG_URL}/package/{owner}/{pkg}/comments'.format( CATALOG_URL=CATALOG_URL, owner=package_owner, pkg=package_name) subject = "New comment on {package_owner}/{package_name}".format( package_owner=package_owner, package_name=package_name) html = render_template('comment_email.html', commenter=commenter, link=link) body = render_template('comment_email.txt', commenter=commenter, link=link) send_email(recipients=[email], sender=DEFAULT_SENDER, subject=subject, html=html, body=body)
python
def send_comment_email(email, package_owner, package_name, commenter): """Send email to owner of package regarding new comment""" link = '{CATALOG_URL}/package/{owner}/{pkg}/comments'.format( CATALOG_URL=CATALOG_URL, owner=package_owner, pkg=package_name) subject = "New comment on {package_owner}/{package_name}".format( package_owner=package_owner, package_name=package_name) html = render_template('comment_email.html', commenter=commenter, link=link) body = render_template('comment_email.txt', commenter=commenter, link=link) send_email(recipients=[email], sender=DEFAULT_SENDER, subject=subject, html=html, body=body)
[ "def", "send_comment_email", "(", "email", ",", "package_owner", ",", "package_name", ",", "commenter", ")", ":", "link", "=", "'{CATALOG_URL}/package/{owner}/{pkg}/comments'", ".", "format", "(", "CATALOG_URL", "=", "CATALOG_URL", ",", "owner", "=", "package_owner", ",", "pkg", "=", "package_name", ")", "subject", "=", "\"New comment on {package_owner}/{package_name}\"", ".", "format", "(", "package_owner", "=", "package_owner", ",", "package_name", "=", "package_name", ")", "html", "=", "render_template", "(", "'comment_email.html'", ",", "commenter", "=", "commenter", ",", "link", "=", "link", ")", "body", "=", "render_template", "(", "'comment_email.txt'", ",", "commenter", "=", "commenter", ",", "link", "=", "link", ")", "send_email", "(", "recipients", "=", "[", "email", "]", ",", "sender", "=", "DEFAULT_SENDER", ",", "subject", "=", "subject", ",", "html", "=", "html", ",", "body", "=", "body", ")" ]
Send email to owner of package regarding new comment
[ "Send", "email", "to", "owner", "of", "package", "regarding", "new", "comment" ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/registry/quilt_server/mail.py#L75-L84
8,180
quiltdata/quilt
compiler/quilt/tools/core.py
hash_contents
def hash_contents(contents): """ Creates a hash of key names and hashes in a package dictionary. "contents" must be a GroupNode. """ assert isinstance(contents, GroupNode) result = hashlib.sha256() def _hash_int(value): result.update(struct.pack(">L", value)) def _hash_str(string): assert isinstance(string, string_types) _hash_int(len(string)) result.update(string.encode()) def _hash_object(obj): _hash_str(obj.json_type) if isinstance(obj, (TableNode, FileNode)): hashes = obj.hashes _hash_int(len(hashes)) for hval in hashes: _hash_str(hval) elif isinstance(obj, GroupNode): children = obj.children _hash_int(len(children)) for key, child in sorted(iteritems(children)): _hash_str(key) _hash_object(child) else: assert False, "Unexpected object: %r" % obj # Backward compatibility: only hash metadata_hash if it's present. if obj.metadata_hash is not None: _hash_str(obj.metadata_hash) _hash_object(contents) return result.hexdigest()
python
def hash_contents(contents): """ Creates a hash of key names and hashes in a package dictionary. "contents" must be a GroupNode. """ assert isinstance(contents, GroupNode) result = hashlib.sha256() def _hash_int(value): result.update(struct.pack(">L", value)) def _hash_str(string): assert isinstance(string, string_types) _hash_int(len(string)) result.update(string.encode()) def _hash_object(obj): _hash_str(obj.json_type) if isinstance(obj, (TableNode, FileNode)): hashes = obj.hashes _hash_int(len(hashes)) for hval in hashes: _hash_str(hval) elif isinstance(obj, GroupNode): children = obj.children _hash_int(len(children)) for key, child in sorted(iteritems(children)): _hash_str(key) _hash_object(child) else: assert False, "Unexpected object: %r" % obj # Backward compatibility: only hash metadata_hash if it's present. if obj.metadata_hash is not None: _hash_str(obj.metadata_hash) _hash_object(contents) return result.hexdigest()
[ "def", "hash_contents", "(", "contents", ")", ":", "assert", "isinstance", "(", "contents", ",", "GroupNode", ")", "result", "=", "hashlib", ".", "sha256", "(", ")", "def", "_hash_int", "(", "value", ")", ":", "result", ".", "update", "(", "struct", ".", "pack", "(", "\">L\"", ",", "value", ")", ")", "def", "_hash_str", "(", "string", ")", ":", "assert", "isinstance", "(", "string", ",", "string_types", ")", "_hash_int", "(", "len", "(", "string", ")", ")", "result", ".", "update", "(", "string", ".", "encode", "(", ")", ")", "def", "_hash_object", "(", "obj", ")", ":", "_hash_str", "(", "obj", ".", "json_type", ")", "if", "isinstance", "(", "obj", ",", "(", "TableNode", ",", "FileNode", ")", ")", ":", "hashes", "=", "obj", ".", "hashes", "_hash_int", "(", "len", "(", "hashes", ")", ")", "for", "hval", "in", "hashes", ":", "_hash_str", "(", "hval", ")", "elif", "isinstance", "(", "obj", ",", "GroupNode", ")", ":", "children", "=", "obj", ".", "children", "_hash_int", "(", "len", "(", "children", ")", ")", "for", "key", ",", "child", "in", "sorted", "(", "iteritems", "(", "children", ")", ")", ":", "_hash_str", "(", "key", ")", "_hash_object", "(", "child", ")", "else", ":", "assert", "False", ",", "\"Unexpected object: %r\"", "%", "obj", "# Backward compatibility: only hash metadata_hash if it's present.", "if", "obj", ".", "metadata_hash", "is", "not", "None", ":", "_hash_str", "(", "obj", ".", "metadata_hash", ")", "_hash_object", "(", "contents", ")", "return", "result", ".", "hexdigest", "(", ")" ]
Creates a hash of key names and hashes in a package dictionary. "contents" must be a GroupNode.
[ "Creates", "a", "hash", "of", "key", "names", "and", "hashes", "in", "a", "package", "dictionary", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/core.py#L144-L184
8,181
quiltdata/quilt
compiler/quilt/tools/core.py
find_object_hashes
def find_object_hashes(root, meta_only=False): """ Iterator that returns hashes of all of the file and table nodes. :param root: starting node """ stack = [root] while stack: obj = stack.pop() if not meta_only and isinstance(obj, (TableNode, FileNode)): for objhash in obj.hashes: yield objhash stack.extend(itervalues(obj.get_children())) if obj.metadata_hash is not None: yield obj.metadata_hash
python
def find_object_hashes(root, meta_only=False): """ Iterator that returns hashes of all of the file and table nodes. :param root: starting node """ stack = [root] while stack: obj = stack.pop() if not meta_only and isinstance(obj, (TableNode, FileNode)): for objhash in obj.hashes: yield objhash stack.extend(itervalues(obj.get_children())) if obj.metadata_hash is not None: yield obj.metadata_hash
[ "def", "find_object_hashes", "(", "root", ",", "meta_only", "=", "False", ")", ":", "stack", "=", "[", "root", "]", "while", "stack", ":", "obj", "=", "stack", ".", "pop", "(", ")", "if", "not", "meta_only", "and", "isinstance", "(", "obj", ",", "(", "TableNode", ",", "FileNode", ")", ")", ":", "for", "objhash", "in", "obj", ".", "hashes", ":", "yield", "objhash", "stack", ".", "extend", "(", "itervalues", "(", "obj", ".", "get_children", "(", ")", ")", ")", "if", "obj", ".", "metadata_hash", "is", "not", "None", ":", "yield", "obj", ".", "metadata_hash" ]
Iterator that returns hashes of all of the file and table nodes. :param root: starting node
[ "Iterator", "that", "returns", "hashes", "of", "all", "of", "the", "file", "and", "table", "nodes", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/core.py#L186-L200
8,182
quiltdata/quilt
registry/quilt_server/analytics.py
_send_event_task
def _send_event_task(args): """ Actually sends the MixPanel event. Runs in a uwsgi worker process. """ endpoint = args['endpoint'] json_message = args['json_message'] _consumer_impl.send(endpoint, json_message)
python
def _send_event_task(args): """ Actually sends the MixPanel event. Runs in a uwsgi worker process. """ endpoint = args['endpoint'] json_message = args['json_message'] _consumer_impl.send(endpoint, json_message)
[ "def", "_send_event_task", "(", "args", ")", ":", "endpoint", "=", "args", "[", "'endpoint'", "]", "json_message", "=", "args", "[", "'json_message'", "]", "_consumer_impl", ".", "send", "(", "endpoint", ",", "json_message", ")" ]
Actually sends the MixPanel event. Runs in a uwsgi worker process.
[ "Actually", "sends", "the", "MixPanel", "event", ".", "Runs", "in", "a", "uwsgi", "worker", "process", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/registry/quilt_server/analytics.py#L28-L34
8,183
quiltdata/quilt
registry/quilt_server/analytics.py
AsyncConsumer.send
def send(self, endpoint, json_message): """ Queues the message to be sent. """ _send_event_task.spool(endpoint=endpoint, json_message=json_message)
python
def send(self, endpoint, json_message): """ Queues the message to be sent. """ _send_event_task.spool(endpoint=endpoint, json_message=json_message)
[ "def", "send", "(", "self", ",", "endpoint", ",", "json_message", ")", ":", "_send_event_task", ".", "spool", "(", "endpoint", "=", "endpoint", ",", "json_message", "=", "json_message", ")" ]
Queues the message to be sent.
[ "Queues", "the", "message", "to", "be", "sent", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/registry/quilt_server/analytics.py#L41-L45
8,184
quiltdata/quilt
compiler/quilt/tools/main.py
main
def main(args=None): """Build and run parser :param args: cli args from tests """ parser = argument_parser() args = parser.parse_args(args) # If 'func' isn't present, something is misconfigured above or no (positional) arg was given. if not hasattr(args, 'func'): args = parser.parse_args(['help']) # show help # Convert argparse.Namespace into dict and clean it up. # We can then pass it directly to the helper function. kwargs = vars(args) # handle the '--dev' option if kwargs.pop('dev') or os.environ.get('QUILT_DEV_MODE', '').strip().lower() == 'true': # Enables CLI ctrl-c tracebacks, and whatever anyone else uses it for quilt._DEV_MODE = True else: # Disables CLI ctrl-c tracebacks, etc. quilt._DEV_MODE = False func = kwargs.pop('func') try: func(**kwargs) return 0 except QuiltException as ex: print(ex.message, file=sys.stderr) return 1 except requests.exceptions.ConnectionError as ex: print("Failed to connect: %s" % ex, file=sys.stderr) return 1
python
def main(args=None): """Build and run parser :param args: cli args from tests """ parser = argument_parser() args = parser.parse_args(args) # If 'func' isn't present, something is misconfigured above or no (positional) arg was given. if not hasattr(args, 'func'): args = parser.parse_args(['help']) # show help # Convert argparse.Namespace into dict and clean it up. # We can then pass it directly to the helper function. kwargs = vars(args) # handle the '--dev' option if kwargs.pop('dev') or os.environ.get('QUILT_DEV_MODE', '').strip().lower() == 'true': # Enables CLI ctrl-c tracebacks, and whatever anyone else uses it for quilt._DEV_MODE = True else: # Disables CLI ctrl-c tracebacks, etc. quilt._DEV_MODE = False func = kwargs.pop('func') try: func(**kwargs) return 0 except QuiltException as ex: print(ex.message, file=sys.stderr) return 1 except requests.exceptions.ConnectionError as ex: print("Failed to connect: %s" % ex, file=sys.stderr) return 1
[ "def", "main", "(", "args", "=", "None", ")", ":", "parser", "=", "argument_parser", "(", ")", "args", "=", "parser", ".", "parse_args", "(", "args", ")", "# If 'func' isn't present, something is misconfigured above or no (positional) arg was given.", "if", "not", "hasattr", "(", "args", ",", "'func'", ")", ":", "args", "=", "parser", ".", "parse_args", "(", "[", "'help'", "]", ")", "# show help", "# Convert argparse.Namespace into dict and clean it up.", "# We can then pass it directly to the helper function.", "kwargs", "=", "vars", "(", "args", ")", "# handle the '--dev' option", "if", "kwargs", ".", "pop", "(", "'dev'", ")", "or", "os", ".", "environ", ".", "get", "(", "'QUILT_DEV_MODE'", ",", "''", ")", ".", "strip", "(", ")", ".", "lower", "(", ")", "==", "'true'", ":", "# Enables CLI ctrl-c tracebacks, and whatever anyone else uses it for", "quilt", ".", "_DEV_MODE", "=", "True", "else", ":", "# Disables CLI ctrl-c tracebacks, etc.", "quilt", ".", "_DEV_MODE", "=", "False", "func", "=", "kwargs", ".", "pop", "(", "'func'", ")", "try", ":", "func", "(", "*", "*", "kwargs", ")", "return", "0", "except", "QuiltException", "as", "ex", ":", "print", "(", "ex", ".", "message", ",", "file", "=", "sys", ".", "stderr", ")", "return", "1", "except", "requests", ".", "exceptions", ".", "ConnectionError", "as", "ex", ":", "print", "(", "\"Failed to connect: %s\"", "%", "ex", ",", "file", "=", "sys", ".", "stderr", ")", "return", "1" ]
Build and run parser :param args: cli args from tests
[ "Build", "and", "run", "parser" ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/main.py#L338-L372
8,185
quiltdata/quilt
compiler/quilt/tools/util.py
is_identifier
def is_identifier(string): """Check if string could be a valid python identifier :param string: string to be tested :returns: True if string can be a python identifier, False otherwise :rtype: bool """ matched = PYTHON_IDENTIFIER_RE.match(string) return bool(matched) and not keyword.iskeyword(string)
python
def is_identifier(string): """Check if string could be a valid python identifier :param string: string to be tested :returns: True if string can be a python identifier, False otherwise :rtype: bool """ matched = PYTHON_IDENTIFIER_RE.match(string) return bool(matched) and not keyword.iskeyword(string)
[ "def", "is_identifier", "(", "string", ")", ":", "matched", "=", "PYTHON_IDENTIFIER_RE", ".", "match", "(", "string", ")", "return", "bool", "(", "matched", ")", "and", "not", "keyword", ".", "iskeyword", "(", "string", ")" ]
Check if string could be a valid python identifier :param string: string to be tested :returns: True if string can be a python identifier, False otherwise :rtype: bool
[ "Check", "if", "string", "could", "be", "a", "valid", "python", "identifier" ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/util.py#L160-L168
8,186
quiltdata/quilt
compiler/quilt/tools/util.py
fs_link
def fs_link(path, linkpath, linktype='soft'): """Create a hard or soft link of `path` at `linkpath` Works on Linux/OSX/Windows (Vista+). :param src: File or directory to be linked :param dest: Path of link to create :param linktype: 'soft' or 'hard' """ global WIN_SOFTLINK global WIN_HARDLINK WIN_NO_ERROR = 22 assert linktype in ('soft', 'hard') path, linkpath = pathlib.Path(path), pathlib.Path(linkpath) # Checks if not path.exists(): # particularly important on Windows to prevent false success raise QuiltException("Path to link to does not exist: {}".format(path)) if linkpath.exists(): raise QuiltException("Link path already exists: {}".format(linkpath)) # Windows if os.name == 'nt': # clear out any pre-existing, un-checked errors ctypes.WinError() # Check Windows version (reasonably) supports symlinks if not sys.getwindowsversion()[0] >= 6: raise QuiltException("Unsupported operation: This version of Windows does not support linking.") # Acquire the windows CreateXLinkW() function if linktype == 'soft': if WIN_SOFTLINK is None: WIN_SOFTLINK = ctypes.windll.kernel32.CreateSymbolicLinkW WIN_SOFTLINK.restype = ctypes.c_bool create_link = lambda l, p: WIN_SOFTLINK(str(l), str(p), p.is_dir()) elif linktype == 'hard': if WIN_HARDLINK is None: WIN_HARDLINK = ctypes.windll.kernel32.CreateHardLinkW WIN_HARDLINK.restype = ctypes.c_bool create_link = WIN_HARDLINK # Call and check results create_link(linkpath, path) # Check WinError, because the return value for CreateSymbolicLinkW's type is suspect due to a # (possible) bug: https://stackoverflow.com/questions/33010440/createsymboliclink-on-windows-10 # We have user results with similar effects (success reported, but not actual) error = ctypes.WinError() if error.winerror: raise QuiltException("Linking failed: " + str(error), original_error=error) # Handle the case wehere linking failed and windows gave no error: if not linkpath.exists() and linkpath.is_symlink(): raise QuiltException("Linking failed: Expected symlink at: {}".format(linkpath)) # Linux, OSX else: try: if linktype == 'soft': linkpath.symlink_to(path) elif linktype == 'hard': os.link(str(path), str(linkpath)) except OSError as error: raise QuiltException("Linking failed: " + str(error), original_error=error)
python
def fs_link(path, linkpath, linktype='soft'): """Create a hard or soft link of `path` at `linkpath` Works on Linux/OSX/Windows (Vista+). :param src: File or directory to be linked :param dest: Path of link to create :param linktype: 'soft' or 'hard' """ global WIN_SOFTLINK global WIN_HARDLINK WIN_NO_ERROR = 22 assert linktype in ('soft', 'hard') path, linkpath = pathlib.Path(path), pathlib.Path(linkpath) # Checks if not path.exists(): # particularly important on Windows to prevent false success raise QuiltException("Path to link to does not exist: {}".format(path)) if linkpath.exists(): raise QuiltException("Link path already exists: {}".format(linkpath)) # Windows if os.name == 'nt': # clear out any pre-existing, un-checked errors ctypes.WinError() # Check Windows version (reasonably) supports symlinks if not sys.getwindowsversion()[0] >= 6: raise QuiltException("Unsupported operation: This version of Windows does not support linking.") # Acquire the windows CreateXLinkW() function if linktype == 'soft': if WIN_SOFTLINK is None: WIN_SOFTLINK = ctypes.windll.kernel32.CreateSymbolicLinkW WIN_SOFTLINK.restype = ctypes.c_bool create_link = lambda l, p: WIN_SOFTLINK(str(l), str(p), p.is_dir()) elif linktype == 'hard': if WIN_HARDLINK is None: WIN_HARDLINK = ctypes.windll.kernel32.CreateHardLinkW WIN_HARDLINK.restype = ctypes.c_bool create_link = WIN_HARDLINK # Call and check results create_link(linkpath, path) # Check WinError, because the return value for CreateSymbolicLinkW's type is suspect due to a # (possible) bug: https://stackoverflow.com/questions/33010440/createsymboliclink-on-windows-10 # We have user results with similar effects (success reported, but not actual) error = ctypes.WinError() if error.winerror: raise QuiltException("Linking failed: " + str(error), original_error=error) # Handle the case wehere linking failed and windows gave no error: if not linkpath.exists() and linkpath.is_symlink(): raise QuiltException("Linking failed: Expected symlink at: {}".format(linkpath)) # Linux, OSX else: try: if linktype == 'soft': linkpath.symlink_to(path) elif linktype == 'hard': os.link(str(path), str(linkpath)) except OSError as error: raise QuiltException("Linking failed: " + str(error), original_error=error)
[ "def", "fs_link", "(", "path", ",", "linkpath", ",", "linktype", "=", "'soft'", ")", ":", "global", "WIN_SOFTLINK", "global", "WIN_HARDLINK", "WIN_NO_ERROR", "=", "22", "assert", "linktype", "in", "(", "'soft'", ",", "'hard'", ")", "path", ",", "linkpath", "=", "pathlib", ".", "Path", "(", "path", ")", ",", "pathlib", ".", "Path", "(", "linkpath", ")", "# Checks", "if", "not", "path", ".", "exists", "(", ")", ":", "# particularly important on Windows to prevent false success", "raise", "QuiltException", "(", "\"Path to link to does not exist: {}\"", ".", "format", "(", "path", ")", ")", "if", "linkpath", ".", "exists", "(", ")", ":", "raise", "QuiltException", "(", "\"Link path already exists: {}\"", ".", "format", "(", "linkpath", ")", ")", "# Windows", "if", "os", ".", "name", "==", "'nt'", ":", "# clear out any pre-existing, un-checked errors", "ctypes", ".", "WinError", "(", ")", "# Check Windows version (reasonably) supports symlinks", "if", "not", "sys", ".", "getwindowsversion", "(", ")", "[", "0", "]", ">=", "6", ":", "raise", "QuiltException", "(", "\"Unsupported operation: This version of Windows does not support linking.\"", ")", "# Acquire the windows CreateXLinkW() function", "if", "linktype", "==", "'soft'", ":", "if", "WIN_SOFTLINK", "is", "None", ":", "WIN_SOFTLINK", "=", "ctypes", ".", "windll", ".", "kernel32", ".", "CreateSymbolicLinkW", "WIN_SOFTLINK", ".", "restype", "=", "ctypes", ".", "c_bool", "create_link", "=", "lambda", "l", ",", "p", ":", "WIN_SOFTLINK", "(", "str", "(", "l", ")", ",", "str", "(", "p", ")", ",", "p", ".", "is_dir", "(", ")", ")", "elif", "linktype", "==", "'hard'", ":", "if", "WIN_HARDLINK", "is", "None", ":", "WIN_HARDLINK", "=", "ctypes", ".", "windll", ".", "kernel32", ".", "CreateHardLinkW", "WIN_HARDLINK", ".", "restype", "=", "ctypes", ".", "c_bool", "create_link", "=", "WIN_HARDLINK", "# Call and check results", "create_link", "(", "linkpath", ",", "path", ")", "# Check WinError, because the return value for CreateSymbolicLinkW's type is suspect due to a", "# (possible) bug: https://stackoverflow.com/questions/33010440/createsymboliclink-on-windows-10", "# We have user results with similar effects (success reported, but not actual)", "error", "=", "ctypes", ".", "WinError", "(", ")", "if", "error", ".", "winerror", ":", "raise", "QuiltException", "(", "\"Linking failed: \"", "+", "str", "(", "error", ")", ",", "original_error", "=", "error", ")", "# Handle the case wehere linking failed and windows gave no error:", "if", "not", "linkpath", ".", "exists", "(", ")", "and", "linkpath", ".", "is_symlink", "(", ")", ":", "raise", "QuiltException", "(", "\"Linking failed: Expected symlink at: {}\"", ".", "format", "(", "linkpath", ")", ")", "# Linux, OSX", "else", ":", "try", ":", "if", "linktype", "==", "'soft'", ":", "linkpath", ".", "symlink_to", "(", "path", ")", "elif", "linktype", "==", "'hard'", ":", "os", ".", "link", "(", "str", "(", "path", ")", ",", "str", "(", "linkpath", ")", ")", "except", "OSError", "as", "error", ":", "raise", "QuiltException", "(", "\"Linking failed: \"", "+", "str", "(", "error", ")", ",", "original_error", "=", "error", ")" ]
Create a hard or soft link of `path` at `linkpath` Works on Linux/OSX/Windows (Vista+). :param src: File or directory to be linked :param dest: Path of link to create :param linktype: 'soft' or 'hard'
[ "Create", "a", "hard", "or", "soft", "link", "of", "path", "at", "linkpath" ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/util.py#L289-L352
8,187
quiltdata/quilt
compiler/quilt/tools/util.py
FileWithReadProgress.read
def read(self, size=-1): """Read bytes and update the progress bar.""" buf = self._fd.read(size) self._progress_cb(len(buf)) return buf
python
def read(self, size=-1): """Read bytes and update the progress bar.""" buf = self._fd.read(size) self._progress_cb(len(buf)) return buf
[ "def", "read", "(", "self", ",", "size", "=", "-", "1", ")", ":", "buf", "=", "self", ".", "_fd", ".", "read", "(", "size", ")", "self", ".", "_progress_cb", "(", "len", "(", "buf", ")", ")", "return", "buf" ]
Read bytes and update the progress bar.
[ "Read", "bytes", "and", "update", "the", "progress", "bar", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/util.py#L81-L85
8,188
quiltdata/quilt
compiler/quilt/tools/store.py
PackageStore.create_dirs
def create_dirs(self): """ Creates the store directory and its subdirectories. """ if not os.path.isdir(self._path): os.makedirs(self._path) for dir_name in [self.OBJ_DIR, self.TMP_OBJ_DIR, self.PKG_DIR, self.CACHE_DIR]: path = os.path.join(self._path, dir_name) if not os.path.isdir(path): os.mkdir(path) if not os.path.exists(self._version_path()): self._write_format_version()
python
def create_dirs(self): """ Creates the store directory and its subdirectories. """ if not os.path.isdir(self._path): os.makedirs(self._path) for dir_name in [self.OBJ_DIR, self.TMP_OBJ_DIR, self.PKG_DIR, self.CACHE_DIR]: path = os.path.join(self._path, dir_name) if not os.path.isdir(path): os.mkdir(path) if not os.path.exists(self._version_path()): self._write_format_version()
[ "def", "create_dirs", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "self", ".", "_path", ")", ":", "os", ".", "makedirs", "(", "self", ".", "_path", ")", "for", "dir_name", "in", "[", "self", ".", "OBJ_DIR", ",", "self", ".", "TMP_OBJ_DIR", ",", "self", ".", "PKG_DIR", ",", "self", ".", "CACHE_DIR", "]", ":", "path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_path", ",", "dir_name", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "os", ".", "mkdir", "(", "path", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "_version_path", "(", ")", ")", ":", "self", ".", "_write_format_version", "(", ")" ]
Creates the store directory and its subdirectories.
[ "Creates", "the", "store", "directory", "and", "its", "subdirectories", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L121-L132
8,189
quiltdata/quilt
compiler/quilt/tools/store.py
PackageStore.find_store_dirs
def find_store_dirs(cls): """ Returns the primary package directory and any additional ones from QUILT_PACKAGE_DIRS. """ store_dirs = [default_store_location()] extra_dirs_str = os.getenv('QUILT_PACKAGE_DIRS') if extra_dirs_str: store_dirs.extend(extra_dirs_str.split(':')) return store_dirs
python
def find_store_dirs(cls): """ Returns the primary package directory and any additional ones from QUILT_PACKAGE_DIRS. """ store_dirs = [default_store_location()] extra_dirs_str = os.getenv('QUILT_PACKAGE_DIRS') if extra_dirs_str: store_dirs.extend(extra_dirs_str.split(':')) return store_dirs
[ "def", "find_store_dirs", "(", "cls", ")", ":", "store_dirs", "=", "[", "default_store_location", "(", ")", "]", "extra_dirs_str", "=", "os", ".", "getenv", "(", "'QUILT_PACKAGE_DIRS'", ")", "if", "extra_dirs_str", ":", "store_dirs", ".", "extend", "(", "extra_dirs_str", ".", "split", "(", "':'", ")", ")", "return", "store_dirs" ]
Returns the primary package directory and any additional ones from QUILT_PACKAGE_DIRS.
[ "Returns", "the", "primary", "package", "directory", "and", "any", "additional", "ones", "from", "QUILT_PACKAGE_DIRS", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L135-L143
8,190
quiltdata/quilt
compiler/quilt/tools/store.py
PackageStore.find_package
def find_package(cls, team, user, package, pkghash=None, store_dir=None): """ Finds an existing package in one of the package directories. """ cls.check_name(team, user, package) dirs = cls.find_store_dirs() for store_dir in dirs: store = PackageStore(store_dir) pkg = store.get_package(team, user, package, pkghash=pkghash) if pkg is not None: return store, pkg return None, None
python
def find_package(cls, team, user, package, pkghash=None, store_dir=None): """ Finds an existing package in one of the package directories. """ cls.check_name(team, user, package) dirs = cls.find_store_dirs() for store_dir in dirs: store = PackageStore(store_dir) pkg = store.get_package(team, user, package, pkghash=pkghash) if pkg is not None: return store, pkg return None, None
[ "def", "find_package", "(", "cls", ",", "team", ",", "user", ",", "package", ",", "pkghash", "=", "None", ",", "store_dir", "=", "None", ")", ":", "cls", ".", "check_name", "(", "team", ",", "user", ",", "package", ")", "dirs", "=", "cls", ".", "find_store_dirs", "(", ")", "for", "store_dir", "in", "dirs", ":", "store", "=", "PackageStore", "(", "store_dir", ")", "pkg", "=", "store", ".", "get_package", "(", "team", ",", "user", ",", "package", ",", "pkghash", "=", "pkghash", ")", "if", "pkg", "is", "not", "None", ":", "return", "store", ",", "pkg", "return", "None", ",", "None" ]
Finds an existing package in one of the package directories.
[ "Finds", "an", "existing", "package", "in", "one", "of", "the", "package", "directories", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L146-L157
8,191
quiltdata/quilt
compiler/quilt/tools/store.py
PackageStore.get_package
def get_package(self, team, user, package, pkghash=None): """ Gets a package from this store. """ self.check_name(team, user, package) path = self.package_path(team, user, package) if not os.path.isdir(path): return None if pkghash is None: latest_tag = os.path.join(path, self.TAGS_DIR, self.LATEST) if not os.path.exists(latest_tag): return None with open (latest_tag, 'r') as tagfile: pkghash = tagfile.read() assert pkghash is not None contents_path = os.path.join(path, self.CONTENTS_DIR, pkghash) if not os.path.isfile(contents_path): return None with open(contents_path, 'r') as contents_file: try: return json.load(contents_file, object_hook=decode_node) except AssertionError as err: if str(err).startswith("Bad package format"): name = "{}{}/{}, {}".format( team + ':' if team else '', user, package, pkghash ) raise StoreException("Error in {}: {}".format(name, str(err))) else: raise
python
def get_package(self, team, user, package, pkghash=None): """ Gets a package from this store. """ self.check_name(team, user, package) path = self.package_path(team, user, package) if not os.path.isdir(path): return None if pkghash is None: latest_tag = os.path.join(path, self.TAGS_DIR, self.LATEST) if not os.path.exists(latest_tag): return None with open (latest_tag, 'r') as tagfile: pkghash = tagfile.read() assert pkghash is not None contents_path = os.path.join(path, self.CONTENTS_DIR, pkghash) if not os.path.isfile(contents_path): return None with open(contents_path, 'r') as contents_file: try: return json.load(contents_file, object_hook=decode_node) except AssertionError as err: if str(err).startswith("Bad package format"): name = "{}{}/{}, {}".format( team + ':' if team else '', user, package, pkghash ) raise StoreException("Error in {}: {}".format(name, str(err))) else: raise
[ "def", "get_package", "(", "self", ",", "team", ",", "user", ",", "package", ",", "pkghash", "=", "None", ")", ":", "self", ".", "check_name", "(", "team", ",", "user", ",", "package", ")", "path", "=", "self", ".", "package_path", "(", "team", ",", "user", ",", "package", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "return", "None", "if", "pkghash", "is", "None", ":", "latest_tag", "=", "os", ".", "path", ".", "join", "(", "path", ",", "self", ".", "TAGS_DIR", ",", "self", ".", "LATEST", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "latest_tag", ")", ":", "return", "None", "with", "open", "(", "latest_tag", ",", "'r'", ")", "as", "tagfile", ":", "pkghash", "=", "tagfile", ".", "read", "(", ")", "assert", "pkghash", "is", "not", "None", "contents_path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "self", ".", "CONTENTS_DIR", ",", "pkghash", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "contents_path", ")", ":", "return", "None", "with", "open", "(", "contents_path", ",", "'r'", ")", "as", "contents_file", ":", "try", ":", "return", "json", ".", "load", "(", "contents_file", ",", "object_hook", "=", "decode_node", ")", "except", "AssertionError", "as", "err", ":", "if", "str", "(", "err", ")", ".", "startswith", "(", "\"Bad package format\"", ")", ":", "name", "=", "\"{}{}/{}, {}\"", ".", "format", "(", "team", "+", "':'", "if", "team", "else", "''", ",", "user", ",", "package", ",", "pkghash", ")", "raise", "StoreException", "(", "\"Error in {}: {}\"", ".", "format", "(", "name", ",", "str", "(", "err", ")", ")", ")", "else", ":", "raise" ]
Gets a package from this store.
[ "Gets", "a", "package", "from", "this", "store", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L189-L224
8,192
quiltdata/quilt
compiler/quilt/tools/store.py
PackageStore.install_package
def install_package(self, team, user, package, contents): """ Creates a new package in the default package store and allocates a per-user directory if needed. """ self.check_name(team, user, package) assert contents is not None self.create_dirs() path = self.package_path(team, user, package) # Delete any existing data. try: os.remove(path) except OSError: pass
python
def install_package(self, team, user, package, contents): """ Creates a new package in the default package store and allocates a per-user directory if needed. """ self.check_name(team, user, package) assert contents is not None self.create_dirs() path = self.package_path(team, user, package) # Delete any existing data. try: os.remove(path) except OSError: pass
[ "def", "install_package", "(", "self", ",", "team", ",", "user", ",", "package", ",", "contents", ")", ":", "self", ".", "check_name", "(", "team", ",", "user", ",", "package", ")", "assert", "contents", "is", "not", "None", "self", ".", "create_dirs", "(", ")", "path", "=", "self", ".", "package_path", "(", "team", ",", "user", ",", "package", ")", "# Delete any existing data.", "try", ":", "os", ".", "remove", "(", "path", ")", "except", "OSError", ":", "pass" ]
Creates a new package in the default package store and allocates a per-user directory if needed.
[ "Creates", "a", "new", "package", "in", "the", "default", "package", "store", "and", "allocates", "a", "per", "-", "user", "directory", "if", "needed", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L226-L241
8,193
quiltdata/quilt
compiler/quilt/tools/store.py
PackageStore.create_package_node
def create_package_node(self, team, user, package, dry_run=False): """ Creates a new package and initializes its contents. See `install_package`. """ contents = RootNode(dict()) if dry_run: return contents self.check_name(team, user, package) assert contents is not None self.create_dirs() # Delete any existing data. path = self.package_path(team, user, package) try: os.remove(path) except OSError: pass return contents
python
def create_package_node(self, team, user, package, dry_run=False): """ Creates a new package and initializes its contents. See `install_package`. """ contents = RootNode(dict()) if dry_run: return contents self.check_name(team, user, package) assert contents is not None self.create_dirs() # Delete any existing data. path = self.package_path(team, user, package) try: os.remove(path) except OSError: pass return contents
[ "def", "create_package_node", "(", "self", ",", "team", ",", "user", ",", "package", ",", "dry_run", "=", "False", ")", ":", "contents", "=", "RootNode", "(", "dict", "(", ")", ")", "if", "dry_run", ":", "return", "contents", "self", ".", "check_name", "(", "team", ",", "user", ",", "package", ")", "assert", "contents", "is", "not", "None", "self", ".", "create_dirs", "(", ")", "# Delete any existing data.", "path", "=", "self", ".", "package_path", "(", "team", ",", "user", ",", "package", ")", "try", ":", "os", ".", "remove", "(", "path", ")", "except", "OSError", ":", "pass", "return", "contents" ]
Creates a new package and initializes its contents. See `install_package`.
[ "Creates", "a", "new", "package", "and", "initializes", "its", "contents", ".", "See", "install_package", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L243-L262
8,194
quiltdata/quilt
compiler/quilt/tools/store.py
PackageStore.iterpackages
def iterpackages(self): """ Return an iterator over all the packages in the PackageStore. """ pkgdir = os.path.join(self._path, self.PKG_DIR) if not os.path.isdir(pkgdir): return for team in sub_dirs(pkgdir): for user in sub_dirs(self.team_path(team)): for pkg in sub_dirs(self.user_path(team, user)): pkgpath = self.package_path(team, user, pkg) for hsh in sub_files(os.path.join(pkgpath, PackageStore.CONTENTS_DIR)): yield self.get_package(team, user, pkg, pkghash=hsh)
python
def iterpackages(self): """ Return an iterator over all the packages in the PackageStore. """ pkgdir = os.path.join(self._path, self.PKG_DIR) if not os.path.isdir(pkgdir): return for team in sub_dirs(pkgdir): for user in sub_dirs(self.team_path(team)): for pkg in sub_dirs(self.user_path(team, user)): pkgpath = self.package_path(team, user, pkg) for hsh in sub_files(os.path.join(pkgpath, PackageStore.CONTENTS_DIR)): yield self.get_package(team, user, pkg, pkghash=hsh)
[ "def", "iterpackages", "(", "self", ")", ":", "pkgdir", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_path", ",", "self", ".", "PKG_DIR", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "pkgdir", ")", ":", "return", "for", "team", "in", "sub_dirs", "(", "pkgdir", ")", ":", "for", "user", "in", "sub_dirs", "(", "self", ".", "team_path", "(", "team", ")", ")", ":", "for", "pkg", "in", "sub_dirs", "(", "self", ".", "user_path", "(", "team", ",", "user", ")", ")", ":", "pkgpath", "=", "self", ".", "package_path", "(", "team", ",", "user", ",", "pkg", ")", "for", "hsh", "in", "sub_files", "(", "os", ".", "path", ".", "join", "(", "pkgpath", ",", "PackageStore", ".", "CONTENTS_DIR", ")", ")", ":", "yield", "self", ".", "get_package", "(", "team", ",", "user", ",", "pkg", ",", "pkghash", "=", "hsh", ")" ]
Return an iterator over all the packages in the PackageStore.
[ "Return", "an", "iterator", "over", "all", "the", "packages", "in", "the", "PackageStore", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L284-L296
8,195
quiltdata/quilt
compiler/quilt/tools/store.py
PackageStore.ls_packages
def ls_packages(self): """ List packages in this store. """ packages = [] pkgdir = os.path.join(self._path, self.PKG_DIR) if not os.path.isdir(pkgdir): return [] for team in sub_dirs(pkgdir): for user in sub_dirs(self.team_path(team)): for pkg in sub_dirs(self.user_path(team, user)): pkgpath = self.package_path(team, user, pkg) pkgmap = {h : [] for h in sub_files(os.path.join(pkgpath, PackageStore.CONTENTS_DIR))} for tag in sub_files(os.path.join(pkgpath, PackageStore.TAGS_DIR)): with open(os.path.join(pkgpath, PackageStore.TAGS_DIR, tag), 'r') as tagfile: pkghash = tagfile.read() pkgmap[pkghash].append(tag) for pkghash, tags in pkgmap.items(): # add teams here if any other than DEFAULT_TEAM should be hidden. team_token = '' if team in (DEFAULT_TEAM,) else team + ':' fullpkg = "{team}{owner}/{pkg}".format(team=team_token, owner=user, pkg=pkg) # Add an empty string tag for untagged hashes displaytags = tags if tags else [""] # Display a separate full line per tag like Docker for tag in displaytags: packages.append((fullpkg, str(tag), pkghash)) return packages
python
def ls_packages(self): """ List packages in this store. """ packages = [] pkgdir = os.path.join(self._path, self.PKG_DIR) if not os.path.isdir(pkgdir): return [] for team in sub_dirs(pkgdir): for user in sub_dirs(self.team_path(team)): for pkg in sub_dirs(self.user_path(team, user)): pkgpath = self.package_path(team, user, pkg) pkgmap = {h : [] for h in sub_files(os.path.join(pkgpath, PackageStore.CONTENTS_DIR))} for tag in sub_files(os.path.join(pkgpath, PackageStore.TAGS_DIR)): with open(os.path.join(pkgpath, PackageStore.TAGS_DIR, tag), 'r') as tagfile: pkghash = tagfile.read() pkgmap[pkghash].append(tag) for pkghash, tags in pkgmap.items(): # add teams here if any other than DEFAULT_TEAM should be hidden. team_token = '' if team in (DEFAULT_TEAM,) else team + ':' fullpkg = "{team}{owner}/{pkg}".format(team=team_token, owner=user, pkg=pkg) # Add an empty string tag for untagged hashes displaytags = tags if tags else [""] # Display a separate full line per tag like Docker for tag in displaytags: packages.append((fullpkg, str(tag), pkghash)) return packages
[ "def", "ls_packages", "(", "self", ")", ":", "packages", "=", "[", "]", "pkgdir", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_path", ",", "self", ".", "PKG_DIR", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "pkgdir", ")", ":", "return", "[", "]", "for", "team", "in", "sub_dirs", "(", "pkgdir", ")", ":", "for", "user", "in", "sub_dirs", "(", "self", ".", "team_path", "(", "team", ")", ")", ":", "for", "pkg", "in", "sub_dirs", "(", "self", ".", "user_path", "(", "team", ",", "user", ")", ")", ":", "pkgpath", "=", "self", ".", "package_path", "(", "team", ",", "user", ",", "pkg", ")", "pkgmap", "=", "{", "h", ":", "[", "]", "for", "h", "in", "sub_files", "(", "os", ".", "path", ".", "join", "(", "pkgpath", ",", "PackageStore", ".", "CONTENTS_DIR", ")", ")", "}", "for", "tag", "in", "sub_files", "(", "os", ".", "path", ".", "join", "(", "pkgpath", ",", "PackageStore", ".", "TAGS_DIR", ")", ")", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "pkgpath", ",", "PackageStore", ".", "TAGS_DIR", ",", "tag", ")", ",", "'r'", ")", "as", "tagfile", ":", "pkghash", "=", "tagfile", ".", "read", "(", ")", "pkgmap", "[", "pkghash", "]", ".", "append", "(", "tag", ")", "for", "pkghash", ",", "tags", "in", "pkgmap", ".", "items", "(", ")", ":", "# add teams here if any other than DEFAULT_TEAM should be hidden.", "team_token", "=", "''", "if", "team", "in", "(", "DEFAULT_TEAM", ",", ")", "else", "team", "+", "':'", "fullpkg", "=", "\"{team}{owner}/{pkg}\"", ".", "format", "(", "team", "=", "team_token", ",", "owner", "=", "user", ",", "pkg", "=", "pkg", ")", "# Add an empty string tag for untagged hashes", "displaytags", "=", "tags", "if", "tags", "else", "[", "\"\"", "]", "# Display a separate full line per tag like Docker", "for", "tag", "in", "displaytags", ":", "packages", ".", "append", "(", "(", "fullpkg", ",", "str", "(", "tag", ")", ",", "pkghash", ")", ")", "return", "packages" ]
List packages in this store.
[ "List", "packages", "in", "this", "store", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L298-L325
8,196
quiltdata/quilt
compiler/quilt/tools/store.py
PackageStore.team_path
def team_path(self, team=None): """ Returns the path to directory with the team's users' package repositories. """ if team is None: team = DEFAULT_TEAM return os.path.join(self._path, self.PKG_DIR, team)
python
def team_path(self, team=None): """ Returns the path to directory with the team's users' package repositories. """ if team is None: team = DEFAULT_TEAM return os.path.join(self._path, self.PKG_DIR, team)
[ "def", "team_path", "(", "self", ",", "team", "=", "None", ")", ":", "if", "team", "is", "None", ":", "team", "=", "DEFAULT_TEAM", "return", "os", ".", "path", ".", "join", "(", "self", ".", "_path", ",", "self", ".", "PKG_DIR", ",", "team", ")" ]
Returns the path to directory with the team's users' package repositories.
[ "Returns", "the", "path", "to", "directory", "with", "the", "team", "s", "users", "package", "repositories", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L327-L333
8,197
quiltdata/quilt
compiler/quilt/tools/store.py
PackageStore.user_path
def user_path(self, team, user): """ Returns the path to directory with the user's package repositories. """ return os.path.join(self.team_path(team), user)
python
def user_path(self, team, user): """ Returns the path to directory with the user's package repositories. """ return os.path.join(self.team_path(team), user)
[ "def", "user_path", "(", "self", ",", "team", ",", "user", ")", ":", "return", "os", ".", "path", ".", "join", "(", "self", ".", "team_path", "(", "team", ")", ",", "user", ")" ]
Returns the path to directory with the user's package repositories.
[ "Returns", "the", "path", "to", "directory", "with", "the", "user", "s", "package", "repositories", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L335-L339
8,198
quiltdata/quilt
compiler/quilt/tools/store.py
PackageStore.package_path
def package_path(self, team, user, package): """ Returns the path to a package repository. """ return os.path.join(self.user_path(team, user), package)
python
def package_path(self, team, user, package): """ Returns the path to a package repository. """ return os.path.join(self.user_path(team, user), package)
[ "def", "package_path", "(", "self", ",", "team", ",", "user", ",", "package", ")", ":", "return", "os", ".", "path", ".", "join", "(", "self", ".", "user_path", "(", "team", ",", "user", ")", ",", "package", ")" ]
Returns the path to a package repository.
[ "Returns", "the", "path", "to", "a", "package", "repository", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L341-L345
8,199
quiltdata/quilt
compiler/quilt/tools/store.py
PackageStore.object_path
def object_path(self, objhash): """ Returns the path to an object file based on its hash. """ return os.path.join(self._path, self.OBJ_DIR, objhash)
python
def object_path(self, objhash): """ Returns the path to an object file based on its hash. """ return os.path.join(self._path, self.OBJ_DIR, objhash)
[ "def", "object_path", "(", "self", ",", "objhash", ")", ":", "return", "os", ".", "path", ".", "join", "(", "self", ".", "_path", ",", "self", ".", "OBJ_DIR", ",", "objhash", ")" ]
Returns the path to an object file based on its hash.
[ "Returns", "the", "path", "to", "an", "object", "file", "based", "on", "its", "hash", "." ]
651853e7e89a8af86e0ff26167e752efa5878c12
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L347-L351