repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
asweigart/PyMsgBox
pymsgbox/__init__.py
_buttonbox
def _buttonbox(msg, title, choices, root=None, timeout=None): """ Display a msg, a title, and a set of buttons. The buttons are defined by the members of the choices list. Return the text of the button that the user selected. @arg msg: the msg to be displayed. @arg title: the window title @arg choices: a list or tuple of the choices to be displayed """ global boxRoot, __replyButtonText, __widgetTexts, buttonsFrame # Initialize __replyButtonText to the first choice. # This is what will be used if the window is closed by the close button. __replyButtonText = choices[0] if root: root.withdraw() boxRoot = tk.Toplevel(master=root) boxRoot.withdraw() else: boxRoot = tk.Tk() boxRoot.withdraw() boxRoot.title(title) boxRoot.iconname('Dialog') boxRoot.geometry(rootWindowPosition) boxRoot.minsize(400, 100) # ------------- define the messageFrame --------------------------------- messageFrame = tk.Frame(master=boxRoot) messageFrame.pack(side=tk.TOP, fill=tk.BOTH) # ------------- define the buttonsFrame --------------------------------- buttonsFrame = tk.Frame(master=boxRoot) buttonsFrame.pack(side=tk.TOP, fill=tk.BOTH) # -------------------- place the widgets in the frames ----------------------- messageWidget = tk.Message(messageFrame, text=msg, width=400) messageWidget.configure(font=(PROPORTIONAL_FONT_FAMILY, PROPORTIONAL_FONT_SIZE)) messageWidget.pack(side=tk.TOP, expand=tk.YES, fill=tk.X, padx='3m', pady='3m') __put_buttons_in_buttonframe(choices) # -------------- the action begins ----------- # put the focus on the first button __firstWidget.focus_force() boxRoot.deiconify() if timeout is not None: boxRoot.after(timeout, timeoutBoxRoot) boxRoot.mainloop() try: boxRoot.destroy() except tk.TclError: if __replyButtonText != TIMEOUT_TEXT: __replyButtonText = None if root: root.deiconify() return __replyButtonText
python
def _buttonbox(msg, title, choices, root=None, timeout=None): """ Display a msg, a title, and a set of buttons. The buttons are defined by the members of the choices list. Return the text of the button that the user selected. @arg msg: the msg to be displayed. @arg title: the window title @arg choices: a list or tuple of the choices to be displayed """ global boxRoot, __replyButtonText, __widgetTexts, buttonsFrame # Initialize __replyButtonText to the first choice. # This is what will be used if the window is closed by the close button. __replyButtonText = choices[0] if root: root.withdraw() boxRoot = tk.Toplevel(master=root) boxRoot.withdraw() else: boxRoot = tk.Tk() boxRoot.withdraw() boxRoot.title(title) boxRoot.iconname('Dialog') boxRoot.geometry(rootWindowPosition) boxRoot.minsize(400, 100) # ------------- define the messageFrame --------------------------------- messageFrame = tk.Frame(master=boxRoot) messageFrame.pack(side=tk.TOP, fill=tk.BOTH) # ------------- define the buttonsFrame --------------------------------- buttonsFrame = tk.Frame(master=boxRoot) buttonsFrame.pack(side=tk.TOP, fill=tk.BOTH) # -------------------- place the widgets in the frames ----------------------- messageWidget = tk.Message(messageFrame, text=msg, width=400) messageWidget.configure(font=(PROPORTIONAL_FONT_FAMILY, PROPORTIONAL_FONT_SIZE)) messageWidget.pack(side=tk.TOP, expand=tk.YES, fill=tk.X, padx='3m', pady='3m') __put_buttons_in_buttonframe(choices) # -------------- the action begins ----------- # put the focus on the first button __firstWidget.focus_force() boxRoot.deiconify() if timeout is not None: boxRoot.after(timeout, timeoutBoxRoot) boxRoot.mainloop() try: boxRoot.destroy() except tk.TclError: if __replyButtonText != TIMEOUT_TEXT: __replyButtonText = None if root: root.deiconify() return __replyButtonText
[ "def", "_buttonbox", "(", "msg", ",", "title", ",", "choices", ",", "root", "=", "None", ",", "timeout", "=", "None", ")", ":", "global", "boxRoot", ",", "__replyButtonText", ",", "__widgetTexts", ",", "buttonsFrame", "# Initialize __replyButtonText to the first choice.", "# This is what will be used if the window is closed by the close button.", "__replyButtonText", "=", "choices", "[", "0", "]", "if", "root", ":", "root", ".", "withdraw", "(", ")", "boxRoot", "=", "tk", ".", "Toplevel", "(", "master", "=", "root", ")", "boxRoot", ".", "withdraw", "(", ")", "else", ":", "boxRoot", "=", "tk", ".", "Tk", "(", ")", "boxRoot", ".", "withdraw", "(", ")", "boxRoot", ".", "title", "(", "title", ")", "boxRoot", ".", "iconname", "(", "'Dialog'", ")", "boxRoot", ".", "geometry", "(", "rootWindowPosition", ")", "boxRoot", ".", "minsize", "(", "400", ",", "100", ")", "# ------------- define the messageFrame ---------------------------------", "messageFrame", "=", "tk", ".", "Frame", "(", "master", "=", "boxRoot", ")", "messageFrame", ".", "pack", "(", "side", "=", "tk", ".", "TOP", ",", "fill", "=", "tk", ".", "BOTH", ")", "# ------------- define the buttonsFrame ---------------------------------", "buttonsFrame", "=", "tk", ".", "Frame", "(", "master", "=", "boxRoot", ")", "buttonsFrame", ".", "pack", "(", "side", "=", "tk", ".", "TOP", ",", "fill", "=", "tk", ".", "BOTH", ")", "# -------------------- place the widgets in the frames -----------------------", "messageWidget", "=", "tk", ".", "Message", "(", "messageFrame", ",", "text", "=", "msg", ",", "width", "=", "400", ")", "messageWidget", ".", "configure", "(", "font", "=", "(", "PROPORTIONAL_FONT_FAMILY", ",", "PROPORTIONAL_FONT_SIZE", ")", ")", "messageWidget", ".", "pack", "(", "side", "=", "tk", ".", "TOP", ",", "expand", "=", "tk", ".", "YES", ",", "fill", "=", "tk", ".", "X", ",", "padx", "=", "'3m'", ",", "pady", "=", "'3m'", ")", "__put_buttons_in_buttonframe", "(", "choices", ")", "# -------------- the action begins -----------", "# put the focus on the first button", "__firstWidget", ".", "focus_force", "(", ")", "boxRoot", ".", "deiconify", "(", ")", "if", "timeout", "is", "not", "None", ":", "boxRoot", ".", "after", "(", "timeout", ",", "timeoutBoxRoot", ")", "boxRoot", ".", "mainloop", "(", ")", "try", ":", "boxRoot", ".", "destroy", "(", ")", "except", "tk", ".", "TclError", ":", "if", "__replyButtonText", "!=", "TIMEOUT_TEXT", ":", "__replyButtonText", "=", "None", "if", "root", ":", "root", ".", "deiconify", "(", ")", "return", "__replyButtonText" ]
Display a msg, a title, and a set of buttons. The buttons are defined by the members of the choices list. Return the text of the button that the user selected. @arg msg: the msg to be displayed. @arg title: the window title @arg choices: a list or tuple of the choices to be displayed
[ "Display", "a", "msg", "a", "title", "and", "a", "set", "of", "buttons", ".", "The", "buttons", "are", "defined", "by", "the", "members", "of", "the", "choices", "list", ".", "Return", "the", "text", "of", "the", "button", "that", "the", "user", "selected", "." ]
train
https://github.com/asweigart/PyMsgBox/blob/c94325d21c08690dd89ebf9ebf1cf1b6ed54a1da/pymsgbox/__init__.py#L134-L194
asweigart/PyMsgBox
pymsgbox/__init__.py
__put_buttons_in_buttonframe
def __put_buttons_in_buttonframe(choices): """Put the buttons in the buttons frame""" global __widgetTexts, __firstWidget, buttonsFrame __firstWidget = None __widgetTexts = {} i = 0 for buttonText in choices: tempButton = tk.Button(buttonsFrame, takefocus=1, text=buttonText) _bindArrows(tempButton) tempButton.pack(expand=tk.YES, side=tk.LEFT, padx='1m', pady='1m', ipadx='2m', ipady='1m') # remember the text associated with this widget __widgetTexts[tempButton] = buttonText # remember the first widget, so we can put the focus there if i == 0: __firstWidget = tempButton i = 1 # for the commandButton, bind activation events to the activation event handler commandButton = tempButton handler = __buttonEvent for selectionEvent in STANDARD_SELECTION_EVENTS: commandButton.bind('<%s>' % selectionEvent, handler) if CANCEL_TEXT in choices: commandButton.bind('<Escape>', __cancelButtonEvent)
python
def __put_buttons_in_buttonframe(choices): """Put the buttons in the buttons frame""" global __widgetTexts, __firstWidget, buttonsFrame __firstWidget = None __widgetTexts = {} i = 0 for buttonText in choices: tempButton = tk.Button(buttonsFrame, takefocus=1, text=buttonText) _bindArrows(tempButton) tempButton.pack(expand=tk.YES, side=tk.LEFT, padx='1m', pady='1m', ipadx='2m', ipady='1m') # remember the text associated with this widget __widgetTexts[tempButton] = buttonText # remember the first widget, so we can put the focus there if i == 0: __firstWidget = tempButton i = 1 # for the commandButton, bind activation events to the activation event handler commandButton = tempButton handler = __buttonEvent for selectionEvent in STANDARD_SELECTION_EVENTS: commandButton.bind('<%s>' % selectionEvent, handler) if CANCEL_TEXT in choices: commandButton.bind('<Escape>', __cancelButtonEvent)
[ "def", "__put_buttons_in_buttonframe", "(", "choices", ")", ":", "global", "__widgetTexts", ",", "__firstWidget", ",", "buttonsFrame", "__firstWidget", "=", "None", "__widgetTexts", "=", "{", "}", "i", "=", "0", "for", "buttonText", "in", "choices", ":", "tempButton", "=", "tk", ".", "Button", "(", "buttonsFrame", ",", "takefocus", "=", "1", ",", "text", "=", "buttonText", ")", "_bindArrows", "(", "tempButton", ")", "tempButton", ".", "pack", "(", "expand", "=", "tk", ".", "YES", ",", "side", "=", "tk", ".", "LEFT", ",", "padx", "=", "'1m'", ",", "pady", "=", "'1m'", ",", "ipadx", "=", "'2m'", ",", "ipady", "=", "'1m'", ")", "# remember the text associated with this widget", "__widgetTexts", "[", "tempButton", "]", "=", "buttonText", "# remember the first widget, so we can put the focus there", "if", "i", "==", "0", ":", "__firstWidget", "=", "tempButton", "i", "=", "1", "# for the commandButton, bind activation events to the activation event handler", "commandButton", "=", "tempButton", "handler", "=", "__buttonEvent", "for", "selectionEvent", "in", "STANDARD_SELECTION_EVENTS", ":", "commandButton", ".", "bind", "(", "'<%s>'", "%", "selectionEvent", ",", "handler", ")", "if", "CANCEL_TEXT", "in", "choices", ":", "commandButton", ".", "bind", "(", "'<Escape>'", ",", "__cancelButtonEvent", ")" ]
Put the buttons in the buttons frame
[ "Put", "the", "buttons", "in", "the", "buttons", "frame" ]
train
https://github.com/asweigart/PyMsgBox/blob/c94325d21c08690dd89ebf9ebf1cf1b6ed54a1da/pymsgbox/__init__.py#L197-L226
asweigart/PyMsgBox
pymsgbox/__init__.py
__fillablebox
def __fillablebox(msg, title='', default='', mask=None, root=None, timeout=None): """ Show a box in which a user can enter some text. You may optionally specify some default text, which will appear in the enterbox when it is displayed. Returns the text that the user entered, or None if he cancels the operation. """ global boxRoot, __enterboxText, __enterboxDefaultText global cancelButton, entryWidget, okButton if title == None: title == '' if default == None: default = '' __enterboxDefaultText = default __enterboxText = __enterboxDefaultText if root: root.withdraw() boxRoot = tk.Toplevel(master=root) boxRoot.withdraw() else: boxRoot = tk.Tk() boxRoot.withdraw() boxRoot.title(title) boxRoot.iconname('Dialog') boxRoot.geometry(rootWindowPosition) boxRoot.bind('<Escape>', __enterboxCancel) # ------------- define the messageFrame --------------------------------- messageFrame = tk.Frame(master=boxRoot) messageFrame.pack(side=tk.TOP, fill=tk.BOTH) # ------------- define the buttonsFrame --------------------------------- buttonsFrame = tk.Frame(master=boxRoot) buttonsFrame.pack(side=tk.TOP, fill=tk.BOTH) # ------------- define the entryFrame --------------------------------- entryFrame = tk.Frame(master=boxRoot) entryFrame.pack(side=tk.TOP, fill=tk.BOTH) # ------------- define the buttonsFrame --------------------------------- buttonsFrame = tk.Frame(master=boxRoot) buttonsFrame.pack(side=tk.TOP, fill=tk.BOTH) #-------------------- the msg widget ---------------------------- messageWidget = tk.Message(messageFrame, width='4.5i', text=msg) messageWidget.configure(font=(PROPORTIONAL_FONT_FAMILY, PROPORTIONAL_FONT_SIZE)) messageWidget.pack(side=tk.RIGHT, expand=1, fill=tk.BOTH, padx='3m', pady='3m') # --------- entryWidget ---------------------------------------------- entryWidget = tk.Entry(entryFrame, width=40) _bindArrows(entryWidget, skipArrowKeys=True) entryWidget.configure(font=(PROPORTIONAL_FONT_FAMILY, TEXT_ENTRY_FONT_SIZE)) if mask: entryWidget.configure(show=mask) entryWidget.pack(side=tk.LEFT, padx='3m') entryWidget.bind('<Return>', __enterboxGetText) entryWidget.bind('<Escape>', __enterboxCancel) # put text into the entryWidget and have it pre-highlighted if __enterboxDefaultText != '': entryWidget.insert(0,__enterboxDefaultText) entryWidget.select_range(0, tk.END) # ------------------ ok button ------------------------------- okButton = tk.Button(buttonsFrame, takefocus=1, text=OK_TEXT) _bindArrows(okButton) okButton.pack(expand=1, side=tk.LEFT, padx='3m', pady='3m', ipadx='2m', ipady='1m') # for the commandButton, bind activation events to the activation event handler commandButton = okButton handler = __enterboxGetText for selectionEvent in STANDARD_SELECTION_EVENTS: commandButton.bind('<%s>' % selectionEvent, handler) # ------------------ cancel button ------------------------------- cancelButton = tk.Button(buttonsFrame, takefocus=1, text=CANCEL_TEXT) _bindArrows(cancelButton) cancelButton.pack(expand=1, side=tk.RIGHT, padx='3m', pady='3m', ipadx='2m', ipady='1m') # for the commandButton, bind activation events to the activation event handler commandButton = cancelButton handler = __enterboxCancel for selectionEvent in STANDARD_SELECTION_EVENTS: commandButton.bind('<%s>' % selectionEvent, handler) # ------------------- time for action! ----------------- entryWidget.focus_force() # put the focus on the entryWidget boxRoot.deiconify() if timeout is not None: boxRoot.after(timeout, timeoutBoxRoot) boxRoot.mainloop() # run it! # -------- after the run has completed ---------------------------------- if root: root.deiconify() try: boxRoot.destroy() # button_click didn't destroy boxRoot, so we do it now except tk.TclError: if __enterboxText != TIMEOUT_TEXT: return None return __enterboxText
python
def __fillablebox(msg, title='', default='', mask=None, root=None, timeout=None): """ Show a box in which a user can enter some text. You may optionally specify some default text, which will appear in the enterbox when it is displayed. Returns the text that the user entered, or None if he cancels the operation. """ global boxRoot, __enterboxText, __enterboxDefaultText global cancelButton, entryWidget, okButton if title == None: title == '' if default == None: default = '' __enterboxDefaultText = default __enterboxText = __enterboxDefaultText if root: root.withdraw() boxRoot = tk.Toplevel(master=root) boxRoot.withdraw() else: boxRoot = tk.Tk() boxRoot.withdraw() boxRoot.title(title) boxRoot.iconname('Dialog') boxRoot.geometry(rootWindowPosition) boxRoot.bind('<Escape>', __enterboxCancel) # ------------- define the messageFrame --------------------------------- messageFrame = tk.Frame(master=boxRoot) messageFrame.pack(side=tk.TOP, fill=tk.BOTH) # ------------- define the buttonsFrame --------------------------------- buttonsFrame = tk.Frame(master=boxRoot) buttonsFrame.pack(side=tk.TOP, fill=tk.BOTH) # ------------- define the entryFrame --------------------------------- entryFrame = tk.Frame(master=boxRoot) entryFrame.pack(side=tk.TOP, fill=tk.BOTH) # ------------- define the buttonsFrame --------------------------------- buttonsFrame = tk.Frame(master=boxRoot) buttonsFrame.pack(side=tk.TOP, fill=tk.BOTH) #-------------------- the msg widget ---------------------------- messageWidget = tk.Message(messageFrame, width='4.5i', text=msg) messageWidget.configure(font=(PROPORTIONAL_FONT_FAMILY, PROPORTIONAL_FONT_SIZE)) messageWidget.pack(side=tk.RIGHT, expand=1, fill=tk.BOTH, padx='3m', pady='3m') # --------- entryWidget ---------------------------------------------- entryWidget = tk.Entry(entryFrame, width=40) _bindArrows(entryWidget, skipArrowKeys=True) entryWidget.configure(font=(PROPORTIONAL_FONT_FAMILY, TEXT_ENTRY_FONT_SIZE)) if mask: entryWidget.configure(show=mask) entryWidget.pack(side=tk.LEFT, padx='3m') entryWidget.bind('<Return>', __enterboxGetText) entryWidget.bind('<Escape>', __enterboxCancel) # put text into the entryWidget and have it pre-highlighted if __enterboxDefaultText != '': entryWidget.insert(0,__enterboxDefaultText) entryWidget.select_range(0, tk.END) # ------------------ ok button ------------------------------- okButton = tk.Button(buttonsFrame, takefocus=1, text=OK_TEXT) _bindArrows(okButton) okButton.pack(expand=1, side=tk.LEFT, padx='3m', pady='3m', ipadx='2m', ipady='1m') # for the commandButton, bind activation events to the activation event handler commandButton = okButton handler = __enterboxGetText for selectionEvent in STANDARD_SELECTION_EVENTS: commandButton.bind('<%s>' % selectionEvent, handler) # ------------------ cancel button ------------------------------- cancelButton = tk.Button(buttonsFrame, takefocus=1, text=CANCEL_TEXT) _bindArrows(cancelButton) cancelButton.pack(expand=1, side=tk.RIGHT, padx='3m', pady='3m', ipadx='2m', ipady='1m') # for the commandButton, bind activation events to the activation event handler commandButton = cancelButton handler = __enterboxCancel for selectionEvent in STANDARD_SELECTION_EVENTS: commandButton.bind('<%s>' % selectionEvent, handler) # ------------------- time for action! ----------------- entryWidget.focus_force() # put the focus on the entryWidget boxRoot.deiconify() if timeout is not None: boxRoot.after(timeout, timeoutBoxRoot) boxRoot.mainloop() # run it! # -------- after the run has completed ---------------------------------- if root: root.deiconify() try: boxRoot.destroy() # button_click didn't destroy boxRoot, so we do it now except tk.TclError: if __enterboxText != TIMEOUT_TEXT: return None return __enterboxText
[ "def", "__fillablebox", "(", "msg", ",", "title", "=", "''", ",", "default", "=", "''", ",", "mask", "=", "None", ",", "root", "=", "None", ",", "timeout", "=", "None", ")", ":", "global", "boxRoot", ",", "__enterboxText", ",", "__enterboxDefaultText", "global", "cancelButton", ",", "entryWidget", ",", "okButton", "if", "title", "==", "None", ":", "title", "==", "''", "if", "default", "==", "None", ":", "default", "=", "''", "__enterboxDefaultText", "=", "default", "__enterboxText", "=", "__enterboxDefaultText", "if", "root", ":", "root", ".", "withdraw", "(", ")", "boxRoot", "=", "tk", ".", "Toplevel", "(", "master", "=", "root", ")", "boxRoot", ".", "withdraw", "(", ")", "else", ":", "boxRoot", "=", "tk", ".", "Tk", "(", ")", "boxRoot", ".", "withdraw", "(", ")", "boxRoot", ".", "title", "(", "title", ")", "boxRoot", ".", "iconname", "(", "'Dialog'", ")", "boxRoot", ".", "geometry", "(", "rootWindowPosition", ")", "boxRoot", ".", "bind", "(", "'<Escape>'", ",", "__enterboxCancel", ")", "# ------------- define the messageFrame ---------------------------------", "messageFrame", "=", "tk", ".", "Frame", "(", "master", "=", "boxRoot", ")", "messageFrame", ".", "pack", "(", "side", "=", "tk", ".", "TOP", ",", "fill", "=", "tk", ".", "BOTH", ")", "# ------------- define the buttonsFrame ---------------------------------", "buttonsFrame", "=", "tk", ".", "Frame", "(", "master", "=", "boxRoot", ")", "buttonsFrame", ".", "pack", "(", "side", "=", "tk", ".", "TOP", ",", "fill", "=", "tk", ".", "BOTH", ")", "# ------------- define the entryFrame ---------------------------------", "entryFrame", "=", "tk", ".", "Frame", "(", "master", "=", "boxRoot", ")", "entryFrame", ".", "pack", "(", "side", "=", "tk", ".", "TOP", ",", "fill", "=", "tk", ".", "BOTH", ")", "# ------------- define the buttonsFrame ---------------------------------", "buttonsFrame", "=", "tk", ".", "Frame", "(", "master", "=", "boxRoot", ")", "buttonsFrame", ".", "pack", "(", "side", "=", "tk", ".", "TOP", ",", "fill", "=", "tk", ".", "BOTH", ")", "#-------------------- the msg widget ----------------------------", "messageWidget", "=", "tk", ".", "Message", "(", "messageFrame", ",", "width", "=", "'4.5i'", ",", "text", "=", "msg", ")", "messageWidget", ".", "configure", "(", "font", "=", "(", "PROPORTIONAL_FONT_FAMILY", ",", "PROPORTIONAL_FONT_SIZE", ")", ")", "messageWidget", ".", "pack", "(", "side", "=", "tk", ".", "RIGHT", ",", "expand", "=", "1", ",", "fill", "=", "tk", ".", "BOTH", ",", "padx", "=", "'3m'", ",", "pady", "=", "'3m'", ")", "# --------- entryWidget ----------------------------------------------", "entryWidget", "=", "tk", ".", "Entry", "(", "entryFrame", ",", "width", "=", "40", ")", "_bindArrows", "(", "entryWidget", ",", "skipArrowKeys", "=", "True", ")", "entryWidget", ".", "configure", "(", "font", "=", "(", "PROPORTIONAL_FONT_FAMILY", ",", "TEXT_ENTRY_FONT_SIZE", ")", ")", "if", "mask", ":", "entryWidget", ".", "configure", "(", "show", "=", "mask", ")", "entryWidget", ".", "pack", "(", "side", "=", "tk", ".", "LEFT", ",", "padx", "=", "'3m'", ")", "entryWidget", ".", "bind", "(", "'<Return>'", ",", "__enterboxGetText", ")", "entryWidget", ".", "bind", "(", "'<Escape>'", ",", "__enterboxCancel", ")", "# put text into the entryWidget and have it pre-highlighted", "if", "__enterboxDefaultText", "!=", "''", ":", "entryWidget", ".", "insert", "(", "0", ",", "__enterboxDefaultText", ")", "entryWidget", ".", "select_range", "(", "0", ",", "tk", ".", "END", ")", "# ------------------ ok button -------------------------------", "okButton", "=", "tk", ".", "Button", "(", "buttonsFrame", ",", "takefocus", "=", "1", ",", "text", "=", "OK_TEXT", ")", "_bindArrows", "(", "okButton", ")", "okButton", ".", "pack", "(", "expand", "=", "1", ",", "side", "=", "tk", ".", "LEFT", ",", "padx", "=", "'3m'", ",", "pady", "=", "'3m'", ",", "ipadx", "=", "'2m'", ",", "ipady", "=", "'1m'", ")", "# for the commandButton, bind activation events to the activation event handler", "commandButton", "=", "okButton", "handler", "=", "__enterboxGetText", "for", "selectionEvent", "in", "STANDARD_SELECTION_EVENTS", ":", "commandButton", ".", "bind", "(", "'<%s>'", "%", "selectionEvent", ",", "handler", ")", "# ------------------ cancel button -------------------------------", "cancelButton", "=", "tk", ".", "Button", "(", "buttonsFrame", ",", "takefocus", "=", "1", ",", "text", "=", "CANCEL_TEXT", ")", "_bindArrows", "(", "cancelButton", ")", "cancelButton", ".", "pack", "(", "expand", "=", "1", ",", "side", "=", "tk", ".", "RIGHT", ",", "padx", "=", "'3m'", ",", "pady", "=", "'3m'", ",", "ipadx", "=", "'2m'", ",", "ipady", "=", "'1m'", ")", "# for the commandButton, bind activation events to the activation event handler", "commandButton", "=", "cancelButton", "handler", "=", "__enterboxCancel", "for", "selectionEvent", "in", "STANDARD_SELECTION_EVENTS", ":", "commandButton", ".", "bind", "(", "'<%s>'", "%", "selectionEvent", ",", "handler", ")", "# ------------------- time for action! -----------------", "entryWidget", ".", "focus_force", "(", ")", "# put the focus on the entryWidget", "boxRoot", ".", "deiconify", "(", ")", "if", "timeout", "is", "not", "None", ":", "boxRoot", ".", "after", "(", "timeout", ",", "timeoutBoxRoot", ")", "boxRoot", ".", "mainloop", "(", ")", "# run it!", "# -------- after the run has completed ----------------------------------", "if", "root", ":", "root", ".", "deiconify", "(", ")", "try", ":", "boxRoot", ".", "destroy", "(", ")", "# button_click didn't destroy boxRoot, so we do it now", "except", "tk", ".", "TclError", ":", "if", "__enterboxText", "!=", "TIMEOUT_TEXT", ":", "return", "None", "return", "__enterboxText" ]
Show a box in which a user can enter some text. You may optionally specify some default text, which will appear in the enterbox when it is displayed. Returns the text that the user entered, or None if he cancels the operation.
[ "Show", "a", "box", "in", "which", "a", "user", "can", "enter", "some", "text", ".", "You", "may", "optionally", "specify", "some", "default", "text", "which", "will", "appear", "in", "the", "enterbox", "when", "it", "is", "displayed", ".", "Returns", "the", "text", "that", "the", "user", "entered", "or", "None", "if", "he", "cancels", "the", "operation", "." ]
train
https://github.com/asweigart/PyMsgBox/blob/c94325d21c08690dd89ebf9ebf1cf1b6ed54a1da/pymsgbox/__init__.py#L259-L365
refindlyllc/rets
rets/parsers/login.py
OneXLogin.parse
def parse(self, response): """ Parse the login xml response :param response: the login response from the RETS server :return: None """ self.headers = response.headers if 'xml' in self.headers.get('Content-Type'): # Got an XML response, likely an error code. xml = xmltodict.parse(response.text) self.analyze_reply_code(xml_response_dict=xml) lines = response.text.split('\r\n') if len(lines) < 3: lines = response.text.split('\n') for line in lines: line = line.strip() name, value = self.read_line(line) if name: if name in self.valid_transactions or re.match(pattern='/^X\-/', string=name): self.capabilities[name] = value else: self.details[name] = value
python
def parse(self, response): """ Parse the login xml response :param response: the login response from the RETS server :return: None """ self.headers = response.headers if 'xml' in self.headers.get('Content-Type'): # Got an XML response, likely an error code. xml = xmltodict.parse(response.text) self.analyze_reply_code(xml_response_dict=xml) lines = response.text.split('\r\n') if len(lines) < 3: lines = response.text.split('\n') for line in lines: line = line.strip() name, value = self.read_line(line) if name: if name in self.valid_transactions or re.match(pattern='/^X\-/', string=name): self.capabilities[name] = value else: self.details[name] = value
[ "def", "parse", "(", "self", ",", "response", ")", ":", "self", ".", "headers", "=", "response", ".", "headers", "if", "'xml'", "in", "self", ".", "headers", ".", "get", "(", "'Content-Type'", ")", ":", "# Got an XML response, likely an error code.", "xml", "=", "xmltodict", ".", "parse", "(", "response", ".", "text", ")", "self", ".", "analyze_reply_code", "(", "xml_response_dict", "=", "xml", ")", "lines", "=", "response", ".", "text", ".", "split", "(", "'\\r\\n'", ")", "if", "len", "(", "lines", ")", "<", "3", ":", "lines", "=", "response", ".", "text", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "line", "=", "line", ".", "strip", "(", ")", "name", ",", "value", "=", "self", ".", "read_line", "(", "line", ")", "if", "name", ":", "if", "name", "in", "self", ".", "valid_transactions", "or", "re", ".", "match", "(", "pattern", "=", "'/^X\\-/'", ",", "string", "=", "name", ")", ":", "self", ".", "capabilities", "[", "name", "]", "=", "value", "else", ":", "self", ".", "details", "[", "name", "]", "=", "value" ]
Parse the login xml response :param response: the login response from the RETS server :return: None
[ "Parse", "the", "login", "xml", "response", ":", "param", "response", ":", "the", "login", "response", "from", "the", "RETS", "server", ":", "return", ":", "None" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/parsers/login.py#L20-L45
refindlyllc/rets
rets/parsers/login.py
OneXLogin.read_line
def read_line(line): """Reads lines of XML and delimits, strips, and returns.""" name, value = '', '' if '=' in line: name, value = line.split('=', 1) return [name.strip(), value.strip()]
python
def read_line(line): """Reads lines of XML and delimits, strips, and returns.""" name, value = '', '' if '=' in line: name, value = line.split('=', 1) return [name.strip(), value.strip()]
[ "def", "read_line", "(", "line", ")", ":", "name", ",", "value", "=", "''", ",", "''", "if", "'='", "in", "line", ":", "name", ",", "value", "=", "line", ".", "split", "(", "'='", ",", "1", ")", "return", "[", "name", ".", "strip", "(", ")", ",", "value", ".", "strip", "(", ")", "]" ]
Reads lines of XML and delimits, strips, and returns.
[ "Reads", "lines", "of", "XML", "and", "delimits", "strips", "and", "returns", "." ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/parsers/login.py#L48-L55
refindlyllc/rets
rets/parsers/search.py
OneXSearchCursor.generator
def generator(self, response): """ Takes a response socket connection and iteratively parses and yields the results as python dictionaries. :param response: a Requests response object with stream=True :return: """ delim = '\t' # Default to tab delimited columns = [] response.raw.decode_content = True events = ET.iterparse(BytesIO(response.content)) results = [] for event, elem in events: # Analyze search record data if "DATA" == elem.tag: data_dict = {column: data for column, data in zip(columns, elem.text.split(delim)) if column != ''} self.parsed_rows += 1 # Rows parsed with all requests results.append(data_dict) # Handle reply code elif "RETS" == elem.tag: reply_code = elem.get('ReplyCode') reply_text = elem.get('ReplyText') if reply_code == '20201': # RETS Response 20201 - No Records Found # Generator should continue and return nothing continue elif reply_code != '0': msg = "RETS Error {0!s}: {1!s}".format(reply_code, reply_text) raise RETSException(msg) # Analyze delimiter elif "DELIMITER" == elem.tag: val = elem.get("value") delim = chr(int(val)) # Analyze columns elif "COLUMNS" == elem.tag: columns = elem.text.split(delim) # handle max rows elif "MAXROWS" == elem.tag: logger.debug("MAXROWS Tag reached in XML") logger.debug("Received {0!s} results from this search".format(self.parsed_rows)) raise MaxrowException(results) else: # This is a tag we don't process (like COUNT) continue elem.clear() return results
python
def generator(self, response): """ Takes a response socket connection and iteratively parses and yields the results as python dictionaries. :param response: a Requests response object with stream=True :return: """ delim = '\t' # Default to tab delimited columns = [] response.raw.decode_content = True events = ET.iterparse(BytesIO(response.content)) results = [] for event, elem in events: # Analyze search record data if "DATA" == elem.tag: data_dict = {column: data for column, data in zip(columns, elem.text.split(delim)) if column != ''} self.parsed_rows += 1 # Rows parsed with all requests results.append(data_dict) # Handle reply code elif "RETS" == elem.tag: reply_code = elem.get('ReplyCode') reply_text = elem.get('ReplyText') if reply_code == '20201': # RETS Response 20201 - No Records Found # Generator should continue and return nothing continue elif reply_code != '0': msg = "RETS Error {0!s}: {1!s}".format(reply_code, reply_text) raise RETSException(msg) # Analyze delimiter elif "DELIMITER" == elem.tag: val = elem.get("value") delim = chr(int(val)) # Analyze columns elif "COLUMNS" == elem.tag: columns = elem.text.split(delim) # handle max rows elif "MAXROWS" == elem.tag: logger.debug("MAXROWS Tag reached in XML") logger.debug("Received {0!s} results from this search".format(self.parsed_rows)) raise MaxrowException(results) else: # This is a tag we don't process (like COUNT) continue elem.clear() return results
[ "def", "generator", "(", "self", ",", "response", ")", ":", "delim", "=", "'\\t'", "# Default to tab delimited", "columns", "=", "[", "]", "response", ".", "raw", ".", "decode_content", "=", "True", "events", "=", "ET", ".", "iterparse", "(", "BytesIO", "(", "response", ".", "content", ")", ")", "results", "=", "[", "]", "for", "event", ",", "elem", "in", "events", ":", "# Analyze search record data", "if", "\"DATA\"", "==", "elem", ".", "tag", ":", "data_dict", "=", "{", "column", ":", "data", "for", "column", ",", "data", "in", "zip", "(", "columns", ",", "elem", ".", "text", ".", "split", "(", "delim", ")", ")", "if", "column", "!=", "''", "}", "self", ".", "parsed_rows", "+=", "1", "# Rows parsed with all requests", "results", ".", "append", "(", "data_dict", ")", "# Handle reply code", "elif", "\"RETS\"", "==", "elem", ".", "tag", ":", "reply_code", "=", "elem", ".", "get", "(", "'ReplyCode'", ")", "reply_text", "=", "elem", ".", "get", "(", "'ReplyText'", ")", "if", "reply_code", "==", "'20201'", ":", "# RETS Response 20201 - No Records Found", "# Generator should continue and return nothing", "continue", "elif", "reply_code", "!=", "'0'", ":", "msg", "=", "\"RETS Error {0!s}: {1!s}\"", ".", "format", "(", "reply_code", ",", "reply_text", ")", "raise", "RETSException", "(", "msg", ")", "# Analyze delimiter", "elif", "\"DELIMITER\"", "==", "elem", ".", "tag", ":", "val", "=", "elem", ".", "get", "(", "\"value\"", ")", "delim", "=", "chr", "(", "int", "(", "val", ")", ")", "# Analyze columns", "elif", "\"COLUMNS\"", "==", "elem", ".", "tag", ":", "columns", "=", "elem", ".", "text", ".", "split", "(", "delim", ")", "# handle max rows", "elif", "\"MAXROWS\"", "==", "elem", ".", "tag", ":", "logger", ".", "debug", "(", "\"MAXROWS Tag reached in XML\"", ")", "logger", ".", "debug", "(", "\"Received {0!s} results from this search\"", ".", "format", "(", "self", ".", "parsed_rows", ")", ")", "raise", "MaxrowException", "(", "results", ")", "else", ":", "# This is a tag we don't process (like COUNT)", "continue", "elem", ".", "clear", "(", ")", "return", "results" ]
Takes a response socket connection and iteratively parses and yields the results as python dictionaries. :param response: a Requests response object with stream=True :return:
[ "Takes", "a", "response", "socket", "connection", "and", "iteratively", "parses", "and", "yields", "the", "results", "as", "python", "dictionaries", ".", ":", "param", "response", ":", "a", "Requests", "response", "object", "with", "stream", "=", "True", ":", "return", ":" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/parsers/search.py#L18-L72
ashim888/awis
myawis/__init__.py
flatten_urlinfo
def flatten_urlinfo(urlinfo, shorter_keys=True): """ Takes a urlinfo object and returns a flat dictionary.""" def flatten(value, prefix=""): if is_string(value): _result[prefix[1:]] = value return try: len(value) except (AttributeError, TypeError): # a leaf _result[prefix[1:]] = value return try: items = value.items() except AttributeError: # an iterable, but not a dict last_prefix = prefix.split(".")[-1] if shorter_keys: prefix = "." + last_prefix if last_prefix == "Country": for v in value: country = v.pop("@Code") flatten(v, ".".join([prefix, country])) elif last_prefix in ["RelatedLink", "CategoryData"]: for i, v in enumerate(value): flatten(v, ".".join([prefix, str(i)])) elif value[0].get("TimeRange"): for v in value: time_range = ".".join(tuple(v.pop("TimeRange").items())[0]) # python 3 odict_items don't support indexing if v.get("DataUrl"): time_range = ".".join([v.pop("DataUrl"), time_range]) flatten(v, ".".join([prefix, time_range])) else: msg = prefix + " contains a list we don't know how to flatten." raise NotImplementedError(msg) else: # a dict, go one level deeper for k, v in items: flatten(v, ".".join([prefix, k])) _result = {} info = xmltodict.parse(str(urlinfo)) flatten(info["aws:UrlInfoResponse"]["Response"]["UrlInfoResult"]["Alexa"]) _result["OutputTimestamp"] = datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%SZ') return _result
python
def flatten_urlinfo(urlinfo, shorter_keys=True): """ Takes a urlinfo object and returns a flat dictionary.""" def flatten(value, prefix=""): if is_string(value): _result[prefix[1:]] = value return try: len(value) except (AttributeError, TypeError): # a leaf _result[prefix[1:]] = value return try: items = value.items() except AttributeError: # an iterable, but not a dict last_prefix = prefix.split(".")[-1] if shorter_keys: prefix = "." + last_prefix if last_prefix == "Country": for v in value: country = v.pop("@Code") flatten(v, ".".join([prefix, country])) elif last_prefix in ["RelatedLink", "CategoryData"]: for i, v in enumerate(value): flatten(v, ".".join([prefix, str(i)])) elif value[0].get("TimeRange"): for v in value: time_range = ".".join(tuple(v.pop("TimeRange").items())[0]) # python 3 odict_items don't support indexing if v.get("DataUrl"): time_range = ".".join([v.pop("DataUrl"), time_range]) flatten(v, ".".join([prefix, time_range])) else: msg = prefix + " contains a list we don't know how to flatten." raise NotImplementedError(msg) else: # a dict, go one level deeper for k, v in items: flatten(v, ".".join([prefix, k])) _result = {} info = xmltodict.parse(str(urlinfo)) flatten(info["aws:UrlInfoResponse"]["Response"]["UrlInfoResult"]["Alexa"]) _result["OutputTimestamp"] = datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%SZ') return _result
[ "def", "flatten_urlinfo", "(", "urlinfo", ",", "shorter_keys", "=", "True", ")", ":", "def", "flatten", "(", "value", ",", "prefix", "=", "\"\"", ")", ":", "if", "is_string", "(", "value", ")", ":", "_result", "[", "prefix", "[", "1", ":", "]", "]", "=", "value", "return", "try", ":", "len", "(", "value", ")", "except", "(", "AttributeError", ",", "TypeError", ")", ":", "# a leaf", "_result", "[", "prefix", "[", "1", ":", "]", "]", "=", "value", "return", "try", ":", "items", "=", "value", ".", "items", "(", ")", "except", "AttributeError", ":", "# an iterable, but not a dict", "last_prefix", "=", "prefix", ".", "split", "(", "\".\"", ")", "[", "-", "1", "]", "if", "shorter_keys", ":", "prefix", "=", "\".\"", "+", "last_prefix", "if", "last_prefix", "==", "\"Country\"", ":", "for", "v", "in", "value", ":", "country", "=", "v", ".", "pop", "(", "\"@Code\"", ")", "flatten", "(", "v", ",", "\".\"", ".", "join", "(", "[", "prefix", ",", "country", "]", ")", ")", "elif", "last_prefix", "in", "[", "\"RelatedLink\"", ",", "\"CategoryData\"", "]", ":", "for", "i", ",", "v", "in", "enumerate", "(", "value", ")", ":", "flatten", "(", "v", ",", "\".\"", ".", "join", "(", "[", "prefix", ",", "str", "(", "i", ")", "]", ")", ")", "elif", "value", "[", "0", "]", ".", "get", "(", "\"TimeRange\"", ")", ":", "for", "v", "in", "value", ":", "time_range", "=", "\".\"", ".", "join", "(", "tuple", "(", "v", ".", "pop", "(", "\"TimeRange\"", ")", ".", "items", "(", ")", ")", "[", "0", "]", ")", "# python 3 odict_items don't support indexing", "if", "v", ".", "get", "(", "\"DataUrl\"", ")", ":", "time_range", "=", "\".\"", ".", "join", "(", "[", "v", ".", "pop", "(", "\"DataUrl\"", ")", ",", "time_range", "]", ")", "flatten", "(", "v", ",", "\".\"", ".", "join", "(", "[", "prefix", ",", "time_range", "]", ")", ")", "else", ":", "msg", "=", "prefix", "+", "\" contains a list we don't know how to flatten.\"", "raise", "NotImplementedError", "(", "msg", ")", "else", ":", "# a dict, go one level deeper", "for", "k", ",", "v", "in", "items", ":", "flatten", "(", "v", ",", "\".\"", ".", "join", "(", "[", "prefix", ",", "k", "]", ")", ")", "_result", "=", "{", "}", "info", "=", "xmltodict", ".", "parse", "(", "str", "(", "urlinfo", ")", ")", "flatten", "(", "info", "[", "\"aws:UrlInfoResponse\"", "]", "[", "\"Response\"", "]", "[", "\"UrlInfoResult\"", "]", "[", "\"Alexa\"", "]", ")", "_result", "[", "\"OutputTimestamp\"", "]", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "'%Y%m%dT%H%M%SZ'", ")", "return", "_result" ]
Takes a urlinfo object and returns a flat dictionary.
[ "Takes", "a", "urlinfo", "object", "and", "returns", "a", "flat", "dictionary", "." ]
train
https://github.com/ashim888/awis/blob/b8ed3437dedd7a9646c748474bfabcf2d6e2700b/myawis/__init__.py#L163-L207
ashim888/awis
myawis/__init__.py
CallAwis.create_v4_signature
def create_v4_signature(self, request_params): ''' Create URI and signature headers based on AWS V4 signing process. Refer to https://docs.aws.amazon.com/AlexaWebInfoService/latest/ApiReferenceArticle.html for request params. :param request_params: dictionary of request parameters :return: URL and header to be passed to requests.get ''' method = 'GET' service = 'awis' host = 'awis.us-west-1.amazonaws.com' region = 'us-west-1' endpoint = 'https://awis.amazonaws.com/api' request_parameters = urlencode([(key, request_params[key]) for key in sorted(request_params.keys())]) # Key derivation functions. See: # http://docs.aws.amazon.com/general/latest/gr/signature-v4-examples.html#signature-v4-examples-python def sign(key, msg): return hmac.new(key, msg.encode('utf-8'), hashlib.sha256).digest() def getSignatureKey(key, dateStamp, regionName, serviceName): kDate = sign(('AWS4' + key).encode('utf-8'), dateStamp) kRegion = sign(kDate, regionName) kService = sign(kRegion, serviceName) kSigning = sign(kService, 'aws4_request') return kSigning # Create a date for headers and the credential string t = datetime.datetime.utcnow() amzdate = t.strftime('%Y%m%dT%H%M%SZ') datestamp = t.strftime('%Y%m%d') # Date w/o time, used in credential scope # Create canonical request canonical_uri = '/api' canonical_querystring = request_parameters canonical_headers = 'host:' + host + '\n' + 'x-amz-date:' + amzdate + '\n' signed_headers = 'host;x-amz-date' payload_hash = hashlib.sha256(''.encode('utf8')).hexdigest() canonical_request = method + '\n' + canonical_uri + '\n' + canonical_querystring + '\n' + canonical_headers + '\n' + signed_headers + '\n' + payload_hash # Create string to sign algorithm = 'AWS4-HMAC-SHA256' credential_scope = datestamp + '/' + region + '/' + service + '/' + 'aws4_request' string_to_sign = algorithm + '\n' + amzdate + '\n' + credential_scope + '\n' + hashlib.sha256(canonical_request.encode('utf8')).hexdigest() # Calculate signature signing_key = getSignatureKey(self.secret_access_key, datestamp, region, service) # Sign the string_to_sign using the signing_key signature = hmac.new(signing_key, (string_to_sign).encode('utf-8'), hashlib.sha256).hexdigest() # Add signing information to the request authorization_header = algorithm + ' ' + 'Credential=' + self.access_id + '/' + credential_scope + ', ' + 'SignedHeaders=' + signed_headers + ', ' + 'Signature=' + signature headers = {'X-Amz-Date':amzdate, 'Authorization':authorization_header, 'Content-Type': 'application/xml', 'Accept': 'application/xml'} # Create request url request_url = endpoint + '?' + canonical_querystring return request_url, headers
python
def create_v4_signature(self, request_params): ''' Create URI and signature headers based on AWS V4 signing process. Refer to https://docs.aws.amazon.com/AlexaWebInfoService/latest/ApiReferenceArticle.html for request params. :param request_params: dictionary of request parameters :return: URL and header to be passed to requests.get ''' method = 'GET' service = 'awis' host = 'awis.us-west-1.amazonaws.com' region = 'us-west-1' endpoint = 'https://awis.amazonaws.com/api' request_parameters = urlencode([(key, request_params[key]) for key in sorted(request_params.keys())]) # Key derivation functions. See: # http://docs.aws.amazon.com/general/latest/gr/signature-v4-examples.html#signature-v4-examples-python def sign(key, msg): return hmac.new(key, msg.encode('utf-8'), hashlib.sha256).digest() def getSignatureKey(key, dateStamp, regionName, serviceName): kDate = sign(('AWS4' + key).encode('utf-8'), dateStamp) kRegion = sign(kDate, regionName) kService = sign(kRegion, serviceName) kSigning = sign(kService, 'aws4_request') return kSigning # Create a date for headers and the credential string t = datetime.datetime.utcnow() amzdate = t.strftime('%Y%m%dT%H%M%SZ') datestamp = t.strftime('%Y%m%d') # Date w/o time, used in credential scope # Create canonical request canonical_uri = '/api' canonical_querystring = request_parameters canonical_headers = 'host:' + host + '\n' + 'x-amz-date:' + amzdate + '\n' signed_headers = 'host;x-amz-date' payload_hash = hashlib.sha256(''.encode('utf8')).hexdigest() canonical_request = method + '\n' + canonical_uri + '\n' + canonical_querystring + '\n' + canonical_headers + '\n' + signed_headers + '\n' + payload_hash # Create string to sign algorithm = 'AWS4-HMAC-SHA256' credential_scope = datestamp + '/' + region + '/' + service + '/' + 'aws4_request' string_to_sign = algorithm + '\n' + amzdate + '\n' + credential_scope + '\n' + hashlib.sha256(canonical_request.encode('utf8')).hexdigest() # Calculate signature signing_key = getSignatureKey(self.secret_access_key, datestamp, region, service) # Sign the string_to_sign using the signing_key signature = hmac.new(signing_key, (string_to_sign).encode('utf-8'), hashlib.sha256).hexdigest() # Add signing information to the request authorization_header = algorithm + ' ' + 'Credential=' + self.access_id + '/' + credential_scope + ', ' + 'SignedHeaders=' + signed_headers + ', ' + 'Signature=' + signature headers = {'X-Amz-Date':amzdate, 'Authorization':authorization_header, 'Content-Type': 'application/xml', 'Accept': 'application/xml'} # Create request url request_url = endpoint + '?' + canonical_querystring return request_url, headers
[ "def", "create_v4_signature", "(", "self", ",", "request_params", ")", ":", "method", "=", "'GET'", "service", "=", "'awis'", "host", "=", "'awis.us-west-1.amazonaws.com'", "region", "=", "'us-west-1'", "endpoint", "=", "'https://awis.amazonaws.com/api'", "request_parameters", "=", "urlencode", "(", "[", "(", "key", ",", "request_params", "[", "key", "]", ")", "for", "key", "in", "sorted", "(", "request_params", ".", "keys", "(", ")", ")", "]", ")", "# Key derivation functions. See:", "# http://docs.aws.amazon.com/general/latest/gr/signature-v4-examples.html#signature-v4-examples-python", "def", "sign", "(", "key", ",", "msg", ")", ":", "return", "hmac", ".", "new", "(", "key", ",", "msg", ".", "encode", "(", "'utf-8'", ")", ",", "hashlib", ".", "sha256", ")", ".", "digest", "(", ")", "def", "getSignatureKey", "(", "key", ",", "dateStamp", ",", "regionName", ",", "serviceName", ")", ":", "kDate", "=", "sign", "(", "(", "'AWS4'", "+", "key", ")", ".", "encode", "(", "'utf-8'", ")", ",", "dateStamp", ")", "kRegion", "=", "sign", "(", "kDate", ",", "regionName", ")", "kService", "=", "sign", "(", "kRegion", ",", "serviceName", ")", "kSigning", "=", "sign", "(", "kService", ",", "'aws4_request'", ")", "return", "kSigning", "# Create a date for headers and the credential string", "t", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", "amzdate", "=", "t", ".", "strftime", "(", "'%Y%m%dT%H%M%SZ'", ")", "datestamp", "=", "t", ".", "strftime", "(", "'%Y%m%d'", ")", "# Date w/o time, used in credential scope", "# Create canonical request", "canonical_uri", "=", "'/api'", "canonical_querystring", "=", "request_parameters", "canonical_headers", "=", "'host:'", "+", "host", "+", "'\\n'", "+", "'x-amz-date:'", "+", "amzdate", "+", "'\\n'", "signed_headers", "=", "'host;x-amz-date'", "payload_hash", "=", "hashlib", ".", "sha256", "(", "''", ".", "encode", "(", "'utf8'", ")", ")", ".", "hexdigest", "(", ")", "canonical_request", "=", "method", "+", "'\\n'", "+", "canonical_uri", "+", "'\\n'", "+", "canonical_querystring", "+", "'\\n'", "+", "canonical_headers", "+", "'\\n'", "+", "signed_headers", "+", "'\\n'", "+", "payload_hash", "# Create string to sign", "algorithm", "=", "'AWS4-HMAC-SHA256'", "credential_scope", "=", "datestamp", "+", "'/'", "+", "region", "+", "'/'", "+", "service", "+", "'/'", "+", "'aws4_request'", "string_to_sign", "=", "algorithm", "+", "'\\n'", "+", "amzdate", "+", "'\\n'", "+", "credential_scope", "+", "'\\n'", "+", "hashlib", ".", "sha256", "(", "canonical_request", ".", "encode", "(", "'utf8'", ")", ")", ".", "hexdigest", "(", ")", "# Calculate signature", "signing_key", "=", "getSignatureKey", "(", "self", ".", "secret_access_key", ",", "datestamp", ",", "region", ",", "service", ")", "# Sign the string_to_sign using the signing_key", "signature", "=", "hmac", ".", "new", "(", "signing_key", ",", "(", "string_to_sign", ")", ".", "encode", "(", "'utf-8'", ")", ",", "hashlib", ".", "sha256", ")", ".", "hexdigest", "(", ")", "# Add signing information to the request", "authorization_header", "=", "algorithm", "+", "' '", "+", "'Credential='", "+", "self", ".", "access_id", "+", "'/'", "+", "credential_scope", "+", "', '", "+", "'SignedHeaders='", "+", "signed_headers", "+", "', '", "+", "'Signature='", "+", "signature", "headers", "=", "{", "'X-Amz-Date'", ":", "amzdate", ",", "'Authorization'", ":", "authorization_header", ",", "'Content-Type'", ":", "'application/xml'", ",", "'Accept'", ":", "'application/xml'", "}", "# Create request url", "request_url", "=", "endpoint", "+", "'?'", "+", "canonical_querystring", "return", "request_url", ",", "headers" ]
Create URI and signature headers based on AWS V4 signing process. Refer to https://docs.aws.amazon.com/AlexaWebInfoService/latest/ApiReferenceArticle.html for request params. :param request_params: dictionary of request parameters :return: URL and header to be passed to requests.get
[ "Create", "URI", "and", "signature", "headers", "based", "on", "AWS", "V4", "signing", "process", ".", "Refer", "to", "https", ":", "//", "docs", ".", "aws", ".", "amazon", ".", "com", "/", "AlexaWebInfoService", "/", "latest", "/", "ApiReferenceArticle", ".", "html", "for", "request", "params", ".", ":", "param", "request_params", ":", "dictionary", "of", "request", "parameters", ":", "return", ":", "URL", "and", "header", "to", "be", "passed", "to", "requests", ".", "get" ]
train
https://github.com/ashim888/awis/blob/b8ed3437dedd7a9646c748474bfabcf2d6e2700b/myawis/__init__.py#L34-L92
ashim888/awis
myawis/__init__.py
CallAwis.urlinfo
def urlinfo(self, domain, response_group = URLINFO_RESPONSE_GROUPS): ''' Provide information about supplied domain as specified by the response group :param domain: Any valid URL :param response_group: Any valid urlinfo response group :return: Traffic and/or content data of the domain in XML format ''' params = { 'Action': "UrlInfo", 'Url': domain, 'ResponseGroup': response_group } url, headers = self.create_v4_signature(params) return self.return_output(url, headers)
python
def urlinfo(self, domain, response_group = URLINFO_RESPONSE_GROUPS): ''' Provide information about supplied domain as specified by the response group :param domain: Any valid URL :param response_group: Any valid urlinfo response group :return: Traffic and/or content data of the domain in XML format ''' params = { 'Action': "UrlInfo", 'Url': domain, 'ResponseGroup': response_group } url, headers = self.create_v4_signature(params) return self.return_output(url, headers)
[ "def", "urlinfo", "(", "self", ",", "domain", ",", "response_group", "=", "URLINFO_RESPONSE_GROUPS", ")", ":", "params", "=", "{", "'Action'", ":", "\"UrlInfo\"", ",", "'Url'", ":", "domain", ",", "'ResponseGroup'", ":", "response_group", "}", "url", ",", "headers", "=", "self", ".", "create_v4_signature", "(", "params", ")", "return", "self", ".", "return_output", "(", "url", ",", "headers", ")" ]
Provide information about supplied domain as specified by the response group :param domain: Any valid URL :param response_group: Any valid urlinfo response group :return: Traffic and/or content data of the domain in XML format
[ "Provide", "information", "about", "supplied", "domain", "as", "specified", "by", "the", "response", "group", ":", "param", "domain", ":", "Any", "valid", "URL", ":", "param", "response_group", ":", "Any", "valid", "urlinfo", "response", "group", ":", "return", ":", "Traffic", "and", "/", "or", "content", "data", "of", "the", "domain", "in", "XML", "format" ]
train
https://github.com/ashim888/awis/blob/b8ed3437dedd7a9646c748474bfabcf2d6e2700b/myawis/__init__.py#L94-L108
ashim888/awis
myawis/__init__.py
CallAwis.traffichistory
def traffichistory(self, domain, response_group=TRAFFICINFO_RESPONSE_GROUPS, myrange=31, start=20070801): ''' Provide traffic history of supplied domain :param domain: Any valid URL :param response_group: Any valid traffic history response group :return: Traffic and/or content data of the domain in XML format ''' params = { 'Action': "TrafficHistory", 'Url': domain, 'ResponseGroup': response_group, 'Range': myrange, 'Start': start, } url, headers = self.create_v4_signature(params) return self.return_output(url, headers)
python
def traffichistory(self, domain, response_group=TRAFFICINFO_RESPONSE_GROUPS, myrange=31, start=20070801): ''' Provide traffic history of supplied domain :param domain: Any valid URL :param response_group: Any valid traffic history response group :return: Traffic and/or content data of the domain in XML format ''' params = { 'Action': "TrafficHistory", 'Url': domain, 'ResponseGroup': response_group, 'Range': myrange, 'Start': start, } url, headers = self.create_v4_signature(params) return self.return_output(url, headers)
[ "def", "traffichistory", "(", "self", ",", "domain", ",", "response_group", "=", "TRAFFICINFO_RESPONSE_GROUPS", ",", "myrange", "=", "31", ",", "start", "=", "20070801", ")", ":", "params", "=", "{", "'Action'", ":", "\"TrafficHistory\"", ",", "'Url'", ":", "domain", ",", "'ResponseGroup'", ":", "response_group", ",", "'Range'", ":", "myrange", ",", "'Start'", ":", "start", ",", "}", "url", ",", "headers", "=", "self", ".", "create_v4_signature", "(", "params", ")", "return", "self", ".", "return_output", "(", "url", ",", "headers", ")" ]
Provide traffic history of supplied domain :param domain: Any valid URL :param response_group: Any valid traffic history response group :return: Traffic and/or content data of the domain in XML format
[ "Provide", "traffic", "history", "of", "supplied", "domain", ":", "param", "domain", ":", "Any", "valid", "URL", ":", "param", "response_group", ":", "Any", "valid", "traffic", "history", "response", "group", ":", "return", ":", "Traffic", "and", "/", "or", "content", "data", "of", "the", "domain", "in", "XML", "format" ]
train
https://github.com/ashim888/awis/blob/b8ed3437dedd7a9646c748474bfabcf2d6e2700b/myawis/__init__.py#L110-L126
ashim888/awis
myawis/__init__.py
CallAwis.cat_browse
def cat_browse(self, domain, path, response_group=CATEGORYBROWSE_RESPONSE_GROUPS, descriptions='True'): ''' Provide category browse information of specified domain :param domain: Any valid URL :param path: Valid category path :param response_group: Any valid traffic history response group :return: Traffic and/or content data of the domain in XML format ''' params = { 'Action': "CategoryListings", 'ResponseGroup': 'Listings', 'Path': quote(path), 'Descriptions': descriptions } url, headers = self.create_v4_signature(params) return self.return_output(url, headers)
python
def cat_browse(self, domain, path, response_group=CATEGORYBROWSE_RESPONSE_GROUPS, descriptions='True'): ''' Provide category browse information of specified domain :param domain: Any valid URL :param path: Valid category path :param response_group: Any valid traffic history response group :return: Traffic and/or content data of the domain in XML format ''' params = { 'Action': "CategoryListings", 'ResponseGroup': 'Listings', 'Path': quote(path), 'Descriptions': descriptions } url, headers = self.create_v4_signature(params) return self.return_output(url, headers)
[ "def", "cat_browse", "(", "self", ",", "domain", ",", "path", ",", "response_group", "=", "CATEGORYBROWSE_RESPONSE_GROUPS", ",", "descriptions", "=", "'True'", ")", ":", "params", "=", "{", "'Action'", ":", "\"CategoryListings\"", ",", "'ResponseGroup'", ":", "'Listings'", ",", "'Path'", ":", "quote", "(", "path", ")", ",", "'Descriptions'", ":", "descriptions", "}", "url", ",", "headers", "=", "self", ".", "create_v4_signature", "(", "params", ")", "return", "self", ".", "return_output", "(", "url", ",", "headers", ")" ]
Provide category browse information of specified domain :param domain: Any valid URL :param path: Valid category path :param response_group: Any valid traffic history response group :return: Traffic and/or content data of the domain in XML format
[ "Provide", "category", "browse", "information", "of", "specified", "domain", ":", "param", "domain", ":", "Any", "valid", "URL", ":", "param", "path", ":", "Valid", "category", "path", ":", "param", "response_group", ":", "Any", "valid", "traffic", "history", "response", "group", ":", "return", ":", "Traffic", "and", "/", "or", "content", "data", "of", "the", "domain", "in", "XML", "format" ]
train
https://github.com/ashim888/awis/blob/b8ed3437dedd7a9646c748474bfabcf2d6e2700b/myawis/__init__.py#L139-L155
refindlyllc/rets
rets/session.py
Session.add_capability
def add_capability(self, name, uri): """ Add a capability of the RETS board :param name: The name of the capability :param uri: The capability URI given by the RETS board :return: None """ parse_results = urlparse(uri) if parse_results.hostname is None: # relative URL given, so build this into an absolute URL login_url = self.capabilities.get('Login') if not login_url: logger.error("There is no login URL stored, so additional capabilities cannot be added.") raise ValueError("Cannot automatically determine absolute path for {0!s} given.".format(uri)) parts = urlparse(login_url) port = ':{}'.format(parts.port) if parts.port else '' uri = parts.scheme + '://' + parts.hostname + port + '/' + uri.lstrip('/') self.capabilities[name] = uri
python
def add_capability(self, name, uri): """ Add a capability of the RETS board :param name: The name of the capability :param uri: The capability URI given by the RETS board :return: None """ parse_results = urlparse(uri) if parse_results.hostname is None: # relative URL given, so build this into an absolute URL login_url = self.capabilities.get('Login') if not login_url: logger.error("There is no login URL stored, so additional capabilities cannot be added.") raise ValueError("Cannot automatically determine absolute path for {0!s} given.".format(uri)) parts = urlparse(login_url) port = ':{}'.format(parts.port) if parts.port else '' uri = parts.scheme + '://' + parts.hostname + port + '/' + uri.lstrip('/') self.capabilities[name] = uri
[ "def", "add_capability", "(", "self", ",", "name", ",", "uri", ")", ":", "parse_results", "=", "urlparse", "(", "uri", ")", "if", "parse_results", ".", "hostname", "is", "None", ":", "# relative URL given, so build this into an absolute URL", "login_url", "=", "self", ".", "capabilities", ".", "get", "(", "'Login'", ")", "if", "not", "login_url", ":", "logger", ".", "error", "(", "\"There is no login URL stored, so additional capabilities cannot be added.\"", ")", "raise", "ValueError", "(", "\"Cannot automatically determine absolute path for {0!s} given.\"", ".", "format", "(", "uri", ")", ")", "parts", "=", "urlparse", "(", "login_url", ")", "port", "=", "':{}'", ".", "format", "(", "parts", ".", "port", ")", "if", "parts", ".", "port", "else", "''", "uri", "=", "parts", ".", "scheme", "+", "'://'", "+", "parts", ".", "hostname", "+", "port", "+", "'/'", "+", "uri", ".", "lstrip", "(", "'/'", ")", "self", ".", "capabilities", "[", "name", "]", "=", "uri" ]
Add a capability of the RETS board :param name: The name of the capability :param uri: The capability URI given by the RETS board :return: None
[ "Add", "a", "capability", "of", "the", "RETS", "board", ":", "param", "name", ":", "The", "name", "of", "the", "capability", ":", "param", "uri", ":", "The", "capability", "URI", "given", "by", "the", "RETS", "board", ":", "return", ":", "None" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/session.py#L90-L110
refindlyllc/rets
rets/session.py
Session.login
def login(self): """ Login to the RETS board and return an instance of Bulletin :return: Bulletin instance """ response = self._request('Login') parser = OneXLogin() parser.parse(response) self.session_id = response.cookies.get(self.session_id_cookie_name, '') if parser.headers.get('RETS-Version') is not None: self.version = str(parser.headers.get('RETS-Version')) self.client.headers['RETS-Version'] = self.version for k, v in parser.capabilities.items(): self.add_capability(k, v) if self.capabilities.get('Action'): self._request('Action') return True
python
def login(self): """ Login to the RETS board and return an instance of Bulletin :return: Bulletin instance """ response = self._request('Login') parser = OneXLogin() parser.parse(response) self.session_id = response.cookies.get(self.session_id_cookie_name, '') if parser.headers.get('RETS-Version') is not None: self.version = str(parser.headers.get('RETS-Version')) self.client.headers['RETS-Version'] = self.version for k, v in parser.capabilities.items(): self.add_capability(k, v) if self.capabilities.get('Action'): self._request('Action') return True
[ "def", "login", "(", "self", ")", ":", "response", "=", "self", ".", "_request", "(", "'Login'", ")", "parser", "=", "OneXLogin", "(", ")", "parser", ".", "parse", "(", "response", ")", "self", ".", "session_id", "=", "response", ".", "cookies", ".", "get", "(", "self", ".", "session_id_cookie_name", ",", "''", ")", "if", "parser", ".", "headers", ".", "get", "(", "'RETS-Version'", ")", "is", "not", "None", ":", "self", ".", "version", "=", "str", "(", "parser", ".", "headers", ".", "get", "(", "'RETS-Version'", ")", ")", "self", ".", "client", ".", "headers", "[", "'RETS-Version'", "]", "=", "self", ".", "version", "for", "k", ",", "v", "in", "parser", ".", "capabilities", ".", "items", "(", ")", ":", "self", ".", "add_capability", "(", "k", ",", "v", ")", "if", "self", ".", "capabilities", ".", "get", "(", "'Action'", ")", ":", "self", ".", "_request", "(", "'Action'", ")", "return", "True" ]
Login to the RETS board and return an instance of Bulletin :return: Bulletin instance
[ "Login", "to", "the", "RETS", "board", "and", "return", "an", "instance", "of", "Bulletin", ":", "return", ":", "Bulletin", "instance" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/session.py#L112-L132
refindlyllc/rets
rets/session.py
Session.get_resource_metadata
def get_resource_metadata(self, resource=None): """ Get resource metadata :param resource: The name of the resource to get metadata for :return: list """ result = self._make_metadata_request(meta_id=0, metadata_type='METADATA-RESOURCE') if resource: result = next((item for item in result if item['ResourceID'] == resource), None) return result
python
def get_resource_metadata(self, resource=None): """ Get resource metadata :param resource: The name of the resource to get metadata for :return: list """ result = self._make_metadata_request(meta_id=0, metadata_type='METADATA-RESOURCE') if resource: result = next((item for item in result if item['ResourceID'] == resource), None) return result
[ "def", "get_resource_metadata", "(", "self", ",", "resource", "=", "None", ")", ":", "result", "=", "self", ".", "_make_metadata_request", "(", "meta_id", "=", "0", ",", "metadata_type", "=", "'METADATA-RESOURCE'", ")", "if", "resource", ":", "result", "=", "next", "(", "(", "item", "for", "item", "in", "result", "if", "item", "[", "'ResourceID'", "]", "==", "resource", ")", ",", "None", ")", "return", "result" ]
Get resource metadata :param resource: The name of the resource to get metadata for :return: list
[ "Get", "resource", "metadata", ":", "param", "resource", ":", "The", "name", "of", "the", "resource", "to", "get", "metadata", "for", ":", "return", ":", "list" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/session.py#L152-L161
refindlyllc/rets
rets/session.py
Session.get_table_metadata
def get_table_metadata(self, resource, resource_class): """ Get metadata for a given resource: class :param resource: The name of the resource :param resource_class: The name of the class to get metadata from :return: list """ return self._make_metadata_request(meta_id=resource + ':' + resource_class, metadata_type='METADATA-TABLE')
python
def get_table_metadata(self, resource, resource_class): """ Get metadata for a given resource: class :param resource: The name of the resource :param resource_class: The name of the class to get metadata from :return: list """ return self._make_metadata_request(meta_id=resource + ':' + resource_class, metadata_type='METADATA-TABLE')
[ "def", "get_table_metadata", "(", "self", ",", "resource", ",", "resource_class", ")", ":", "return", "self", ".", "_make_metadata_request", "(", "meta_id", "=", "resource", "+", "':'", "+", "resource_class", ",", "metadata_type", "=", "'METADATA-TABLE'", ")" ]
Get metadata for a given resource: class :param resource: The name of the resource :param resource_class: The name of the class to get metadata from :return: list
[ "Get", "metadata", "for", "a", "given", "resource", ":", "class", ":", "param", "resource", ":", "The", "name", "of", "the", "resource", ":", "param", "resource_class", ":", "The", "name", "of", "the", "class", "to", "get", "metadata", "from", ":", "return", ":", "list" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/session.py#L171-L178
refindlyllc/rets
rets/session.py
Session.get_lookup_values
def get_lookup_values(self, resource, lookup_name): """ Get possible lookup values for a given field :param resource: The name of the resource :param lookup_name: The name of the the field to get lookup values for :return: list """ return self._make_metadata_request(meta_id=resource + ':' + lookup_name, metadata_type='METADATA-LOOKUP_TYPE')
python
def get_lookup_values(self, resource, lookup_name): """ Get possible lookup values for a given field :param resource: The name of the resource :param lookup_name: The name of the the field to get lookup values for :return: list """ return self._make_metadata_request(meta_id=resource + ':' + lookup_name, metadata_type='METADATA-LOOKUP_TYPE')
[ "def", "get_lookup_values", "(", "self", ",", "resource", ",", "lookup_name", ")", ":", "return", "self", ".", "_make_metadata_request", "(", "meta_id", "=", "resource", "+", "':'", "+", "lookup_name", ",", "metadata_type", "=", "'METADATA-LOOKUP_TYPE'", ")" ]
Get possible lookup values for a given field :param resource: The name of the resource :param lookup_name: The name of the the field to get lookup values for :return: list
[ "Get", "possible", "lookup", "values", "for", "a", "given", "field", ":", "param", "resource", ":", "The", "name", "of", "the", "resource", ":", "param", "lookup_name", ":", "The", "name", "of", "the", "the", "field", "to", "get", "lookup", "values", "for", ":", "return", ":", "list" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/session.py#L188-L195
refindlyllc/rets
rets/session.py
Session._make_metadata_request
def _make_metadata_request(self, meta_id, metadata_type=None): """ Get the Metadata. The Session initializes with 'COMPACT-DECODED' as the format type. If that returns a DTD error then we change to the 'STANDARD-XML' format and try again. :param meta_id: The name of the resource, class, or lookup to get metadata for :param metadata_type: The RETS metadata type :return: list """ # If this metadata _request has already happened, returned the saved result. key = '{0!s}:{1!s}'.format(metadata_type, meta_id) if key in self.metadata_responses and self.cache_metadata: response = self.metadata_responses[key] else: response = self._request( capability='GetMetadata', options={ 'query': { 'Type': metadata_type, 'ID': meta_id, 'Format': self.metadata_format } } ) self.metadata_responses[key] = response if self.metadata_format == 'COMPACT-DECODED': parser = CompactMetadata() else: parser = StandardXMLetadata() try: return parser.parse(response=response, metadata_type=metadata_type) except RETSException as e: # Remove response from cache self.metadata_responses.pop(key, None) # If the server responds with an invalid parameter for COMPACT-DECODED, try STANDARD-XML if self.metadata_format != 'STANDARD-XML' and e.reply_code in ['20513', '20514']: self.metadata_responses.pop(key, None) self.metadata_format = 'STANDARD-XML' return self._make_metadata_request(meta_id=meta_id, metadata_type=metadata_type) raise RETSException(e.reply_text, e.reply_code)
python
def _make_metadata_request(self, meta_id, metadata_type=None): """ Get the Metadata. The Session initializes with 'COMPACT-DECODED' as the format type. If that returns a DTD error then we change to the 'STANDARD-XML' format and try again. :param meta_id: The name of the resource, class, or lookup to get metadata for :param metadata_type: The RETS metadata type :return: list """ # If this metadata _request has already happened, returned the saved result. key = '{0!s}:{1!s}'.format(metadata_type, meta_id) if key in self.metadata_responses and self.cache_metadata: response = self.metadata_responses[key] else: response = self._request( capability='GetMetadata', options={ 'query': { 'Type': metadata_type, 'ID': meta_id, 'Format': self.metadata_format } } ) self.metadata_responses[key] = response if self.metadata_format == 'COMPACT-DECODED': parser = CompactMetadata() else: parser = StandardXMLetadata() try: return parser.parse(response=response, metadata_type=metadata_type) except RETSException as e: # Remove response from cache self.metadata_responses.pop(key, None) # If the server responds with an invalid parameter for COMPACT-DECODED, try STANDARD-XML if self.metadata_format != 'STANDARD-XML' and e.reply_code in ['20513', '20514']: self.metadata_responses.pop(key, None) self.metadata_format = 'STANDARD-XML' return self._make_metadata_request(meta_id=meta_id, metadata_type=metadata_type) raise RETSException(e.reply_text, e.reply_code)
[ "def", "_make_metadata_request", "(", "self", ",", "meta_id", ",", "metadata_type", "=", "None", ")", ":", "# If this metadata _request has already happened, returned the saved result.", "key", "=", "'{0!s}:{1!s}'", ".", "format", "(", "metadata_type", ",", "meta_id", ")", "if", "key", "in", "self", ".", "metadata_responses", "and", "self", ".", "cache_metadata", ":", "response", "=", "self", ".", "metadata_responses", "[", "key", "]", "else", ":", "response", "=", "self", ".", "_request", "(", "capability", "=", "'GetMetadata'", ",", "options", "=", "{", "'query'", ":", "{", "'Type'", ":", "metadata_type", ",", "'ID'", ":", "meta_id", ",", "'Format'", ":", "self", ".", "metadata_format", "}", "}", ")", "self", ".", "metadata_responses", "[", "key", "]", "=", "response", "if", "self", ".", "metadata_format", "==", "'COMPACT-DECODED'", ":", "parser", "=", "CompactMetadata", "(", ")", "else", ":", "parser", "=", "StandardXMLetadata", "(", ")", "try", ":", "return", "parser", ".", "parse", "(", "response", "=", "response", ",", "metadata_type", "=", "metadata_type", ")", "except", "RETSException", "as", "e", ":", "# Remove response from cache", "self", ".", "metadata_responses", ".", "pop", "(", "key", ",", "None", ")", "# If the server responds with an invalid parameter for COMPACT-DECODED, try STANDARD-XML", "if", "self", ".", "metadata_format", "!=", "'STANDARD-XML'", "and", "e", ".", "reply_code", "in", "[", "'20513'", ",", "'20514'", "]", ":", "self", ".", "metadata_responses", ".", "pop", "(", "key", ",", "None", ")", "self", ".", "metadata_format", "=", "'STANDARD-XML'", "return", "self", ".", "_make_metadata_request", "(", "meta_id", "=", "meta_id", ",", "metadata_type", "=", "metadata_type", ")", "raise", "RETSException", "(", "e", ".", "reply_text", ",", "e", ".", "reply_code", ")" ]
Get the Metadata. The Session initializes with 'COMPACT-DECODED' as the format type. If that returns a DTD error then we change to the 'STANDARD-XML' format and try again. :param meta_id: The name of the resource, class, or lookup to get metadata for :param metadata_type: The RETS metadata type :return: list
[ "Get", "the", "Metadata", ".", "The", "Session", "initializes", "with", "COMPACT", "-", "DECODED", "as", "the", "format", "type", ".", "If", "that", "returns", "a", "DTD", "error", "then", "we", "change", "to", "the", "STANDARD", "-", "XML", "format", "and", "try", "again", ".", ":", "param", "meta_id", ":", "The", "name", "of", "the", "resource", "class", "or", "lookup", "to", "get", "metadata", "for", ":", "param", "metadata_type", ":", "The", "RETS", "metadata", "type", ":", "return", ":", "list" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/session.py#L197-L238
refindlyllc/rets
rets/session.py
Session.get_preferred_object
def get_preferred_object(self, resource, object_type, content_id, location=0): """ Get the first object from a Resource :param resource: The name of the resource :param object_type: The type of object to fetch :param content_id: The unique id of the item to get objects for :param location: The path to get Objects from :return: Object """ collection = self.get_object(resource=resource, object_type=object_type, content_ids=content_id, object_ids='0', location=location) return collection[0]
python
def get_preferred_object(self, resource, object_type, content_id, location=0): """ Get the first object from a Resource :param resource: The name of the resource :param object_type: The type of object to fetch :param content_id: The unique id of the item to get objects for :param location: The path to get Objects from :return: Object """ collection = self.get_object(resource=resource, object_type=object_type, content_ids=content_id, object_ids='0', location=location) return collection[0]
[ "def", "get_preferred_object", "(", "self", ",", "resource", ",", "object_type", ",", "content_id", ",", "location", "=", "0", ")", ":", "collection", "=", "self", ".", "get_object", "(", "resource", "=", "resource", ",", "object_type", "=", "object_type", ",", "content_ids", "=", "content_id", ",", "object_ids", "=", "'0'", ",", "location", "=", "location", ")", "return", "collection", "[", "0", "]" ]
Get the first object from a Resource :param resource: The name of the resource :param object_type: The type of object to fetch :param content_id: The unique id of the item to get objects for :param location: The path to get Objects from :return: Object
[ "Get", "the", "first", "object", "from", "a", "Resource", ":", "param", "resource", ":", "The", "name", "of", "the", "resource", ":", "param", "object_type", ":", "The", "type", "of", "object", "to", "fetch", ":", "param", "content_id", ":", "The", "unique", "id", "of", "the", "item", "to", "get", "objects", "for", ":", "param", "location", ":", "The", "path", "to", "get", "Objects", "from", ":", "return", ":", "Object" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/session.py#L240-L251
refindlyllc/rets
rets/session.py
Session.get_object
def get_object(self, resource, object_type, content_ids, object_ids='*', location=0): """ Get a list of Objects from a resource :param resource: The resource to get objects from :param object_type: The type of object to fetch :param content_ids: The unique id of the item to get objects for :param object_ids: ids of the objects to download :param location: The path to get Objects from :return: list """ object_helper = GetObject() request_ids = object_helper.ids(content_ids=content_ids, object_ids=object_ids) response = self._request( capability='GetObject', options={ 'query': { "Resource": resource, "Type": object_type, "ID": ','.join(request_ids), "Location": location } } ) if 'multipart' in response.headers.get('Content-Type'): parser = MultipleObjectParser() collection = parser.parse_image_response(response) else: parser = SingleObjectParser() collection = [parser.parse_image_response(response)] return collection
python
def get_object(self, resource, object_type, content_ids, object_ids='*', location=0): """ Get a list of Objects from a resource :param resource: The resource to get objects from :param object_type: The type of object to fetch :param content_ids: The unique id of the item to get objects for :param object_ids: ids of the objects to download :param location: The path to get Objects from :return: list """ object_helper = GetObject() request_ids = object_helper.ids(content_ids=content_ids, object_ids=object_ids) response = self._request( capability='GetObject', options={ 'query': { "Resource": resource, "Type": object_type, "ID": ','.join(request_ids), "Location": location } } ) if 'multipart' in response.headers.get('Content-Type'): parser = MultipleObjectParser() collection = parser.parse_image_response(response) else: parser = SingleObjectParser() collection = [parser.parse_image_response(response)] return collection
[ "def", "get_object", "(", "self", ",", "resource", ",", "object_type", ",", "content_ids", ",", "object_ids", "=", "'*'", ",", "location", "=", "0", ")", ":", "object_helper", "=", "GetObject", "(", ")", "request_ids", "=", "object_helper", ".", "ids", "(", "content_ids", "=", "content_ids", ",", "object_ids", "=", "object_ids", ")", "response", "=", "self", ".", "_request", "(", "capability", "=", "'GetObject'", ",", "options", "=", "{", "'query'", ":", "{", "\"Resource\"", ":", "resource", ",", "\"Type\"", ":", "object_type", ",", "\"ID\"", ":", "','", ".", "join", "(", "request_ids", ")", ",", "\"Location\"", ":", "location", "}", "}", ")", "if", "'multipart'", "in", "response", ".", "headers", ".", "get", "(", "'Content-Type'", ")", ":", "parser", "=", "MultipleObjectParser", "(", ")", "collection", "=", "parser", ".", "parse_image_response", "(", "response", ")", "else", ":", "parser", "=", "SingleObjectParser", "(", ")", "collection", "=", "[", "parser", ".", "parse_image_response", "(", "response", ")", "]", "return", "collection" ]
Get a list of Objects from a resource :param resource: The resource to get objects from :param object_type: The type of object to fetch :param content_ids: The unique id of the item to get objects for :param object_ids: ids of the objects to download :param location: The path to get Objects from :return: list
[ "Get", "a", "list", "of", "Objects", "from", "a", "resource", ":", "param", "resource", ":", "The", "resource", "to", "get", "objects", "from", ":", "param", "object_type", ":", "The", "type", "of", "object", "to", "fetch", ":", "param", "content_ids", ":", "The", "unique", "id", "of", "the", "item", "to", "get", "objects", "for", ":", "param", "object_ids", ":", "ids", "of", "the", "objects", "to", "download", ":", "param", "location", ":", "The", "path", "to", "get", "Objects", "from", ":", "return", ":", "list" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/session.py#L253-L286
refindlyllc/rets
rets/session.py
Session.search
def search(self, resource, resource_class, search_filter=None, dmql_query=None, limit=9999999, offset=0, optional_parameters=None, auto_offset=True, query_type='DMQL2', standard_names=0, response_format='COMPACT-DECODED'): """ Preform a search on the RETS board :param resource: The resource that contains the class to search :param resource_class: The class to search :param search_filter: The query as a dict :param dmql_query: The query in dmql format :param limit: Limit search values count :param offset: Offset for RETS request. Useful when RETS limits number of results or transactions :param optional_parameters: Values for option paramters :param auto_offset: Should the search be allowed to trigger subsequent searches. :param query_type: DMQL or DMQL2 depending on the rets server. :param standard_names: 1 to use standard names, 0 to use system names :param response_format: COMPACT-DECODED, COMPACT, or STANDARD-XML :return: dict """ if (search_filter and dmql_query) or (not search_filter and not dmql_query): raise ValueError("You may specify either a search_filter or dmql_query") search_helper = DMQLHelper() if dmql_query: dmql_query = search_helper.dmql(query=dmql_query) else: dmql_query = search_helper.filter_to_dmql(filter_dict=search_filter) parameters = { 'SearchType': resource, 'Class': resource_class, 'Query': dmql_query, 'QueryType': query_type, 'Count': 1, 'Format': response_format, 'StandardNames': standard_names, } if not optional_parameters: optional_parameters = {} parameters.update(optional_parameters) # if the Select parameter given is an array, format it as it needs to be if 'Select' in parameters and isinstance(parameters.get('Select'), list): parameters['Select'] = ','.join(parameters['Select']) if limit: parameters['Limit'] = limit if offset: parameters['Offset'] = offset search_cursor = OneXSearchCursor() response = self._request( capability='Search', options={ 'query': parameters, }, stream=True ) try: return search_cursor.generator(response=response) except MaxrowException as max_exception: # Recursive searching if automatically performing offsets for the client if auto_offset and limit > len(max_exception.rows_returned): new_limit = limit - len(max_exception.rows_returned) # have not returned results to the desired limit new_offset = offset + len(max_exception.rows_returned) # adjust offset results = self.search(resource=resource, resource_class=resource_class, search_filter=None, dmql_query=dmql_query, offset=new_offset, limit=new_limit, optional_parameters=optional_parameters, auto_offset=auto_offset) previous_results = max_exception.rows_returned return previous_results + results return max_exception.rows_returned
python
def search(self, resource, resource_class, search_filter=None, dmql_query=None, limit=9999999, offset=0, optional_parameters=None, auto_offset=True, query_type='DMQL2', standard_names=0, response_format='COMPACT-DECODED'): """ Preform a search on the RETS board :param resource: The resource that contains the class to search :param resource_class: The class to search :param search_filter: The query as a dict :param dmql_query: The query in dmql format :param limit: Limit search values count :param offset: Offset for RETS request. Useful when RETS limits number of results or transactions :param optional_parameters: Values for option paramters :param auto_offset: Should the search be allowed to trigger subsequent searches. :param query_type: DMQL or DMQL2 depending on the rets server. :param standard_names: 1 to use standard names, 0 to use system names :param response_format: COMPACT-DECODED, COMPACT, or STANDARD-XML :return: dict """ if (search_filter and dmql_query) or (not search_filter and not dmql_query): raise ValueError("You may specify either a search_filter or dmql_query") search_helper = DMQLHelper() if dmql_query: dmql_query = search_helper.dmql(query=dmql_query) else: dmql_query = search_helper.filter_to_dmql(filter_dict=search_filter) parameters = { 'SearchType': resource, 'Class': resource_class, 'Query': dmql_query, 'QueryType': query_type, 'Count': 1, 'Format': response_format, 'StandardNames': standard_names, } if not optional_parameters: optional_parameters = {} parameters.update(optional_parameters) # if the Select parameter given is an array, format it as it needs to be if 'Select' in parameters and isinstance(parameters.get('Select'), list): parameters['Select'] = ','.join(parameters['Select']) if limit: parameters['Limit'] = limit if offset: parameters['Offset'] = offset search_cursor = OneXSearchCursor() response = self._request( capability='Search', options={ 'query': parameters, }, stream=True ) try: return search_cursor.generator(response=response) except MaxrowException as max_exception: # Recursive searching if automatically performing offsets for the client if auto_offset and limit > len(max_exception.rows_returned): new_limit = limit - len(max_exception.rows_returned) # have not returned results to the desired limit new_offset = offset + len(max_exception.rows_returned) # adjust offset results = self.search(resource=resource, resource_class=resource_class, search_filter=None, dmql_query=dmql_query, offset=new_offset, limit=new_limit, optional_parameters=optional_parameters, auto_offset=auto_offset) previous_results = max_exception.rows_returned return previous_results + results return max_exception.rows_returned
[ "def", "search", "(", "self", ",", "resource", ",", "resource_class", ",", "search_filter", "=", "None", ",", "dmql_query", "=", "None", ",", "limit", "=", "9999999", ",", "offset", "=", "0", ",", "optional_parameters", "=", "None", ",", "auto_offset", "=", "True", ",", "query_type", "=", "'DMQL2'", ",", "standard_names", "=", "0", ",", "response_format", "=", "'COMPACT-DECODED'", ")", ":", "if", "(", "search_filter", "and", "dmql_query", ")", "or", "(", "not", "search_filter", "and", "not", "dmql_query", ")", ":", "raise", "ValueError", "(", "\"You may specify either a search_filter or dmql_query\"", ")", "search_helper", "=", "DMQLHelper", "(", ")", "if", "dmql_query", ":", "dmql_query", "=", "search_helper", ".", "dmql", "(", "query", "=", "dmql_query", ")", "else", ":", "dmql_query", "=", "search_helper", ".", "filter_to_dmql", "(", "filter_dict", "=", "search_filter", ")", "parameters", "=", "{", "'SearchType'", ":", "resource", ",", "'Class'", ":", "resource_class", ",", "'Query'", ":", "dmql_query", ",", "'QueryType'", ":", "query_type", ",", "'Count'", ":", "1", ",", "'Format'", ":", "response_format", ",", "'StandardNames'", ":", "standard_names", ",", "}", "if", "not", "optional_parameters", ":", "optional_parameters", "=", "{", "}", "parameters", ".", "update", "(", "optional_parameters", ")", "# if the Select parameter given is an array, format it as it needs to be", "if", "'Select'", "in", "parameters", "and", "isinstance", "(", "parameters", ".", "get", "(", "'Select'", ")", ",", "list", ")", ":", "parameters", "[", "'Select'", "]", "=", "','", ".", "join", "(", "parameters", "[", "'Select'", "]", ")", "if", "limit", ":", "parameters", "[", "'Limit'", "]", "=", "limit", "if", "offset", ":", "parameters", "[", "'Offset'", "]", "=", "offset", "search_cursor", "=", "OneXSearchCursor", "(", ")", "response", "=", "self", ".", "_request", "(", "capability", "=", "'Search'", ",", "options", "=", "{", "'query'", ":", "parameters", ",", "}", ",", "stream", "=", "True", ")", "try", ":", "return", "search_cursor", ".", "generator", "(", "response", "=", "response", ")", "except", "MaxrowException", "as", "max_exception", ":", "# Recursive searching if automatically performing offsets for the client", "if", "auto_offset", "and", "limit", ">", "len", "(", "max_exception", ".", "rows_returned", ")", ":", "new_limit", "=", "limit", "-", "len", "(", "max_exception", ".", "rows_returned", ")", "# have not returned results to the desired limit", "new_offset", "=", "offset", "+", "len", "(", "max_exception", ".", "rows_returned", ")", "# adjust offset", "results", "=", "self", ".", "search", "(", "resource", "=", "resource", ",", "resource_class", "=", "resource_class", ",", "search_filter", "=", "None", ",", "dmql_query", "=", "dmql_query", ",", "offset", "=", "new_offset", ",", "limit", "=", "new_limit", ",", "optional_parameters", "=", "optional_parameters", ",", "auto_offset", "=", "auto_offset", ")", "previous_results", "=", "max_exception", ".", "rows_returned", "return", "previous_results", "+", "results", "return", "max_exception", ".", "rows_returned" ]
Preform a search on the RETS board :param resource: The resource that contains the class to search :param resource_class: The class to search :param search_filter: The query as a dict :param dmql_query: The query in dmql format :param limit: Limit search values count :param offset: Offset for RETS request. Useful when RETS limits number of results or transactions :param optional_parameters: Values for option paramters :param auto_offset: Should the search be allowed to trigger subsequent searches. :param query_type: DMQL or DMQL2 depending on the rets server. :param standard_names: 1 to use standard names, 0 to use system names :param response_format: COMPACT-DECODED, COMPACT, or STANDARD-XML :return: dict
[ "Preform", "a", "search", "on", "the", "RETS", "board", ":", "param", "resource", ":", "The", "resource", "that", "contains", "the", "class", "to", "search", ":", "param", "resource_class", ":", "The", "class", "to", "search", ":", "param", "search_filter", ":", "The", "query", "as", "a", "dict", ":", "param", "dmql_query", ":", "The", "query", "in", "dmql", "format", ":", "param", "limit", ":", "Limit", "search", "values", "count", ":", "param", "offset", ":", "Offset", "for", "RETS", "request", ".", "Useful", "when", "RETS", "limits", "number", "of", "results", "or", "transactions", ":", "param", "optional_parameters", ":", "Values", "for", "option", "paramters", ":", "param", "auto_offset", ":", "Should", "the", "search", "be", "allowed", "to", "trigger", "subsequent", "searches", ".", ":", "param", "query_type", ":", "DMQL", "or", "DMQL2", "depending", "on", "the", "rets", "server", ".", ":", "param", "standard_names", ":", "1", "to", "use", "standard", "names", "0", "to", "use", "system", "names", ":", "param", "response_format", ":", "COMPACT", "-", "DECODED", "COMPACT", "or", "STANDARD", "-", "XML", ":", "return", ":", "dict" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/session.py#L288-L363
refindlyllc/rets
rets/session.py
Session._request
def _request(self, capability, options=None, stream=False): """ Make a _request to the RETS server :param capability: The name of the capability to use to get the URI :param options: Options to put into the _request :return: Response """ if options is None: options = {} options.update({ 'headers': self.client.headers.copy() }) url = self.capabilities.get(capability) if not url: msg = "{0!s} tried but no valid endpoints was found. Did you forget to Login?".format(capability) raise NotLoggedIn(msg) if self.user_agent_password: ua_digest = self._user_agent_digest_hash() options['headers']['RETS-UA-Authorization'] = 'Digest {0!s}'.format(ua_digest) if self.use_post_method and capability != 'Action': # Action Requests should always be GET query = options.get('query') response = self.client.post(url, data=query, headers=options['headers'], stream=stream) else: if 'query' in options: url += '?' + '&'.join('{0!s}={1!s}'.format(k, quote(str(v))) for k, v in options['query'].items()) response = self.client.get(url, headers=options['headers'], stream=stream) if response.status_code in [400, 401]: if capability == 'Login': m = "Could not log into the RETS server with the provided credentials." else: m = "The RETS server returned a 401 status code. You must be logged in to make this request." raise NotLoggedIn(m) elif response.status_code == 404 and self.use_post_method: raise HTTPException("Got a 404 when making a POST request. Try setting use_post_method=False when " "initializing the Session.") return response
python
def _request(self, capability, options=None, stream=False): """ Make a _request to the RETS server :param capability: The name of the capability to use to get the URI :param options: Options to put into the _request :return: Response """ if options is None: options = {} options.update({ 'headers': self.client.headers.copy() }) url = self.capabilities.get(capability) if not url: msg = "{0!s} tried but no valid endpoints was found. Did you forget to Login?".format(capability) raise NotLoggedIn(msg) if self.user_agent_password: ua_digest = self._user_agent_digest_hash() options['headers']['RETS-UA-Authorization'] = 'Digest {0!s}'.format(ua_digest) if self.use_post_method and capability != 'Action': # Action Requests should always be GET query = options.get('query') response = self.client.post(url, data=query, headers=options['headers'], stream=stream) else: if 'query' in options: url += '?' + '&'.join('{0!s}={1!s}'.format(k, quote(str(v))) for k, v in options['query'].items()) response = self.client.get(url, headers=options['headers'], stream=stream) if response.status_code in [400, 401]: if capability == 'Login': m = "Could not log into the RETS server with the provided credentials." else: m = "The RETS server returned a 401 status code. You must be logged in to make this request." raise NotLoggedIn(m) elif response.status_code == 404 and self.use_post_method: raise HTTPException("Got a 404 when making a POST request. Try setting use_post_method=False when " "initializing the Session.") return response
[ "def", "_request", "(", "self", ",", "capability", ",", "options", "=", "None", ",", "stream", "=", "False", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "options", ".", "update", "(", "{", "'headers'", ":", "self", ".", "client", ".", "headers", ".", "copy", "(", ")", "}", ")", "url", "=", "self", ".", "capabilities", ".", "get", "(", "capability", ")", "if", "not", "url", ":", "msg", "=", "\"{0!s} tried but no valid endpoints was found. Did you forget to Login?\"", ".", "format", "(", "capability", ")", "raise", "NotLoggedIn", "(", "msg", ")", "if", "self", ".", "user_agent_password", ":", "ua_digest", "=", "self", ".", "_user_agent_digest_hash", "(", ")", "options", "[", "'headers'", "]", "[", "'RETS-UA-Authorization'", "]", "=", "'Digest {0!s}'", ".", "format", "(", "ua_digest", ")", "if", "self", ".", "use_post_method", "and", "capability", "!=", "'Action'", ":", "# Action Requests should always be GET", "query", "=", "options", ".", "get", "(", "'query'", ")", "response", "=", "self", ".", "client", ".", "post", "(", "url", ",", "data", "=", "query", ",", "headers", "=", "options", "[", "'headers'", "]", ",", "stream", "=", "stream", ")", "else", ":", "if", "'query'", "in", "options", ":", "url", "+=", "'?'", "+", "'&'", ".", "join", "(", "'{0!s}={1!s}'", ".", "format", "(", "k", ",", "quote", "(", "str", "(", "v", ")", ")", ")", "for", "k", ",", "v", "in", "options", "[", "'query'", "]", ".", "items", "(", ")", ")", "response", "=", "self", ".", "client", ".", "get", "(", "url", ",", "headers", "=", "options", "[", "'headers'", "]", ",", "stream", "=", "stream", ")", "if", "response", ".", "status_code", "in", "[", "400", ",", "401", "]", ":", "if", "capability", "==", "'Login'", ":", "m", "=", "\"Could not log into the RETS server with the provided credentials.\"", "else", ":", "m", "=", "\"The RETS server returned a 401 status code. You must be logged in to make this request.\"", "raise", "NotLoggedIn", "(", "m", ")", "elif", "response", ".", "status_code", "==", "404", "and", "self", ".", "use_post_method", ":", "raise", "HTTPException", "(", "\"Got a 404 when making a POST request. Try setting use_post_method=False when \"", "\"initializing the Session.\"", ")", "return", "response" ]
Make a _request to the RETS server :param capability: The name of the capability to use to get the URI :param options: Options to put into the _request :return: Response
[ "Make", "a", "_request", "to", "the", "RETS", "server", ":", "param", "capability", ":", "The", "name", "of", "the", "capability", "to", "use", "to", "get", "the", "URI", ":", "param", "options", ":", "Options", "to", "put", "into", "the", "_request", ":", "return", ":", "Response" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/session.py#L365-L409
refindlyllc/rets
rets/session.py
Session._user_agent_digest_hash
def _user_agent_digest_hash(self): """ Hash the user agent and user agent password Section 3.10 of https://www.nar.realtor/retsorg.nsf/retsproto1.7d6.pdf :return: md5 """ if not self.version: raise MissingVersion("A version is required for user agent auth. The RETS server should set this" "automatically but it has not. Please instantiate the session with a version argument" "to provide the version.") version_number = self.version.strip('RETS/') user_str = '{0!s}:{1!s}'.format(self.user_agent, self.user_agent_password).encode('utf-8') a1 = hashlib.md5(user_str).hexdigest() session_id = self.session_id if self.session_id is not None else '' digest_str = '{0!s}::{1!s}:{2!s}'.format(a1, session_id, version_number).encode('utf-8') digest = hashlib.md5(digest_str).hexdigest() return digest
python
def _user_agent_digest_hash(self): """ Hash the user agent and user agent password Section 3.10 of https://www.nar.realtor/retsorg.nsf/retsproto1.7d6.pdf :return: md5 """ if not self.version: raise MissingVersion("A version is required for user agent auth. The RETS server should set this" "automatically but it has not. Please instantiate the session with a version argument" "to provide the version.") version_number = self.version.strip('RETS/') user_str = '{0!s}:{1!s}'.format(self.user_agent, self.user_agent_password).encode('utf-8') a1 = hashlib.md5(user_str).hexdigest() session_id = self.session_id if self.session_id is not None else '' digest_str = '{0!s}::{1!s}:{2!s}'.format(a1, session_id, version_number).encode('utf-8') digest = hashlib.md5(digest_str).hexdigest() return digest
[ "def", "_user_agent_digest_hash", "(", "self", ")", ":", "if", "not", "self", ".", "version", ":", "raise", "MissingVersion", "(", "\"A version is required for user agent auth. The RETS server should set this\"", "\"automatically but it has not. Please instantiate the session with a version argument\"", "\"to provide the version.\"", ")", "version_number", "=", "self", ".", "version", ".", "strip", "(", "'RETS/'", ")", "user_str", "=", "'{0!s}:{1!s}'", ".", "format", "(", "self", ".", "user_agent", ",", "self", ".", "user_agent_password", ")", ".", "encode", "(", "'utf-8'", ")", "a1", "=", "hashlib", ".", "md5", "(", "user_str", ")", ".", "hexdigest", "(", ")", "session_id", "=", "self", ".", "session_id", "if", "self", ".", "session_id", "is", "not", "None", "else", "''", "digest_str", "=", "'{0!s}::{1!s}:{2!s}'", ".", "format", "(", "a1", ",", "session_id", ",", "version_number", ")", ".", "encode", "(", "'utf-8'", ")", "digest", "=", "hashlib", ".", "md5", "(", "digest_str", ")", ".", "hexdigest", "(", ")", "return", "digest" ]
Hash the user agent and user agent password Section 3.10 of https://www.nar.realtor/retsorg.nsf/retsproto1.7d6.pdf :return: md5
[ "Hash", "the", "user", "agent", "and", "user", "agent", "password", "Section", "3", ".", "10", "of", "https", ":", "//", "www", ".", "nar", ".", "realtor", "/", "retsorg", ".", "nsf", "/", "retsproto1", ".", "7d6", ".", "pdf", ":", "return", ":", "md5" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/session.py#L411-L427
hamidfzm/Flask-HTMLmin
flask_htmlmin/__init__.py
HTMLMIN.response_minify
def response_minify(self, response): """ minify response html to decrease traffic """ if response.content_type == u'text/html; charset=utf-8': endpoint = request.endpoint or '' view_func = current_app.view_functions.get(endpoint, None) name = ( '%s.%s' % (view_func.__module__, view_func.__name__) if view_func else '' ) if name in self._exempt_routes: return response response.direct_passthrough = False response.set_data( self._html_minify.minify(response.get_data(as_text=True)) ) return response return response
python
def response_minify(self, response): """ minify response html to decrease traffic """ if response.content_type == u'text/html; charset=utf-8': endpoint = request.endpoint or '' view_func = current_app.view_functions.get(endpoint, None) name = ( '%s.%s' % (view_func.__module__, view_func.__name__) if view_func else '' ) if name in self._exempt_routes: return response response.direct_passthrough = False response.set_data( self._html_minify.minify(response.get_data(as_text=True)) ) return response return response
[ "def", "response_minify", "(", "self", ",", "response", ")", ":", "if", "response", ".", "content_type", "==", "u'text/html; charset=utf-8'", ":", "endpoint", "=", "request", ".", "endpoint", "or", "''", "view_func", "=", "current_app", ".", "view_functions", ".", "get", "(", "endpoint", ",", "None", ")", "name", "=", "(", "'%s.%s'", "%", "(", "view_func", ".", "__module__", ",", "view_func", ".", "__name__", ")", "if", "view_func", "else", "''", ")", "if", "name", "in", "self", ".", "_exempt_routes", ":", "return", "response", "response", ".", "direct_passthrough", "=", "False", "response", ".", "set_data", "(", "self", ".", "_html_minify", ".", "minify", "(", "response", ".", "get_data", "(", "as_text", "=", "True", ")", ")", ")", "return", "response", "return", "response" ]
minify response html to decrease traffic
[ "minify", "response", "html", "to", "decrease", "traffic" ]
train
https://github.com/hamidfzm/Flask-HTMLmin/blob/03de23347ac021da4011af36b57903a235268429/flask_htmlmin/__init__.py#L29-L50
hamidfzm/Flask-HTMLmin
flask_htmlmin/__init__.py
HTMLMIN.exempt
def exempt(self, obj): """ decorator to mark a view as exempt from htmlmin. """ name = '%s.%s' % (obj.__module__, obj.__name__) @wraps(obj) def __inner(*a, **k): return obj(*a, **k) self._exempt_routes.add(name) return __inner
python
def exempt(self, obj): """ decorator to mark a view as exempt from htmlmin. """ name = '%s.%s' % (obj.__module__, obj.__name__) @wraps(obj) def __inner(*a, **k): return obj(*a, **k) self._exempt_routes.add(name) return __inner
[ "def", "exempt", "(", "self", ",", "obj", ")", ":", "name", "=", "'%s.%s'", "%", "(", "obj", ".", "__module__", ",", "obj", ".", "__name__", ")", "@", "wraps", "(", "obj", ")", "def", "__inner", "(", "*", "a", ",", "*", "*", "k", ")", ":", "return", "obj", "(", "*", "a", ",", "*", "*", "k", ")", "self", ".", "_exempt_routes", ".", "add", "(", "name", ")", "return", "__inner" ]
decorator to mark a view as exempt from htmlmin.
[ "decorator", "to", "mark", "a", "view", "as", "exempt", "from", "htmlmin", "." ]
train
https://github.com/hamidfzm/Flask-HTMLmin/blob/03de23347ac021da4011af36b57903a235268429/flask_htmlmin/__init__.py#L52-L63
refindlyllc/rets
rets/utils/search.py
DMQLHelper.dmql
def dmql(query): """Client supplied raw DMQL, ensure quote wrap.""" if isinstance(query, dict): raise ValueError("You supplied a dictionary to the dmql_query parameter, but a string is required." " Did you mean to pass this to the search_filter parameter? ") # automatically surround the given query with parentheses if it doesn't have them already if len(query) > 0 and query != "*" and query[0] != '(' and query[-1] != ')': query = '({})'.format(query) return query
python
def dmql(query): """Client supplied raw DMQL, ensure quote wrap.""" if isinstance(query, dict): raise ValueError("You supplied a dictionary to the dmql_query parameter, but a string is required." " Did you mean to pass this to the search_filter parameter? ") # automatically surround the given query with parentheses if it doesn't have them already if len(query) > 0 and query != "*" and query[0] != '(' and query[-1] != ')': query = '({})'.format(query) return query
[ "def", "dmql", "(", "query", ")", ":", "if", "isinstance", "(", "query", ",", "dict", ")", ":", "raise", "ValueError", "(", "\"You supplied a dictionary to the dmql_query parameter, but a string is required.\"", "\" Did you mean to pass this to the search_filter parameter? \"", ")", "# automatically surround the given query with parentheses if it doesn't have them already", "if", "len", "(", "query", ")", ">", "0", "and", "query", "!=", "\"*\"", "and", "query", "[", "0", "]", "!=", "'('", "and", "query", "[", "-", "1", "]", "!=", "')'", ":", "query", "=", "'({})'", ".", "format", "(", "query", ")", "return", "query" ]
Client supplied raw DMQL, ensure quote wrap.
[ "Client", "supplied", "raw", "DMQL", "ensure", "quote", "wrap", "." ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/utils/search.py#L14-L23
refindlyllc/rets
rets/utils/search.py
DMQLHelper.filter_to_dmql
def filter_to_dmql(filter_dict): """Converts the filter dictionary into DMQL""" if not isinstance(filter_dict, (dict, collections.OrderedDict)): raise TypeError('Expected a dictionary type buy got {} instead.'.format(type(filter_dict))) def is_date_time_type(val): """Returns True if the value is a datetime""" return isinstance(val, (datetime.datetime, datetime.date, datetime.time)) def evaluate_datetime(val): """Converts the datetime object into the RETS expected format""" date_format = '%Y-%m-%d' time_format = '%H:%M:%S' datetime_format = '{}T{}'.format(date_format, time_format) if isinstance(val, datetime.datetime): evaluated = val.strftime(datetime_format) elif isinstance(val, datetime.date): evaluated = val.strftime(date_format) elif isinstance(val, datetime.time): evaluated = val.strftime(time_format) else: evaluated = val return evaluated def evaluate_operators(key_dict): """Turns the custom filter operators into the expected RETS query""" allowed_operators = ['$gte', '$lte', '$contains', '$begins', '$ends', '$in', '$nin', '$neq'] # If key not in allowed_operators, assume it is a field name with the and operation. if not all(op in allowed_operators for op in key_dict.keys()): raise ValueError("You have supplied an invalid operator. " "Please provide one of the following {}".format(allowed_operators)) # We can have a single operator key, or the combination of gte/lte keys = key_dict.keys() string = '' # Search between two numbers or two dates if len(keys) == 2 and all(k in ['$gte', '$lte'] for k in keys): if all(is_date_time_type(key_dict[v]) for v in keys): # comparing dates string = '{}-{}'.format(evaluate_datetime(key_dict['$gte']), evaluate_datetime(key_dict['$lte'])) else: # comparing numbers try: float(key_dict['$gte']) float(key_dict['$lte']) except ValueError: raise ValueError("$gte and $lte expect numeric or datetime values") string = '{:.2f}-{:.2f}'.format(key_dict['$gte'], key_dict['$lte']) # Using a single operator key elif len(keys) == 1: if '$gte' in key_dict: if is_date_time_type(key_dict['$gte']): string = '{}+'.format(evaluate_datetime(key_dict['$gte'])) else: try: float(key_dict['$gte']) except ValueError: raise ValueError("$gte expects a numeric value or a datetime object") string = '{:.2f}+'.format(key_dict['$gte']) elif '$lte' in key_dict: if is_date_time_type(key_dict['$lte']): string = '{}-'.format(evaluate_datetime(key_dict['$lte'])) else: try: float(key_dict['$lte']) except ValueError: raise ValueError("$lte expects a numeric value or a datetime object") string = '{:.2f}-'.format(key_dict['$lte']) elif '$in' in key_dict: if not isinstance(key_dict['$in'], list): raise ValueError("$in expects a list of strings") key_dict['$in'] = [evaluate_datetime(v) for v in key_dict['$in']] if not all(isinstance(v, string_types) for v in key_dict['$in']): raise ValueError("$in expects a list of strings") options = ','.join(key_dict['$in']) string = '{}'.format(options) elif '$nin' in key_dict: if not isinstance(key_dict['$nin'], list): raise ValueError("$nin expects a list of strings") key_dict['$nin'] = [evaluate_datetime(v) for v in key_dict['$nin']] if not all(isinstance(v, string_types) for v in key_dict['$nin']): raise ValueError("$nin expects a list of strings") options = ','.join(key_dict['$nin']) string = '~{}'.format(options) elif '$contains' in key_dict: if not isinstance(key_dict['$contains'], string_types): raise ValueError("$contains expects a string.") string = '*{}*'.format(key_dict['$contains']) elif '$begins' in key_dict: if not isinstance(key_dict['$begins'], string_types): raise ValueError("$begins expects a string.") string = '{}*'.format(key_dict['$begins']) elif '$ends' in key_dict: if not isinstance(key_dict['$ends'], string_types): raise ValueError("$ends expects a string.") string = '*{}'.format(key_dict['$ends']) elif '$neq' in key_dict: string = '~{}'.format(key_dict['$neq']) else: # Provided too many or too few operators raise ValueError("Please supply $gte and $lte for getting values between numbers or 1 of {}".format( allowed_operators)) return string dmql_search_filters = [] for filt, value in filter_dict.items(): dmql_string = '({}='.format(filt) if isinstance(value, dict): # Applying an operator. This will need to be recursive because of the or possibility dmql_string += evaluate_operators(key_dict=value) else: # Simle equals statement dmql_string += '{}'.format(evaluate_datetime(value)) dmql_string += ')' dmql_search_filters.append(dmql_string) search_string = ','.join(dmql_search_filters) # Converts the filter dictionary to dmqp string logger.debug("Filter returned the following DMQL: {}".format(search_string)) return search_string
python
def filter_to_dmql(filter_dict): """Converts the filter dictionary into DMQL""" if not isinstance(filter_dict, (dict, collections.OrderedDict)): raise TypeError('Expected a dictionary type buy got {} instead.'.format(type(filter_dict))) def is_date_time_type(val): """Returns True if the value is a datetime""" return isinstance(val, (datetime.datetime, datetime.date, datetime.time)) def evaluate_datetime(val): """Converts the datetime object into the RETS expected format""" date_format = '%Y-%m-%d' time_format = '%H:%M:%S' datetime_format = '{}T{}'.format(date_format, time_format) if isinstance(val, datetime.datetime): evaluated = val.strftime(datetime_format) elif isinstance(val, datetime.date): evaluated = val.strftime(date_format) elif isinstance(val, datetime.time): evaluated = val.strftime(time_format) else: evaluated = val return evaluated def evaluate_operators(key_dict): """Turns the custom filter operators into the expected RETS query""" allowed_operators = ['$gte', '$lte', '$contains', '$begins', '$ends', '$in', '$nin', '$neq'] # If key not in allowed_operators, assume it is a field name with the and operation. if not all(op in allowed_operators for op in key_dict.keys()): raise ValueError("You have supplied an invalid operator. " "Please provide one of the following {}".format(allowed_operators)) # We can have a single operator key, or the combination of gte/lte keys = key_dict.keys() string = '' # Search between two numbers or two dates if len(keys) == 2 and all(k in ['$gte', '$lte'] for k in keys): if all(is_date_time_type(key_dict[v]) for v in keys): # comparing dates string = '{}-{}'.format(evaluate_datetime(key_dict['$gte']), evaluate_datetime(key_dict['$lte'])) else: # comparing numbers try: float(key_dict['$gte']) float(key_dict['$lte']) except ValueError: raise ValueError("$gte and $lte expect numeric or datetime values") string = '{:.2f}-{:.2f}'.format(key_dict['$gte'], key_dict['$lte']) # Using a single operator key elif len(keys) == 1: if '$gte' in key_dict: if is_date_time_type(key_dict['$gte']): string = '{}+'.format(evaluate_datetime(key_dict['$gte'])) else: try: float(key_dict['$gte']) except ValueError: raise ValueError("$gte expects a numeric value or a datetime object") string = '{:.2f}+'.format(key_dict['$gte']) elif '$lte' in key_dict: if is_date_time_type(key_dict['$lte']): string = '{}-'.format(evaluate_datetime(key_dict['$lte'])) else: try: float(key_dict['$lte']) except ValueError: raise ValueError("$lte expects a numeric value or a datetime object") string = '{:.2f}-'.format(key_dict['$lte']) elif '$in' in key_dict: if not isinstance(key_dict['$in'], list): raise ValueError("$in expects a list of strings") key_dict['$in'] = [evaluate_datetime(v) for v in key_dict['$in']] if not all(isinstance(v, string_types) for v in key_dict['$in']): raise ValueError("$in expects a list of strings") options = ','.join(key_dict['$in']) string = '{}'.format(options) elif '$nin' in key_dict: if not isinstance(key_dict['$nin'], list): raise ValueError("$nin expects a list of strings") key_dict['$nin'] = [evaluate_datetime(v) for v in key_dict['$nin']] if not all(isinstance(v, string_types) for v in key_dict['$nin']): raise ValueError("$nin expects a list of strings") options = ','.join(key_dict['$nin']) string = '~{}'.format(options) elif '$contains' in key_dict: if not isinstance(key_dict['$contains'], string_types): raise ValueError("$contains expects a string.") string = '*{}*'.format(key_dict['$contains']) elif '$begins' in key_dict: if not isinstance(key_dict['$begins'], string_types): raise ValueError("$begins expects a string.") string = '{}*'.format(key_dict['$begins']) elif '$ends' in key_dict: if not isinstance(key_dict['$ends'], string_types): raise ValueError("$ends expects a string.") string = '*{}'.format(key_dict['$ends']) elif '$neq' in key_dict: string = '~{}'.format(key_dict['$neq']) else: # Provided too many or too few operators raise ValueError("Please supply $gte and $lte for getting values between numbers or 1 of {}".format( allowed_operators)) return string dmql_search_filters = [] for filt, value in filter_dict.items(): dmql_string = '({}='.format(filt) if isinstance(value, dict): # Applying an operator. This will need to be recursive because of the or possibility dmql_string += evaluate_operators(key_dict=value) else: # Simle equals statement dmql_string += '{}'.format(evaluate_datetime(value)) dmql_string += ')' dmql_search_filters.append(dmql_string) search_string = ','.join(dmql_search_filters) # Converts the filter dictionary to dmqp string logger.debug("Filter returned the following DMQL: {}".format(search_string)) return search_string
[ "def", "filter_to_dmql", "(", "filter_dict", ")", ":", "if", "not", "isinstance", "(", "filter_dict", ",", "(", "dict", ",", "collections", ".", "OrderedDict", ")", ")", ":", "raise", "TypeError", "(", "'Expected a dictionary type buy got {} instead.'", ".", "format", "(", "type", "(", "filter_dict", ")", ")", ")", "def", "is_date_time_type", "(", "val", ")", ":", "\"\"\"Returns True if the value is a datetime\"\"\"", "return", "isinstance", "(", "val", ",", "(", "datetime", ".", "datetime", ",", "datetime", ".", "date", ",", "datetime", ".", "time", ")", ")", "def", "evaluate_datetime", "(", "val", ")", ":", "\"\"\"Converts the datetime object into the RETS expected format\"\"\"", "date_format", "=", "'%Y-%m-%d'", "time_format", "=", "'%H:%M:%S'", "datetime_format", "=", "'{}T{}'", ".", "format", "(", "date_format", ",", "time_format", ")", "if", "isinstance", "(", "val", ",", "datetime", ".", "datetime", ")", ":", "evaluated", "=", "val", ".", "strftime", "(", "datetime_format", ")", "elif", "isinstance", "(", "val", ",", "datetime", ".", "date", ")", ":", "evaluated", "=", "val", ".", "strftime", "(", "date_format", ")", "elif", "isinstance", "(", "val", ",", "datetime", ".", "time", ")", ":", "evaluated", "=", "val", ".", "strftime", "(", "time_format", ")", "else", ":", "evaluated", "=", "val", "return", "evaluated", "def", "evaluate_operators", "(", "key_dict", ")", ":", "\"\"\"Turns the custom filter operators into the expected RETS query\"\"\"", "allowed_operators", "=", "[", "'$gte'", ",", "'$lte'", ",", "'$contains'", ",", "'$begins'", ",", "'$ends'", ",", "'$in'", ",", "'$nin'", ",", "'$neq'", "]", "# If key not in allowed_operators, assume it is a field name with the and operation.", "if", "not", "all", "(", "op", "in", "allowed_operators", "for", "op", "in", "key_dict", ".", "keys", "(", ")", ")", ":", "raise", "ValueError", "(", "\"You have supplied an invalid operator. \"", "\"Please provide one of the following {}\"", ".", "format", "(", "allowed_operators", ")", ")", "# We can have a single operator key, or the combination of gte/lte", "keys", "=", "key_dict", ".", "keys", "(", ")", "string", "=", "''", "# Search between two numbers or two dates", "if", "len", "(", "keys", ")", "==", "2", "and", "all", "(", "k", "in", "[", "'$gte'", ",", "'$lte'", "]", "for", "k", "in", "keys", ")", ":", "if", "all", "(", "is_date_time_type", "(", "key_dict", "[", "v", "]", ")", "for", "v", "in", "keys", ")", ":", "# comparing dates", "string", "=", "'{}-{}'", ".", "format", "(", "evaluate_datetime", "(", "key_dict", "[", "'$gte'", "]", ")", ",", "evaluate_datetime", "(", "key_dict", "[", "'$lte'", "]", ")", ")", "else", ":", "# comparing numbers", "try", ":", "float", "(", "key_dict", "[", "'$gte'", "]", ")", "float", "(", "key_dict", "[", "'$lte'", "]", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "\"$gte and $lte expect numeric or datetime values\"", ")", "string", "=", "'{:.2f}-{:.2f}'", ".", "format", "(", "key_dict", "[", "'$gte'", "]", ",", "key_dict", "[", "'$lte'", "]", ")", "# Using a single operator key", "elif", "len", "(", "keys", ")", "==", "1", ":", "if", "'$gte'", "in", "key_dict", ":", "if", "is_date_time_type", "(", "key_dict", "[", "'$gte'", "]", ")", ":", "string", "=", "'{}+'", ".", "format", "(", "evaluate_datetime", "(", "key_dict", "[", "'$gte'", "]", ")", ")", "else", ":", "try", ":", "float", "(", "key_dict", "[", "'$gte'", "]", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "\"$gte expects a numeric value or a datetime object\"", ")", "string", "=", "'{:.2f}+'", ".", "format", "(", "key_dict", "[", "'$gte'", "]", ")", "elif", "'$lte'", "in", "key_dict", ":", "if", "is_date_time_type", "(", "key_dict", "[", "'$lte'", "]", ")", ":", "string", "=", "'{}-'", ".", "format", "(", "evaluate_datetime", "(", "key_dict", "[", "'$lte'", "]", ")", ")", "else", ":", "try", ":", "float", "(", "key_dict", "[", "'$lte'", "]", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "\"$lte expects a numeric value or a datetime object\"", ")", "string", "=", "'{:.2f}-'", ".", "format", "(", "key_dict", "[", "'$lte'", "]", ")", "elif", "'$in'", "in", "key_dict", ":", "if", "not", "isinstance", "(", "key_dict", "[", "'$in'", "]", ",", "list", ")", ":", "raise", "ValueError", "(", "\"$in expects a list of strings\"", ")", "key_dict", "[", "'$in'", "]", "=", "[", "evaluate_datetime", "(", "v", ")", "for", "v", "in", "key_dict", "[", "'$in'", "]", "]", "if", "not", "all", "(", "isinstance", "(", "v", ",", "string_types", ")", "for", "v", "in", "key_dict", "[", "'$in'", "]", ")", ":", "raise", "ValueError", "(", "\"$in expects a list of strings\"", ")", "options", "=", "','", ".", "join", "(", "key_dict", "[", "'$in'", "]", ")", "string", "=", "'{}'", ".", "format", "(", "options", ")", "elif", "'$nin'", "in", "key_dict", ":", "if", "not", "isinstance", "(", "key_dict", "[", "'$nin'", "]", ",", "list", ")", ":", "raise", "ValueError", "(", "\"$nin expects a list of strings\"", ")", "key_dict", "[", "'$nin'", "]", "=", "[", "evaluate_datetime", "(", "v", ")", "for", "v", "in", "key_dict", "[", "'$nin'", "]", "]", "if", "not", "all", "(", "isinstance", "(", "v", ",", "string_types", ")", "for", "v", "in", "key_dict", "[", "'$nin'", "]", ")", ":", "raise", "ValueError", "(", "\"$nin expects a list of strings\"", ")", "options", "=", "','", ".", "join", "(", "key_dict", "[", "'$nin'", "]", ")", "string", "=", "'~{}'", ".", "format", "(", "options", ")", "elif", "'$contains'", "in", "key_dict", ":", "if", "not", "isinstance", "(", "key_dict", "[", "'$contains'", "]", ",", "string_types", ")", ":", "raise", "ValueError", "(", "\"$contains expects a string.\"", ")", "string", "=", "'*{}*'", ".", "format", "(", "key_dict", "[", "'$contains'", "]", ")", "elif", "'$begins'", "in", "key_dict", ":", "if", "not", "isinstance", "(", "key_dict", "[", "'$begins'", "]", ",", "string_types", ")", ":", "raise", "ValueError", "(", "\"$begins expects a string.\"", ")", "string", "=", "'{}*'", ".", "format", "(", "key_dict", "[", "'$begins'", "]", ")", "elif", "'$ends'", "in", "key_dict", ":", "if", "not", "isinstance", "(", "key_dict", "[", "'$ends'", "]", ",", "string_types", ")", ":", "raise", "ValueError", "(", "\"$ends expects a string.\"", ")", "string", "=", "'*{}'", ".", "format", "(", "key_dict", "[", "'$ends'", "]", ")", "elif", "'$neq'", "in", "key_dict", ":", "string", "=", "'~{}'", ".", "format", "(", "key_dict", "[", "'$neq'", "]", ")", "else", ":", "# Provided too many or too few operators", "raise", "ValueError", "(", "\"Please supply $gte and $lte for getting values between numbers or 1 of {}\"", ".", "format", "(", "allowed_operators", ")", ")", "return", "string", "dmql_search_filters", "=", "[", "]", "for", "filt", ",", "value", "in", "filter_dict", ".", "items", "(", ")", ":", "dmql_string", "=", "'({}='", ".", "format", "(", "filt", ")", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "# Applying an operator. This will need to be recursive because of the or possibility", "dmql_string", "+=", "evaluate_operators", "(", "key_dict", "=", "value", ")", "else", ":", "# Simle equals statement", "dmql_string", "+=", "'{}'", ".", "format", "(", "evaluate_datetime", "(", "value", ")", ")", "dmql_string", "+=", "')'", "dmql_search_filters", ".", "append", "(", "dmql_string", ")", "search_string", "=", "','", ".", "join", "(", "dmql_search_filters", ")", "# Converts the filter dictionary to dmqp string", "logger", ".", "debug", "(", "\"Filter returned the following DMQL: {}\"", ".", "format", "(", "search_string", ")", ")", "return", "search_string" ]
Converts the filter dictionary into DMQL
[ "Converts", "the", "filter", "dictionary", "into", "DMQL" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/utils/search.py#L26-L161
refindlyllc/rets
rets/parsers/base.py
Base.get_attributes
def get_attributes(input_dict): """ Get attributes of xml tags in input_dict and creates a dictionary with the attribute name as the key and the attribute value as the value :param input_dict: The xml tag with the attributes and values :return: dict """ return {k.lstrip("@"): v for k, v in input_dict.items() if k[0] == "@"}
python
def get_attributes(input_dict): """ Get attributes of xml tags in input_dict and creates a dictionary with the attribute name as the key and the attribute value as the value :param input_dict: The xml tag with the attributes and values :return: dict """ return {k.lstrip("@"): v for k, v in input_dict.items() if k[0] == "@"}
[ "def", "get_attributes", "(", "input_dict", ")", ":", "return", "{", "k", ".", "lstrip", "(", "\"@\"", ")", ":", "v", "for", "k", ",", "v", "in", "input_dict", ".", "items", "(", ")", "if", "k", "[", "0", "]", "==", "\"@\"", "}" ]
Get attributes of xml tags in input_dict and creates a dictionary with the attribute name as the key and the attribute value as the value :param input_dict: The xml tag with the attributes and values :return: dict
[ "Get", "attributes", "of", "xml", "tags", "in", "input_dict", "and", "creates", "a", "dictionary", "with", "the", "attribute", "name", "as", "the", "key", "and", "the", "attribute", "value", "as", "the", "value", ":", "param", "input_dict", ":", "The", "xml", "tag", "with", "the", "attributes", "and", "values", ":", "return", ":", "dict" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/parsers/base.py#L12-L19
refindlyllc/rets
rets/parsers/base.py
Base.data_columns_to_dict
def data_columns_to_dict(columns_string, dict_string, delimiter=None): """ Turns column names in a single string into a dictionary with the key being the column name and the value being the value in that column for each row :param columns_string: A string of column names :param dict_string: A string of values :param delimiter: The delimiter to use to split the column and values :return: dict """ if delimiter: return {k: v for k, v in zip(columns_string.split(delimiter), dict_string.split(delimiter))} else: return {k: v for k, v in zip(columns_string.split(), dict_string.split())}
python
def data_columns_to_dict(columns_string, dict_string, delimiter=None): """ Turns column names in a single string into a dictionary with the key being the column name and the value being the value in that column for each row :param columns_string: A string of column names :param dict_string: A string of values :param delimiter: The delimiter to use to split the column and values :return: dict """ if delimiter: return {k: v for k, v in zip(columns_string.split(delimiter), dict_string.split(delimiter))} else: return {k: v for k, v in zip(columns_string.split(), dict_string.split())}
[ "def", "data_columns_to_dict", "(", "columns_string", ",", "dict_string", ",", "delimiter", "=", "None", ")", ":", "if", "delimiter", ":", "return", "{", "k", ":", "v", "for", "k", ",", "v", "in", "zip", "(", "columns_string", ".", "split", "(", "delimiter", ")", ",", "dict_string", ".", "split", "(", "delimiter", ")", ")", "}", "else", ":", "return", "{", "k", ":", "v", "for", "k", ",", "v", "in", "zip", "(", "columns_string", ".", "split", "(", ")", ",", "dict_string", ".", "split", "(", ")", ")", "}" ]
Turns column names in a single string into a dictionary with the key being the column name and the value being the value in that column for each row :param columns_string: A string of column names :param dict_string: A string of values :param delimiter: The delimiter to use to split the column and values :return: dict
[ "Turns", "column", "names", "in", "a", "single", "string", "into", "a", "dictionary", "with", "the", "key", "being", "the", "column", "name", "and", "the", "value", "being", "the", "value", "in", "that", "column", "for", "each", "row", ":", "param", "columns_string", ":", "A", "string", "of", "column", "names", ":", "param", "dict_string", ":", "A", "string", "of", "values", ":", "param", "delimiter", ":", "The", "delimiter", "to", "use", "to", "split", "the", "column", "and", "values", ":", "return", ":", "dict" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/parsers/base.py#L22-L34
refindlyllc/rets
rets/parsers/base.py
Base.analyze_reply_code
def analyze_reply_code(self, xml_response_dict): """ Checks the RETS Response Code and handles non-zero answers. :param xml_response_dict: :return: None """ if 'RETS-STATUS' in xml_response_dict: attributes = self.get_attributes(xml_response_dict['RETS-STATUS']) reply_code = attributes['ReplyCode'] reply_text = attributes.get('ReplyText', 'RETS did not supply a Reply Text.') logger.debug("Received ReplyCode of {0!s} from the RETS Server: {0!s}".format(reply_code, reply_text)) if reply_code != '0': raise RETSException(reply_text, reply_code) elif 'RETS' not in xml_response_dict: # pragma: no cover raise RETSException("The <RETS> tag was expected in the response XML but it was not found.") attributes = self.get_attributes(input_dict=xml_response_dict['RETS']) if 'ReplyCode' not in attributes: # pragma: no cover # The RETS server did not return a response code. return True reply_code = attributes['ReplyCode'] reply_text = attributes.get('ReplyText', 'RETS did not supply a Reply Text.') logger.debug("Received ReplyCode of {0!s} from the RETS Server: {0!s}".format(reply_code, reply_text)) if reply_code != '0': raise RETSException(reply_text, reply_code)
python
def analyze_reply_code(self, xml_response_dict): """ Checks the RETS Response Code and handles non-zero answers. :param xml_response_dict: :return: None """ if 'RETS-STATUS' in xml_response_dict: attributes = self.get_attributes(xml_response_dict['RETS-STATUS']) reply_code = attributes['ReplyCode'] reply_text = attributes.get('ReplyText', 'RETS did not supply a Reply Text.') logger.debug("Received ReplyCode of {0!s} from the RETS Server: {0!s}".format(reply_code, reply_text)) if reply_code != '0': raise RETSException(reply_text, reply_code) elif 'RETS' not in xml_response_dict: # pragma: no cover raise RETSException("The <RETS> tag was expected in the response XML but it was not found.") attributes = self.get_attributes(input_dict=xml_response_dict['RETS']) if 'ReplyCode' not in attributes: # pragma: no cover # The RETS server did not return a response code. return True reply_code = attributes['ReplyCode'] reply_text = attributes.get('ReplyText', 'RETS did not supply a Reply Text.') logger.debug("Received ReplyCode of {0!s} from the RETS Server: {0!s}".format(reply_code, reply_text)) if reply_code != '0': raise RETSException(reply_text, reply_code)
[ "def", "analyze_reply_code", "(", "self", ",", "xml_response_dict", ")", ":", "if", "'RETS-STATUS'", "in", "xml_response_dict", ":", "attributes", "=", "self", ".", "get_attributes", "(", "xml_response_dict", "[", "'RETS-STATUS'", "]", ")", "reply_code", "=", "attributes", "[", "'ReplyCode'", "]", "reply_text", "=", "attributes", ".", "get", "(", "'ReplyText'", ",", "'RETS did not supply a Reply Text.'", ")", "logger", ".", "debug", "(", "\"Received ReplyCode of {0!s} from the RETS Server: {0!s}\"", ".", "format", "(", "reply_code", ",", "reply_text", ")", ")", "if", "reply_code", "!=", "'0'", ":", "raise", "RETSException", "(", "reply_text", ",", "reply_code", ")", "elif", "'RETS'", "not", "in", "xml_response_dict", ":", "# pragma: no cover", "raise", "RETSException", "(", "\"The <RETS> tag was expected in the response XML but it was not found.\"", ")", "attributes", "=", "self", ".", "get_attributes", "(", "input_dict", "=", "xml_response_dict", "[", "'RETS'", "]", ")", "if", "'ReplyCode'", "not", "in", "attributes", ":", "# pragma: no cover", "# The RETS server did not return a response code.", "return", "True", "reply_code", "=", "attributes", "[", "'ReplyCode'", "]", "reply_text", "=", "attributes", ".", "get", "(", "'ReplyText'", ",", "'RETS did not supply a Reply Text.'", ")", "logger", ".", "debug", "(", "\"Received ReplyCode of {0!s} from the RETS Server: {0!s}\"", ".", "format", "(", "reply_code", ",", "reply_text", ")", ")", "if", "reply_code", "!=", "'0'", ":", "raise", "RETSException", "(", "reply_text", ",", "reply_code", ")" ]
Checks the RETS Response Code and handles non-zero answers. :param xml_response_dict: :return: None
[ "Checks", "the", "RETS", "Response", "Code", "and", "handles", "non", "-", "zero", "answers", ".", ":", "param", "xml_response_dict", ":", ":", "return", ":", "None" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/parsers/base.py#L36-L64
refindlyllc/rets
rets/utils/get_object.py
GetObject.ids
def ids(self, content_ids, object_ids): """Appends the content and object ids how RETS expects them""" result = [] content_ids = self.split(content_ids, False) object_ids = self.split(object_ids) for cid in content_ids: result.append('{}:{}'.format(cid, ':'.join(object_ids))) return result
python
def ids(self, content_ids, object_ids): """Appends the content and object ids how RETS expects them""" result = [] content_ids = self.split(content_ids, False) object_ids = self.split(object_ids) for cid in content_ids: result.append('{}:{}'.format(cid, ':'.join(object_ids))) return result
[ "def", "ids", "(", "self", ",", "content_ids", ",", "object_ids", ")", ":", "result", "=", "[", "]", "content_ids", "=", "self", ".", "split", "(", "content_ids", ",", "False", ")", "object_ids", "=", "self", ".", "split", "(", "object_ids", ")", "for", "cid", "in", "content_ids", ":", "result", ".", "append", "(", "'{}:{}'", ".", "format", "(", "cid", ",", "':'", ".", "join", "(", "object_ids", ")", ")", ")", "return", "result" ]
Appends the content and object ids how RETS expects them
[ "Appends", "the", "content", "and", "object", "ids", "how", "RETS", "expects", "them" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/utils/get_object.py#L7-L17
refindlyllc/rets
rets/utils/get_object.py
GetObject.split
def split(value, dash_ranges=True): """Splits """ if isinstance(value, list): value = [str(v) for v in value] else: str_value = str(value) dash_matches = re.match(pattern='(\d+)\-(\d+)', string=str_value) if ':' in str_value or ',' in str_value: value = [v.strip() for v in str_value.replace(',', ':').split(':')] elif dash_ranges and dash_matches: start_range = int(dash_matches.group(1)) end_range = int(dash_matches.group(2)) + 1 rng = range(start_range, end_range) value = [str(r) for r in rng] else: value = [str_value] return value
python
def split(value, dash_ranges=True): """Splits """ if isinstance(value, list): value = [str(v) for v in value] else: str_value = str(value) dash_matches = re.match(pattern='(\d+)\-(\d+)', string=str_value) if ':' in str_value or ',' in str_value: value = [v.strip() for v in str_value.replace(',', ':').split(':')] elif dash_ranges and dash_matches: start_range = int(dash_matches.group(1)) end_range = int(dash_matches.group(2)) + 1 rng = range(start_range, end_range) value = [str(r) for r in rng] else: value = [str_value] return value
[ "def", "split", "(", "value", ",", "dash_ranges", "=", "True", ")", ":", "if", "isinstance", "(", "value", ",", "list", ")", ":", "value", "=", "[", "str", "(", "v", ")", "for", "v", "in", "value", "]", "else", ":", "str_value", "=", "str", "(", "value", ")", "dash_matches", "=", "re", ".", "match", "(", "pattern", "=", "'(\\d+)\\-(\\d+)'", ",", "string", "=", "str_value", ")", "if", "':'", "in", "str_value", "or", "','", "in", "str_value", ":", "value", "=", "[", "v", ".", "strip", "(", ")", "for", "v", "in", "str_value", ".", "replace", "(", "','", ",", "':'", ")", ".", "split", "(", "':'", ")", "]", "elif", "dash_ranges", "and", "dash_matches", ":", "start_range", "=", "int", "(", "dash_matches", ".", "group", "(", "1", ")", ")", "end_range", "=", "int", "(", "dash_matches", ".", "group", "(", "2", ")", ")", "+", "1", "rng", "=", "range", "(", "start_range", ",", "end_range", ")", "value", "=", "[", "str", "(", "r", ")", "for", "r", "in", "rng", "]", "else", ":", "value", "=", "[", "str_value", "]", "return", "value" ]
Splits
[ "Splits" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/utils/get_object.py#L20-L38
roanuz/py-cricket
src/pycricket_storagehandler.py
RcaFileStorageHandler.set_value
def set_value(self, key, value): """ Set key value to the file. The fuction will be make the key and value to dictinary formate. If its exist then it will update the current new key value to the file. Arg: key : cache key value : cache value """ file_cache = self.read_file() if file_cache: file_cache[key] = value else: file_cache = {} file_cache[key] = value self.update_file(file_cache)
python
def set_value(self, key, value): """ Set key value to the file. The fuction will be make the key and value to dictinary formate. If its exist then it will update the current new key value to the file. Arg: key : cache key value : cache value """ file_cache = self.read_file() if file_cache: file_cache[key] = value else: file_cache = {} file_cache[key] = value self.update_file(file_cache)
[ "def", "set_value", "(", "self", ",", "key", ",", "value", ")", ":", "file_cache", "=", "self", ".", "read_file", "(", ")", "if", "file_cache", ":", "file_cache", "[", "key", "]", "=", "value", "else", ":", "file_cache", "=", "{", "}", "file_cache", "[", "key", "]", "=", "value", "self", ".", "update_file", "(", "file_cache", ")" ]
Set key value to the file. The fuction will be make the key and value to dictinary formate. If its exist then it will update the current new key value to the file. Arg: key : cache key value : cache value
[ "Set", "key", "value", "to", "the", "file", "." ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket_storagehandler.py#L56-L73
roanuz/py-cricket
src/pycricket_storagehandler.py
RcaFileStorageHandler.delete_value
def delete_value(self, key): """ Delete the key if the token is expired. Arg: key : cache key """ response = {} response['status'] = False response['msg'] = "key does not exist" file_cache = self.read_file() if key in file_cache: del file_cache[key] self.update_file(file_cache) response['status'] = True response['msg'] = "success" return response
python
def delete_value(self, key): """ Delete the key if the token is expired. Arg: key : cache key """ response = {} response['status'] = False response['msg'] = "key does not exist" file_cache = self.read_file() if key in file_cache: del file_cache[key] self.update_file(file_cache) response['status'] = True response['msg'] = "success" return response
[ "def", "delete_value", "(", "self", ",", "key", ")", ":", "response", "=", "{", "}", "response", "[", "'status'", "]", "=", "False", "response", "[", "'msg'", "]", "=", "\"key does not exist\"", "file_cache", "=", "self", ".", "read_file", "(", ")", "if", "key", "in", "file_cache", ":", "del", "file_cache", "[", "key", "]", "self", ".", "update_file", "(", "file_cache", ")", "response", "[", "'status'", "]", "=", "True", "response", "[", "'msg'", "]", "=", "\"success\"", "return", "response" ]
Delete the key if the token is expired. Arg: key : cache key
[ "Delete", "the", "key", "if", "the", "token", "is", "expired", "." ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket_storagehandler.py#L101-L118
roanuz/py-cricket
src/pycricket_storagehandler.py
RcaFileStorageHandler.read_file
def read_file(self): """ Open the file and assiging the permission to read/write and return the content in json formate. Return : json data """ file_obj = open(self.file, 'r') content = file_obj.read() file_obj.close() if content: content = json.loads(content) return content else: return {}
python
def read_file(self): """ Open the file and assiging the permission to read/write and return the content in json formate. Return : json data """ file_obj = open(self.file, 'r') content = file_obj.read() file_obj.close() if content: content = json.loads(content) return content else: return {}
[ "def", "read_file", "(", "self", ")", ":", "file_obj", "=", "open", "(", "self", ".", "file", ",", "'r'", ")", "content", "=", "file_obj", ".", "read", "(", ")", "file_obj", ".", "close", "(", ")", "if", "content", ":", "content", "=", "json", ".", "loads", "(", "content", ")", "return", "content", "else", ":", "return", "{", "}" ]
Open the file and assiging the permission to read/write and return the content in json formate. Return : json data
[ "Open", "the", "file", "and", "assiging", "the", "permission", "to", "read", "/", "write", "and", "return", "the", "content", "in", "json", "formate", "." ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket_storagehandler.py#L128-L142
roanuz/py-cricket
src/pycricket_storagehandler.py
RcaFileStorageHandler.update_file
def update_file(self, content): """ It will convert json content to json string and update into file. Return: Boolean True/False """ updated_content = json.dumps(content) file_obj = open(self.file, 'r+') file_obj.write(str(updated_content)) file_obj.close() return True
python
def update_file(self, content): """ It will convert json content to json string and update into file. Return: Boolean True/False """ updated_content = json.dumps(content) file_obj = open(self.file, 'r+') file_obj.write(str(updated_content)) file_obj.close() return True
[ "def", "update_file", "(", "self", ",", "content", ")", ":", "updated_content", "=", "json", ".", "dumps", "(", "content", ")", "file_obj", "=", "open", "(", "self", ".", "file", ",", "'r+'", ")", "file_obj", ".", "write", "(", "str", "(", "updated_content", ")", ")", "file_obj", ".", "close", "(", ")", "return", "True" ]
It will convert json content to json string and update into file. Return: Boolean True/False
[ "It", "will", "convert", "json", "content", "to", "json", "string", "and", "update", "into", "file", "." ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket_storagehandler.py#L144-L155
refindlyllc/rets
rets/parsers/metadata.py
CompactMetadata.parse
def parse(self, response, metadata_type): """ Parses RETS metadata using the COMPACT-DECODED format :param response: :param metadata_type: :return: """ xml = xmltodict.parse(response.text) self.analyze_reply_code(xml_response_dict=xml) base = xml.get('RETS', {}).get(metadata_type, {}) attributes = self.get_attributes(base) parsed = [] if base.get('System') or base.get('SYSTEM'): system_obj = {} if base.get('SYSTEM', {}).get('@SystemDescription'): system_obj['system_id'] = str(base['SYSTEM']['@SystemID']) if base.get('SYSTEM', {}).get('@SystemDescription'): system_obj['system_description'] = str(base['SYSTEM']['@SystemDescription']) if base.get('SYSTEM', {}).get('@TimeZoneOffset'): system_obj['timezone_offset'] = str(base['SYSTEM']['@TimeZoneOffset']) if base.get('SYSTEM', {}).get('Comments'): system_obj['comments'] = base['SYSTEM']['Comments'] if base.get('@Version'): system_obj['version'] = base['@Version'] parsed.append(system_obj) elif 'DATA' in base: if not isinstance(base['DATA'], list): # xmltodict could take single entry XML lists and turn them into str base['DATA'] = [base['DATA']] for data in base['DATA']: data_dict = self.data_columns_to_dict(columns_string=base.get('COLUMNS', ''), dict_string=data) data_dict.update(attributes) parsed.append(data_dict) return parsed
python
def parse(self, response, metadata_type): """ Parses RETS metadata using the COMPACT-DECODED format :param response: :param metadata_type: :return: """ xml = xmltodict.parse(response.text) self.analyze_reply_code(xml_response_dict=xml) base = xml.get('RETS', {}).get(metadata_type, {}) attributes = self.get_attributes(base) parsed = [] if base.get('System') or base.get('SYSTEM'): system_obj = {} if base.get('SYSTEM', {}).get('@SystemDescription'): system_obj['system_id'] = str(base['SYSTEM']['@SystemID']) if base.get('SYSTEM', {}).get('@SystemDescription'): system_obj['system_description'] = str(base['SYSTEM']['@SystemDescription']) if base.get('SYSTEM', {}).get('@TimeZoneOffset'): system_obj['timezone_offset'] = str(base['SYSTEM']['@TimeZoneOffset']) if base.get('SYSTEM', {}).get('Comments'): system_obj['comments'] = base['SYSTEM']['Comments'] if base.get('@Version'): system_obj['version'] = base['@Version'] parsed.append(system_obj) elif 'DATA' in base: if not isinstance(base['DATA'], list): # xmltodict could take single entry XML lists and turn them into str base['DATA'] = [base['DATA']] for data in base['DATA']: data_dict = self.data_columns_to_dict(columns_string=base.get('COLUMNS', ''), dict_string=data) data_dict.update(attributes) parsed.append(data_dict) return parsed
[ "def", "parse", "(", "self", ",", "response", ",", "metadata_type", ")", ":", "xml", "=", "xmltodict", ".", "parse", "(", "response", ".", "text", ")", "self", ".", "analyze_reply_code", "(", "xml_response_dict", "=", "xml", ")", "base", "=", "xml", ".", "get", "(", "'RETS'", ",", "{", "}", ")", ".", "get", "(", "metadata_type", ",", "{", "}", ")", "attributes", "=", "self", ".", "get_attributes", "(", "base", ")", "parsed", "=", "[", "]", "if", "base", ".", "get", "(", "'System'", ")", "or", "base", ".", "get", "(", "'SYSTEM'", ")", ":", "system_obj", "=", "{", "}", "if", "base", ".", "get", "(", "'SYSTEM'", ",", "{", "}", ")", ".", "get", "(", "'@SystemDescription'", ")", ":", "system_obj", "[", "'system_id'", "]", "=", "str", "(", "base", "[", "'SYSTEM'", "]", "[", "'@SystemID'", "]", ")", "if", "base", ".", "get", "(", "'SYSTEM'", ",", "{", "}", ")", ".", "get", "(", "'@SystemDescription'", ")", ":", "system_obj", "[", "'system_description'", "]", "=", "str", "(", "base", "[", "'SYSTEM'", "]", "[", "'@SystemDescription'", "]", ")", "if", "base", ".", "get", "(", "'SYSTEM'", ",", "{", "}", ")", ".", "get", "(", "'@TimeZoneOffset'", ")", ":", "system_obj", "[", "'timezone_offset'", "]", "=", "str", "(", "base", "[", "'SYSTEM'", "]", "[", "'@TimeZoneOffset'", "]", ")", "if", "base", ".", "get", "(", "'SYSTEM'", ",", "{", "}", ")", ".", "get", "(", "'Comments'", ")", ":", "system_obj", "[", "'comments'", "]", "=", "base", "[", "'SYSTEM'", "]", "[", "'Comments'", "]", "if", "base", ".", "get", "(", "'@Version'", ")", ":", "system_obj", "[", "'version'", "]", "=", "base", "[", "'@Version'", "]", "parsed", ".", "append", "(", "system_obj", ")", "elif", "'DATA'", "in", "base", ":", "if", "not", "isinstance", "(", "base", "[", "'DATA'", "]", ",", "list", ")", ":", "# xmltodict could take single entry XML lists and turn them into str", "base", "[", "'DATA'", "]", "=", "[", "base", "[", "'DATA'", "]", "]", "for", "data", "in", "base", "[", "'DATA'", "]", ":", "data_dict", "=", "self", ".", "data_columns_to_dict", "(", "columns_string", "=", "base", ".", "get", "(", "'COLUMNS'", ",", "''", ")", ",", "dict_string", "=", "data", ")", "data_dict", ".", "update", "(", "attributes", ")", "parsed", ".", "append", "(", "data_dict", ")", "return", "parsed" ]
Parses RETS metadata using the COMPACT-DECODED format :param response: :param metadata_type: :return:
[ "Parses", "RETS", "metadata", "using", "the", "COMPACT", "-", "DECODED", "format", ":", "param", "response", ":", ":", "param", "metadata_type", ":", ":", "return", ":" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/parsers/metadata.py#L14-L56
refindlyllc/rets
rets/parsers/metadata.py
StandardXMLetadata.parse
def parse(self, response, metadata_type): """ Parses RETS metadata using the STANDARD-XML format :param response: requests Response object :param metadata_type: string :return parsed: list """ xml = xmltodict.parse(response.text) self.analyze_reply_code(xml_response_dict=xml) base = xml.get('RETS', {}).get('METADATA', {}).get(metadata_type, {}) if metadata_type == 'METADATA-SYSTEM': syst = base.get('System', base.get('SYSTEM')) if not syst: raise ParseError("Could not get the System key from a METADATA-SYSTEM request.") system_obj = {} if syst.get('SystemID'): system_obj['system_id'] = str(syst['SystemID']) if syst.get('SystemDescription'): system_obj['system_description'] = str(syst['SystemDescription']) if syst.get('Comments'): system_obj['comments'] = syst['Comments'] if base.get('@Version'): system_obj['version'] = base['@Version'] return [system_obj] elif metadata_type == 'METADATA-CLASS': key = 'class' elif metadata_type == 'METADATA-RESOURCE': key = 'resource' elif metadata_type == 'METADATA-LOOKUP_TYPE': key = 'lookuptype' elif metadata_type == 'METADATA-OBJECT': key = 'object' elif metadata_type == 'METADATA-TABLE': key = 'field' else: msg = "Got an unknown metadata type of {0!s}".format(metadata_type) raise ParseError(msg) # Get the version with the right capitalization from the dictionary key_cap = None for k in base.keys(): if k.lower() == key: key_cap = k if not key_cap: msg = 'Could not find {0!s} in the response XML'.format(key) raise ParseError(msg) if isinstance(base[key_cap], list): return base[key_cap] else: return [base[key_cap]]
python
def parse(self, response, metadata_type): """ Parses RETS metadata using the STANDARD-XML format :param response: requests Response object :param metadata_type: string :return parsed: list """ xml = xmltodict.parse(response.text) self.analyze_reply_code(xml_response_dict=xml) base = xml.get('RETS', {}).get('METADATA', {}).get(metadata_type, {}) if metadata_type == 'METADATA-SYSTEM': syst = base.get('System', base.get('SYSTEM')) if not syst: raise ParseError("Could not get the System key from a METADATA-SYSTEM request.") system_obj = {} if syst.get('SystemID'): system_obj['system_id'] = str(syst['SystemID']) if syst.get('SystemDescription'): system_obj['system_description'] = str(syst['SystemDescription']) if syst.get('Comments'): system_obj['comments'] = syst['Comments'] if base.get('@Version'): system_obj['version'] = base['@Version'] return [system_obj] elif metadata_type == 'METADATA-CLASS': key = 'class' elif metadata_type == 'METADATA-RESOURCE': key = 'resource' elif metadata_type == 'METADATA-LOOKUP_TYPE': key = 'lookuptype' elif metadata_type == 'METADATA-OBJECT': key = 'object' elif metadata_type == 'METADATA-TABLE': key = 'field' else: msg = "Got an unknown metadata type of {0!s}".format(metadata_type) raise ParseError(msg) # Get the version with the right capitalization from the dictionary key_cap = None for k in base.keys(): if k.lower() == key: key_cap = k if not key_cap: msg = 'Could not find {0!s} in the response XML'.format(key) raise ParseError(msg) if isinstance(base[key_cap], list): return base[key_cap] else: return [base[key_cap]]
[ "def", "parse", "(", "self", ",", "response", ",", "metadata_type", ")", ":", "xml", "=", "xmltodict", ".", "parse", "(", "response", ".", "text", ")", "self", ".", "analyze_reply_code", "(", "xml_response_dict", "=", "xml", ")", "base", "=", "xml", ".", "get", "(", "'RETS'", ",", "{", "}", ")", ".", "get", "(", "'METADATA'", ",", "{", "}", ")", ".", "get", "(", "metadata_type", ",", "{", "}", ")", "if", "metadata_type", "==", "'METADATA-SYSTEM'", ":", "syst", "=", "base", ".", "get", "(", "'System'", ",", "base", ".", "get", "(", "'SYSTEM'", ")", ")", "if", "not", "syst", ":", "raise", "ParseError", "(", "\"Could not get the System key from a METADATA-SYSTEM request.\"", ")", "system_obj", "=", "{", "}", "if", "syst", ".", "get", "(", "'SystemID'", ")", ":", "system_obj", "[", "'system_id'", "]", "=", "str", "(", "syst", "[", "'SystemID'", "]", ")", "if", "syst", ".", "get", "(", "'SystemDescription'", ")", ":", "system_obj", "[", "'system_description'", "]", "=", "str", "(", "syst", "[", "'SystemDescription'", "]", ")", "if", "syst", ".", "get", "(", "'Comments'", ")", ":", "system_obj", "[", "'comments'", "]", "=", "syst", "[", "'Comments'", "]", "if", "base", ".", "get", "(", "'@Version'", ")", ":", "system_obj", "[", "'version'", "]", "=", "base", "[", "'@Version'", "]", "return", "[", "system_obj", "]", "elif", "metadata_type", "==", "'METADATA-CLASS'", ":", "key", "=", "'class'", "elif", "metadata_type", "==", "'METADATA-RESOURCE'", ":", "key", "=", "'resource'", "elif", "metadata_type", "==", "'METADATA-LOOKUP_TYPE'", ":", "key", "=", "'lookuptype'", "elif", "metadata_type", "==", "'METADATA-OBJECT'", ":", "key", "=", "'object'", "elif", "metadata_type", "==", "'METADATA-TABLE'", ":", "key", "=", "'field'", "else", ":", "msg", "=", "\"Got an unknown metadata type of {0!s}\"", ".", "format", "(", "metadata_type", ")", "raise", "ParseError", "(", "msg", ")", "# Get the version with the right capitalization from the dictionary", "key_cap", "=", "None", "for", "k", "in", "base", ".", "keys", "(", ")", ":", "if", "k", ".", "lower", "(", ")", "==", "key", ":", "key_cap", "=", "k", "if", "not", "key_cap", ":", "msg", "=", "'Could not find {0!s} in the response XML'", ".", "format", "(", "key", ")", "raise", "ParseError", "(", "msg", ")", "if", "isinstance", "(", "base", "[", "key_cap", "]", ",", "list", ")", ":", "return", "base", "[", "key_cap", "]", "else", ":", "return", "[", "base", "[", "key_cap", "]", "]" ]
Parses RETS metadata using the STANDARD-XML format :param response: requests Response object :param metadata_type: string :return parsed: list
[ "Parses", "RETS", "metadata", "using", "the", "STANDARD", "-", "XML", "format", ":", "param", "response", ":", "requests", "Response", "object", ":", "param", "metadata_type", ":", "string", ":", "return", "parsed", ":", "list" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/parsers/metadata.py#L62-L116
refindlyllc/rets
rets/parsers/get_object.py
MultipleObjectParser._get_multiparts
def _get_multiparts(response): """ From this 'multipart/parallel; boundary="874e43d27ec6d83f30f37841bdaf90c7"; charset=utf-8' get this --874e43d27ec6d83f30f37841bdaf90c7 """ boundary = None for part in response.headers.get('Content-Type', '').split(';'): if 'boundary=' in part: boundary = '--{}'.format(part.split('=', 1)[1].strip('\"')) break if not boundary: raise ParseError("Was not able to find the boundary between objects in a multipart response") if response.content is None: return [] response_string = response.content if six.PY3: # Python3 returns bytes, decode for string operations response_string = response_string.decode('latin-1') # help bad responses be more multipart compliant whole_body = response_string.strip('\r\n') no_front_boundary = whole_body.strip(boundary) # The boundary comes with some characters multi_parts = [] for part in no_front_boundary.split(boundary): multi_parts.append(part.strip('\r\n')) return multi_parts
python
def _get_multiparts(response): """ From this 'multipart/parallel; boundary="874e43d27ec6d83f30f37841bdaf90c7"; charset=utf-8' get this --874e43d27ec6d83f30f37841bdaf90c7 """ boundary = None for part in response.headers.get('Content-Type', '').split(';'): if 'boundary=' in part: boundary = '--{}'.format(part.split('=', 1)[1].strip('\"')) break if not boundary: raise ParseError("Was not able to find the boundary between objects in a multipart response") if response.content is None: return [] response_string = response.content if six.PY3: # Python3 returns bytes, decode for string operations response_string = response_string.decode('latin-1') # help bad responses be more multipart compliant whole_body = response_string.strip('\r\n') no_front_boundary = whole_body.strip(boundary) # The boundary comes with some characters multi_parts = [] for part in no_front_boundary.split(boundary): multi_parts.append(part.strip('\r\n')) return multi_parts
[ "def", "_get_multiparts", "(", "response", ")", ":", "boundary", "=", "None", "for", "part", "in", "response", ".", "headers", ".", "get", "(", "'Content-Type'", ",", "''", ")", ".", "split", "(", "';'", ")", ":", "if", "'boundary='", "in", "part", ":", "boundary", "=", "'--{}'", ".", "format", "(", "part", ".", "split", "(", "'='", ",", "1", ")", "[", "1", "]", ".", "strip", "(", "'\\\"'", ")", ")", "break", "if", "not", "boundary", ":", "raise", "ParseError", "(", "\"Was not able to find the boundary between objects in a multipart response\"", ")", "if", "response", ".", "content", "is", "None", ":", "return", "[", "]", "response_string", "=", "response", ".", "content", "if", "six", ".", "PY3", ":", "# Python3 returns bytes, decode for string operations", "response_string", "=", "response_string", ".", "decode", "(", "'latin-1'", ")", "# help bad responses be more multipart compliant", "whole_body", "=", "response_string", ".", "strip", "(", "'\\r\\n'", ")", "no_front_boundary", "=", "whole_body", ".", "strip", "(", "boundary", ")", "# The boundary comes with some characters", "multi_parts", "=", "[", "]", "for", "part", "in", "no_front_boundary", ".", "split", "(", "boundary", ")", ":", "multi_parts", ".", "append", "(", "part", ".", "strip", "(", "'\\r\\n'", ")", ")", "return", "multi_parts" ]
From this 'multipart/parallel; boundary="874e43d27ec6d83f30f37841bdaf90c7"; charset=utf-8' get this --874e43d27ec6d83f30f37841bdaf90c7
[ "From", "this", "multipart", "/", "parallel", ";", "boundary", "=", "874e43d27ec6d83f30f37841bdaf90c7", ";", "charset", "=", "utf", "-", "8", "get", "this", "--", "874e43d27ec6d83f30f37841bdaf90c7" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/parsers/get_object.py#L39-L73
refindlyllc/rets
rets/parsers/get_object.py
MultipleObjectParser.parse_image_response
def parse_image_response(self, response): """ Parse multiple objects from the RETS feed. A lot of string methods are used to handle the response before encoding it back into bytes for the object. :param response: The response from the feed :return: list of SingleObjectParser """ if 'xml' in response.headers.get('Content-Type'): # Got an XML response, likely an error code. xml = xmltodict.parse(response.text) self.analyze_reply_code(xml_response_dict=xml) multi_parts = self._get_multiparts(response) parsed = [] # go through each part of the multipart message for part in multi_parts: clean_part = part.strip('\r\n\r\n') if '\r\n\r\n' in clean_part: header, body = clean_part.split('\r\n\r\n', 1) else: header = clean_part body = None part_header_dict = {k.strip(): v.strip() for k, v in (h.split(':', 1) for h in header.split('\r\n'))} # Some multipart requests respond with a text/XML part stating an error if 'xml' in part_header_dict.get('Content-Type'): # Got an XML response, likely an error code. # Some rets servers give characters after the closing brace. body = body[:body.index('/>') + 2] if '/>' in body else body xml = xmltodict.parse(body) try: self.analyze_reply_code(xml_response_dict=xml) except RETSException as e: if e.reply_code == '20403': # The requested object_id was not found. continue raise e if body: obj = self._response_object_from_header( obj_head_dict=part_header_dict, content=body.encode('latin-1') if six.PY3 else body) else: obj = self._response_object_from_header(obj_head_dict=part_header_dict) parsed.append(obj) return parsed
python
def parse_image_response(self, response): """ Parse multiple objects from the RETS feed. A lot of string methods are used to handle the response before encoding it back into bytes for the object. :param response: The response from the feed :return: list of SingleObjectParser """ if 'xml' in response.headers.get('Content-Type'): # Got an XML response, likely an error code. xml = xmltodict.parse(response.text) self.analyze_reply_code(xml_response_dict=xml) multi_parts = self._get_multiparts(response) parsed = [] # go through each part of the multipart message for part in multi_parts: clean_part = part.strip('\r\n\r\n') if '\r\n\r\n' in clean_part: header, body = clean_part.split('\r\n\r\n', 1) else: header = clean_part body = None part_header_dict = {k.strip(): v.strip() for k, v in (h.split(':', 1) for h in header.split('\r\n'))} # Some multipart requests respond with a text/XML part stating an error if 'xml' in part_header_dict.get('Content-Type'): # Got an XML response, likely an error code. # Some rets servers give characters after the closing brace. body = body[:body.index('/>') + 2] if '/>' in body else body xml = xmltodict.parse(body) try: self.analyze_reply_code(xml_response_dict=xml) except RETSException as e: if e.reply_code == '20403': # The requested object_id was not found. continue raise e if body: obj = self._response_object_from_header( obj_head_dict=part_header_dict, content=body.encode('latin-1') if six.PY3 else body) else: obj = self._response_object_from_header(obj_head_dict=part_header_dict) parsed.append(obj) return parsed
[ "def", "parse_image_response", "(", "self", ",", "response", ")", ":", "if", "'xml'", "in", "response", ".", "headers", ".", "get", "(", "'Content-Type'", ")", ":", "# Got an XML response, likely an error code.", "xml", "=", "xmltodict", ".", "parse", "(", "response", ".", "text", ")", "self", ".", "analyze_reply_code", "(", "xml_response_dict", "=", "xml", ")", "multi_parts", "=", "self", ".", "_get_multiparts", "(", "response", ")", "parsed", "=", "[", "]", "# go through each part of the multipart message", "for", "part", "in", "multi_parts", ":", "clean_part", "=", "part", ".", "strip", "(", "'\\r\\n\\r\\n'", ")", "if", "'\\r\\n\\r\\n'", "in", "clean_part", ":", "header", ",", "body", "=", "clean_part", ".", "split", "(", "'\\r\\n\\r\\n'", ",", "1", ")", "else", ":", "header", "=", "clean_part", "body", "=", "None", "part_header_dict", "=", "{", "k", ".", "strip", "(", ")", ":", "v", ".", "strip", "(", ")", "for", "k", ",", "v", "in", "(", "h", ".", "split", "(", "':'", ",", "1", ")", "for", "h", "in", "header", ".", "split", "(", "'\\r\\n'", ")", ")", "}", "# Some multipart requests respond with a text/XML part stating an error", "if", "'xml'", "in", "part_header_dict", ".", "get", "(", "'Content-Type'", ")", ":", "# Got an XML response, likely an error code.", "# Some rets servers give characters after the closing brace.", "body", "=", "body", "[", ":", "body", ".", "index", "(", "'/>'", ")", "+", "2", "]", "if", "'/>'", "in", "body", "else", "body", "xml", "=", "xmltodict", ".", "parse", "(", "body", ")", "try", ":", "self", ".", "analyze_reply_code", "(", "xml_response_dict", "=", "xml", ")", "except", "RETSException", "as", "e", ":", "if", "e", ".", "reply_code", "==", "'20403'", ":", "# The requested object_id was not found.", "continue", "raise", "e", "if", "body", ":", "obj", "=", "self", ".", "_response_object_from_header", "(", "obj_head_dict", "=", "part_header_dict", ",", "content", "=", "body", ".", "encode", "(", "'latin-1'", ")", "if", "six", ".", "PY3", "else", "body", ")", "else", ":", "obj", "=", "self", ".", "_response_object_from_header", "(", "obj_head_dict", "=", "part_header_dict", ")", "parsed", ".", "append", "(", "obj", ")", "return", "parsed" ]
Parse multiple objects from the RETS feed. A lot of string methods are used to handle the response before encoding it back into bytes for the object. :param response: The response from the feed :return: list of SingleObjectParser
[ "Parse", "multiple", "objects", "from", "the", "RETS", "feed", ".", "A", "lot", "of", "string", "methods", "are", "used", "to", "handle", "the", "response", "before", "encoding", "it", "back", "into", "bytes", "for", "the", "object", ".", ":", "param", "response", ":", "The", "response", "from", "the", "feed", ":", "return", ":", "list", "of", "SingleObjectParser" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/parsers/get_object.py#L75-L120
refindlyllc/rets
rets/parsers/get_object.py
SingleObjectParser.parse_image_response
def parse_image_response(self, response): """ Parse a single object from the RETS feed :param response: The response from the RETS server :return: Object """ if 'xml' in response.headers.get('Content-Type'): # Got an XML response, likely an error code. xml = xmltodict.parse(response.text) self.analyze_reply_code(xml_response_dict=xml) obj = self._response_object_from_header( obj_head_dict=response.headers, content=response.content) return obj
python
def parse_image_response(self, response): """ Parse a single object from the RETS feed :param response: The response from the RETS server :return: Object """ if 'xml' in response.headers.get('Content-Type'): # Got an XML response, likely an error code. xml = xmltodict.parse(response.text) self.analyze_reply_code(xml_response_dict=xml) obj = self._response_object_from_header( obj_head_dict=response.headers, content=response.content) return obj
[ "def", "parse_image_response", "(", "self", ",", "response", ")", ":", "if", "'xml'", "in", "response", ".", "headers", ".", "get", "(", "'Content-Type'", ")", ":", "# Got an XML response, likely an error code.", "xml", "=", "xmltodict", ".", "parse", "(", "response", ".", "text", ")", "self", ".", "analyze_reply_code", "(", "xml_response_dict", "=", "xml", ")", "obj", "=", "self", ".", "_response_object_from_header", "(", "obj_head_dict", "=", "response", ".", "headers", ",", "content", "=", "response", ".", "content", ")", "return", "obj" ]
Parse a single object from the RETS feed :param response: The response from the RETS server :return: Object
[ "Parse", "a", "single", "object", "from", "the", "RETS", "feed", ":", "param", "response", ":", "The", "response", "from", "the", "RETS", "server", ":", "return", ":", "Object" ]
train
https://github.com/refindlyllc/rets/blob/c615dfc272cff0825fd3b50863c46afc3e33916f/rets/parsers/get_object.py#L125-L139
roanuz/py-cricket
src/pycricket.py
RcaApp.auth
def auth(self): """ Auth is used to call the AUTH API of CricketAPI. Access token required for every request call to CricketAPI. Auth functional will post user Cricket API app details to server and return the access token. Return: Access token """ if not self.store_handler.has_value('access_token'): params = {} params["access_key"] = self.access_key params["secret_key"] = self.secret_key params["app_id"] = self.app_id params["device_id"] = self.device_id auth_url = self.api_path + "auth/" response = self.get_response(auth_url, params, "post") if 'auth' in response: self.store_handler.set_value("access_token", response['auth']['access_token']) self.store_handler.set_value("expires", response['auth']['expires']) logger.info('Getting new access token') else: msg = "Error getting access_token, " + \ "please verify your access_key, secret_key and app_id" logger.error(msg) raise Exception("Auth Failed, please check your access details")
python
def auth(self): """ Auth is used to call the AUTH API of CricketAPI. Access token required for every request call to CricketAPI. Auth functional will post user Cricket API app details to server and return the access token. Return: Access token """ if not self.store_handler.has_value('access_token'): params = {} params["access_key"] = self.access_key params["secret_key"] = self.secret_key params["app_id"] = self.app_id params["device_id"] = self.device_id auth_url = self.api_path + "auth/" response = self.get_response(auth_url, params, "post") if 'auth' in response: self.store_handler.set_value("access_token", response['auth']['access_token']) self.store_handler.set_value("expires", response['auth']['expires']) logger.info('Getting new access token') else: msg = "Error getting access_token, " + \ "please verify your access_key, secret_key and app_id" logger.error(msg) raise Exception("Auth Failed, please check your access details")
[ "def", "auth", "(", "self", ")", ":", "if", "not", "self", ".", "store_handler", ".", "has_value", "(", "'access_token'", ")", ":", "params", "=", "{", "}", "params", "[", "\"access_key\"", "]", "=", "self", ".", "access_key", "params", "[", "\"secret_key\"", "]", "=", "self", ".", "secret_key", "params", "[", "\"app_id\"", "]", "=", "self", ".", "app_id", "params", "[", "\"device_id\"", "]", "=", "self", ".", "device_id", "auth_url", "=", "self", ".", "api_path", "+", "\"auth/\"", "response", "=", "self", ".", "get_response", "(", "auth_url", ",", "params", ",", "\"post\"", ")", "if", "'auth'", "in", "response", ":", "self", ".", "store_handler", ".", "set_value", "(", "\"access_token\"", ",", "response", "[", "'auth'", "]", "[", "'access_token'", "]", ")", "self", ".", "store_handler", ".", "set_value", "(", "\"expires\"", ",", "response", "[", "'auth'", "]", "[", "'expires'", "]", ")", "logger", ".", "info", "(", "'Getting new access token'", ")", "else", ":", "msg", "=", "\"Error getting access_token, \"", "+", "\"please verify your access_key, secret_key and app_id\"", "logger", ".", "error", "(", "msg", ")", "raise", "Exception", "(", "\"Auth Failed, please check your access details\"", ")" ]
Auth is used to call the AUTH API of CricketAPI. Access token required for every request call to CricketAPI. Auth functional will post user Cricket API app details to server and return the access token. Return: Access token
[ "Auth", "is", "used", "to", "call", "the", "AUTH", "API", "of", "CricketAPI", ".", "Access", "token", "required", "for", "every", "request", "call", "to", "CricketAPI", ".", "Auth", "functional", "will", "post", "user", "Cricket", "API", "app", "details", "to", "server", "and", "return", "the", "access", "token", "." ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L89-L117
roanuz/py-cricket
src/pycricket.py
RcaApp.get_response
def get_response(self, url, params={}, method="get"): """ It will return json response based on given url, params and methods. Arg: params: 'dictionary' url: 'url' format method: default 'get', support method 'post' Return: json data """ if method == "post": response_data = json.loads(requests.post(url, params=params).text) else: params["access_token"] = self.get_active_token() response_data = json.loads(requests.get(url, params=params).text) if not response_data['status_code'] == 200: if "status_msg" in response_data: logger.error("Bad response: " + response_data['status_msg']) else: logger.error("Some thing went wrong, please check your " + \ "request params Example: card_type and date") return response_data
python
def get_response(self, url, params={}, method="get"): """ It will return json response based on given url, params and methods. Arg: params: 'dictionary' url: 'url' format method: default 'get', support method 'post' Return: json data """ if method == "post": response_data = json.loads(requests.post(url, params=params).text) else: params["access_token"] = self.get_active_token() response_data = json.loads(requests.get(url, params=params).text) if not response_data['status_code'] == 200: if "status_msg" in response_data: logger.error("Bad response: " + response_data['status_msg']) else: logger.error("Some thing went wrong, please check your " + \ "request params Example: card_type and date") return response_data
[ "def", "get_response", "(", "self", ",", "url", ",", "params", "=", "{", "}", ",", "method", "=", "\"get\"", ")", ":", "if", "method", "==", "\"post\"", ":", "response_data", "=", "json", ".", "loads", "(", "requests", ".", "post", "(", "url", ",", "params", "=", "params", ")", ".", "text", ")", "else", ":", "params", "[", "\"access_token\"", "]", "=", "self", ".", "get_active_token", "(", ")", "response_data", "=", "json", ".", "loads", "(", "requests", ".", "get", "(", "url", ",", "params", "=", "params", ")", ".", "text", ")", "if", "not", "response_data", "[", "'status_code'", "]", "==", "200", ":", "if", "\"status_msg\"", "in", "response_data", ":", "logger", ".", "error", "(", "\"Bad response: \"", "+", "response_data", "[", "'status_msg'", "]", ")", "else", ":", "logger", ".", "error", "(", "\"Some thing went wrong, please check your \"", "+", "\"request params Example: card_type and date\"", ")", "return", "response_data" ]
It will return json response based on given url, params and methods. Arg: params: 'dictionary' url: 'url' format method: default 'get', support method 'post' Return: json data
[ "It", "will", "return", "json", "response", "based", "on", "given", "url", "params", "and", "methods", ".", "Arg", ":", "params", ":", "dictionary", "url", ":", "url", "format", "method", ":", "default", "get", "support", "method", "post", "Return", ":", "json", "data" ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L119-L144
roanuz/py-cricket
src/pycricket.py
RcaApp.get_active_token
def get_active_token(self): """ Getting the valid access token. Access token expires every 24 hours, It will expires then it will generate a new token. Return: active access token """ expire_time = self.store_handler.has_value("expires") access_token = self.store_handler.has_value("access_token") if expire_time and access_token: expire_time = self.store_handler.get_value("expires") if not datetime.now() < datetime.fromtimestamp(float(expire_time)): self.store_handler.delete_value("access_token") self.store_handler.delete_value("expires") logger.info('Access token expired, going to get new token') self.auth() else: logger.info('Access token noy expired yet') else: self.auth() return self.store_handler.get_value("access_token")
python
def get_active_token(self): """ Getting the valid access token. Access token expires every 24 hours, It will expires then it will generate a new token. Return: active access token """ expire_time = self.store_handler.has_value("expires") access_token = self.store_handler.has_value("access_token") if expire_time and access_token: expire_time = self.store_handler.get_value("expires") if not datetime.now() < datetime.fromtimestamp(float(expire_time)): self.store_handler.delete_value("access_token") self.store_handler.delete_value("expires") logger.info('Access token expired, going to get new token') self.auth() else: logger.info('Access token noy expired yet') else: self.auth() return self.store_handler.get_value("access_token")
[ "def", "get_active_token", "(", "self", ")", ":", "expire_time", "=", "self", ".", "store_handler", ".", "has_value", "(", "\"expires\"", ")", "access_token", "=", "self", ".", "store_handler", ".", "has_value", "(", "\"access_token\"", ")", "if", "expire_time", "and", "access_token", ":", "expire_time", "=", "self", ".", "store_handler", ".", "get_value", "(", "\"expires\"", ")", "if", "not", "datetime", ".", "now", "(", ")", "<", "datetime", ".", "fromtimestamp", "(", "float", "(", "expire_time", ")", ")", ":", "self", ".", "store_handler", ".", "delete_value", "(", "\"access_token\"", ")", "self", ".", "store_handler", ".", "delete_value", "(", "\"expires\"", ")", "logger", ".", "info", "(", "'Access token expired, going to get new token'", ")", "self", ".", "auth", "(", ")", "else", ":", "logger", ".", "info", "(", "'Access token noy expired yet'", ")", "else", ":", "self", ".", "auth", "(", ")", "return", "self", ".", "store_handler", ".", "get_value", "(", "\"access_token\"", ")" ]
Getting the valid access token. Access token expires every 24 hours, It will expires then it will generate a new token. Return: active access token
[ "Getting", "the", "valid", "access", "token", "." ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L146-L169
roanuz/py-cricket
src/pycricket.py
RcaApp.get_match
def get_match(self, match_key, card_type="full_card"): """ Calling the Match API. Arg: match_key: key of the match card_type: optional, default to full_card. Accepted values are micro_card, summary_card & full_card. Return: json data """ match_url = self.api_path + "match/" + match_key + "/" params = {} params["card_type"] = card_type response = self.get_response(match_url, params) return response
python
def get_match(self, match_key, card_type="full_card"): """ Calling the Match API. Arg: match_key: key of the match card_type: optional, default to full_card. Accepted values are micro_card, summary_card & full_card. Return: json data """ match_url = self.api_path + "match/" + match_key + "/" params = {} params["card_type"] = card_type response = self.get_response(match_url, params) return response
[ "def", "get_match", "(", "self", ",", "match_key", ",", "card_type", "=", "\"full_card\"", ")", ":", "match_url", "=", "self", ".", "api_path", "+", "\"match/\"", "+", "match_key", "+", "\"/\"", "params", "=", "{", "}", "params", "[", "\"card_type\"", "]", "=", "card_type", "response", "=", "self", ".", "get_response", "(", "match_url", ",", "params", ")", "return", "response" ]
Calling the Match API. Arg: match_key: key of the match card_type: optional, default to full_card. Accepted values are micro_card, summary_card & full_card. Return: json data
[ "Calling", "the", "Match", "API", ".", "Arg", ":", "match_key", ":", "key", "of", "the", "match", "card_type", ":", "optional", "default", "to", "full_card", ".", "Accepted", "values", "are", "micro_card", "summary_card", "&", "full_card", ".", "Return", ":", "json", "data" ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L171-L187
roanuz/py-cricket
src/pycricket.py
RcaApp.get_recent_matches
def get_recent_matches(self, card_type="micro_card"): """ Calling the Recent Matches API. Arg: card_type: optional, default to micro_card. Accepted values are micro_card & summary_card. Return: json data """ recent_matches_url = self.api_path + "recent_matches/" params = {} params["card_type"] = card_type response = self.get_response(recent_matches_url, params) return response
python
def get_recent_matches(self, card_type="micro_card"): """ Calling the Recent Matches API. Arg: card_type: optional, default to micro_card. Accepted values are micro_card & summary_card. Return: json data """ recent_matches_url = self.api_path + "recent_matches/" params = {} params["card_type"] = card_type response = self.get_response(recent_matches_url, params) return response
[ "def", "get_recent_matches", "(", "self", ",", "card_type", "=", "\"micro_card\"", ")", ":", "recent_matches_url", "=", "self", ".", "api_path", "+", "\"recent_matches/\"", "params", "=", "{", "}", "params", "[", "\"card_type\"", "]", "=", "card_type", "response", "=", "self", ".", "get_response", "(", "recent_matches_url", ",", "params", ")", "return", "response" ]
Calling the Recent Matches API. Arg: card_type: optional, default to micro_card. Accepted values are micro_card & summary_card. Return: json data
[ "Calling", "the", "Recent", "Matches", "API", "." ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L189-L204
roanuz/py-cricket
src/pycricket.py
RcaApp.get_player_stats
def get_player_stats(self, player_key, board_key): """ Calling the Player Stats API Args: player_key: Key of the player board_key: key of the board Return: json data """ player_stats_url = self.api_path + 'player/' + player_key + '/league/' + board_key + '/stats/' response = self.get_response(player_stats_url) return response
python
def get_player_stats(self, player_key, board_key): """ Calling the Player Stats API Args: player_key: Key of the player board_key: key of the board Return: json data """ player_stats_url = self.api_path + 'player/' + player_key + '/league/' + board_key + '/stats/' response = self.get_response(player_stats_url) return response
[ "def", "get_player_stats", "(", "self", ",", "player_key", ",", "board_key", ")", ":", "player_stats_url", "=", "self", ".", "api_path", "+", "'player/'", "+", "player_key", "+", "'/league/'", "+", "board_key", "+", "'/stats/'", "response", "=", "self", ".", "get_response", "(", "player_stats_url", ")", "return", "response" ]
Calling the Player Stats API Args: player_key: Key of the player board_key: key of the board Return: json data
[ "Calling", "the", "Player", "Stats", "API", "Args", ":", "player_key", ":", "Key", "of", "the", "player", "board_key", ":", "key", "of", "the", "board", "Return", ":", "json", "data" ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L206-L217
roanuz/py-cricket
src/pycricket.py
RcaApp.get_ball_by_ball
def get_ball_by_ball(self, match_key, over_key=None): """ match_key: key of the match over_key : key of the over Return: json data: """ if over_key: ball_by_ball_url = "{base_path}match/{match_key}/balls/{over_key}/".format(base_path=self.api_path, match_key=match_key, over_key=over_key) else: ball_by_ball_url = "{base_path}match/{match_key}/balls/".format(base_path=self.api_path, match_key=match_key) response = self.get_response(ball_by_ball_url) return response
python
def get_ball_by_ball(self, match_key, over_key=None): """ match_key: key of the match over_key : key of the over Return: json data: """ if over_key: ball_by_ball_url = "{base_path}match/{match_key}/balls/{over_key}/".format(base_path=self.api_path, match_key=match_key, over_key=over_key) else: ball_by_ball_url = "{base_path}match/{match_key}/balls/".format(base_path=self.api_path, match_key=match_key) response = self.get_response(ball_by_ball_url) return response
[ "def", "get_ball_by_ball", "(", "self", ",", "match_key", ",", "over_key", "=", "None", ")", ":", "if", "over_key", ":", "ball_by_ball_url", "=", "\"{base_path}match/{match_key}/balls/{over_key}/\"", ".", "format", "(", "base_path", "=", "self", ".", "api_path", ",", "match_key", "=", "match_key", ",", "over_key", "=", "over_key", ")", "else", ":", "ball_by_ball_url", "=", "\"{base_path}match/{match_key}/balls/\"", ".", "format", "(", "base_path", "=", "self", ".", "api_path", ",", "match_key", "=", "match_key", ")", "response", "=", "self", ".", "get_response", "(", "ball_by_ball_url", ")", "return", "response" ]
match_key: key of the match over_key : key of the over Return: json data:
[ "match_key", ":", "key", "of", "the", "match", "over_key", ":", "key", "of", "the", "over", "Return", ":", "json", "data", ":" ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L219-L233
roanuz/py-cricket
src/pycricket.py
RcaApp.get_recent_season_matches
def get_recent_season_matches(self, season_key): """ Calling specific season recent matches. Arg: season_key: key of the season. Return: json date """ season_recent_matches_url = self.api_path + "season/" + season_key + "/recent_matches/" response = self.get_response(season_recent_matches_url) return response
python
def get_recent_season_matches(self, season_key): """ Calling specific season recent matches. Arg: season_key: key of the season. Return: json date """ season_recent_matches_url = self.api_path + "season/" + season_key + "/recent_matches/" response = self.get_response(season_recent_matches_url) return response
[ "def", "get_recent_season_matches", "(", "self", ",", "season_key", ")", ":", "season_recent_matches_url", "=", "self", ".", "api_path", "+", "\"season/\"", "+", "season_key", "+", "\"/recent_matches/\"", "response", "=", "self", ".", "get_response", "(", "season_recent_matches_url", ")", "return", "response" ]
Calling specific season recent matches. Arg: season_key: key of the season. Return: json date
[ "Calling", "specific", "season", "recent", "matches", "." ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L235-L247
roanuz/py-cricket
src/pycricket.py
RcaApp.get_recent_seasons
def get_recent_seasons(self): """ Calling the Recent Season API. Return: json data """ recent_seasons_url = self.api_path + "recent_seasons/" response = self.get_response(recent_seasons_url) return response
python
def get_recent_seasons(self): """ Calling the Recent Season API. Return: json data """ recent_seasons_url = self.api_path + "recent_seasons/" response = self.get_response(recent_seasons_url) return response
[ "def", "get_recent_seasons", "(", "self", ")", ":", "recent_seasons_url", "=", "self", ".", "api_path", "+", "\"recent_seasons/\"", "response", "=", "self", ".", "get_response", "(", "recent_seasons_url", ")", "return", "response" ]
Calling the Recent Season API. Return: json data
[ "Calling", "the", "Recent", "Season", "API", "." ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L249-L259
roanuz/py-cricket
src/pycricket.py
RcaApp.get_schedule
def get_schedule(self, date=None): """ Calling the Schedule API. Return: json data """ schedule_url = self.api_path + "schedule/" params = {} if date: params['date'] = date response = self.get_response(schedule_url, params) return response
python
def get_schedule(self, date=None): """ Calling the Schedule API. Return: json data """ schedule_url = self.api_path + "schedule/" params = {} if date: params['date'] = date response = self.get_response(schedule_url, params) return response
[ "def", "get_schedule", "(", "self", ",", "date", "=", "None", ")", ":", "schedule_url", "=", "self", ".", "api_path", "+", "\"schedule/\"", "params", "=", "{", "}", "if", "date", ":", "params", "[", "'date'", "]", "=", "date", "response", "=", "self", ".", "get_response", "(", "schedule_url", ",", "params", ")", "return", "response" ]
Calling the Schedule API. Return: json data
[ "Calling", "the", "Schedule", "API", "." ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L261-L274
roanuz/py-cricket
src/pycricket.py
RcaApp.get_season_schedule
def get_season_schedule(self, season_key): """ Calling specific season schedule Arg: season_key: key of the season Return: json data """ schedule_url = self.api_path + "season/" + season_key + "/schedule/" response = self.get_response(schedule_url) return response
python
def get_season_schedule(self, season_key): """ Calling specific season schedule Arg: season_key: key of the season Return: json data """ schedule_url = self.api_path + "season/" + season_key + "/schedule/" response = self.get_response(schedule_url) return response
[ "def", "get_season_schedule", "(", "self", ",", "season_key", ")", ":", "schedule_url", "=", "self", ".", "api_path", "+", "\"season/\"", "+", "season_key", "+", "\"/schedule/\"", "response", "=", "self", ".", "get_response", "(", "schedule_url", ")", "return", "response" ]
Calling specific season schedule Arg: season_key: key of the season Return: json data
[ "Calling", "specific", "season", "schedule" ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L276-L288
roanuz/py-cricket
src/pycricket.py
RcaApp.get_season
def get_season(self, season_key, card_type="micro_card"): """ Calling Season API. Arg: season_key: key of the season card_type: optional, default to micro_card. Accepted values are micro_card & summary_card Return: json data """ season_url = self.api_path + "season/" + season_key + "/" params = {} params["card_type"] = card_type response = self.get_response(season_url, params) return response
python
def get_season(self, season_key, card_type="micro_card"): """ Calling Season API. Arg: season_key: key of the season card_type: optional, default to micro_card. Accepted values are micro_card & summary_card Return: json data """ season_url = self.api_path + "season/" + season_key + "/" params = {} params["card_type"] = card_type response = self.get_response(season_url, params) return response
[ "def", "get_season", "(", "self", ",", "season_key", ",", "card_type", "=", "\"micro_card\"", ")", ":", "season_url", "=", "self", ".", "api_path", "+", "\"season/\"", "+", "season_key", "+", "\"/\"", "params", "=", "{", "}", "params", "[", "\"card_type\"", "]", "=", "card_type", "response", "=", "self", ".", "get_response", "(", "season_url", ",", "params", ")", "return", "response" ]
Calling Season API. Arg: season_key: key of the season card_type: optional, default to micro_card. Accepted values are micro_card & summary_card Return: json data
[ "Calling", "Season", "API", "." ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L290-L306
roanuz/py-cricket
src/pycricket.py
RcaApp.get_season_stats
def get_season_stats(self, season_key): """ Calling Season Stats API. Arg: season_key: key of the season Return: json data """ season_stats_url = self.api_path + "season/" + season_key + "/stats/" response = self.get_response(season_stats_url) return response
python
def get_season_stats(self, season_key): """ Calling Season Stats API. Arg: season_key: key of the season Return: json data """ season_stats_url = self.api_path + "season/" + season_key + "/stats/" response = self.get_response(season_stats_url) return response
[ "def", "get_season_stats", "(", "self", ",", "season_key", ")", ":", "season_stats_url", "=", "self", ".", "api_path", "+", "\"season/\"", "+", "season_key", "+", "\"/stats/\"", "response", "=", "self", ".", "get_response", "(", "season_stats_url", ")", "return", "response" ]
Calling Season Stats API. Arg: season_key: key of the season Return: json data
[ "Calling", "Season", "Stats", "API", "." ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L308-L320
roanuz/py-cricket
src/pycricket.py
RcaApp.get_season_team
def get_season_team(self, season_key, season_team_key,stats_type=None): """ Calling Season teams API Arg: season_key: key of the season Return: json data """ params = {"stats_type": stats_type} season_team_url = self.api_path + 'season/' + season_key + '/team/' + season_team_key + '/' response = self.get_response(season_team_url, params=params) return response
python
def get_season_team(self, season_key, season_team_key,stats_type=None): """ Calling Season teams API Arg: season_key: key of the season Return: json data """ params = {"stats_type": stats_type} season_team_url = self.api_path + 'season/' + season_key + '/team/' + season_team_key + '/' response = self.get_response(season_team_url, params=params) return response
[ "def", "get_season_team", "(", "self", ",", "season_key", ",", "season_team_key", ",", "stats_type", "=", "None", ")", ":", "params", "=", "{", "\"stats_type\"", ":", "stats_type", "}", "season_team_url", "=", "self", ".", "api_path", "+", "'season/'", "+", "season_key", "+", "'/team/'", "+", "season_team_key", "+", "'/'", "response", "=", "self", ".", "get_response", "(", "season_team_url", ",", "params", "=", "params", ")", "return", "response" ]
Calling Season teams API Arg: season_key: key of the season Return: json data
[ "Calling", "Season", "teams", "API" ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L322-L334
roanuz/py-cricket
src/pycricket.py
RcaApp.get_season_points
def get_season_points(self, season_key): """ Calling Season Points API. Arg: season_key: key of the season Return: json data """ season_points_url = self.api_path + "season/" + season_key + "/points/" response = self.get_response(season_points_url) return response
python
def get_season_points(self, season_key): """ Calling Season Points API. Arg: season_key: key of the season Return: json data """ season_points_url = self.api_path + "season/" + season_key + "/points/" response = self.get_response(season_points_url) return response
[ "def", "get_season_points", "(", "self", ",", "season_key", ")", ":", "season_points_url", "=", "self", ".", "api_path", "+", "\"season/\"", "+", "season_key", "+", "\"/points/\"", "response", "=", "self", ".", "get_response", "(", "season_points_url", ")", "return", "response" ]
Calling Season Points API. Arg: season_key: key of the season Return: json data
[ "Calling", "Season", "Points", "API", "." ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L336-L348
roanuz/py-cricket
src/pycricket.py
RcaApp.get_season_player_stats
def get_season_player_stats(self, season_key, player_key): """ Calling Season Player Stats API. Arg: season_key: key of the season player_key: key of the player Return: json data """ season_player_stats_url = self.api_path + "season/" + season_key + "/player/" + player_key + "/stats/" response = self.get_response(season_player_stats_url) return response
python
def get_season_player_stats(self, season_key, player_key): """ Calling Season Player Stats API. Arg: season_key: key of the season player_key: key of the player Return: json data """ season_player_stats_url = self.api_path + "season/" + season_key + "/player/" + player_key + "/stats/" response = self.get_response(season_player_stats_url) return response
[ "def", "get_season_player_stats", "(", "self", ",", "season_key", ",", "player_key", ")", ":", "season_player_stats_url", "=", "self", ".", "api_path", "+", "\"season/\"", "+", "season_key", "+", "\"/player/\"", "+", "player_key", "+", "\"/stats/\"", "response", "=", "self", ".", "get_response", "(", "season_player_stats_url", ")", "return", "response" ]
Calling Season Player Stats API. Arg: season_key: key of the season player_key: key of the player Return: json data
[ "Calling", "Season", "Player", "Stats", "API", "." ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L350-L363
roanuz/py-cricket
src/pycricket.py
RcaApp.get_overs_summary
def get_overs_summary(self, match_key): """ Calling Overs Summary API Arg: match_key: key of the match Return: json data """ overs_summary_url = self.api_path + "match/" + match_key + "/overs_summary/" response = self.get_response(overs_summary_url) return response
python
def get_overs_summary(self, match_key): """ Calling Overs Summary API Arg: match_key: key of the match Return: json data """ overs_summary_url = self.api_path + "match/" + match_key + "/overs_summary/" response = self.get_response(overs_summary_url) return response
[ "def", "get_overs_summary", "(", "self", ",", "match_key", ")", ":", "overs_summary_url", "=", "self", ".", "api_path", "+", "\"match/\"", "+", "match_key", "+", "\"/overs_summary/\"", "response", "=", "self", ".", "get_response", "(", "overs_summary_url", ")", "return", "response" ]
Calling Overs Summary API Arg: match_key: key of the match Return: json data
[ "Calling", "Overs", "Summary", "API" ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L365-L376
roanuz/py-cricket
src/pycricket.py
RcaApp.get_news_aggregation
def get_news_aggregation(self): """ Calling News Aggregation API Return: json data """ news_aggregation_url = self.api_path + "news_aggregation" + "/" response = self.get_response(news_aggregation_url) return response
python
def get_news_aggregation(self): """ Calling News Aggregation API Return: json data """ news_aggregation_url = self.api_path + "news_aggregation" + "/" response = self.get_response(news_aggregation_url) return response
[ "def", "get_news_aggregation", "(", "self", ")", ":", "news_aggregation_url", "=", "self", ".", "api_path", "+", "\"news_aggregation\"", "+", "\"/\"", "response", "=", "self", ".", "get_response", "(", "news_aggregation_url", ")", "return", "response" ]
Calling News Aggregation API Return: json data
[ "Calling", "News", "Aggregation", "API" ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L378-L388
roanuz/py-cricket
src/pycricket.py
RcaApp.get_fantasy_credits
def get_fantasy_credits(self, match_key): """ Calling Fantasy Credit API Arg: match_key: key of the match Return: json data """ fantasy_credit_url = self.api_path_v3 + "fantasy-match-credits/" + match_key + "/" response = self.get_response(fantasy_credit_url) return response
python
def get_fantasy_credits(self, match_key): """ Calling Fantasy Credit API Arg: match_key: key of the match Return: json data """ fantasy_credit_url = self.api_path_v3 + "fantasy-match-credits/" + match_key + "/" response = self.get_response(fantasy_credit_url) return response
[ "def", "get_fantasy_credits", "(", "self", ",", "match_key", ")", ":", "fantasy_credit_url", "=", "self", ".", "api_path_v3", "+", "\"fantasy-match-credits/\"", "+", "match_key", "+", "\"/\"", "response", "=", "self", ".", "get_response", "(", "fantasy_credit_url", ")", "return", "response" ]
Calling Fantasy Credit API Arg: match_key: key of the match Return: json data
[ "Calling", "Fantasy", "Credit", "API" ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L390-L402
roanuz/py-cricket
src/pycricket.py
RcaApp.get_fantasy_points
def get_fantasy_points(self, match_key): """ Calling Fantasy Points API Arg: match_key: key of the match Return: json data """ fantasy_points_url = self.api_path_v3 + "fantasy-match-points/" + match_key + "/" response = self.get_response(fantasy_points_url) return response
python
def get_fantasy_points(self, match_key): """ Calling Fantasy Points API Arg: match_key: key of the match Return: json data """ fantasy_points_url = self.api_path_v3 + "fantasy-match-points/" + match_key + "/" response = self.get_response(fantasy_points_url) return response
[ "def", "get_fantasy_points", "(", "self", ",", "match_key", ")", ":", "fantasy_points_url", "=", "self", ".", "api_path_v3", "+", "\"fantasy-match-points/\"", "+", "match_key", "+", "\"/\"", "response", "=", "self", ".", "get_response", "(", "fantasy_points_url", ")", "return", "response" ]
Calling Fantasy Points API Arg: match_key: key of the match Return: json data
[ "Calling", "Fantasy", "Points", "API" ]
train
https://github.com/roanuz/py-cricket/blob/fa47fe2e92915fc58db38898213e974742af55d4/src/pycricket.py#L404-L416
HarveyHunt/i3situation
i3situation/plugins/reddit.py
RedditPlugin.main
def main(self): """ Generates an output string by replacing the keywords in the format string with the corresponding values from a submission dictionary. """ self.manage_submissions() out_string = self.options['format'] # Pop until we get something which len(title) <= max-chars length = float('inf') while length > self.options['max_chars']: self.selected_submission = self.submissions.pop() length = len(self.selected_submission['title']) for k, v in self.selected_submission.items(): out_string = out_string.replace(k, self.h.unescape(str(v))) return self.output(out_string, out_string)
python
def main(self): """ Generates an output string by replacing the keywords in the format string with the corresponding values from a submission dictionary. """ self.manage_submissions() out_string = self.options['format'] # Pop until we get something which len(title) <= max-chars length = float('inf') while length > self.options['max_chars']: self.selected_submission = self.submissions.pop() length = len(self.selected_submission['title']) for k, v in self.selected_submission.items(): out_string = out_string.replace(k, self.h.unescape(str(v))) return self.output(out_string, out_string)
[ "def", "main", "(", "self", ")", ":", "self", ".", "manage_submissions", "(", ")", "out_string", "=", "self", ".", "options", "[", "'format'", "]", "# Pop until we get something which len(title) <= max-chars", "length", "=", "float", "(", "'inf'", ")", "while", "length", ">", "self", ".", "options", "[", "'max_chars'", "]", ":", "self", ".", "selected_submission", "=", "self", ".", "submissions", ".", "pop", "(", ")", "length", "=", "len", "(", "self", ".", "selected_submission", "[", "'title'", "]", ")", "for", "k", ",", "v", "in", "self", ".", "selected_submission", ".", "items", "(", ")", ":", "out_string", "=", "out_string", ".", "replace", "(", "k", ",", "self", ".", "h", ".", "unescape", "(", "str", "(", "v", ")", ")", ")", "return", "self", ".", "output", "(", "out_string", ",", "out_string", ")" ]
Generates an output string by replacing the keywords in the format string with the corresponding values from a submission dictionary.
[ "Generates", "an", "output", "string", "by", "replacing", "the", "keywords", "in", "the", "format", "string", "with", "the", "corresponding", "values", "from", "a", "submission", "dictionary", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/plugins/reddit.py#L66-L82
HarveyHunt/i3situation
i3situation/plugins/reddit.py
RedditPlugin.login
def login(self): """ Logs into Reddit in order to display a personalised front page. """ data = {'user': self.options['username'], 'passwd': self.options['password'], 'api_type': 'json'} response = self.client.post('http://www.reddit.com/api/login', data=data) self.client.modhash = response.json()['json']['data']['modhash']
python
def login(self): """ Logs into Reddit in order to display a personalised front page. """ data = {'user': self.options['username'], 'passwd': self.options['password'], 'api_type': 'json'} response = self.client.post('http://www.reddit.com/api/login', data=data) self.client.modhash = response.json()['json']['data']['modhash']
[ "def", "login", "(", "self", ")", ":", "data", "=", "{", "'user'", ":", "self", ".", "options", "[", "'username'", "]", ",", "'passwd'", ":", "self", ".", "options", "[", "'password'", "]", ",", "'api_type'", ":", "'json'", "}", "response", "=", "self", ".", "client", ".", "post", "(", "'http://www.reddit.com/api/login'", ",", "data", "=", "data", ")", "self", ".", "client", ".", "modhash", "=", "response", ".", "json", "(", ")", "[", "'json'", "]", "[", "'data'", "]", "[", "'modhash'", "]" ]
Logs into Reddit in order to display a personalised front page.
[ "Logs", "into", "Reddit", "in", "order", "to", "display", "a", "personalised", "front", "page", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/plugins/reddit.py#L84-L91
HarveyHunt/i3situation
i3situation/plugins/reddit.py
RedditPlugin.manage_submissions
def manage_submissions(self): """ If there are no or only one submissions left, get new submissions. This function manages URL creation and the specifics for front page or subreddit mode. """ if not hasattr(self, 'submissions') or len(self.submissions) == 1: self.submissions = [] if self.options['mode'] == 'front': # If there are no login details, the standard front # page will be displayed. if self.options['password'] and self.options['username']: self.login() url = 'http://reddit.com/.json?sort={0}'.format(self.options['sort']) self.submissions = self.get_submissions(url) elif self.options['mode'] == 'subreddit': for subreddit in self.options['subreddits']: url = 'http://reddit.com/r/{0}/.json?sort={1}'.format( subreddit, self.options['limit']) self.submissions += self.get_submissions(url) else: return
python
def manage_submissions(self): """ If there are no or only one submissions left, get new submissions. This function manages URL creation and the specifics for front page or subreddit mode. """ if not hasattr(self, 'submissions') or len(self.submissions) == 1: self.submissions = [] if self.options['mode'] == 'front': # If there are no login details, the standard front # page will be displayed. if self.options['password'] and self.options['username']: self.login() url = 'http://reddit.com/.json?sort={0}'.format(self.options['sort']) self.submissions = self.get_submissions(url) elif self.options['mode'] == 'subreddit': for subreddit in self.options['subreddits']: url = 'http://reddit.com/r/{0}/.json?sort={1}'.format( subreddit, self.options['limit']) self.submissions += self.get_submissions(url) else: return
[ "def", "manage_submissions", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'submissions'", ")", "or", "len", "(", "self", ".", "submissions", ")", "==", "1", ":", "self", ".", "submissions", "=", "[", "]", "if", "self", ".", "options", "[", "'mode'", "]", "==", "'front'", ":", "# If there are no login details, the standard front", "# page will be displayed.", "if", "self", ".", "options", "[", "'password'", "]", "and", "self", ".", "options", "[", "'username'", "]", ":", "self", ".", "login", "(", ")", "url", "=", "'http://reddit.com/.json?sort={0}'", ".", "format", "(", "self", ".", "options", "[", "'sort'", "]", ")", "self", ".", "submissions", "=", "self", ".", "get_submissions", "(", "url", ")", "elif", "self", ".", "options", "[", "'mode'", "]", "==", "'subreddit'", ":", "for", "subreddit", "in", "self", ".", "options", "[", "'subreddits'", "]", ":", "url", "=", "'http://reddit.com/r/{0}/.json?sort={1}'", ".", "format", "(", "subreddit", ",", "self", ".", "options", "[", "'limit'", "]", ")", "self", ".", "submissions", "+=", "self", ".", "get_submissions", "(", "url", ")", "else", ":", "return" ]
If there are no or only one submissions left, get new submissions. This function manages URL creation and the specifics for front page or subreddit mode.
[ "If", "there", "are", "no", "or", "only", "one", "submissions", "left", "get", "new", "submissions", ".", "This", "function", "manages", "URL", "creation", "and", "the", "specifics", "for", "front", "page", "or", "subreddit", "mode", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/plugins/reddit.py#L93-L114
HarveyHunt/i3situation
i3situation/plugins/reddit.py
RedditPlugin.get_submissions
def get_submissions(self, url): """ Connects to Reddit and gets a JSON representation of submissions. This JSON data is then processed and returned. url: A url that requests for submissions should be sent to. """ response = self.client.get(url, params={'limit': self.options['limit']}) submissions = [x['data'] for x in response.json()['data']['children']] return submissions
python
def get_submissions(self, url): """ Connects to Reddit and gets a JSON representation of submissions. This JSON data is then processed and returned. url: A url that requests for submissions should be sent to. """ response = self.client.get(url, params={'limit': self.options['limit']}) submissions = [x['data'] for x in response.json()['data']['children']] return submissions
[ "def", "get_submissions", "(", "self", ",", "url", ")", ":", "response", "=", "self", ".", "client", ".", "get", "(", "url", ",", "params", "=", "{", "'limit'", ":", "self", ".", "options", "[", "'limit'", "]", "}", ")", "submissions", "=", "[", "x", "[", "'data'", "]", "for", "x", "in", "response", ".", "json", "(", ")", "[", "'data'", "]", "[", "'children'", "]", "]", "return", "submissions" ]
Connects to Reddit and gets a JSON representation of submissions. This JSON data is then processed and returned. url: A url that requests for submissions should be sent to.
[ "Connects", "to", "Reddit", "and", "gets", "a", "JSON", "representation", "of", "submissions", ".", "This", "JSON", "data", "is", "then", "processed", "and", "returned", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/plugins/reddit.py#L116-L125
HarveyHunt/i3situation
i3situation/plugins/cmus.py
CmusPlugin.main
def main(self): """ A compulsary function that gets the output of the cmus-remote -Q command and converts it to unicode in order for it to be processed and finally output. """ try: # Setting stderr to subprocess.STDOUT seems to stop the error # message returned by the process from being output to STDOUT. cmus_output = subprocess.check_output(['cmus-remote', '-Q'], stderr=subprocess.STDOUT).decode('utf-8') except subprocess.CalledProcessError: return self.output(None, None) if 'duration' in cmus_output: status = self.convert_cmus_output(cmus_output) out_string = self.options['format'] for k, v in status.items(): out_string = out_string.replace(k, v) else: out_string = None return self.output(out_string, out_string)
python
def main(self): """ A compulsary function that gets the output of the cmus-remote -Q command and converts it to unicode in order for it to be processed and finally output. """ try: # Setting stderr to subprocess.STDOUT seems to stop the error # message returned by the process from being output to STDOUT. cmus_output = subprocess.check_output(['cmus-remote', '-Q'], stderr=subprocess.STDOUT).decode('utf-8') except subprocess.CalledProcessError: return self.output(None, None) if 'duration' in cmus_output: status = self.convert_cmus_output(cmus_output) out_string = self.options['format'] for k, v in status.items(): out_string = out_string.replace(k, v) else: out_string = None return self.output(out_string, out_string)
[ "def", "main", "(", "self", ")", ":", "try", ":", "# Setting stderr to subprocess.STDOUT seems to stop the error", "# message returned by the process from being output to STDOUT.", "cmus_output", "=", "subprocess", ".", "check_output", "(", "[", "'cmus-remote'", ",", "'-Q'", "]", ",", "stderr", "=", "subprocess", ".", "STDOUT", ")", ".", "decode", "(", "'utf-8'", ")", "except", "subprocess", ".", "CalledProcessError", ":", "return", "self", ".", "output", "(", "None", ",", "None", ")", "if", "'duration'", "in", "cmus_output", ":", "status", "=", "self", ".", "convert_cmus_output", "(", "cmus_output", ")", "out_string", "=", "self", ".", "options", "[", "'format'", "]", "for", "k", ",", "v", "in", "status", ".", "items", "(", ")", ":", "out_string", "=", "out_string", ".", "replace", "(", "k", ",", "v", ")", "else", ":", "out_string", "=", "None", "return", "self", ".", "output", "(", "out_string", ",", "out_string", ")" ]
A compulsary function that gets the output of the cmus-remote -Q command and converts it to unicode in order for it to be processed and finally output.
[ "A", "compulsary", "function", "that", "gets", "the", "output", "of", "the", "cmus", "-", "remote", "-", "Q", "command", "and", "converts", "it", "to", "unicode", "in", "order", "for", "it", "to", "be", "processed", "and", "finally", "output", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/plugins/cmus.py#L44-L64
HarveyHunt/i3situation
i3situation/plugins/cmus.py
CmusPlugin.convert_cmus_output
def convert_cmus_output(self, cmus_output): """ Change the newline separated string of output data into a dictionary which can then be used to replace the strings in the config format. cmus_output: A string with information about cmus that is newline seperated. Running cmus-remote -Q in a terminal will show you what you're dealing with. """ cmus_output = cmus_output.split('\n') cmus_output = [x.replace('tag ', '') for x in cmus_output if not x in ''] cmus_output = [x.replace('set ', '') for x in cmus_output] status = {} partitioned = (item.partition(' ') for item in cmus_output) status = {item[0]: item[2] for item in partitioned} status['duration'] = self.convert_time(status['duration']) status['position'] = self.convert_time(status['position']) return status
python
def convert_cmus_output(self, cmus_output): """ Change the newline separated string of output data into a dictionary which can then be used to replace the strings in the config format. cmus_output: A string with information about cmus that is newline seperated. Running cmus-remote -Q in a terminal will show you what you're dealing with. """ cmus_output = cmus_output.split('\n') cmus_output = [x.replace('tag ', '') for x in cmus_output if not x in ''] cmus_output = [x.replace('set ', '') for x in cmus_output] status = {} partitioned = (item.partition(' ') for item in cmus_output) status = {item[0]: item[2] for item in partitioned} status['duration'] = self.convert_time(status['duration']) status['position'] = self.convert_time(status['position']) return status
[ "def", "convert_cmus_output", "(", "self", ",", "cmus_output", ")", ":", "cmus_output", "=", "cmus_output", ".", "split", "(", "'\\n'", ")", "cmus_output", "=", "[", "x", ".", "replace", "(", "'tag '", ",", "''", ")", "for", "x", "in", "cmus_output", "if", "not", "x", "in", "''", "]", "cmus_output", "=", "[", "x", ".", "replace", "(", "'set '", ",", "''", ")", "for", "x", "in", "cmus_output", "]", "status", "=", "{", "}", "partitioned", "=", "(", "item", ".", "partition", "(", "' '", ")", "for", "item", "in", "cmus_output", ")", "status", "=", "{", "item", "[", "0", "]", ":", "item", "[", "2", "]", "for", "item", "in", "partitioned", "}", "status", "[", "'duration'", "]", "=", "self", ".", "convert_time", "(", "status", "[", "'duration'", "]", ")", "status", "[", "'position'", "]", "=", "self", ".", "convert_time", "(", "status", "[", "'position'", "]", ")", "return", "status" ]
Change the newline separated string of output data into a dictionary which can then be used to replace the strings in the config format. cmus_output: A string with information about cmus that is newline seperated. Running cmus-remote -Q in a terminal will show you what you're dealing with.
[ "Change", "the", "newline", "separated", "string", "of", "output", "data", "into", "a", "dictionary", "which", "can", "then", "be", "used", "to", "replace", "the", "strings", "in", "the", "config", "format", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/plugins/cmus.py#L66-L84
HarveyHunt/i3situation
i3situation/plugins/cmus.py
CmusPlugin.convert_time
def convert_time(self, time): """ A helper function to convert seconds into hh:mm:ss for better readability. time: A string representing time in seconds. """ time_string = str(datetime.timedelta(seconds=int(time))) if time_string.split(':')[0] == '0': time_string = time_string.partition(':')[2] return time_string
python
def convert_time(self, time): """ A helper function to convert seconds into hh:mm:ss for better readability. time: A string representing time in seconds. """ time_string = str(datetime.timedelta(seconds=int(time))) if time_string.split(':')[0] == '0': time_string = time_string.partition(':')[2] return time_string
[ "def", "convert_time", "(", "self", ",", "time", ")", ":", "time_string", "=", "str", "(", "datetime", ".", "timedelta", "(", "seconds", "=", "int", "(", "time", ")", ")", ")", "if", "time_string", ".", "split", "(", "':'", ")", "[", "0", "]", "==", "'0'", ":", "time_string", "=", "time_string", ".", "partition", "(", "':'", ")", "[", "2", "]", "return", "time_string" ]
A helper function to convert seconds into hh:mm:ss for better readability. time: A string representing time in seconds.
[ "A", "helper", "function", "to", "convert", "seconds", "into", "hh", ":", "mm", ":", "ss", "for", "better", "readability", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/plugins/cmus.py#L86-L96
HarveyHunt/i3situation
i3situation/plugins/_plugin.py
Plugin.output
def output(self, full_text, short_text): """ Output all of the options and data for a segment. full_text: A string representing the data that should be output to i3bar. short_text: A more concise version of full_text, in case there is minimal room on the i3bar. """ full_text = full_text.replace('\n', '') short_text = short_text.replace('\n', '') self.output_options.update({'full_text': full_text, 'short_text': short_text}) self.output_options = {k: v for k, v in self.output_options.items() if v} return self.output_options
python
def output(self, full_text, short_text): """ Output all of the options and data for a segment. full_text: A string representing the data that should be output to i3bar. short_text: A more concise version of full_text, in case there is minimal room on the i3bar. """ full_text = full_text.replace('\n', '') short_text = short_text.replace('\n', '') self.output_options.update({'full_text': full_text, 'short_text': short_text}) self.output_options = {k: v for k, v in self.output_options.items() if v} return self.output_options
[ "def", "output", "(", "self", ",", "full_text", ",", "short_text", ")", ":", "full_text", "=", "full_text", ".", "replace", "(", "'\\n'", ",", "''", ")", "short_text", "=", "short_text", ".", "replace", "(", "'\\n'", ",", "''", ")", "self", ".", "output_options", ".", "update", "(", "{", "'full_text'", ":", "full_text", ",", "'short_text'", ":", "short_text", "}", ")", "self", ".", "output_options", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "self", ".", "output_options", ".", "items", "(", ")", "if", "v", "}", "return", "self", ".", "output_options" ]
Output all of the options and data for a segment. full_text: A string representing the data that should be output to i3bar. short_text: A more concise version of full_text, in case there is minimal room on the i3bar.
[ "Output", "all", "of", "the", "options", "and", "data", "for", "a", "segment", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/plugins/_plugin.py#L29-L41
HarveyHunt/i3situation
i3situation/plugins/_plugin.py
Plugin.on_click
def on_click(self, event): """ A function that should be overwritten by a plugin that wishes to react to events, if it wants to perform any action other than running the supplied command related to a button. event: A dictionary passed from i3bar (after being decoded from JSON) that has the folowing format: event = {'name': 'my_plugin', 'x': 231, 'y': 423} Note: It is also possible to have an instance key, but i3situation doesn't set it. """ if event['button'] == 1 and 'button1' in self.options: subprocess.call(self.options['button1'].split()) elif event['button'] == 2 and 'button2' in self.options: subprocess.call(self.options['button2'].split()) elif event['button'] == 3 and 'button3' in self.options: subprocess.call(self.options['button3'].split())
python
def on_click(self, event): """ A function that should be overwritten by a plugin that wishes to react to events, if it wants to perform any action other than running the supplied command related to a button. event: A dictionary passed from i3bar (after being decoded from JSON) that has the folowing format: event = {'name': 'my_plugin', 'x': 231, 'y': 423} Note: It is also possible to have an instance key, but i3situation doesn't set it. """ if event['button'] == 1 and 'button1' in self.options: subprocess.call(self.options['button1'].split()) elif event['button'] == 2 and 'button2' in self.options: subprocess.call(self.options['button2'].split()) elif event['button'] == 3 and 'button3' in self.options: subprocess.call(self.options['button3'].split())
[ "def", "on_click", "(", "self", ",", "event", ")", ":", "if", "event", "[", "'button'", "]", "==", "1", "and", "'button1'", "in", "self", ".", "options", ":", "subprocess", ".", "call", "(", "self", ".", "options", "[", "'button1'", "]", ".", "split", "(", ")", ")", "elif", "event", "[", "'button'", "]", "==", "2", "and", "'button2'", "in", "self", ".", "options", ":", "subprocess", ".", "call", "(", "self", ".", "options", "[", "'button2'", "]", ".", "split", "(", ")", ")", "elif", "event", "[", "'button'", "]", "==", "3", "and", "'button3'", "in", "self", ".", "options", ":", "subprocess", ".", "call", "(", "self", ".", "options", "[", "'button3'", "]", ".", "split", "(", ")", ")" ]
A function that should be overwritten by a plugin that wishes to react to events, if it wants to perform any action other than running the supplied command related to a button. event: A dictionary passed from i3bar (after being decoded from JSON) that has the folowing format: event = {'name': 'my_plugin', 'x': 231, 'y': 423} Note: It is also possible to have an instance key, but i3situation doesn't set it.
[ "A", "function", "that", "should", "be", "overwritten", "by", "a", "plugin", "that", "wishes", "to", "react", "to", "events", "if", "it", "wants", "to", "perform", "any", "action", "other", "than", "running", "the", "supplied", "command", "related", "to", "a", "button", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/plugins/_plugin.py#L43-L61
nephila/django-ckeditor-filebrowser-filer
ckeditor_filebrowser_filer/views.py
url_reverse
def url_reverse(request): """ Reverse the requested URL (passed via GET / POST as `url_name` parameter) :param request: Request object :return: The reversed path """ if request.method in ('GET', 'POST'): data = getattr(request, request.method) url_name = data.get('url_name') try: path = urls.reverse(url_name, args=data.getlist('args')) (view_func, args, kwargs) = urls.resolve(path) return http.HttpResponse(path, content_type='text/plain') except urls.NoReverseMatch: return http.HttpResponse('Error', content_type='text/plain') return http.HttpResponseNotAllowed(('GET', 'POST'))
python
def url_reverse(request): """ Reverse the requested URL (passed via GET / POST as `url_name` parameter) :param request: Request object :return: The reversed path """ if request.method in ('GET', 'POST'): data = getattr(request, request.method) url_name = data.get('url_name') try: path = urls.reverse(url_name, args=data.getlist('args')) (view_func, args, kwargs) = urls.resolve(path) return http.HttpResponse(path, content_type='text/plain') except urls.NoReverseMatch: return http.HttpResponse('Error', content_type='text/plain') return http.HttpResponseNotAllowed(('GET', 'POST'))
[ "def", "url_reverse", "(", "request", ")", ":", "if", "request", ".", "method", "in", "(", "'GET'", ",", "'POST'", ")", ":", "data", "=", "getattr", "(", "request", ",", "request", ".", "method", ")", "url_name", "=", "data", ".", "get", "(", "'url_name'", ")", "try", ":", "path", "=", "urls", ".", "reverse", "(", "url_name", ",", "args", "=", "data", ".", "getlist", "(", "'args'", ")", ")", "(", "view_func", ",", "args", ",", "kwargs", ")", "=", "urls", ".", "resolve", "(", "path", ")", "return", "http", ".", "HttpResponse", "(", "path", ",", "content_type", "=", "'text/plain'", ")", "except", "urls", ".", "NoReverseMatch", ":", "return", "http", ".", "HttpResponse", "(", "'Error'", ",", "content_type", "=", "'text/plain'", ")", "return", "http", ".", "HttpResponseNotAllowed", "(", "(", "'GET'", ",", "'POST'", ")", ")" ]
Reverse the requested URL (passed via GET / POST as `url_name` parameter) :param request: Request object :return: The reversed path
[ "Reverse", "the", "requested", "URL", "(", "passed", "via", "GET", "/", "POST", "as", "url_name", "parameter", ")" ]
train
https://github.com/nephila/django-ckeditor-filebrowser-filer/blob/336ea259fbc6f9d338e3798375299f654a4d6995/ckeditor_filebrowser_filer/views.py#L41-L57
nephila/django-ckeditor-filebrowser-filer
ckeditor_filebrowser_filer/views.py
url_image
def url_image(request, image_id, thumb_options=None, width=None, height=None): """ Converts a filer image ID in a complete path :param request: Request object :param image_id: Filer image ID :param thumb_options: ThumbnailOption ID :param width: user-provided width :param height: user-provided height :return: JSON serialized URL components ('url', 'width', 'height') """ image = File.objects.get(pk=image_id) if getattr(image, 'canonical_url', None): url = image.canonical_url else: url = image.url thumb = _return_thumbnail(image, thumb_options, width, height) if thumb: image = thumb url = image.url data = { 'url': url, 'width': image.width, 'height': image.height, } return http.HttpResponse(json.dumps(data), content_type='application/json')
python
def url_image(request, image_id, thumb_options=None, width=None, height=None): """ Converts a filer image ID in a complete path :param request: Request object :param image_id: Filer image ID :param thumb_options: ThumbnailOption ID :param width: user-provided width :param height: user-provided height :return: JSON serialized URL components ('url', 'width', 'height') """ image = File.objects.get(pk=image_id) if getattr(image, 'canonical_url', None): url = image.canonical_url else: url = image.url thumb = _return_thumbnail(image, thumb_options, width, height) if thumb: image = thumb url = image.url data = { 'url': url, 'width': image.width, 'height': image.height, } return http.HttpResponse(json.dumps(data), content_type='application/json')
[ "def", "url_image", "(", "request", ",", "image_id", ",", "thumb_options", "=", "None", ",", "width", "=", "None", ",", "height", "=", "None", ")", ":", "image", "=", "File", ".", "objects", ".", "get", "(", "pk", "=", "image_id", ")", "if", "getattr", "(", "image", ",", "'canonical_url'", ",", "None", ")", ":", "url", "=", "image", ".", "canonical_url", "else", ":", "url", "=", "image", ".", "url", "thumb", "=", "_return_thumbnail", "(", "image", ",", "thumb_options", ",", "width", ",", "height", ")", "if", "thumb", ":", "image", "=", "thumb", "url", "=", "image", ".", "url", "data", "=", "{", "'url'", ":", "url", ",", "'width'", ":", "image", ".", "width", ",", "'height'", ":", "image", ".", "height", ",", "}", "return", "http", ".", "HttpResponse", "(", "json", ".", "dumps", "(", "data", ")", ",", "content_type", "=", "'application/json'", ")" ]
Converts a filer image ID in a complete path :param request: Request object :param image_id: Filer image ID :param thumb_options: ThumbnailOption ID :param width: user-provided width :param height: user-provided height :return: JSON serialized URL components ('url', 'width', 'height')
[ "Converts", "a", "filer", "image", "ID", "in", "a", "complete", "path" ]
train
https://github.com/nephila/django-ckeditor-filebrowser-filer/blob/336ea259fbc6f9d338e3798375299f654a4d6995/ckeditor_filebrowser_filer/views.py#L79-L104
nephila/django-ckeditor-filebrowser-filer
ckeditor_filebrowser_filer/views.py
thumbnail_options
def thumbnail_options(request): """ Returns the requested ThumbnailOption as JSON :param request: Request object :return: JSON serialized ThumbnailOption """ response_data = [{'id': opt.pk, 'name': opt.name} for opt in ThumbnailOption.objects.all()] return http.HttpResponse(json.dumps(response_data), content_type="application/json")
python
def thumbnail_options(request): """ Returns the requested ThumbnailOption as JSON :param request: Request object :return: JSON serialized ThumbnailOption """ response_data = [{'id': opt.pk, 'name': opt.name} for opt in ThumbnailOption.objects.all()] return http.HttpResponse(json.dumps(response_data), content_type="application/json")
[ "def", "thumbnail_options", "(", "request", ")", ":", "response_data", "=", "[", "{", "'id'", ":", "opt", ".", "pk", ",", "'name'", ":", "opt", ".", "name", "}", "for", "opt", "in", "ThumbnailOption", ".", "objects", ".", "all", "(", ")", "]", "return", "http", ".", "HttpResponse", "(", "json", ".", "dumps", "(", "response_data", ")", ",", "content_type", "=", "\"application/json\"", ")" ]
Returns the requested ThumbnailOption as JSON :param request: Request object :return: JSON serialized ThumbnailOption
[ "Returns", "the", "requested", "ThumbnailOption", "as", "JSON" ]
train
https://github.com/nephila/django-ckeditor-filebrowser-filer/blob/336ea259fbc6f9d338e3798375299f654a4d6995/ckeditor_filebrowser_filer/views.py#L107-L115
nephila/django-ckeditor-filebrowser-filer
ckeditor_filebrowser_filer/views.py
serve_image
def serve_image(request, image_id, thumb_options=None, width=None, height=None): """ returns the content of an image sized according to the parameters :param request: Request object :param image_id: Filer image ID :param thumb_options: ThumbnailOption ID :param width: user-provided width :param height: user-provided height :return: JSON serialized URL components ('url', 'width', 'height') """ image = File.objects.get(pk=image_id) if getattr(image, 'canonical_url', None): url = image.canonical_url else: url = image.url thumb = _return_thumbnail(image, thumb_options, width, height) if thumb: return server.serve(request, file_obj=thumb, save_as=False) else: return HttpResponseRedirect(url)
python
def serve_image(request, image_id, thumb_options=None, width=None, height=None): """ returns the content of an image sized according to the parameters :param request: Request object :param image_id: Filer image ID :param thumb_options: ThumbnailOption ID :param width: user-provided width :param height: user-provided height :return: JSON serialized URL components ('url', 'width', 'height') """ image = File.objects.get(pk=image_id) if getattr(image, 'canonical_url', None): url = image.canonical_url else: url = image.url thumb = _return_thumbnail(image, thumb_options, width, height) if thumb: return server.serve(request, file_obj=thumb, save_as=False) else: return HttpResponseRedirect(url)
[ "def", "serve_image", "(", "request", ",", "image_id", ",", "thumb_options", "=", "None", ",", "width", "=", "None", ",", "height", "=", "None", ")", ":", "image", "=", "File", ".", "objects", ".", "get", "(", "pk", "=", "image_id", ")", "if", "getattr", "(", "image", ",", "'canonical_url'", ",", "None", ")", ":", "url", "=", "image", ".", "canonical_url", "else", ":", "url", "=", "image", ".", "url", "thumb", "=", "_return_thumbnail", "(", "image", ",", "thumb_options", ",", "width", ",", "height", ")", "if", "thumb", ":", "return", "server", ".", "serve", "(", "request", ",", "file_obj", "=", "thumb", ",", "save_as", "=", "False", ")", "else", ":", "return", "HttpResponseRedirect", "(", "url", ")" ]
returns the content of an image sized according to the parameters :param request: Request object :param image_id: Filer image ID :param thumb_options: ThumbnailOption ID :param width: user-provided width :param height: user-provided height :return: JSON serialized URL components ('url', 'width', 'height')
[ "returns", "the", "content", "of", "an", "image", "sized", "according", "to", "the", "parameters" ]
train
https://github.com/nephila/django-ckeditor-filebrowser-filer/blob/336ea259fbc6f9d338e3798375299f654a4d6995/ckeditor_filebrowser_filer/views.py#L118-L138
HarveyHunt/i3situation
i3situation/core/config.py
Config._touch_dir
def _touch_dir(self, path): """ A helper function to create a directory if it doesn't exist. path: A string containing a full path to the directory to be created. """ try: os.makedirs(path) except OSError as e: if e.errno != errno.EEXIST: raise
python
def _touch_dir(self, path): """ A helper function to create a directory if it doesn't exist. path: A string containing a full path to the directory to be created. """ try: os.makedirs(path) except OSError as e: if e.errno != errno.EEXIST: raise
[ "def", "_touch_dir", "(", "self", ",", "path", ")", ":", "try", ":", "os", ".", "makedirs", "(", "path", ")", "except", "OSError", "as", "e", ":", "if", "e", ".", "errno", "!=", "errno", ".", "EEXIST", ":", "raise" ]
A helper function to create a directory if it doesn't exist. path: A string containing a full path to the directory to be created.
[ "A", "helper", "function", "to", "create", "a", "directory", "if", "it", "doesn", "t", "exist", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/core/config.py#L32-L42
HarveyHunt/i3situation
i3situation/core/config.py
Config.reload
def reload(self): """ Reload the configuration from the file. This is in its own function so that it can be called at any time by another class. """ self._conf = configparser.ConfigParser() # Preserve the case of sections and keys. self._conf.optionxform = str self._conf.read(self.config_file_path) if 'general' not in self._conf.keys(): raise IncompleteConfigurationFile('Missing the general section') general = self._replace_data_types(dict(self._conf.items('general'))) self._conf.remove_section('general') plugin = [] for section in self._conf.sections(): plugin.append(dict(self._conf.items(section))) plugin[-1].update({'name': section}) plugin[-1] = self._replace_data_types(plugin[-1]) return (plugin, general)
python
def reload(self): """ Reload the configuration from the file. This is in its own function so that it can be called at any time by another class. """ self._conf = configparser.ConfigParser() # Preserve the case of sections and keys. self._conf.optionxform = str self._conf.read(self.config_file_path) if 'general' not in self._conf.keys(): raise IncompleteConfigurationFile('Missing the general section') general = self._replace_data_types(dict(self._conf.items('general'))) self._conf.remove_section('general') plugin = [] for section in self._conf.sections(): plugin.append(dict(self._conf.items(section))) plugin[-1].update({'name': section}) plugin[-1] = self._replace_data_types(plugin[-1]) return (plugin, general)
[ "def", "reload", "(", "self", ")", ":", "self", ".", "_conf", "=", "configparser", ".", "ConfigParser", "(", ")", "# Preserve the case of sections and keys.", "self", ".", "_conf", ".", "optionxform", "=", "str", "self", ".", "_conf", ".", "read", "(", "self", ".", "config_file_path", ")", "if", "'general'", "not", "in", "self", ".", "_conf", ".", "keys", "(", ")", ":", "raise", "IncompleteConfigurationFile", "(", "'Missing the general section'", ")", "general", "=", "self", ".", "_replace_data_types", "(", "dict", "(", "self", ".", "_conf", ".", "items", "(", "'general'", ")", ")", ")", "self", ".", "_conf", ".", "remove_section", "(", "'general'", ")", "plugin", "=", "[", "]", "for", "section", "in", "self", ".", "_conf", ".", "sections", "(", ")", ":", "plugin", ".", "append", "(", "dict", "(", "self", ".", "_conf", ".", "items", "(", "section", ")", ")", ")", "plugin", "[", "-", "1", "]", ".", "update", "(", "{", "'name'", ":", "section", "}", ")", "plugin", "[", "-", "1", "]", "=", "self", ".", "_replace_data_types", "(", "plugin", "[", "-", "1", "]", ")", "return", "(", "plugin", ",", "general", ")" ]
Reload the configuration from the file. This is in its own function so that it can be called at any time by another class.
[ "Reload", "the", "configuration", "from", "the", "file", ".", "This", "is", "in", "its", "own", "function", "so", "that", "it", "can", "be", "called", "at", "any", "time", "by", "another", "class", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/core/config.py#L50-L68
HarveyHunt/i3situation
i3situation/core/config.py
Config._replace_data_types
def _replace_data_types(dictionary): """ Replaces strings with appropriate data types (int, boolean). Also replaces the human readable logging levels with the integer form. dictionary: A dictionary returned from the config file. """ logging_levels = {'NONE': 0, 'NULL': 0, 'DEBUG': 10, 'INFO': 20, 'WARNING': 30, 'ERROR': 40, 'CRITICAL': 50} for k, v in dictionary.items(): if v in ['true', 'True', 'on']: dictionary[k] = True elif v in ['false', 'False', 'off']: dictionary[k] = False elif k == 'log_file' and '~' in v: dictionary[k] = v.replace('~', os.path.expanduser('~')) elif v in logging_levels: dictionary[k] = logging_levels[v] elif isinstance(v, str) and v.isnumeric(): dictionary[k] = int(v) elif ',' in v: dictionary[k] = [x.strip() for x in v.split(',')] return dictionary
python
def _replace_data_types(dictionary): """ Replaces strings with appropriate data types (int, boolean). Also replaces the human readable logging levels with the integer form. dictionary: A dictionary returned from the config file. """ logging_levels = {'NONE': 0, 'NULL': 0, 'DEBUG': 10, 'INFO': 20, 'WARNING': 30, 'ERROR': 40, 'CRITICAL': 50} for k, v in dictionary.items(): if v in ['true', 'True', 'on']: dictionary[k] = True elif v in ['false', 'False', 'off']: dictionary[k] = False elif k == 'log_file' and '~' in v: dictionary[k] = v.replace('~', os.path.expanduser('~')) elif v in logging_levels: dictionary[k] = logging_levels[v] elif isinstance(v, str) and v.isnumeric(): dictionary[k] = int(v) elif ',' in v: dictionary[k] = [x.strip() for x in v.split(',')] return dictionary
[ "def", "_replace_data_types", "(", "dictionary", ")", ":", "logging_levels", "=", "{", "'NONE'", ":", "0", ",", "'NULL'", ":", "0", ",", "'DEBUG'", ":", "10", ",", "'INFO'", ":", "20", ",", "'WARNING'", ":", "30", ",", "'ERROR'", ":", "40", ",", "'CRITICAL'", ":", "50", "}", "for", "k", ",", "v", "in", "dictionary", ".", "items", "(", ")", ":", "if", "v", "in", "[", "'true'", ",", "'True'", ",", "'on'", "]", ":", "dictionary", "[", "k", "]", "=", "True", "elif", "v", "in", "[", "'false'", ",", "'False'", ",", "'off'", "]", ":", "dictionary", "[", "k", "]", "=", "False", "elif", "k", "==", "'log_file'", "and", "'~'", "in", "v", ":", "dictionary", "[", "k", "]", "=", "v", ".", "replace", "(", "'~'", ",", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", ")", "elif", "v", "in", "logging_levels", ":", "dictionary", "[", "k", "]", "=", "logging_levels", "[", "v", "]", "elif", "isinstance", "(", "v", ",", "str", ")", "and", "v", ".", "isnumeric", "(", ")", ":", "dictionary", "[", "k", "]", "=", "int", "(", "v", ")", "elif", "','", "in", "v", ":", "dictionary", "[", "k", "]", "=", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "v", ".", "split", "(", "','", ")", "]", "return", "dictionary" ]
Replaces strings with appropriate data types (int, boolean). Also replaces the human readable logging levels with the integer form. dictionary: A dictionary returned from the config file.
[ "Replaces", "strings", "with", "appropriate", "data", "types", "(", "int", "boolean", ")", ".", "Also", "replaces", "the", "human", "readable", "logging", "levels", "with", "the", "integer", "form", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/core/config.py#L71-L93
xolox/python-rotate-backups
rotate_backups/cli.py
main
def main(): """Command line interface for the ``rotate-backups`` program.""" coloredlogs.install(syslog=True) # Command line option defaults. rotation_scheme = {} kw = dict(include_list=[], exclude_list=[]) parallel = False use_sudo = False # Internal state. selected_locations = [] # Parse the command line arguments. try: options, arguments = getopt.getopt(sys.argv[1:], 'M:H:d:w:m:y:I:x:jpri:c:r:uC:nvqh', [ 'minutely=', 'hourly=', 'daily=', 'weekly=', 'monthly=', 'yearly=', 'include=', 'exclude=', 'parallel', 'prefer-recent', 'relaxed', 'ionice=', 'config=', 'use-sudo', 'dry-run', 'removal-command=', 'verbose', 'quiet', 'help', ]) for option, value in options: if option in ('-M', '--minutely'): rotation_scheme['minutely'] = coerce_retention_period(value) elif option in ('-H', '--hourly'): rotation_scheme['hourly'] = coerce_retention_period(value) elif option in ('-d', '--daily'): rotation_scheme['daily'] = coerce_retention_period(value) elif option in ('-w', '--weekly'): rotation_scheme['weekly'] = coerce_retention_period(value) elif option in ('-m', '--monthly'): rotation_scheme['monthly'] = coerce_retention_period(value) elif option in ('-y', '--yearly'): rotation_scheme['yearly'] = coerce_retention_period(value) elif option in ('-I', '--include'): kw['include_list'].append(value) elif option in ('-x', '--exclude'): kw['exclude_list'].append(value) elif option in ('-j', '--parallel'): parallel = True elif option in ('-p', '--prefer-recent'): kw['prefer_recent'] = True elif option in ('-r', '--relaxed'): kw['strict'] = False elif option in ('-i', '--ionice'): value = validate_ionice_class(value.lower().strip()) kw['io_scheduling_class'] = value elif option in ('-c', '--config'): kw['config_file'] = parse_path(value) elif option in ('-u', '--use-sudo'): use_sudo = True elif option in ('-n', '--dry-run'): logger.info("Performing a dry run (because of %s option) ..", option) kw['dry_run'] = True elif option in ('-C', '--removal-command'): removal_command = shlex.split(value) logger.info("Using custom removal command: %s", removal_command) kw['removal_command'] = removal_command elif option in ('-v', '--verbose'): coloredlogs.increase_verbosity() elif option in ('-q', '--quiet'): coloredlogs.decrease_verbosity() elif option in ('-h', '--help'): usage(__doc__) return else: assert False, "Unhandled option! (programming error)" if rotation_scheme: logger.verbose("Rotation scheme defined on command line: %s", rotation_scheme) if arguments: # Rotation of the locations given on the command line. location_source = 'command line arguments' selected_locations.extend(coerce_location(value, sudo=use_sudo) for value in arguments) else: # Rotation of all configured locations. location_source = 'configuration file' selected_locations.extend( location for location, rotation_scheme, options in load_config_file(configuration_file=kw.get('config_file'), expand=True) ) # Inform the user which location(s) will be rotated. if selected_locations: logger.verbose("Selected %s based on %s:", pluralize(len(selected_locations), "location"), location_source) for number, location in enumerate(selected_locations, start=1): logger.verbose(" %i. %s", number, location) else: # Show the usage message when no directories are given nor configured. logger.verbose("No location(s) to rotate selected.") usage(__doc__) return except Exception as e: logger.error("%s", e) sys.exit(1) # Rotate the backups in the selected directories. program = RotateBackups(rotation_scheme, **kw) if parallel: program.rotate_concurrent(*selected_locations) else: for location in selected_locations: program.rotate_backups(location)
python
def main(): """Command line interface for the ``rotate-backups`` program.""" coloredlogs.install(syslog=True) # Command line option defaults. rotation_scheme = {} kw = dict(include_list=[], exclude_list=[]) parallel = False use_sudo = False # Internal state. selected_locations = [] # Parse the command line arguments. try: options, arguments = getopt.getopt(sys.argv[1:], 'M:H:d:w:m:y:I:x:jpri:c:r:uC:nvqh', [ 'minutely=', 'hourly=', 'daily=', 'weekly=', 'monthly=', 'yearly=', 'include=', 'exclude=', 'parallel', 'prefer-recent', 'relaxed', 'ionice=', 'config=', 'use-sudo', 'dry-run', 'removal-command=', 'verbose', 'quiet', 'help', ]) for option, value in options: if option in ('-M', '--minutely'): rotation_scheme['minutely'] = coerce_retention_period(value) elif option in ('-H', '--hourly'): rotation_scheme['hourly'] = coerce_retention_period(value) elif option in ('-d', '--daily'): rotation_scheme['daily'] = coerce_retention_period(value) elif option in ('-w', '--weekly'): rotation_scheme['weekly'] = coerce_retention_period(value) elif option in ('-m', '--monthly'): rotation_scheme['monthly'] = coerce_retention_period(value) elif option in ('-y', '--yearly'): rotation_scheme['yearly'] = coerce_retention_period(value) elif option in ('-I', '--include'): kw['include_list'].append(value) elif option in ('-x', '--exclude'): kw['exclude_list'].append(value) elif option in ('-j', '--parallel'): parallel = True elif option in ('-p', '--prefer-recent'): kw['prefer_recent'] = True elif option in ('-r', '--relaxed'): kw['strict'] = False elif option in ('-i', '--ionice'): value = validate_ionice_class(value.lower().strip()) kw['io_scheduling_class'] = value elif option in ('-c', '--config'): kw['config_file'] = parse_path(value) elif option in ('-u', '--use-sudo'): use_sudo = True elif option in ('-n', '--dry-run'): logger.info("Performing a dry run (because of %s option) ..", option) kw['dry_run'] = True elif option in ('-C', '--removal-command'): removal_command = shlex.split(value) logger.info("Using custom removal command: %s", removal_command) kw['removal_command'] = removal_command elif option in ('-v', '--verbose'): coloredlogs.increase_verbosity() elif option in ('-q', '--quiet'): coloredlogs.decrease_verbosity() elif option in ('-h', '--help'): usage(__doc__) return else: assert False, "Unhandled option! (programming error)" if rotation_scheme: logger.verbose("Rotation scheme defined on command line: %s", rotation_scheme) if arguments: # Rotation of the locations given on the command line. location_source = 'command line arguments' selected_locations.extend(coerce_location(value, sudo=use_sudo) for value in arguments) else: # Rotation of all configured locations. location_source = 'configuration file' selected_locations.extend( location for location, rotation_scheme, options in load_config_file(configuration_file=kw.get('config_file'), expand=True) ) # Inform the user which location(s) will be rotated. if selected_locations: logger.verbose("Selected %s based on %s:", pluralize(len(selected_locations), "location"), location_source) for number, location in enumerate(selected_locations, start=1): logger.verbose(" %i. %s", number, location) else: # Show the usage message when no directories are given nor configured. logger.verbose("No location(s) to rotate selected.") usage(__doc__) return except Exception as e: logger.error("%s", e) sys.exit(1) # Rotate the backups in the selected directories. program = RotateBackups(rotation_scheme, **kw) if parallel: program.rotate_concurrent(*selected_locations) else: for location in selected_locations: program.rotate_backups(location)
[ "def", "main", "(", ")", ":", "coloredlogs", ".", "install", "(", "syslog", "=", "True", ")", "# Command line option defaults.", "rotation_scheme", "=", "{", "}", "kw", "=", "dict", "(", "include_list", "=", "[", "]", ",", "exclude_list", "=", "[", "]", ")", "parallel", "=", "False", "use_sudo", "=", "False", "# Internal state.", "selected_locations", "=", "[", "]", "# Parse the command line arguments.", "try", ":", "options", ",", "arguments", "=", "getopt", ".", "getopt", "(", "sys", ".", "argv", "[", "1", ":", "]", ",", "'M:H:d:w:m:y:I:x:jpri:c:r:uC:nvqh'", ",", "[", "'minutely='", ",", "'hourly='", ",", "'daily='", ",", "'weekly='", ",", "'monthly='", ",", "'yearly='", ",", "'include='", ",", "'exclude='", ",", "'parallel'", ",", "'prefer-recent'", ",", "'relaxed'", ",", "'ionice='", ",", "'config='", ",", "'use-sudo'", ",", "'dry-run'", ",", "'removal-command='", ",", "'verbose'", ",", "'quiet'", ",", "'help'", ",", "]", ")", "for", "option", ",", "value", "in", "options", ":", "if", "option", "in", "(", "'-M'", ",", "'--minutely'", ")", ":", "rotation_scheme", "[", "'minutely'", "]", "=", "coerce_retention_period", "(", "value", ")", "elif", "option", "in", "(", "'-H'", ",", "'--hourly'", ")", ":", "rotation_scheme", "[", "'hourly'", "]", "=", "coerce_retention_period", "(", "value", ")", "elif", "option", "in", "(", "'-d'", ",", "'--daily'", ")", ":", "rotation_scheme", "[", "'daily'", "]", "=", "coerce_retention_period", "(", "value", ")", "elif", "option", "in", "(", "'-w'", ",", "'--weekly'", ")", ":", "rotation_scheme", "[", "'weekly'", "]", "=", "coerce_retention_period", "(", "value", ")", "elif", "option", "in", "(", "'-m'", ",", "'--monthly'", ")", ":", "rotation_scheme", "[", "'monthly'", "]", "=", "coerce_retention_period", "(", "value", ")", "elif", "option", "in", "(", "'-y'", ",", "'--yearly'", ")", ":", "rotation_scheme", "[", "'yearly'", "]", "=", "coerce_retention_period", "(", "value", ")", "elif", "option", "in", "(", "'-I'", ",", "'--include'", ")", ":", "kw", "[", "'include_list'", "]", ".", "append", "(", "value", ")", "elif", "option", "in", "(", "'-x'", ",", "'--exclude'", ")", ":", "kw", "[", "'exclude_list'", "]", ".", "append", "(", "value", ")", "elif", "option", "in", "(", "'-j'", ",", "'--parallel'", ")", ":", "parallel", "=", "True", "elif", "option", "in", "(", "'-p'", ",", "'--prefer-recent'", ")", ":", "kw", "[", "'prefer_recent'", "]", "=", "True", "elif", "option", "in", "(", "'-r'", ",", "'--relaxed'", ")", ":", "kw", "[", "'strict'", "]", "=", "False", "elif", "option", "in", "(", "'-i'", ",", "'--ionice'", ")", ":", "value", "=", "validate_ionice_class", "(", "value", ".", "lower", "(", ")", ".", "strip", "(", ")", ")", "kw", "[", "'io_scheduling_class'", "]", "=", "value", "elif", "option", "in", "(", "'-c'", ",", "'--config'", ")", ":", "kw", "[", "'config_file'", "]", "=", "parse_path", "(", "value", ")", "elif", "option", "in", "(", "'-u'", ",", "'--use-sudo'", ")", ":", "use_sudo", "=", "True", "elif", "option", "in", "(", "'-n'", ",", "'--dry-run'", ")", ":", "logger", ".", "info", "(", "\"Performing a dry run (because of %s option) ..\"", ",", "option", ")", "kw", "[", "'dry_run'", "]", "=", "True", "elif", "option", "in", "(", "'-C'", ",", "'--removal-command'", ")", ":", "removal_command", "=", "shlex", ".", "split", "(", "value", ")", "logger", ".", "info", "(", "\"Using custom removal command: %s\"", ",", "removal_command", ")", "kw", "[", "'removal_command'", "]", "=", "removal_command", "elif", "option", "in", "(", "'-v'", ",", "'--verbose'", ")", ":", "coloredlogs", ".", "increase_verbosity", "(", ")", "elif", "option", "in", "(", "'-q'", ",", "'--quiet'", ")", ":", "coloredlogs", ".", "decrease_verbosity", "(", ")", "elif", "option", "in", "(", "'-h'", ",", "'--help'", ")", ":", "usage", "(", "__doc__", ")", "return", "else", ":", "assert", "False", ",", "\"Unhandled option! (programming error)\"", "if", "rotation_scheme", ":", "logger", ".", "verbose", "(", "\"Rotation scheme defined on command line: %s\"", ",", "rotation_scheme", ")", "if", "arguments", ":", "# Rotation of the locations given on the command line.", "location_source", "=", "'command line arguments'", "selected_locations", ".", "extend", "(", "coerce_location", "(", "value", ",", "sudo", "=", "use_sudo", ")", "for", "value", "in", "arguments", ")", "else", ":", "# Rotation of all configured locations.", "location_source", "=", "'configuration file'", "selected_locations", ".", "extend", "(", "location", "for", "location", ",", "rotation_scheme", ",", "options", "in", "load_config_file", "(", "configuration_file", "=", "kw", ".", "get", "(", "'config_file'", ")", ",", "expand", "=", "True", ")", ")", "# Inform the user which location(s) will be rotated.", "if", "selected_locations", ":", "logger", ".", "verbose", "(", "\"Selected %s based on %s:\"", ",", "pluralize", "(", "len", "(", "selected_locations", ")", ",", "\"location\"", ")", ",", "location_source", ")", "for", "number", ",", "location", "in", "enumerate", "(", "selected_locations", ",", "start", "=", "1", ")", ":", "logger", ".", "verbose", "(", "\" %i. %s\"", ",", "number", ",", "location", ")", "else", ":", "# Show the usage message when no directories are given nor configured.", "logger", ".", "verbose", "(", "\"No location(s) to rotate selected.\"", ")", "usage", "(", "__doc__", ")", "return", "except", "Exception", "as", "e", ":", "logger", ".", "error", "(", "\"%s\"", ",", "e", ")", "sys", ".", "exit", "(", "1", ")", "# Rotate the backups in the selected directories.", "program", "=", "RotateBackups", "(", "rotation_scheme", ",", "*", "*", "kw", ")", "if", "parallel", ":", "program", ".", "rotate_concurrent", "(", "*", "selected_locations", ")", "else", ":", "for", "location", "in", "selected_locations", ":", "program", ".", "rotate_backups", "(", "location", ")" ]
Command line interface for the ``rotate-backups`` program.
[ "Command", "line", "interface", "for", "the", "rotate", "-", "backups", "program", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/cli.py#L205-L303
HarveyHunt/i3situation
i3situation/plugins/battery.py
BatteryPlugin.get_battery_state
def get_battery_state(self, prop): """ Return the first line from the file located at battery_path/prop as a string. """ with open(os.path.join(self.options['battery_path'], prop), 'r') as f: return f.readline().strip()
python
def get_battery_state(self, prop): """ Return the first line from the file located at battery_path/prop as a string. """ with open(os.path.join(self.options['battery_path'], prop), 'r') as f: return f.readline().strip()
[ "def", "get_battery_state", "(", "self", ",", "prop", ")", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "self", ".", "options", "[", "'battery_path'", "]", ",", "prop", ")", ",", "'r'", ")", "as", "f", ":", "return", "f", ".", "readline", "(", ")", ".", "strip", "(", ")" ]
Return the first line from the file located at battery_path/prop as a string.
[ "Return", "the", "first", "line", "from", "the", "file", "located", "at", "battery_path", "/", "prop", "as", "a", "string", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/plugins/battery.py#L52-L58
xolox/python-rotate-backups
rotate_backups/__init__.py
coerce_location
def coerce_location(value, **options): """ Coerce a string to a :class:`Location` object. :param value: The value to coerce (a string or :class:`Location` object). :param options: Any keyword arguments are passed on to :func:`~executor.contexts.create_context()`. :returns: A :class:`Location` object. """ # Location objects pass through untouched. if not isinstance(value, Location): # Other values are expected to be strings. if not isinstance(value, string_types): msg = "Expected Location object or string, got %s instead!" raise ValueError(msg % type(value)) # Try to parse a remote location. ssh_alias, _, directory = value.partition(':') if ssh_alias and directory and '/' not in ssh_alias: options['ssh_alias'] = ssh_alias else: directory = value # Create the location object. value = Location( context=create_context(**options), directory=parse_path(directory), ) return value
python
def coerce_location(value, **options): """ Coerce a string to a :class:`Location` object. :param value: The value to coerce (a string or :class:`Location` object). :param options: Any keyword arguments are passed on to :func:`~executor.contexts.create_context()`. :returns: A :class:`Location` object. """ # Location objects pass through untouched. if not isinstance(value, Location): # Other values are expected to be strings. if not isinstance(value, string_types): msg = "Expected Location object or string, got %s instead!" raise ValueError(msg % type(value)) # Try to parse a remote location. ssh_alias, _, directory = value.partition(':') if ssh_alias and directory and '/' not in ssh_alias: options['ssh_alias'] = ssh_alias else: directory = value # Create the location object. value = Location( context=create_context(**options), directory=parse_path(directory), ) return value
[ "def", "coerce_location", "(", "value", ",", "*", "*", "options", ")", ":", "# Location objects pass through untouched.", "if", "not", "isinstance", "(", "value", ",", "Location", ")", ":", "# Other values are expected to be strings.", "if", "not", "isinstance", "(", "value", ",", "string_types", ")", ":", "msg", "=", "\"Expected Location object or string, got %s instead!\"", "raise", "ValueError", "(", "msg", "%", "type", "(", "value", ")", ")", "# Try to parse a remote location.", "ssh_alias", ",", "_", ",", "directory", "=", "value", ".", "partition", "(", "':'", ")", "if", "ssh_alias", "and", "directory", "and", "'/'", "not", "in", "ssh_alias", ":", "options", "[", "'ssh_alias'", "]", "=", "ssh_alias", "else", ":", "directory", "=", "value", "# Create the location object.", "value", "=", "Location", "(", "context", "=", "create_context", "(", "*", "*", "options", ")", ",", "directory", "=", "parse_path", "(", "directory", ")", ",", ")", "return", "value" ]
Coerce a string to a :class:`Location` object. :param value: The value to coerce (a string or :class:`Location` object). :param options: Any keyword arguments are passed on to :func:`~executor.contexts.create_context()`. :returns: A :class:`Location` object.
[ "Coerce", "a", "string", "to", "a", ":", "class", ":", "Location", "object", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L93-L119
xolox/python-rotate-backups
rotate_backups/__init__.py
coerce_retention_period
def coerce_retention_period(value): """ Coerce a retention period to a Python value. :param value: A string containing the text 'always', a number or an expression that can be evaluated to a number. :returns: A number or the string 'always'. :raises: :exc:`~exceptions.ValueError` when the string can't be coerced. """ # Numbers pass through untouched. if not isinstance(value, numbers.Number): # Other values are expected to be strings. if not isinstance(value, string_types): msg = "Expected string, got %s instead!" raise ValueError(msg % type(value)) # Check for the literal string `always'. value = value.strip() if value.lower() == 'always': value = 'always' else: # Evaluate other strings as expressions. value = simple_eval(value) if not isinstance(value, numbers.Number): msg = "Expected numeric result, got %s instead!" raise ValueError(msg % type(value)) return value
python
def coerce_retention_period(value): """ Coerce a retention period to a Python value. :param value: A string containing the text 'always', a number or an expression that can be evaluated to a number. :returns: A number or the string 'always'. :raises: :exc:`~exceptions.ValueError` when the string can't be coerced. """ # Numbers pass through untouched. if not isinstance(value, numbers.Number): # Other values are expected to be strings. if not isinstance(value, string_types): msg = "Expected string, got %s instead!" raise ValueError(msg % type(value)) # Check for the literal string `always'. value = value.strip() if value.lower() == 'always': value = 'always' else: # Evaluate other strings as expressions. value = simple_eval(value) if not isinstance(value, numbers.Number): msg = "Expected numeric result, got %s instead!" raise ValueError(msg % type(value)) return value
[ "def", "coerce_retention_period", "(", "value", ")", ":", "# Numbers pass through untouched.", "if", "not", "isinstance", "(", "value", ",", "numbers", ".", "Number", ")", ":", "# Other values are expected to be strings.", "if", "not", "isinstance", "(", "value", ",", "string_types", ")", ":", "msg", "=", "\"Expected string, got %s instead!\"", "raise", "ValueError", "(", "msg", "%", "type", "(", "value", ")", ")", "# Check for the literal string `always'.", "value", "=", "value", ".", "strip", "(", ")", "if", "value", ".", "lower", "(", ")", "==", "'always'", ":", "value", "=", "'always'", "else", ":", "# Evaluate other strings as expressions.", "value", "=", "simple_eval", "(", "value", ")", "if", "not", "isinstance", "(", "value", ",", "numbers", ".", "Number", ")", ":", "msg", "=", "\"Expected numeric result, got %s instead!\"", "raise", "ValueError", "(", "msg", "%", "type", "(", "value", ")", ")", "return", "value" ]
Coerce a retention period to a Python value. :param value: A string containing the text 'always', a number or an expression that can be evaluated to a number. :returns: A number or the string 'always'. :raises: :exc:`~exceptions.ValueError` when the string can't be coerced.
[ "Coerce", "a", "retention", "period", "to", "a", "Python", "value", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L122-L147
xolox/python-rotate-backups
rotate_backups/__init__.py
load_config_file
def load_config_file(configuration_file=None, expand=True): """ Load a configuration file with backup directories and rotation schemes. :param configuration_file: Override the pathname of the configuration file to load (a string or :data:`None`). :param expand: :data:`True` to expand filename patterns to their matches, :data:`False` otherwise. :returns: A generator of tuples with four values each: 1. An execution context created using :mod:`executor.contexts`. 2. The pathname of a directory with backups (a string). 3. A dictionary with the rotation scheme. 4. A dictionary with additional options. :raises: :exc:`~exceptions.ValueError` when `configuration_file` is given but doesn't exist or can't be loaded. This function is used by :class:`RotateBackups` to discover user defined rotation schemes and by :mod:`rotate_backups.cli` to discover directories for which backup rotation is configured. When `configuration_file` isn't given :class:`~update_dotdee.ConfigLoader` is used to search for configuration files in the following locations: - ``/etc/rotate-backups.ini`` and ``/etc/rotate-backups.d/*.ini`` - ``~/.rotate-backups.ini`` and ``~/.rotate-backups.d/*.ini`` - ``~/.config/rotate-backups.ini`` and ``~/.config/rotate-backups.d/*.ini`` All of the available configuration files are loaded in the order given above, so that sections in user-specific configuration files override sections by the same name in system-wide configuration files. """ expand_notice_given = False if configuration_file: loader = ConfigLoader(available_files=[configuration_file], strict=True) else: loader = ConfigLoader(program_name='rotate-backups', strict=False) for section in loader.section_names: items = dict(loader.get_options(section)) context_options = {} if coerce_boolean(items.get('use-sudo')): context_options['sudo'] = True if items.get('ssh-user'): context_options['ssh_user'] = items['ssh-user'] location = coerce_location(section, **context_options) rotation_scheme = dict((name, coerce_retention_period(items[name])) for name in SUPPORTED_FREQUENCIES if name in items) options = dict(include_list=split(items.get('include-list', '')), exclude_list=split(items.get('exclude-list', '')), io_scheduling_class=items.get('ionice'), strict=coerce_boolean(items.get('strict', 'yes')), prefer_recent=coerce_boolean(items.get('prefer-recent', 'no'))) # Don't override the value of the 'removal_command' property unless the # 'removal-command' configuration file option has a value set. if items.get('removal-command'): options['removal_command'] = shlex.split(items['removal-command']) # Expand filename patterns? if expand and location.have_wildcards: logger.verbose("Expanding filename pattern %s on %s ..", location.directory, location.context) if location.is_remote and not expand_notice_given: logger.notice("Expanding remote filename patterns (may be slow) ..") expand_notice_given = True for match in sorted(location.context.glob(location.directory)): if location.context.is_directory(match): logger.verbose("Matched directory: %s", match) expanded = Location(context=location.context, directory=match) yield expanded, rotation_scheme, options else: logger.verbose("Ignoring match (not a directory): %s", match) else: yield location, rotation_scheme, options
python
def load_config_file(configuration_file=None, expand=True): """ Load a configuration file with backup directories and rotation schemes. :param configuration_file: Override the pathname of the configuration file to load (a string or :data:`None`). :param expand: :data:`True` to expand filename patterns to their matches, :data:`False` otherwise. :returns: A generator of tuples with four values each: 1. An execution context created using :mod:`executor.contexts`. 2. The pathname of a directory with backups (a string). 3. A dictionary with the rotation scheme. 4. A dictionary with additional options. :raises: :exc:`~exceptions.ValueError` when `configuration_file` is given but doesn't exist or can't be loaded. This function is used by :class:`RotateBackups` to discover user defined rotation schemes and by :mod:`rotate_backups.cli` to discover directories for which backup rotation is configured. When `configuration_file` isn't given :class:`~update_dotdee.ConfigLoader` is used to search for configuration files in the following locations: - ``/etc/rotate-backups.ini`` and ``/etc/rotate-backups.d/*.ini`` - ``~/.rotate-backups.ini`` and ``~/.rotate-backups.d/*.ini`` - ``~/.config/rotate-backups.ini`` and ``~/.config/rotate-backups.d/*.ini`` All of the available configuration files are loaded in the order given above, so that sections in user-specific configuration files override sections by the same name in system-wide configuration files. """ expand_notice_given = False if configuration_file: loader = ConfigLoader(available_files=[configuration_file], strict=True) else: loader = ConfigLoader(program_name='rotate-backups', strict=False) for section in loader.section_names: items = dict(loader.get_options(section)) context_options = {} if coerce_boolean(items.get('use-sudo')): context_options['sudo'] = True if items.get('ssh-user'): context_options['ssh_user'] = items['ssh-user'] location = coerce_location(section, **context_options) rotation_scheme = dict((name, coerce_retention_period(items[name])) for name in SUPPORTED_FREQUENCIES if name in items) options = dict(include_list=split(items.get('include-list', '')), exclude_list=split(items.get('exclude-list', '')), io_scheduling_class=items.get('ionice'), strict=coerce_boolean(items.get('strict', 'yes')), prefer_recent=coerce_boolean(items.get('prefer-recent', 'no'))) # Don't override the value of the 'removal_command' property unless the # 'removal-command' configuration file option has a value set. if items.get('removal-command'): options['removal_command'] = shlex.split(items['removal-command']) # Expand filename patterns? if expand and location.have_wildcards: logger.verbose("Expanding filename pattern %s on %s ..", location.directory, location.context) if location.is_remote and not expand_notice_given: logger.notice("Expanding remote filename patterns (may be slow) ..") expand_notice_given = True for match in sorted(location.context.glob(location.directory)): if location.context.is_directory(match): logger.verbose("Matched directory: %s", match) expanded = Location(context=location.context, directory=match) yield expanded, rotation_scheme, options else: logger.verbose("Ignoring match (not a directory): %s", match) else: yield location, rotation_scheme, options
[ "def", "load_config_file", "(", "configuration_file", "=", "None", ",", "expand", "=", "True", ")", ":", "expand_notice_given", "=", "False", "if", "configuration_file", ":", "loader", "=", "ConfigLoader", "(", "available_files", "=", "[", "configuration_file", "]", ",", "strict", "=", "True", ")", "else", ":", "loader", "=", "ConfigLoader", "(", "program_name", "=", "'rotate-backups'", ",", "strict", "=", "False", ")", "for", "section", "in", "loader", ".", "section_names", ":", "items", "=", "dict", "(", "loader", ".", "get_options", "(", "section", ")", ")", "context_options", "=", "{", "}", "if", "coerce_boolean", "(", "items", ".", "get", "(", "'use-sudo'", ")", ")", ":", "context_options", "[", "'sudo'", "]", "=", "True", "if", "items", ".", "get", "(", "'ssh-user'", ")", ":", "context_options", "[", "'ssh_user'", "]", "=", "items", "[", "'ssh-user'", "]", "location", "=", "coerce_location", "(", "section", ",", "*", "*", "context_options", ")", "rotation_scheme", "=", "dict", "(", "(", "name", ",", "coerce_retention_period", "(", "items", "[", "name", "]", ")", ")", "for", "name", "in", "SUPPORTED_FREQUENCIES", "if", "name", "in", "items", ")", "options", "=", "dict", "(", "include_list", "=", "split", "(", "items", ".", "get", "(", "'include-list'", ",", "''", ")", ")", ",", "exclude_list", "=", "split", "(", "items", ".", "get", "(", "'exclude-list'", ",", "''", ")", ")", ",", "io_scheduling_class", "=", "items", ".", "get", "(", "'ionice'", ")", ",", "strict", "=", "coerce_boolean", "(", "items", ".", "get", "(", "'strict'", ",", "'yes'", ")", ")", ",", "prefer_recent", "=", "coerce_boolean", "(", "items", ".", "get", "(", "'prefer-recent'", ",", "'no'", ")", ")", ")", "# Don't override the value of the 'removal_command' property unless the", "# 'removal-command' configuration file option has a value set.", "if", "items", ".", "get", "(", "'removal-command'", ")", ":", "options", "[", "'removal_command'", "]", "=", "shlex", ".", "split", "(", "items", "[", "'removal-command'", "]", ")", "# Expand filename patterns?", "if", "expand", "and", "location", ".", "have_wildcards", ":", "logger", ".", "verbose", "(", "\"Expanding filename pattern %s on %s ..\"", ",", "location", ".", "directory", ",", "location", ".", "context", ")", "if", "location", ".", "is_remote", "and", "not", "expand_notice_given", ":", "logger", ".", "notice", "(", "\"Expanding remote filename patterns (may be slow) ..\"", ")", "expand_notice_given", "=", "True", "for", "match", "in", "sorted", "(", "location", ".", "context", ".", "glob", "(", "location", ".", "directory", ")", ")", ":", "if", "location", ".", "context", ".", "is_directory", "(", "match", ")", ":", "logger", ".", "verbose", "(", "\"Matched directory: %s\"", ",", "match", ")", "expanded", "=", "Location", "(", "context", "=", "location", ".", "context", ",", "directory", "=", "match", ")", "yield", "expanded", ",", "rotation_scheme", ",", "options", "else", ":", "logger", ".", "verbose", "(", "\"Ignoring match (not a directory): %s\"", ",", "match", ")", "else", ":", "yield", "location", ",", "rotation_scheme", ",", "options" ]
Load a configuration file with backup directories and rotation schemes. :param configuration_file: Override the pathname of the configuration file to load (a string or :data:`None`). :param expand: :data:`True` to expand filename patterns to their matches, :data:`False` otherwise. :returns: A generator of tuples with four values each: 1. An execution context created using :mod:`executor.contexts`. 2. The pathname of a directory with backups (a string). 3. A dictionary with the rotation scheme. 4. A dictionary with additional options. :raises: :exc:`~exceptions.ValueError` when `configuration_file` is given but doesn't exist or can't be loaded. This function is used by :class:`RotateBackups` to discover user defined rotation schemes and by :mod:`rotate_backups.cli` to discover directories for which backup rotation is configured. When `configuration_file` isn't given :class:`~update_dotdee.ConfigLoader` is used to search for configuration files in the following locations: - ``/etc/rotate-backups.ini`` and ``/etc/rotate-backups.d/*.ini`` - ``~/.rotate-backups.ini`` and ``~/.rotate-backups.d/*.ini`` - ``~/.config/rotate-backups.ini`` and ``~/.config/rotate-backups.d/*.ini`` All of the available configuration files are loaded in the order given above, so that sections in user-specific configuration files override sections by the same name in system-wide configuration files.
[ "Load", "a", "configuration", "file", "with", "backup", "directories", "and", "rotation", "schemes", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L150-L220
xolox/python-rotate-backups
rotate_backups/__init__.py
rotate_backups
def rotate_backups(directory, rotation_scheme, **options): """ Rotate the backups in a directory according to a flexible rotation scheme. .. note:: This function exists to preserve backwards compatibility with older versions of the `rotate-backups` package where all of the logic was exposed as a single function. Please refer to the documentation of the :class:`RotateBackups` initializer and the :func:`~RotateBackups.rotate_backups()` method for an explanation of this function's parameters. """ program = RotateBackups(rotation_scheme=rotation_scheme, **options) program.rotate_backups(directory)
python
def rotate_backups(directory, rotation_scheme, **options): """ Rotate the backups in a directory according to a flexible rotation scheme. .. note:: This function exists to preserve backwards compatibility with older versions of the `rotate-backups` package where all of the logic was exposed as a single function. Please refer to the documentation of the :class:`RotateBackups` initializer and the :func:`~RotateBackups.rotate_backups()` method for an explanation of this function's parameters. """ program = RotateBackups(rotation_scheme=rotation_scheme, **options) program.rotate_backups(directory)
[ "def", "rotate_backups", "(", "directory", ",", "rotation_scheme", ",", "*", "*", "options", ")", ":", "program", "=", "RotateBackups", "(", "rotation_scheme", "=", "rotation_scheme", ",", "*", "*", "options", ")", "program", ".", "rotate_backups", "(", "directory", ")" ]
Rotate the backups in a directory according to a flexible rotation scheme. .. note:: This function exists to preserve backwards compatibility with older versions of the `rotate-backups` package where all of the logic was exposed as a single function. Please refer to the documentation of the :class:`RotateBackups` initializer and the :func:`~RotateBackups.rotate_backups()` method for an explanation of this function's parameters.
[ "Rotate", "the", "backups", "in", "a", "directory", "according", "to", "a", "flexible", "rotation", "scheme", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L223-L235
xolox/python-rotate-backups
rotate_backups/__init__.py
RotateBackups.rotate_concurrent
def rotate_concurrent(self, *locations, **kw): """ Rotate the backups in the given locations concurrently. :param locations: One or more values accepted by :func:`coerce_location()`. :param kw: Any keyword arguments are passed on to :func:`rotate_backups()`. This function uses :func:`rotate_backups()` to prepare rotation commands for the given locations and then it removes backups in parallel, one backup per mount point at a time. The idea behind this approach is that parallel rotation is most useful when the files to be removed are on different disks and so multiple devices can be utilized at the same time. Because mount points are per system :func:`rotate_concurrent()` will also parallelize over backups located on multiple remote systems. """ timer = Timer() pool = CommandPool(concurrency=10) logger.info("Scanning %s ..", pluralize(len(locations), "backup location")) for location in locations: for cmd in self.rotate_backups(location, prepare=True, **kw): pool.add(cmd) if pool.num_commands > 0: backups = pluralize(pool.num_commands, "backup") logger.info("Preparing to rotate %s (in parallel) ..", backups) pool.run() logger.info("Successfully rotated %s in %s.", backups, timer)
python
def rotate_concurrent(self, *locations, **kw): """ Rotate the backups in the given locations concurrently. :param locations: One or more values accepted by :func:`coerce_location()`. :param kw: Any keyword arguments are passed on to :func:`rotate_backups()`. This function uses :func:`rotate_backups()` to prepare rotation commands for the given locations and then it removes backups in parallel, one backup per mount point at a time. The idea behind this approach is that parallel rotation is most useful when the files to be removed are on different disks and so multiple devices can be utilized at the same time. Because mount points are per system :func:`rotate_concurrent()` will also parallelize over backups located on multiple remote systems. """ timer = Timer() pool = CommandPool(concurrency=10) logger.info("Scanning %s ..", pluralize(len(locations), "backup location")) for location in locations: for cmd in self.rotate_backups(location, prepare=True, **kw): pool.add(cmd) if pool.num_commands > 0: backups = pluralize(pool.num_commands, "backup") logger.info("Preparing to rotate %s (in parallel) ..", backups) pool.run() logger.info("Successfully rotated %s in %s.", backups, timer)
[ "def", "rotate_concurrent", "(", "self", ",", "*", "locations", ",", "*", "*", "kw", ")", ":", "timer", "=", "Timer", "(", ")", "pool", "=", "CommandPool", "(", "concurrency", "=", "10", ")", "logger", ".", "info", "(", "\"Scanning %s ..\"", ",", "pluralize", "(", "len", "(", "locations", ")", ",", "\"backup location\"", ")", ")", "for", "location", "in", "locations", ":", "for", "cmd", "in", "self", ".", "rotate_backups", "(", "location", ",", "prepare", "=", "True", ",", "*", "*", "kw", ")", ":", "pool", ".", "add", "(", "cmd", ")", "if", "pool", ".", "num_commands", ">", "0", ":", "backups", "=", "pluralize", "(", "pool", ".", "num_commands", ",", "\"backup\"", ")", "logger", ".", "info", "(", "\"Preparing to rotate %s (in parallel) ..\"", ",", "backups", ")", "pool", ".", "run", "(", ")", "logger", ".", "info", "(", "\"Successfully rotated %s in %s.\"", ",", "backups", ",", "timer", ")" ]
Rotate the backups in the given locations concurrently. :param locations: One or more values accepted by :func:`coerce_location()`. :param kw: Any keyword arguments are passed on to :func:`rotate_backups()`. This function uses :func:`rotate_backups()` to prepare rotation commands for the given locations and then it removes backups in parallel, one backup per mount point at a time. The idea behind this approach is that parallel rotation is most useful when the files to be removed are on different disks and so multiple devices can be utilized at the same time. Because mount points are per system :func:`rotate_concurrent()` will also parallelize over backups located on multiple remote systems.
[ "Rotate", "the", "backups", "in", "the", "given", "locations", "concurrently", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L403-L431
xolox/python-rotate-backups
rotate_backups/__init__.py
RotateBackups.rotate_backups
def rotate_backups(self, location, load_config=True, prepare=False): """ Rotate the backups in a directory according to a flexible rotation scheme. :param location: Any value accepted by :func:`coerce_location()`. :param load_config: If :data:`True` (so by default) the rotation scheme and other options can be customized by the user in a configuration file. In this case the caller's arguments are only used when the configuration file doesn't define a configuration for the location. :param prepare: If this is :data:`True` (not the default) then :func:`rotate_backups()` will prepare the required rotation commands without running them. :returns: A list with the rotation commands (:class:`~executor.ExternalCommand` objects). :raises: :exc:`~exceptions.ValueError` when the given location doesn't exist, isn't readable or isn't writable. The third check is only performed when dry run isn't enabled. This function binds the main methods of the :class:`RotateBackups` class together to implement backup rotation with an easy to use Python API. If you're using `rotate-backups` as a Python API and the default behavior is not satisfactory, consider writing your own :func:`rotate_backups()` function based on the underlying :func:`collect_backups()`, :func:`group_backups()`, :func:`apply_rotation_scheme()` and :func:`find_preservation_criteria()` methods. """ rotation_commands = [] location = coerce_location(location) # Load configuration overrides by user? if load_config: location = self.load_config_file(location) # Collect the backups in the given directory. sorted_backups = self.collect_backups(location) if not sorted_backups: logger.info("No backups found in %s.", location) return # Make sure the directory is writable. if not self.dry_run: location.ensure_writable() most_recent_backup = sorted_backups[-1] # Group the backups by the rotation frequencies. backups_by_frequency = self.group_backups(sorted_backups) # Apply the user defined rotation scheme. self.apply_rotation_scheme(backups_by_frequency, most_recent_backup.timestamp) # Find which backups to preserve and why. backups_to_preserve = self.find_preservation_criteria(backups_by_frequency) # Apply the calculated rotation scheme. for backup in sorted_backups: friendly_name = backup.pathname if not location.is_remote: # Use human friendly pathname formatting for local backups. friendly_name = format_path(backup.pathname) if backup in backups_to_preserve: matching_periods = backups_to_preserve[backup] logger.info("Preserving %s (matches %s retention %s) ..", friendly_name, concatenate(map(repr, matching_periods)), "period" if len(matching_periods) == 1 else "periods") else: logger.info("Deleting %s ..", friendly_name) if not self.dry_run: # Copy the list with the (possibly user defined) removal command. removal_command = list(self.removal_command) # Add the pathname of the backup as the final argument. removal_command.append(backup.pathname) # Construct the command object. command = location.context.prepare( command=removal_command, group_by=(location.ssh_alias, location.mount_point), ionice=self.io_scheduling_class, ) rotation_commands.append(command) if not prepare: timer = Timer() command.wait() logger.verbose("Deleted %s in %s.", friendly_name, timer) if len(backups_to_preserve) == len(sorted_backups): logger.info("Nothing to do! (all backups preserved)") return rotation_commands
python
def rotate_backups(self, location, load_config=True, prepare=False): """ Rotate the backups in a directory according to a flexible rotation scheme. :param location: Any value accepted by :func:`coerce_location()`. :param load_config: If :data:`True` (so by default) the rotation scheme and other options can be customized by the user in a configuration file. In this case the caller's arguments are only used when the configuration file doesn't define a configuration for the location. :param prepare: If this is :data:`True` (not the default) then :func:`rotate_backups()` will prepare the required rotation commands without running them. :returns: A list with the rotation commands (:class:`~executor.ExternalCommand` objects). :raises: :exc:`~exceptions.ValueError` when the given location doesn't exist, isn't readable or isn't writable. The third check is only performed when dry run isn't enabled. This function binds the main methods of the :class:`RotateBackups` class together to implement backup rotation with an easy to use Python API. If you're using `rotate-backups` as a Python API and the default behavior is not satisfactory, consider writing your own :func:`rotate_backups()` function based on the underlying :func:`collect_backups()`, :func:`group_backups()`, :func:`apply_rotation_scheme()` and :func:`find_preservation_criteria()` methods. """ rotation_commands = [] location = coerce_location(location) # Load configuration overrides by user? if load_config: location = self.load_config_file(location) # Collect the backups in the given directory. sorted_backups = self.collect_backups(location) if not sorted_backups: logger.info("No backups found in %s.", location) return # Make sure the directory is writable. if not self.dry_run: location.ensure_writable() most_recent_backup = sorted_backups[-1] # Group the backups by the rotation frequencies. backups_by_frequency = self.group_backups(sorted_backups) # Apply the user defined rotation scheme. self.apply_rotation_scheme(backups_by_frequency, most_recent_backup.timestamp) # Find which backups to preserve and why. backups_to_preserve = self.find_preservation_criteria(backups_by_frequency) # Apply the calculated rotation scheme. for backup in sorted_backups: friendly_name = backup.pathname if not location.is_remote: # Use human friendly pathname formatting for local backups. friendly_name = format_path(backup.pathname) if backup in backups_to_preserve: matching_periods = backups_to_preserve[backup] logger.info("Preserving %s (matches %s retention %s) ..", friendly_name, concatenate(map(repr, matching_periods)), "period" if len(matching_periods) == 1 else "periods") else: logger.info("Deleting %s ..", friendly_name) if not self.dry_run: # Copy the list with the (possibly user defined) removal command. removal_command = list(self.removal_command) # Add the pathname of the backup as the final argument. removal_command.append(backup.pathname) # Construct the command object. command = location.context.prepare( command=removal_command, group_by=(location.ssh_alias, location.mount_point), ionice=self.io_scheduling_class, ) rotation_commands.append(command) if not prepare: timer = Timer() command.wait() logger.verbose("Deleted %s in %s.", friendly_name, timer) if len(backups_to_preserve) == len(sorted_backups): logger.info("Nothing to do! (all backups preserved)") return rotation_commands
[ "def", "rotate_backups", "(", "self", ",", "location", ",", "load_config", "=", "True", ",", "prepare", "=", "False", ")", ":", "rotation_commands", "=", "[", "]", "location", "=", "coerce_location", "(", "location", ")", "# Load configuration overrides by user?", "if", "load_config", ":", "location", "=", "self", ".", "load_config_file", "(", "location", ")", "# Collect the backups in the given directory.", "sorted_backups", "=", "self", ".", "collect_backups", "(", "location", ")", "if", "not", "sorted_backups", ":", "logger", ".", "info", "(", "\"No backups found in %s.\"", ",", "location", ")", "return", "# Make sure the directory is writable.", "if", "not", "self", ".", "dry_run", ":", "location", ".", "ensure_writable", "(", ")", "most_recent_backup", "=", "sorted_backups", "[", "-", "1", "]", "# Group the backups by the rotation frequencies.", "backups_by_frequency", "=", "self", ".", "group_backups", "(", "sorted_backups", ")", "# Apply the user defined rotation scheme.", "self", ".", "apply_rotation_scheme", "(", "backups_by_frequency", ",", "most_recent_backup", ".", "timestamp", ")", "# Find which backups to preserve and why.", "backups_to_preserve", "=", "self", ".", "find_preservation_criteria", "(", "backups_by_frequency", ")", "# Apply the calculated rotation scheme.", "for", "backup", "in", "sorted_backups", ":", "friendly_name", "=", "backup", ".", "pathname", "if", "not", "location", ".", "is_remote", ":", "# Use human friendly pathname formatting for local backups.", "friendly_name", "=", "format_path", "(", "backup", ".", "pathname", ")", "if", "backup", "in", "backups_to_preserve", ":", "matching_periods", "=", "backups_to_preserve", "[", "backup", "]", "logger", ".", "info", "(", "\"Preserving %s (matches %s retention %s) ..\"", ",", "friendly_name", ",", "concatenate", "(", "map", "(", "repr", ",", "matching_periods", ")", ")", ",", "\"period\"", "if", "len", "(", "matching_periods", ")", "==", "1", "else", "\"periods\"", ")", "else", ":", "logger", ".", "info", "(", "\"Deleting %s ..\"", ",", "friendly_name", ")", "if", "not", "self", ".", "dry_run", ":", "# Copy the list with the (possibly user defined) removal command.", "removal_command", "=", "list", "(", "self", ".", "removal_command", ")", "# Add the pathname of the backup as the final argument.", "removal_command", ".", "append", "(", "backup", ".", "pathname", ")", "# Construct the command object.", "command", "=", "location", ".", "context", ".", "prepare", "(", "command", "=", "removal_command", ",", "group_by", "=", "(", "location", ".", "ssh_alias", ",", "location", ".", "mount_point", ")", ",", "ionice", "=", "self", ".", "io_scheduling_class", ",", ")", "rotation_commands", ".", "append", "(", "command", ")", "if", "not", "prepare", ":", "timer", "=", "Timer", "(", ")", "command", ".", "wait", "(", ")", "logger", ".", "verbose", "(", "\"Deleted %s in %s.\"", ",", "friendly_name", ",", "timer", ")", "if", "len", "(", "backups_to_preserve", ")", "==", "len", "(", "sorted_backups", ")", ":", "logger", ".", "info", "(", "\"Nothing to do! (all backups preserved)\"", ")", "return", "rotation_commands" ]
Rotate the backups in a directory according to a flexible rotation scheme. :param location: Any value accepted by :func:`coerce_location()`. :param load_config: If :data:`True` (so by default) the rotation scheme and other options can be customized by the user in a configuration file. In this case the caller's arguments are only used when the configuration file doesn't define a configuration for the location. :param prepare: If this is :data:`True` (not the default) then :func:`rotate_backups()` will prepare the required rotation commands without running them. :returns: A list with the rotation commands (:class:`~executor.ExternalCommand` objects). :raises: :exc:`~exceptions.ValueError` when the given location doesn't exist, isn't readable or isn't writable. The third check is only performed when dry run isn't enabled. This function binds the main methods of the :class:`RotateBackups` class together to implement backup rotation with an easy to use Python API. If you're using `rotate-backups` as a Python API and the default behavior is not satisfactory, consider writing your own :func:`rotate_backups()` function based on the underlying :func:`collect_backups()`, :func:`group_backups()`, :func:`apply_rotation_scheme()` and :func:`find_preservation_criteria()` methods.
[ "Rotate", "the", "backups", "in", "a", "directory", "according", "to", "a", "flexible", "rotation", "scheme", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L433-L512
xolox/python-rotate-backups
rotate_backups/__init__.py
RotateBackups.load_config_file
def load_config_file(self, location): """ Load a rotation scheme and other options from a configuration file. :param location: Any value accepted by :func:`coerce_location()`. :returns: The configured or given :class:`Location` object. """ location = coerce_location(location) for configured_location, rotation_scheme, options in load_config_file(self.config_file, expand=False): if configured_location.match(location): logger.verbose("Loading configuration for %s ..", location) if rotation_scheme: self.rotation_scheme = rotation_scheme for name, value in options.items(): if value: setattr(self, name, value) # Create a new Location object based on the directory of the # given location and the execution context of the configured # location, because: # # 1. The directory of the configured location may be a filename # pattern whereas we are interested in the expanded name. # # 2. The execution context of the given location may lack some # details of the configured location. return Location( context=configured_location.context, directory=location.directory, ) logger.verbose("No configuration found for %s.", location) return location
python
def load_config_file(self, location): """ Load a rotation scheme and other options from a configuration file. :param location: Any value accepted by :func:`coerce_location()`. :returns: The configured or given :class:`Location` object. """ location = coerce_location(location) for configured_location, rotation_scheme, options in load_config_file(self.config_file, expand=False): if configured_location.match(location): logger.verbose("Loading configuration for %s ..", location) if rotation_scheme: self.rotation_scheme = rotation_scheme for name, value in options.items(): if value: setattr(self, name, value) # Create a new Location object based on the directory of the # given location and the execution context of the configured # location, because: # # 1. The directory of the configured location may be a filename # pattern whereas we are interested in the expanded name. # # 2. The execution context of the given location may lack some # details of the configured location. return Location( context=configured_location.context, directory=location.directory, ) logger.verbose("No configuration found for %s.", location) return location
[ "def", "load_config_file", "(", "self", ",", "location", ")", ":", "location", "=", "coerce_location", "(", "location", ")", "for", "configured_location", ",", "rotation_scheme", ",", "options", "in", "load_config_file", "(", "self", ".", "config_file", ",", "expand", "=", "False", ")", ":", "if", "configured_location", ".", "match", "(", "location", ")", ":", "logger", ".", "verbose", "(", "\"Loading configuration for %s ..\"", ",", "location", ")", "if", "rotation_scheme", ":", "self", ".", "rotation_scheme", "=", "rotation_scheme", "for", "name", ",", "value", "in", "options", ".", "items", "(", ")", ":", "if", "value", ":", "setattr", "(", "self", ",", "name", ",", "value", ")", "# Create a new Location object based on the directory of the", "# given location and the execution context of the configured", "# location, because:", "#", "# 1. The directory of the configured location may be a filename", "# pattern whereas we are interested in the expanded name.", "#", "# 2. The execution context of the given location may lack some", "# details of the configured location.", "return", "Location", "(", "context", "=", "configured_location", ".", "context", ",", "directory", "=", "location", ".", "directory", ",", ")", "logger", ".", "verbose", "(", "\"No configuration found for %s.\"", ",", "location", ")", "return", "location" ]
Load a rotation scheme and other options from a configuration file. :param location: Any value accepted by :func:`coerce_location()`. :returns: The configured or given :class:`Location` object.
[ "Load", "a", "rotation", "scheme", "and", "other", "options", "from", "a", "configuration", "file", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L514-L544
xolox/python-rotate-backups
rotate_backups/__init__.py
RotateBackups.collect_backups
def collect_backups(self, location): """ Collect the backups at the given location. :param location: Any value accepted by :func:`coerce_location()`. :returns: A sorted :class:`list` of :class:`Backup` objects (the backups are sorted by their date). :raises: :exc:`~exceptions.ValueError` when the given directory doesn't exist or isn't readable. """ backups = [] location = coerce_location(location) logger.info("Scanning %s for backups ..", location) location.ensure_readable() for entry in natsort(location.context.list_entries(location.directory)): match = TIMESTAMP_PATTERN.search(entry) if match: if self.exclude_list and any(fnmatch.fnmatch(entry, p) for p in self.exclude_list): logger.verbose("Excluded %s (it matched the exclude list).", entry) elif self.include_list and not any(fnmatch.fnmatch(entry, p) for p in self.include_list): logger.verbose("Excluded %s (it didn't match the include list).", entry) else: try: backups.append(Backup( pathname=os.path.join(location.directory, entry), timestamp=datetime.datetime(*(int(group, 10) for group in match.groups('0'))), )) except ValueError as e: logger.notice("Ignoring %s due to invalid date (%s).", entry, e) else: logger.debug("Failed to match time stamp in filename: %s", entry) if backups: logger.info("Found %i timestamped backups in %s.", len(backups), location) return sorted(backups)
python
def collect_backups(self, location): """ Collect the backups at the given location. :param location: Any value accepted by :func:`coerce_location()`. :returns: A sorted :class:`list` of :class:`Backup` objects (the backups are sorted by their date). :raises: :exc:`~exceptions.ValueError` when the given directory doesn't exist or isn't readable. """ backups = [] location = coerce_location(location) logger.info("Scanning %s for backups ..", location) location.ensure_readable() for entry in natsort(location.context.list_entries(location.directory)): match = TIMESTAMP_PATTERN.search(entry) if match: if self.exclude_list and any(fnmatch.fnmatch(entry, p) for p in self.exclude_list): logger.verbose("Excluded %s (it matched the exclude list).", entry) elif self.include_list and not any(fnmatch.fnmatch(entry, p) for p in self.include_list): logger.verbose("Excluded %s (it didn't match the include list).", entry) else: try: backups.append(Backup( pathname=os.path.join(location.directory, entry), timestamp=datetime.datetime(*(int(group, 10) for group in match.groups('0'))), )) except ValueError as e: logger.notice("Ignoring %s due to invalid date (%s).", entry, e) else: logger.debug("Failed to match time stamp in filename: %s", entry) if backups: logger.info("Found %i timestamped backups in %s.", len(backups), location) return sorted(backups)
[ "def", "collect_backups", "(", "self", ",", "location", ")", ":", "backups", "=", "[", "]", "location", "=", "coerce_location", "(", "location", ")", "logger", ".", "info", "(", "\"Scanning %s for backups ..\"", ",", "location", ")", "location", ".", "ensure_readable", "(", ")", "for", "entry", "in", "natsort", "(", "location", ".", "context", ".", "list_entries", "(", "location", ".", "directory", ")", ")", ":", "match", "=", "TIMESTAMP_PATTERN", ".", "search", "(", "entry", ")", "if", "match", ":", "if", "self", ".", "exclude_list", "and", "any", "(", "fnmatch", ".", "fnmatch", "(", "entry", ",", "p", ")", "for", "p", "in", "self", ".", "exclude_list", ")", ":", "logger", ".", "verbose", "(", "\"Excluded %s (it matched the exclude list).\"", ",", "entry", ")", "elif", "self", ".", "include_list", "and", "not", "any", "(", "fnmatch", ".", "fnmatch", "(", "entry", ",", "p", ")", "for", "p", "in", "self", ".", "include_list", ")", ":", "logger", ".", "verbose", "(", "\"Excluded %s (it didn't match the include list).\"", ",", "entry", ")", "else", ":", "try", ":", "backups", ".", "append", "(", "Backup", "(", "pathname", "=", "os", ".", "path", ".", "join", "(", "location", ".", "directory", ",", "entry", ")", ",", "timestamp", "=", "datetime", ".", "datetime", "(", "*", "(", "int", "(", "group", ",", "10", ")", "for", "group", "in", "match", ".", "groups", "(", "'0'", ")", ")", ")", ",", ")", ")", "except", "ValueError", "as", "e", ":", "logger", ".", "notice", "(", "\"Ignoring %s due to invalid date (%s).\"", ",", "entry", ",", "e", ")", "else", ":", "logger", ".", "debug", "(", "\"Failed to match time stamp in filename: %s\"", ",", "entry", ")", "if", "backups", ":", "logger", ".", "info", "(", "\"Found %i timestamped backups in %s.\"", ",", "len", "(", "backups", ")", ",", "location", ")", "return", "sorted", "(", "backups", ")" ]
Collect the backups at the given location. :param location: Any value accepted by :func:`coerce_location()`. :returns: A sorted :class:`list` of :class:`Backup` objects (the backups are sorted by their date). :raises: :exc:`~exceptions.ValueError` when the given directory doesn't exist or isn't readable.
[ "Collect", "the", "backups", "at", "the", "given", "location", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L546-L579
xolox/python-rotate-backups
rotate_backups/__init__.py
RotateBackups.group_backups
def group_backups(self, backups): """ Group backups collected by :func:`collect_backups()` by rotation frequencies. :param backups: A :class:`set` of :class:`Backup` objects. :returns: A :class:`dict` whose keys are the names of rotation frequencies ('hourly', 'daily', etc.) and whose values are dictionaries. Each nested dictionary contains lists of :class:`Backup` objects that are grouped together because they belong into the same time unit for the corresponding rotation frequency. """ backups_by_frequency = dict((frequency, collections.defaultdict(list)) for frequency in SUPPORTED_FREQUENCIES) for b in backups: backups_by_frequency['minutely'][(b.year, b.month, b.day, b.hour, b.minute)].append(b) backups_by_frequency['hourly'][(b.year, b.month, b.day, b.hour)].append(b) backups_by_frequency['daily'][(b.year, b.month, b.day)].append(b) backups_by_frequency['weekly'][(b.year, b.week)].append(b) backups_by_frequency['monthly'][(b.year, b.month)].append(b) backups_by_frequency['yearly'][b.year].append(b) return backups_by_frequency
python
def group_backups(self, backups): """ Group backups collected by :func:`collect_backups()` by rotation frequencies. :param backups: A :class:`set` of :class:`Backup` objects. :returns: A :class:`dict` whose keys are the names of rotation frequencies ('hourly', 'daily', etc.) and whose values are dictionaries. Each nested dictionary contains lists of :class:`Backup` objects that are grouped together because they belong into the same time unit for the corresponding rotation frequency. """ backups_by_frequency = dict((frequency, collections.defaultdict(list)) for frequency in SUPPORTED_FREQUENCIES) for b in backups: backups_by_frequency['minutely'][(b.year, b.month, b.day, b.hour, b.minute)].append(b) backups_by_frequency['hourly'][(b.year, b.month, b.day, b.hour)].append(b) backups_by_frequency['daily'][(b.year, b.month, b.day)].append(b) backups_by_frequency['weekly'][(b.year, b.week)].append(b) backups_by_frequency['monthly'][(b.year, b.month)].append(b) backups_by_frequency['yearly'][b.year].append(b) return backups_by_frequency
[ "def", "group_backups", "(", "self", ",", "backups", ")", ":", "backups_by_frequency", "=", "dict", "(", "(", "frequency", ",", "collections", ".", "defaultdict", "(", "list", ")", ")", "for", "frequency", "in", "SUPPORTED_FREQUENCIES", ")", "for", "b", "in", "backups", ":", "backups_by_frequency", "[", "'minutely'", "]", "[", "(", "b", ".", "year", ",", "b", ".", "month", ",", "b", ".", "day", ",", "b", ".", "hour", ",", "b", ".", "minute", ")", "]", ".", "append", "(", "b", ")", "backups_by_frequency", "[", "'hourly'", "]", "[", "(", "b", ".", "year", ",", "b", ".", "month", ",", "b", ".", "day", ",", "b", ".", "hour", ")", "]", ".", "append", "(", "b", ")", "backups_by_frequency", "[", "'daily'", "]", "[", "(", "b", ".", "year", ",", "b", ".", "month", ",", "b", ".", "day", ")", "]", ".", "append", "(", "b", ")", "backups_by_frequency", "[", "'weekly'", "]", "[", "(", "b", ".", "year", ",", "b", ".", "week", ")", "]", ".", "append", "(", "b", ")", "backups_by_frequency", "[", "'monthly'", "]", "[", "(", "b", ".", "year", ",", "b", ".", "month", ")", "]", ".", "append", "(", "b", ")", "backups_by_frequency", "[", "'yearly'", "]", "[", "b", ".", "year", "]", ".", "append", "(", "b", ")", "return", "backups_by_frequency" ]
Group backups collected by :func:`collect_backups()` by rotation frequencies. :param backups: A :class:`set` of :class:`Backup` objects. :returns: A :class:`dict` whose keys are the names of rotation frequencies ('hourly', 'daily', etc.) and whose values are dictionaries. Each nested dictionary contains lists of :class:`Backup` objects that are grouped together because they belong into the same time unit for the corresponding rotation frequency.
[ "Group", "backups", "collected", "by", ":", "func", ":", "collect_backups", "()", "by", "rotation", "frequencies", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L581-L601
xolox/python-rotate-backups
rotate_backups/__init__.py
RotateBackups.apply_rotation_scheme
def apply_rotation_scheme(self, backups_by_frequency, most_recent_backup): """ Apply the user defined rotation scheme to the result of :func:`group_backups()`. :param backups_by_frequency: A :class:`dict` in the format generated by :func:`group_backups()`. :param most_recent_backup: The :class:`~datetime.datetime` of the most recent backup. :raises: :exc:`~exceptions.ValueError` when the rotation scheme dictionary is empty (this would cause all backups to be deleted). .. note:: This method mutates the given data structure by removing all backups that should be removed to apply the user defined rotation scheme. """ if not self.rotation_scheme: raise ValueError("Refusing to use empty rotation scheme! (all backups would be deleted)") for frequency, backups in backups_by_frequency.items(): # Ignore frequencies not specified by the user. if frequency not in self.rotation_scheme: backups.clear() else: # Reduce the number of backups in each time slot of this # rotation frequency to a single backup (the oldest one or the # newest one). for period, backups_in_period in backups.items(): index = -1 if self.prefer_recent else 0 selected_backup = sorted(backups_in_period)[index] backups[period] = [selected_backup] # Check if we need to rotate away backups in old periods. retention_period = self.rotation_scheme[frequency] if retention_period != 'always': # Remove backups created before the minimum date of this # rotation frequency? (relative to the most recent backup) if self.strict: minimum_date = most_recent_backup - SUPPORTED_FREQUENCIES[frequency] * retention_period for period, backups_in_period in list(backups.items()): for backup in backups_in_period: if backup.timestamp < minimum_date: backups_in_period.remove(backup) if not backups_in_period: backups.pop(period) # If there are more periods remaining than the user # requested to be preserved we delete the oldest one(s). items_to_preserve = sorted(backups.items())[-retention_period:] backups_by_frequency[frequency] = dict(items_to_preserve)
python
def apply_rotation_scheme(self, backups_by_frequency, most_recent_backup): """ Apply the user defined rotation scheme to the result of :func:`group_backups()`. :param backups_by_frequency: A :class:`dict` in the format generated by :func:`group_backups()`. :param most_recent_backup: The :class:`~datetime.datetime` of the most recent backup. :raises: :exc:`~exceptions.ValueError` when the rotation scheme dictionary is empty (this would cause all backups to be deleted). .. note:: This method mutates the given data structure by removing all backups that should be removed to apply the user defined rotation scheme. """ if not self.rotation_scheme: raise ValueError("Refusing to use empty rotation scheme! (all backups would be deleted)") for frequency, backups in backups_by_frequency.items(): # Ignore frequencies not specified by the user. if frequency not in self.rotation_scheme: backups.clear() else: # Reduce the number of backups in each time slot of this # rotation frequency to a single backup (the oldest one or the # newest one). for period, backups_in_period in backups.items(): index = -1 if self.prefer_recent else 0 selected_backup = sorted(backups_in_period)[index] backups[period] = [selected_backup] # Check if we need to rotate away backups in old periods. retention_period = self.rotation_scheme[frequency] if retention_period != 'always': # Remove backups created before the minimum date of this # rotation frequency? (relative to the most recent backup) if self.strict: minimum_date = most_recent_backup - SUPPORTED_FREQUENCIES[frequency] * retention_period for period, backups_in_period in list(backups.items()): for backup in backups_in_period: if backup.timestamp < minimum_date: backups_in_period.remove(backup) if not backups_in_period: backups.pop(period) # If there are more periods remaining than the user # requested to be preserved we delete the oldest one(s). items_to_preserve = sorted(backups.items())[-retention_period:] backups_by_frequency[frequency] = dict(items_to_preserve)
[ "def", "apply_rotation_scheme", "(", "self", ",", "backups_by_frequency", ",", "most_recent_backup", ")", ":", "if", "not", "self", ".", "rotation_scheme", ":", "raise", "ValueError", "(", "\"Refusing to use empty rotation scheme! (all backups would be deleted)\"", ")", "for", "frequency", ",", "backups", "in", "backups_by_frequency", ".", "items", "(", ")", ":", "# Ignore frequencies not specified by the user.", "if", "frequency", "not", "in", "self", ".", "rotation_scheme", ":", "backups", ".", "clear", "(", ")", "else", ":", "# Reduce the number of backups in each time slot of this", "# rotation frequency to a single backup (the oldest one or the", "# newest one).", "for", "period", ",", "backups_in_period", "in", "backups", ".", "items", "(", ")", ":", "index", "=", "-", "1", "if", "self", ".", "prefer_recent", "else", "0", "selected_backup", "=", "sorted", "(", "backups_in_period", ")", "[", "index", "]", "backups", "[", "period", "]", "=", "[", "selected_backup", "]", "# Check if we need to rotate away backups in old periods.", "retention_period", "=", "self", ".", "rotation_scheme", "[", "frequency", "]", "if", "retention_period", "!=", "'always'", ":", "# Remove backups created before the minimum date of this", "# rotation frequency? (relative to the most recent backup)", "if", "self", ".", "strict", ":", "minimum_date", "=", "most_recent_backup", "-", "SUPPORTED_FREQUENCIES", "[", "frequency", "]", "*", "retention_period", "for", "period", ",", "backups_in_period", "in", "list", "(", "backups", ".", "items", "(", ")", ")", ":", "for", "backup", "in", "backups_in_period", ":", "if", "backup", ".", "timestamp", "<", "minimum_date", ":", "backups_in_period", ".", "remove", "(", "backup", ")", "if", "not", "backups_in_period", ":", "backups", ".", "pop", "(", "period", ")", "# If there are more periods remaining than the user", "# requested to be preserved we delete the oldest one(s).", "items_to_preserve", "=", "sorted", "(", "backups", ".", "items", "(", ")", ")", "[", "-", "retention_period", ":", "]", "backups_by_frequency", "[", "frequency", "]", "=", "dict", "(", "items_to_preserve", ")" ]
Apply the user defined rotation scheme to the result of :func:`group_backups()`. :param backups_by_frequency: A :class:`dict` in the format generated by :func:`group_backups()`. :param most_recent_backup: The :class:`~datetime.datetime` of the most recent backup. :raises: :exc:`~exceptions.ValueError` when the rotation scheme dictionary is empty (this would cause all backups to be deleted). .. note:: This method mutates the given data structure by removing all backups that should be removed to apply the user defined rotation scheme.
[ "Apply", "the", "user", "defined", "rotation", "scheme", "to", "the", "result", "of", ":", "func", ":", "group_backups", "()", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L603-L649
xolox/python-rotate-backups
rotate_backups/__init__.py
RotateBackups.find_preservation_criteria
def find_preservation_criteria(self, backups_by_frequency): """ Collect the criteria used to decide which backups to preserve. :param backups_by_frequency: A :class:`dict` in the format generated by :func:`group_backups()` which has been processed by :func:`apply_rotation_scheme()`. :returns: A :class:`dict` with :class:`Backup` objects as keys and :class:`list` objects containing strings (rotation frequencies) as values. """ backups_to_preserve = collections.defaultdict(list) for frequency, delta in ORDERED_FREQUENCIES: for period in backups_by_frequency[frequency].values(): for backup in period: backups_to_preserve[backup].append(frequency) return backups_to_preserve
python
def find_preservation_criteria(self, backups_by_frequency): """ Collect the criteria used to decide which backups to preserve. :param backups_by_frequency: A :class:`dict` in the format generated by :func:`group_backups()` which has been processed by :func:`apply_rotation_scheme()`. :returns: A :class:`dict` with :class:`Backup` objects as keys and :class:`list` objects containing strings (rotation frequencies) as values. """ backups_to_preserve = collections.defaultdict(list) for frequency, delta in ORDERED_FREQUENCIES: for period in backups_by_frequency[frequency].values(): for backup in period: backups_to_preserve[backup].append(frequency) return backups_to_preserve
[ "def", "find_preservation_criteria", "(", "self", ",", "backups_by_frequency", ")", ":", "backups_to_preserve", "=", "collections", ".", "defaultdict", "(", "list", ")", "for", "frequency", ",", "delta", "in", "ORDERED_FREQUENCIES", ":", "for", "period", "in", "backups_by_frequency", "[", "frequency", "]", ".", "values", "(", ")", ":", "for", "backup", "in", "period", ":", "backups_to_preserve", "[", "backup", "]", ".", "append", "(", "frequency", ")", "return", "backups_to_preserve" ]
Collect the criteria used to decide which backups to preserve. :param backups_by_frequency: A :class:`dict` in the format generated by :func:`group_backups()` which has been processed by :func:`apply_rotation_scheme()`. :returns: A :class:`dict` with :class:`Backup` objects as keys and :class:`list` objects containing strings (rotation frequencies) as values.
[ "Collect", "the", "criteria", "used", "to", "decide", "which", "backups", "to", "preserve", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L651-L667
xolox/python-rotate-backups
rotate_backups/__init__.py
Location.mount_point
def mount_point(self): """ The pathname of the mount point of :attr:`directory` (a string or :data:`None`). If the ``stat --format=%m ...`` command that is used to determine the mount point fails, the value of this property defaults to :data:`None`. This enables graceful degradation on e.g. Mac OS X whose ``stat`` implementation is rather bare bones compared to GNU/Linux. """ try: return self.context.capture('stat', '--format=%m', self.directory, silent=True) except ExternalCommandFailed: return None
python
def mount_point(self): """ The pathname of the mount point of :attr:`directory` (a string or :data:`None`). If the ``stat --format=%m ...`` command that is used to determine the mount point fails, the value of this property defaults to :data:`None`. This enables graceful degradation on e.g. Mac OS X whose ``stat`` implementation is rather bare bones compared to GNU/Linux. """ try: return self.context.capture('stat', '--format=%m', self.directory, silent=True) except ExternalCommandFailed: return None
[ "def", "mount_point", "(", "self", ")", ":", "try", ":", "return", "self", ".", "context", ".", "capture", "(", "'stat'", ",", "'--format=%m'", ",", "self", ".", "directory", ",", "silent", "=", "True", ")", "except", "ExternalCommandFailed", ":", "return", "None" ]
The pathname of the mount point of :attr:`directory` (a string or :data:`None`). If the ``stat --format=%m ...`` command that is used to determine the mount point fails, the value of this property defaults to :data:`None`. This enables graceful degradation on e.g. Mac OS X whose ``stat`` implementation is rather bare bones compared to GNU/Linux.
[ "The", "pathname", "of", "the", "mount", "point", "of", ":", "attr", ":", "directory", "(", "a", "string", "or", ":", "data", ":", "None", ")", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L693-L705
xolox/python-rotate-backups
rotate_backups/__init__.py
Location.ensure_exists
def ensure_exists(self): """Make sure the location exists.""" if not self.context.is_directory(self.directory): # This can also happen when we don't have permission to one of the # parent directories so we'll point that out in the error message # when it seems applicable (so as not to confuse users). if self.context.have_superuser_privileges: msg = "The directory %s doesn't exist!" raise ValueError(msg % self) else: raise ValueError(compact(""" The directory {location} isn't accessible, most likely because it doesn't exist or because of permissions. If you're sure the directory exists you can use the --use-sudo option. """, location=self))
python
def ensure_exists(self): """Make sure the location exists.""" if not self.context.is_directory(self.directory): # This can also happen when we don't have permission to one of the # parent directories so we'll point that out in the error message # when it seems applicable (so as not to confuse users). if self.context.have_superuser_privileges: msg = "The directory %s doesn't exist!" raise ValueError(msg % self) else: raise ValueError(compact(""" The directory {location} isn't accessible, most likely because it doesn't exist or because of permissions. If you're sure the directory exists you can use the --use-sudo option. """, location=self))
[ "def", "ensure_exists", "(", "self", ")", ":", "if", "not", "self", ".", "context", ".", "is_directory", "(", "self", ".", "directory", ")", ":", "# This can also happen when we don't have permission to one of the", "# parent directories so we'll point that out in the error message", "# when it seems applicable (so as not to confuse users).", "if", "self", ".", "context", ".", "have_superuser_privileges", ":", "msg", "=", "\"The directory %s doesn't exist!\"", "raise", "ValueError", "(", "msg", "%", "self", ")", "else", ":", "raise", "ValueError", "(", "compact", "(", "\"\"\"\n The directory {location} isn't accessible, most likely\n because it doesn't exist or because of permissions. If\n you're sure the directory exists you can use the\n --use-sudo option.\n \"\"\"", ",", "location", "=", "self", ")", ")" ]
Make sure the location exists.
[ "Make", "sure", "the", "location", "exists", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L729-L744
xolox/python-rotate-backups
rotate_backups/__init__.py
Location.ensure_readable
def ensure_readable(self): """Make sure the location exists and is readable.""" self.ensure_exists() if not self.context.is_readable(self.directory): if self.context.have_superuser_privileges: msg = "The directory %s isn't readable!" raise ValueError(msg % self) else: raise ValueError(compact(""" The directory {location} isn't readable, most likely because of permissions. Consider using the --use-sudo option. """, location=self))
python
def ensure_readable(self): """Make sure the location exists and is readable.""" self.ensure_exists() if not self.context.is_readable(self.directory): if self.context.have_superuser_privileges: msg = "The directory %s isn't readable!" raise ValueError(msg % self) else: raise ValueError(compact(""" The directory {location} isn't readable, most likely because of permissions. Consider using the --use-sudo option. """, location=self))
[ "def", "ensure_readable", "(", "self", ")", ":", "self", ".", "ensure_exists", "(", ")", "if", "not", "self", ".", "context", ".", "is_readable", "(", "self", ".", "directory", ")", ":", "if", "self", ".", "context", ".", "have_superuser_privileges", ":", "msg", "=", "\"The directory %s isn't readable!\"", "raise", "ValueError", "(", "msg", "%", "self", ")", "else", ":", "raise", "ValueError", "(", "compact", "(", "\"\"\"\n The directory {location} isn't readable, most likely\n because of permissions. Consider using the --use-sudo\n option.\n \"\"\"", ",", "location", "=", "self", ")", ")" ]
Make sure the location exists and is readable.
[ "Make", "sure", "the", "location", "exists", "and", "is", "readable", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L746-L758
xolox/python-rotate-backups
rotate_backups/__init__.py
Location.ensure_writable
def ensure_writable(self): """Make sure the directory exists and is writable.""" self.ensure_exists() if not self.context.is_writable(self.directory): if self.context.have_superuser_privileges: msg = "The directory %s isn't writable!" raise ValueError(msg % self) else: raise ValueError(compact(""" The directory {location} isn't writable, most likely due to permissions. Consider using the --use-sudo option. """, location=self))
python
def ensure_writable(self): """Make sure the directory exists and is writable.""" self.ensure_exists() if not self.context.is_writable(self.directory): if self.context.have_superuser_privileges: msg = "The directory %s isn't writable!" raise ValueError(msg % self) else: raise ValueError(compact(""" The directory {location} isn't writable, most likely due to permissions. Consider using the --use-sudo option. """, location=self))
[ "def", "ensure_writable", "(", "self", ")", ":", "self", ".", "ensure_exists", "(", ")", "if", "not", "self", ".", "context", ".", "is_writable", "(", "self", ".", "directory", ")", ":", "if", "self", ".", "context", ".", "have_superuser_privileges", ":", "msg", "=", "\"The directory %s isn't writable!\"", "raise", "ValueError", "(", "msg", "%", "self", ")", "else", ":", "raise", "ValueError", "(", "compact", "(", "\"\"\"\n The directory {location} isn't writable, most likely due\n to permissions. Consider using the --use-sudo option.\n \"\"\"", ",", "location", "=", "self", ")", ")" ]
Make sure the directory exists and is writable.
[ "Make", "sure", "the", "directory", "exists", "and", "is", "writable", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L760-L771
xolox/python-rotate-backups
rotate_backups/__init__.py
Location.match
def match(self, location): """ Check if the given location "matches". :param location: The :class:`Location` object to try to match. :returns: :data:`True` if the two locations are on the same system and the :attr:`directory` can be matched as a filename pattern or a literal match on the normalized pathname. """ if self.ssh_alias != location.ssh_alias: # Never match locations on other systems. return False elif self.have_wildcards: # Match filename patterns using fnmatch(). return fnmatch.fnmatch(location.directory, self.directory) else: # Compare normalized directory pathnames. self = os.path.normpath(self.directory) other = os.path.normpath(location.directory) return self == other
python
def match(self, location): """ Check if the given location "matches". :param location: The :class:`Location` object to try to match. :returns: :data:`True` if the two locations are on the same system and the :attr:`directory` can be matched as a filename pattern or a literal match on the normalized pathname. """ if self.ssh_alias != location.ssh_alias: # Never match locations on other systems. return False elif self.have_wildcards: # Match filename patterns using fnmatch(). return fnmatch.fnmatch(location.directory, self.directory) else: # Compare normalized directory pathnames. self = os.path.normpath(self.directory) other = os.path.normpath(location.directory) return self == other
[ "def", "match", "(", "self", ",", "location", ")", ":", "if", "self", ".", "ssh_alias", "!=", "location", ".", "ssh_alias", ":", "# Never match locations on other systems.", "return", "False", "elif", "self", ".", "have_wildcards", ":", "# Match filename patterns using fnmatch().", "return", "fnmatch", ".", "fnmatch", "(", "location", ".", "directory", ",", "self", ".", "directory", ")", "else", ":", "# Compare normalized directory pathnames.", "self", "=", "os", ".", "path", ".", "normpath", "(", "self", ".", "directory", ")", "other", "=", "os", ".", "path", ".", "normpath", "(", "location", ".", "directory", ")", "return", "self", "==", "other" ]
Check if the given location "matches". :param location: The :class:`Location` object to try to match. :returns: :data:`True` if the two locations are on the same system and the :attr:`directory` can be matched as a filename pattern or a literal match on the normalized pathname.
[ "Check", "if", "the", "given", "location", "matches", "." ]
train
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L773-L792
HarveyHunt/i3situation
i3situation/core/status.py
setup_file_logger
def setup_file_logger(filename, formatting, log_level): """ A helper function for creating a file logger. Accepts arguments, as it is used in Status and LoggingWriter. """ logger = logging.getLogger() # If a stream handler has been attached, remove it. if logger.handlers: logger.removeHandler(logger.handlers[0]) handler = logging.FileHandler(filename) logger.addHandler(handler) formatter = logging.Formatter(*formatting) handler.setFormatter(formatter) logger.setLevel(log_level) handler.setLevel(log_level) return logger
python
def setup_file_logger(filename, formatting, log_level): """ A helper function for creating a file logger. Accepts arguments, as it is used in Status and LoggingWriter. """ logger = logging.getLogger() # If a stream handler has been attached, remove it. if logger.handlers: logger.removeHandler(logger.handlers[0]) handler = logging.FileHandler(filename) logger.addHandler(handler) formatter = logging.Formatter(*formatting) handler.setFormatter(formatter) logger.setLevel(log_level) handler.setLevel(log_level) return logger
[ "def", "setup_file_logger", "(", "filename", ",", "formatting", ",", "log_level", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", ")", "# If a stream handler has been attached, remove it.", "if", "logger", ".", "handlers", ":", "logger", ".", "removeHandler", "(", "logger", ".", "handlers", "[", "0", "]", ")", "handler", "=", "logging", ".", "FileHandler", "(", "filename", ")", "logger", ".", "addHandler", "(", "handler", ")", "formatter", "=", "logging", ".", "Formatter", "(", "*", "formatting", ")", "handler", ".", "setFormatter", "(", "formatter", ")", "logger", ".", "setLevel", "(", "log_level", ")", "handler", ".", "setLevel", "(", "log_level", ")", "return", "logger" ]
A helper function for creating a file logger. Accepts arguments, as it is used in Status and LoggingWriter.
[ "A", "helper", "function", "for", "creating", "a", "file", "logger", ".", "Accepts", "arguments", "as", "it", "is", "used", "in", "Status", "and", "LoggingWriter", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/core/status.py#L12-L27
HarveyHunt/i3situation
i3situation/core/status.py
Status.output_to_bar
def output_to_bar(self, message, comma=True): """ Outputs data to stdout, without buffering. message: A string containing the data to be output. comma: Whether or not a comma should be placed at the end of the output. """ if comma: message += ',' sys.stdout.write(message + '\n') sys.stdout.flush()
python
def output_to_bar(self, message, comma=True): """ Outputs data to stdout, without buffering. message: A string containing the data to be output. comma: Whether or not a comma should be placed at the end of the output. """ if comma: message += ',' sys.stdout.write(message + '\n') sys.stdout.flush()
[ "def", "output_to_bar", "(", "self", ",", "message", ",", "comma", "=", "True", ")", ":", "if", "comma", ":", "message", "+=", "','", "sys", ".", "stdout", ".", "write", "(", "message", "+", "'\\n'", ")", "sys", ".", "stdout", ".", "flush", "(", ")" ]
Outputs data to stdout, without buffering. message: A string containing the data to be output. comma: Whether or not a comma should be placed at the end of the output.
[ "Outputs", "data", "to", "stdout", "without", "buffering", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/core/status.py#L76-L86
HarveyHunt/i3situation
i3situation/core/status.py
Status.reload
def reload(self): """ Reload the installed plugins and the configuration file. This is called when either the plugins or config get updated. """ logging.debug('Reloading config file as files have been modified.') self.config.plugin, self.config.general = self.config.reload() logging.debug('Reloading plugins as files have been modified.') self.loader = plugin_manager.PluginLoader( self._plugin_path, self.config.plugin) self._plugin_mod_time = os.path.getmtime(self._plugin_path) self._config_mod_time = os.path.getmtime(self._config_file_path)
python
def reload(self): """ Reload the installed plugins and the configuration file. This is called when either the plugins or config get updated. """ logging.debug('Reloading config file as files have been modified.') self.config.plugin, self.config.general = self.config.reload() logging.debug('Reloading plugins as files have been modified.') self.loader = plugin_manager.PluginLoader( self._plugin_path, self.config.plugin) self._plugin_mod_time = os.path.getmtime(self._plugin_path) self._config_mod_time = os.path.getmtime(self._config_file_path)
[ "def", "reload", "(", "self", ")", ":", "logging", ".", "debug", "(", "'Reloading config file as files have been modified.'", ")", "self", ".", "config", ".", "plugin", ",", "self", ".", "config", ".", "general", "=", "self", ".", "config", ".", "reload", "(", ")", "logging", ".", "debug", "(", "'Reloading plugins as files have been modified.'", ")", "self", ".", "loader", "=", "plugin_manager", ".", "PluginLoader", "(", "self", ".", "_plugin_path", ",", "self", ".", "config", ".", "plugin", ")", "self", ".", "_plugin_mod_time", "=", "os", ".", "path", ".", "getmtime", "(", "self", ".", "_plugin_path", ")", "self", ".", "_config_mod_time", "=", "os", ".", "path", ".", "getmtime", "(", "self", ".", "_config_file_path", ")" ]
Reload the installed plugins and the configuration file. This is called when either the plugins or config get updated.
[ "Reload", "the", "installed", "plugins", "and", "the", "configuration", "file", ".", "This", "is", "called", "when", "either", "the", "plugins", "or", "config", "get", "updated", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/core/status.py#L88-L99
HarveyHunt/i3situation
i3situation/core/status.py
Status.run_plugins
def run_plugins(self): """ Creates a thread for each plugin and lets the thread_manager handle it. """ for obj in self.loader.objects: # Reserve a slot in the output_dict in order to ensure that the # items are in the correct order. self.output_dict[obj.output_options['name']] = None self.thread_manager.add_thread(obj.main, obj.options['interval'])
python
def run_plugins(self): """ Creates a thread for each plugin and lets the thread_manager handle it. """ for obj in self.loader.objects: # Reserve a slot in the output_dict in order to ensure that the # items are in the correct order. self.output_dict[obj.output_options['name']] = None self.thread_manager.add_thread(obj.main, obj.options['interval'])
[ "def", "run_plugins", "(", "self", ")", ":", "for", "obj", "in", "self", ".", "loader", ".", "objects", ":", "# Reserve a slot in the output_dict in order to ensure that the", "# items are in the correct order.", "self", ".", "output_dict", "[", "obj", ".", "output_options", "[", "'name'", "]", "]", "=", "None", "self", ".", "thread_manager", ".", "add_thread", "(", "obj", ".", "main", ",", "obj", ".", "options", "[", "'interval'", "]", ")" ]
Creates a thread for each plugin and lets the thread_manager handle it.
[ "Creates", "a", "thread", "for", "each", "plugin", "and", "lets", "the", "thread_manager", "handle", "it", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/core/status.py#L101-L109
HarveyHunt/i3situation
i3situation/core/status.py
Status.run
def run(self): """ Monitors if the config file or plugins are updated. Also outputs the JSON data generated by the plugins, without needing to poll the threads. """ self.run_plugins() while True: # Reload plugins and config if either the config file or plugin # directory are modified. if self._config_mod_time != os.path.getmtime(self._config_file_path) or \ self._plugin_mod_time != os.path.getmtime(self._plugin_path): self.thread_manager.kill_all_threads() self.output_dict.clear() self.reload() self.run_plugins() self.output_to_bar(json.dumps(self._remove_empty_output())) time.sleep(self.config.general['interval'])
python
def run(self): """ Monitors if the config file or plugins are updated. Also outputs the JSON data generated by the plugins, without needing to poll the threads. """ self.run_plugins() while True: # Reload plugins and config if either the config file or plugin # directory are modified. if self._config_mod_time != os.path.getmtime(self._config_file_path) or \ self._plugin_mod_time != os.path.getmtime(self._plugin_path): self.thread_manager.kill_all_threads() self.output_dict.clear() self.reload() self.run_plugins() self.output_to_bar(json.dumps(self._remove_empty_output())) time.sleep(self.config.general['interval'])
[ "def", "run", "(", "self", ")", ":", "self", ".", "run_plugins", "(", ")", "while", "True", ":", "# Reload plugins and config if either the config file or plugin", "# directory are modified.", "if", "self", ".", "_config_mod_time", "!=", "os", ".", "path", ".", "getmtime", "(", "self", ".", "_config_file_path", ")", "or", "self", ".", "_plugin_mod_time", "!=", "os", ".", "path", ".", "getmtime", "(", "self", ".", "_plugin_path", ")", ":", "self", ".", "thread_manager", ".", "kill_all_threads", "(", ")", "self", ".", "output_dict", ".", "clear", "(", ")", "self", ".", "reload", "(", ")", "self", ".", "run_plugins", "(", ")", "self", ".", "output_to_bar", "(", "json", ".", "dumps", "(", "self", ".", "_remove_empty_output", "(", ")", ")", ")", "time", ".", "sleep", "(", "self", ".", "config", ".", "general", "[", "'interval'", "]", ")" ]
Monitors if the config file or plugins are updated. Also outputs the JSON data generated by the plugins, without needing to poll the threads.
[ "Monitors", "if", "the", "config", "file", "or", "plugins", "are", "updated", ".", "Also", "outputs", "the", "JSON", "data", "generated", "by", "the", "plugins", "without", "needing", "to", "poll", "the", "threads", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/core/status.py#L111-L127
HarveyHunt/i3situation
i3situation/core/status.py
Status._remove_empty_output
def _remove_empty_output(self): """ If plugins haven't been initialised and therefore not sending output or their output is None, there is no reason to take up extra room on the bar. """ output = [] for key in self.output_dict: if self.output_dict[key] is not None and 'full_text' in self.output_dict[key]: output.append(self.output_dict[key]) return output
python
def _remove_empty_output(self): """ If plugins haven't been initialised and therefore not sending output or their output is None, there is no reason to take up extra room on the bar. """ output = [] for key in self.output_dict: if self.output_dict[key] is not None and 'full_text' in self.output_dict[key]: output.append(self.output_dict[key]) return output
[ "def", "_remove_empty_output", "(", "self", ")", ":", "output", "=", "[", "]", "for", "key", "in", "self", ".", "output_dict", ":", "if", "self", ".", "output_dict", "[", "key", "]", "is", "not", "None", "and", "'full_text'", "in", "self", ".", "output_dict", "[", "key", "]", ":", "output", ".", "append", "(", "self", ".", "output_dict", "[", "key", "]", ")", "return", "output" ]
If plugins haven't been initialised and therefore not sending output or their output is None, there is no reason to take up extra room on the bar.
[ "If", "plugins", "haven", "t", "been", "initialised", "and", "therefore", "not", "sending", "output", "or", "their", "output", "is", "None", "there", "is", "no", "reason", "to", "take", "up", "extra", "room", "on", "the", "bar", "." ]
train
https://github.com/HarveyHunt/i3situation/blob/3160a21006fcc6961f240988874e228a5ec6f18e/i3situation/core/status.py#L129-L139