repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
keans/lmnotify
lmnotify/lmnotify.py
LaMetricManager.set_display
def set_display(self, brightness=100, brightness_mode="auto"): """ allows to modify display state (change brightness) :param int brightness: display brightness [0, 100] (default: 100) :param str brightness_mode: the brightness mode of the display [auto, manual] (default: auto) """ assert(brightness_mode in ("auto", "manual")) assert(brightness in range(101)) log.debug("setting display information...") cmd, url = DEVICE_URLS["set_display"] json_data = { "brightness_mode": brightness_mode, "brightness": brightness } return self._exec(cmd, url, json_data=json_data)
python
def set_display(self, brightness=100, brightness_mode="auto"): """ allows to modify display state (change brightness) :param int brightness: display brightness [0, 100] (default: 100) :param str brightness_mode: the brightness mode of the display [auto, manual] (default: auto) """ assert(brightness_mode in ("auto", "manual")) assert(brightness in range(101)) log.debug("setting display information...") cmd, url = DEVICE_URLS["set_display"] json_data = { "brightness_mode": brightness_mode, "brightness": brightness } return self._exec(cmd, url, json_data=json_data)
[ "def", "set_display", "(", "self", ",", "brightness", "=", "100", ",", "brightness_mode", "=", "\"auto\"", ")", ":", "assert", "(", "brightness_mode", "in", "(", "\"auto\"", ",", "\"manual\"", ")", ")", "assert", "(", "brightness", "in", "range", "(", "101", ")", ")", "log", ".", "debug", "(", "\"setting display information...\"", ")", "cmd", ",", "url", "=", "DEVICE_URLS", "[", "\"set_display\"", "]", "json_data", "=", "{", "\"brightness_mode\"", ":", "brightness_mode", ",", "\"brightness\"", ":", "brightness", "}", "return", "self", ".", "_exec", "(", "cmd", ",", "url", ",", "json_data", "=", "json_data", ")" ]
allows to modify display state (change brightness) :param int brightness: display brightness [0, 100] (default: 100) :param str brightness_mode: the brightness mode of the display [auto, manual] (default: auto)
[ "allows", "to", "modify", "display", "state", "(", "change", "brightness", ")" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/lmnotify.py#L350-L369
keans/lmnotify
lmnotify/lmnotify.py
LaMetricManager.set_screensaver
def set_screensaver( self, mode, is_mode_enabled, start_time=None, end_time=None, is_screensaver_enabled=True ): """ set the display's screensaver mode :param str mode: mode of the screensaver [when_dark, time_based] :param bool is_mode_enabled: specifies if mode is enabled or disabled :param str start_time: start time, only used in time_based mode (format: %H:%M:%S) :param str end_time: end time, only used in time_based mode (format: %H:%M:%S) :param bool is_screensaver_enabled: is overall screensaver turned on overrules mode specific settings """ assert(mode in ("when_dark", "time_based")) log.debug("setting screensaver to '{}'...".format(mode)) cmd, url = DEVICE_URLS["set_display"] json_data = { "screensaver": { "enabled": is_screensaver_enabled, "mode": mode, "mode_params": { "enabled": is_mode_enabled }, } } if mode == "time_based": # TODO: add time checks assert((start_time is not None) and (end_time is not None)) json_data["screensaver"]["mode_params"]["start_time"] = start_time json_data["screensaver"]["mode_params"]["end_time"] = end_time return self._exec(cmd, url, json_data=json_data)
python
def set_screensaver( self, mode, is_mode_enabled, start_time=None, end_time=None, is_screensaver_enabled=True ): """ set the display's screensaver mode :param str mode: mode of the screensaver [when_dark, time_based] :param bool is_mode_enabled: specifies if mode is enabled or disabled :param str start_time: start time, only used in time_based mode (format: %H:%M:%S) :param str end_time: end time, only used in time_based mode (format: %H:%M:%S) :param bool is_screensaver_enabled: is overall screensaver turned on overrules mode specific settings """ assert(mode in ("when_dark", "time_based")) log.debug("setting screensaver to '{}'...".format(mode)) cmd, url = DEVICE_URLS["set_display"] json_data = { "screensaver": { "enabled": is_screensaver_enabled, "mode": mode, "mode_params": { "enabled": is_mode_enabled }, } } if mode == "time_based": # TODO: add time checks assert((start_time is not None) and (end_time is not None)) json_data["screensaver"]["mode_params"]["start_time"] = start_time json_data["screensaver"]["mode_params"]["end_time"] = end_time return self._exec(cmd, url, json_data=json_data)
[ "def", "set_screensaver", "(", "self", ",", "mode", ",", "is_mode_enabled", ",", "start_time", "=", "None", ",", "end_time", "=", "None", ",", "is_screensaver_enabled", "=", "True", ")", ":", "assert", "(", "mode", "in", "(", "\"when_dark\"", ",", "\"time_based\"", ")", ")", "log", ".", "debug", "(", "\"setting screensaver to '{}'...\"", ".", "format", "(", "mode", ")", ")", "cmd", ",", "url", "=", "DEVICE_URLS", "[", "\"set_display\"", "]", "json_data", "=", "{", "\"screensaver\"", ":", "{", "\"enabled\"", ":", "is_screensaver_enabled", ",", "\"mode\"", ":", "mode", ",", "\"mode_params\"", ":", "{", "\"enabled\"", ":", "is_mode_enabled", "}", ",", "}", "}", "if", "mode", "==", "\"time_based\"", ":", "# TODO: add time checks", "assert", "(", "(", "start_time", "is", "not", "None", ")", "and", "(", "end_time", "is", "not", "None", ")", ")", "json_data", "[", "\"screensaver\"", "]", "[", "\"mode_params\"", "]", "[", "\"start_time\"", "]", "=", "start_time", "json_data", "[", "\"screensaver\"", "]", "[", "\"mode_params\"", "]", "[", "\"end_time\"", "]", "=", "end_time", "return", "self", ".", "_exec", "(", "cmd", ",", "url", ",", "json_data", "=", "json_data", ")" ]
set the display's screensaver mode :param str mode: mode of the screensaver [when_dark, time_based] :param bool is_mode_enabled: specifies if mode is enabled or disabled :param str start_time: start time, only used in time_based mode (format: %H:%M:%S) :param str end_time: end time, only used in time_based mode (format: %H:%M:%S) :param bool is_screensaver_enabled: is overall screensaver turned on overrules mode specific settings
[ "set", "the", "display", "s", "screensaver", "mode" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/lmnotify.py#L371-L408
keans/lmnotify
lmnotify/lmnotify.py
LaMetricManager.get_volume
def get_volume(self): """ returns the current volume """ log.debug("getting volumne...") cmd, url = DEVICE_URLS["get_volume"] return self._exec(cmd, url)
python
def get_volume(self): """ returns the current volume """ log.debug("getting volumne...") cmd, url = DEVICE_URLS["get_volume"] return self._exec(cmd, url)
[ "def", "get_volume", "(", "self", ")", ":", "log", ".", "debug", "(", "\"getting volumne...\"", ")", "cmd", ",", "url", "=", "DEVICE_URLS", "[", "\"get_volume\"", "]", "return", "self", ".", "_exec", "(", "cmd", ",", "url", ")" ]
returns the current volume
[ "returns", "the", "current", "volume" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/lmnotify.py#L410-L416
keans/lmnotify
lmnotify/lmnotify.py
LaMetricManager.set_volume
def set_volume(self, volume=50): """ allows to change the volume :param int volume: volume to be set for the current device [0..100] (default: 50) """ assert(volume in range(101)) log.debug("setting volume...") cmd, url = DEVICE_URLS["set_volume"] json_data = { "volume": volume, } return self._exec(cmd, url, json_data=json_data)
python
def set_volume(self, volume=50): """ allows to change the volume :param int volume: volume to be set for the current device [0..100] (default: 50) """ assert(volume in range(101)) log.debug("setting volume...") cmd, url = DEVICE_URLS["set_volume"] json_data = { "volume": volume, } return self._exec(cmd, url, json_data=json_data)
[ "def", "set_volume", "(", "self", ",", "volume", "=", "50", ")", ":", "assert", "(", "volume", "in", "range", "(", "101", ")", ")", "log", ".", "debug", "(", "\"setting volume...\"", ")", "cmd", ",", "url", "=", "DEVICE_URLS", "[", "\"set_volume\"", "]", "json_data", "=", "{", "\"volume\"", ":", "volume", ",", "}", "return", "self", ".", "_exec", "(", "cmd", ",", "url", ",", "json_data", "=", "json_data", ")" ]
allows to change the volume :param int volume: volume to be set for the current device [0..100] (default: 50)
[ "allows", "to", "change", "the", "volume" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/lmnotify.py#L418-L433
keans/lmnotify
lmnotify/lmnotify.py
LaMetricManager.get_bluetooth_state
def get_bluetooth_state(self): """ returns the bluetooth state """ log.debug("getting bluetooth state...") cmd, url = DEVICE_URLS["get_bluetooth_state"] return self._exec(cmd, url)
python
def get_bluetooth_state(self): """ returns the bluetooth state """ log.debug("getting bluetooth state...") cmd, url = DEVICE_URLS["get_bluetooth_state"] return self._exec(cmd, url)
[ "def", "get_bluetooth_state", "(", "self", ")", ":", "log", ".", "debug", "(", "\"getting bluetooth state...\"", ")", "cmd", ",", "url", "=", "DEVICE_URLS", "[", "\"get_bluetooth_state\"", "]", "return", "self", ".", "_exec", "(", "cmd", ",", "url", ")" ]
returns the bluetooth state
[ "returns", "the", "bluetooth", "state" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/lmnotify.py#L435-L441
keans/lmnotify
lmnotify/lmnotify.py
LaMetricManager.set_bluetooth
def set_bluetooth(self, active=None, name=None): """ allows to activate/deactivate bluetooth and change the name """ assert(active is not None or name is not None) log.debug("setting bluetooth state...") cmd, url = DEVICE_URLS["set_bluetooth"] json_data = {} if name is not None: json_data["name"] = name if active is not None: json_data["active"] = active return self._exec(cmd, url, json_data=json_data)
python
def set_bluetooth(self, active=None, name=None): """ allows to activate/deactivate bluetooth and change the name """ assert(active is not None or name is not None) log.debug("setting bluetooth state...") cmd, url = DEVICE_URLS["set_bluetooth"] json_data = {} if name is not None: json_data["name"] = name if active is not None: json_data["active"] = active return self._exec(cmd, url, json_data=json_data)
[ "def", "set_bluetooth", "(", "self", ",", "active", "=", "None", ",", "name", "=", "None", ")", ":", "assert", "(", "active", "is", "not", "None", "or", "name", "is", "not", "None", ")", "log", ".", "debug", "(", "\"setting bluetooth state...\"", ")", "cmd", ",", "url", "=", "DEVICE_URLS", "[", "\"set_bluetooth\"", "]", "json_data", "=", "{", "}", "if", "name", "is", "not", "None", ":", "json_data", "[", "\"name\"", "]", "=", "name", "if", "active", "is", "not", "None", ":", "json_data", "[", "\"active\"", "]", "=", "active", "return", "self", ".", "_exec", "(", "cmd", ",", "url", ",", "json_data", "=", "json_data", ")" ]
allows to activate/deactivate bluetooth and change the name
[ "allows", "to", "activate", "/", "deactivate", "bluetooth", "and", "change", "the", "name" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/lmnotify.py#L443-L458
keans/lmnotify
lmnotify/lmnotify.py
LaMetricManager.get_wifi_state
def get_wifi_state(self): """ returns the current Wi-Fi state the device is connected to """ log.debug("getting wifi state...") cmd, url = DEVICE_URLS["get_wifi_state"] return self._exec(cmd, url)
python
def get_wifi_state(self): """ returns the current Wi-Fi state the device is connected to """ log.debug("getting wifi state...") cmd, url = DEVICE_URLS["get_wifi_state"] return self._exec(cmd, url)
[ "def", "get_wifi_state", "(", "self", ")", ":", "log", ".", "debug", "(", "\"getting wifi state...\"", ")", "cmd", ",", "url", "=", "DEVICE_URLS", "[", "\"get_wifi_state\"", "]", "return", "self", ".", "_exec", "(", "cmd", ",", "url", ")" ]
returns the current Wi-Fi state the device is connected to
[ "returns", "the", "current", "Wi", "-", "Fi", "state", "the", "device", "is", "connected", "to" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/lmnotify.py#L460-L466
keans/lmnotify
lmnotify/lmnotify.py
LaMetricManager.set_apps_list
def set_apps_list(self): """ gets installed apps and puts them into the available_apps list """ log.debug("getting apps and setting them in the internal app list...") cmd, url = DEVICE_URLS["get_apps_list"] result = self._exec(cmd, url) self.available_apps = [ AppModel(result[app]) for app in result ]
python
def set_apps_list(self): """ gets installed apps and puts them into the available_apps list """ log.debug("getting apps and setting them in the internal app list...") cmd, url = DEVICE_URLS["get_apps_list"] result = self._exec(cmd, url) self.available_apps = [ AppModel(result[app]) for app in result ]
[ "def", "set_apps_list", "(", "self", ")", ":", "log", ".", "debug", "(", "\"getting apps and setting them in the internal app list...\"", ")", "cmd", ",", "url", "=", "DEVICE_URLS", "[", "\"get_apps_list\"", "]", "result", "=", "self", ".", "_exec", "(", "cmd", ",", "url", ")", "self", ".", "available_apps", "=", "[", "AppModel", "(", "result", "[", "app", "]", ")", "for", "app", "in", "result", "]" ]
gets installed apps and puts them into the available_apps list
[ "gets", "installed", "apps", "and", "puts", "them", "into", "the", "available_apps", "list" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/lmnotify.py#L469-L481
keans/lmnotify
lmnotify/lmnotify.py
LaMetricManager.switch_to_app
def switch_to_app(self, package): """ activates an app that is specified by package. Selects the first app it finds in the app list :param package: name of package/app :type package: str :return: None :rtype: None """ log.debug("switching to app '{}'...".format(package)) cmd, url = DEVICE_URLS["switch_to_app"] widget_id = self._get_widget_id(package) url = url.format('{}', package, widget_id) self.result = self._exec(cmd, url)
python
def switch_to_app(self, package): """ activates an app that is specified by package. Selects the first app it finds in the app list :param package: name of package/app :type package: str :return: None :rtype: None """ log.debug("switching to app '{}'...".format(package)) cmd, url = DEVICE_URLS["switch_to_app"] widget_id = self._get_widget_id(package) url = url.format('{}', package, widget_id) self.result = self._exec(cmd, url)
[ "def", "switch_to_app", "(", "self", ",", "package", ")", ":", "log", ".", "debug", "(", "\"switching to app '{}'...\"", ".", "format", "(", "package", ")", ")", "cmd", ",", "url", "=", "DEVICE_URLS", "[", "\"switch_to_app\"", "]", "widget_id", "=", "self", ".", "_get_widget_id", "(", "package", ")", "url", "=", "url", ".", "format", "(", "'{}'", ",", "package", ",", "widget_id", ")", "self", ".", "result", "=", "self", ".", "_exec", "(", "cmd", ",", "url", ")" ]
activates an app that is specified by package. Selects the first app it finds in the app list :param package: name of package/app :type package: str :return: None :rtype: None
[ "activates", "an", "app", "that", "is", "specified", "by", "package", ".", "Selects", "the", "first", "app", "it", "finds", "in", "the", "app", "list" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/lmnotify.py#L489-L505
keans/lmnotify
lmnotify/lmnotify.py
LaMetricManager.switch_to_next_app
def switch_to_next_app(self): """ switches to the next app """ log.debug("switching to next app...") cmd, url = DEVICE_URLS["switch_to_next_app"] self.result = self._exec(cmd, url)
python
def switch_to_next_app(self): """ switches to the next app """ log.debug("switching to next app...") cmd, url = DEVICE_URLS["switch_to_next_app"] self.result = self._exec(cmd, url)
[ "def", "switch_to_next_app", "(", "self", ")", ":", "log", ".", "debug", "(", "\"switching to next app...\"", ")", "cmd", ",", "url", "=", "DEVICE_URLS", "[", "\"switch_to_next_app\"", "]", "self", ".", "result", "=", "self", ".", "_exec", "(", "cmd", ",", "url", ")" ]
switches to the next app
[ "switches", "to", "the", "next", "app" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/lmnotify.py#L507-L513
keans/lmnotify
lmnotify/lmnotify.py
LaMetricManager.activate_widget
def activate_widget(self, package): """ activate the widget of the given package :param str package: name of the package """ cmd, url = DEVICE_URLS["activate_widget"] # get widget id for the package widget_id = self._get_widget_id(package) url = url.format('{}', package, widget_id) self.result = self._exec(cmd, url)
python
def activate_widget(self, package): """ activate the widget of the given package :param str package: name of the package """ cmd, url = DEVICE_URLS["activate_widget"] # get widget id for the package widget_id = self._get_widget_id(package) url = url.format('{}', package, widget_id) self.result = self._exec(cmd, url)
[ "def", "activate_widget", "(", "self", ",", "package", ")", ":", "cmd", ",", "url", "=", "DEVICE_URLS", "[", "\"activate_widget\"", "]", "# get widget id for the package", "widget_id", "=", "self", ".", "_get_widget_id", "(", "package", ")", "url", "=", "url", ".", "format", "(", "'{}'", ",", "package", ",", "widget_id", ")", "self", ".", "result", "=", "self", ".", "_exec", "(", "cmd", ",", "url", ")" ]
activate the widget of the given package :param str package: name of the package
[ "activate", "the", "widget", "of", "the", "given", "package" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/lmnotify.py#L523-L535
keans/lmnotify
lmnotify/lmnotify.py
LaMetricManager._app_exec
def _app_exec(self, package, action, params=None): """ meta method for all interactions with apps :param package: name of package/app :type package: str :param action: the action to be executed :type action: str :param params: optional parameters for this action :type params: dict :return: None :rtype: None """ # get list of possible commands from app.actions allowed_commands = [] for app in self.get_apps_list(): if app.package == package: allowed_commands = list(app.actions.keys()) break # check if action is in this list assert(action in allowed_commands) cmd, url = DEVICE_URLS["do_action"] # get widget id for the package widget_id = self._get_widget_id(package) url = url.format('{}', package, widget_id) json_data = {"id": action} if params is not None: json_data["params"] = params self.result = self._exec(cmd, url, json_data=json_data)
python
def _app_exec(self, package, action, params=None): """ meta method for all interactions with apps :param package: name of package/app :type package: str :param action: the action to be executed :type action: str :param params: optional parameters for this action :type params: dict :return: None :rtype: None """ # get list of possible commands from app.actions allowed_commands = [] for app in self.get_apps_list(): if app.package == package: allowed_commands = list(app.actions.keys()) break # check if action is in this list assert(action in allowed_commands) cmd, url = DEVICE_URLS["do_action"] # get widget id for the package widget_id = self._get_widget_id(package) url = url.format('{}', package, widget_id) json_data = {"id": action} if params is not None: json_data["params"] = params self.result = self._exec(cmd, url, json_data=json_data)
[ "def", "_app_exec", "(", "self", ",", "package", ",", "action", ",", "params", "=", "None", ")", ":", "# get list of possible commands from app.actions", "allowed_commands", "=", "[", "]", "for", "app", "in", "self", ".", "get_apps_list", "(", ")", ":", "if", "app", ".", "package", "==", "package", ":", "allowed_commands", "=", "list", "(", "app", ".", "actions", ".", "keys", "(", ")", ")", "break", "# check if action is in this list", "assert", "(", "action", "in", "allowed_commands", ")", "cmd", ",", "url", "=", "DEVICE_URLS", "[", "\"do_action\"", "]", "# get widget id for the package", "widget_id", "=", "self", ".", "_get_widget_id", "(", "package", ")", "url", "=", "url", ".", "format", "(", "'{}'", ",", "package", ",", "widget_id", ")", "json_data", "=", "{", "\"id\"", ":", "action", "}", "if", "params", "is", "not", "None", ":", "json_data", "[", "\"params\"", "]", "=", "params", "self", ".", "result", "=", "self", ".", "_exec", "(", "cmd", ",", "url", ",", "json_data", "=", "json_data", ")" ]
meta method for all interactions with apps :param package: name of package/app :type package: str :param action: the action to be executed :type action: str :param params: optional parameters for this action :type params: dict :return: None :rtype: None
[ "meta", "method", "for", "all", "interactions", "with", "apps" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/lmnotify.py#L537-L569
keans/lmnotify
lmnotify/lmnotify.py
LaMetricManager.alarm_set
def alarm_set(self, time, wake_with_radio=False): """ set the alarm clock :param str time: time of the alarm (format: %H:%M:%S) :param bool wake_with_radio: if True, radio will be used for the alarm instead of beep sound """ # TODO: check for correct time format log.debug("alarm => set...") params = { "enabled": True, "time": time, "wake_with_radio": wake_with_radio } self._app_exec("com.lametric.clock", "clock.alarm", params=params)
python
def alarm_set(self, time, wake_with_radio=False): """ set the alarm clock :param str time: time of the alarm (format: %H:%M:%S) :param bool wake_with_radio: if True, radio will be used for the alarm instead of beep sound """ # TODO: check for correct time format log.debug("alarm => set...") params = { "enabled": True, "time": time, "wake_with_radio": wake_with_radio } self._app_exec("com.lametric.clock", "clock.alarm", params=params)
[ "def", "alarm_set", "(", "self", ",", "time", ",", "wake_with_radio", "=", "False", ")", ":", "# TODO: check for correct time format", "log", ".", "debug", "(", "\"alarm => set...\"", ")", "params", "=", "{", "\"enabled\"", ":", "True", ",", "\"time\"", ":", "time", ",", "\"wake_with_radio\"", ":", "wake_with_radio", "}", "self", ".", "_app_exec", "(", "\"com.lametric.clock\"", ",", "\"clock.alarm\"", ",", "params", "=", "params", ")" ]
set the alarm clock :param str time: time of the alarm (format: %H:%M:%S) :param bool wake_with_radio: if True, radio will be used for the alarm instead of beep sound
[ "set", "the", "alarm", "clock" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/lmnotify.py#L599-L614
keans/lmnotify
lmnotify/lmnotify.py
LaMetricManager.alarm_disable
def alarm_disable(self): """ disable the alarm """ log.debug("alarm => disable...") params = {"enabled": False} self._app_exec("com.lametric.clock", "clock.alarm", params=params)
python
def alarm_disable(self): """ disable the alarm """ log.debug("alarm => disable...") params = {"enabled": False} self._app_exec("com.lametric.clock", "clock.alarm", params=params)
[ "def", "alarm_disable", "(", "self", ")", ":", "log", ".", "debug", "(", "\"alarm => disable...\"", ")", "params", "=", "{", "\"enabled\"", ":", "False", "}", "self", ".", "_app_exec", "(", "\"com.lametric.clock\"", ",", "\"clock.alarm\"", ",", "params", "=", "params", ")" ]
disable the alarm
[ "disable", "the", "alarm" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/lmnotify.py#L616-L622
keans/lmnotify
lmnotify/lmnotify.py
LaMetricManager.countdown_set
def countdown_set(self, duration, start_now): """ set the countdown :param str duration: :param str start_now: """ log.debug("countdown => set...") params = {'duration': duration, 'start_now': start_now} self._app_exec( "com.lametric.countdown", "countdown.configure", params )
python
def countdown_set(self, duration, start_now): """ set the countdown :param str duration: :param str start_now: """ log.debug("countdown => set...") params = {'duration': duration, 'start_now': start_now} self._app_exec( "com.lametric.countdown", "countdown.configure", params )
[ "def", "countdown_set", "(", "self", ",", "duration", ",", "start_now", ")", ":", "log", ".", "debug", "(", "\"countdown => set...\"", ")", "params", "=", "{", "'duration'", ":", "duration", ",", "'start_now'", ":", "start_now", "}", "self", ".", "_app_exec", "(", "\"com.lametric.countdown\"", ",", "\"countdown.configure\"", ",", "params", ")" ]
set the countdown :param str duration: :param str start_now:
[ "set", "the", "countdown" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/lmnotify.py#L645-L656
comtihon/catcher
catcher/steps/external.py
External.action
def action(self, includes: dict, variables: dict) -> tuple: """ Call external script. :param includes: testcase's includes :param variables: variables :return: script's output """ json_args = fill_template_str(json.dumps(self.data), variables) p = subprocess.Popen([self.module, json_args], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if p.wait() == 0: out = p.stdout.read().decode() debug(out) return variables, json.loads(out) else: out = p.stdout.read().decode() warning(out) raise Exception('Execution failed.')
python
def action(self, includes: dict, variables: dict) -> tuple: """ Call external script. :param includes: testcase's includes :param variables: variables :return: script's output """ json_args = fill_template_str(json.dumps(self.data), variables) p = subprocess.Popen([self.module, json_args], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if p.wait() == 0: out = p.stdout.read().decode() debug(out) return variables, json.loads(out) else: out = p.stdout.read().decode() warning(out) raise Exception('Execution failed.')
[ "def", "action", "(", "self", ",", "includes", ":", "dict", ",", "variables", ":", "dict", ")", "->", "tuple", ":", "json_args", "=", "fill_template_str", "(", "json", ".", "dumps", "(", "self", ".", "data", ")", ",", "variables", ")", "p", "=", "subprocess", ".", "Popen", "(", "[", "self", ".", "module", ",", "json_args", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "STDOUT", ")", "if", "p", ".", "wait", "(", ")", "==", "0", ":", "out", "=", "p", ".", "stdout", ".", "read", "(", ")", ".", "decode", "(", ")", "debug", "(", "out", ")", "return", "variables", ",", "json", ".", "loads", "(", "out", ")", "else", ":", "out", "=", "p", ".", "stdout", ".", "read", "(", ")", ".", "decode", "(", ")", "warning", "(", "out", ")", "raise", "Exception", "(", "'Execution failed.'", ")" ]
Call external script. :param includes: testcase's includes :param variables: variables :return: script's output
[ "Call", "external", "script", "." ]
train
https://github.com/comtihon/catcher/blob/5124e69d11cb6987daca595a61a4062d2b5f5ecc/catcher/steps/external.py#L17-L34
keans/lmnotify
lmnotify/session.py
CloudSession.set_credentials
def set_credentials(self, client_id=None, client_secret=None): """ set given credentials and reset the session """ self._client_id = client_id self._client_secret = client_secret # make sure to reset session due to credential change self._session = None
python
def set_credentials(self, client_id=None, client_secret=None): """ set given credentials and reset the session """ self._client_id = client_id self._client_secret = client_secret # make sure to reset session due to credential change self._session = None
[ "def", "set_credentials", "(", "self", ",", "client_id", "=", "None", ",", "client_secret", "=", "None", ")", ":", "self", ".", "_client_id", "=", "client_id", "self", ".", "_client_secret", "=", "client_secret", "# make sure to reset session due to credential change", "self", ".", "_session", "=", "None" ]
set given credentials and reset the session
[ "set", "given", "credentials", "and", "reset", "the", "session" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/session.py#L82-L90
keans/lmnotify
lmnotify/session.py
CloudSession.init_session
def init_session(self, get_token=True): """ init a new oauth2 session that is required to access the cloud :param bool get_token: if True, a token will be obtained, after the session has been created """ if (self._client_id is None) or (self._client_secret is None): sys.exit( "Please make sure to set the client id and client secret " "via the constructor, the environment variables or the config " "file; otherwise, the LaMetric cloud cannot be accessed. " "Abort!" ) self._session = OAuth2Session( client=BackendApplicationClient(client_id=self._client_id) ) if get_token is True: # get oauth token self.get_token()
python
def init_session(self, get_token=True): """ init a new oauth2 session that is required to access the cloud :param bool get_token: if True, a token will be obtained, after the session has been created """ if (self._client_id is None) or (self._client_secret is None): sys.exit( "Please make sure to set the client id and client secret " "via the constructor, the environment variables or the config " "file; otherwise, the LaMetric cloud cannot be accessed. " "Abort!" ) self._session = OAuth2Session( client=BackendApplicationClient(client_id=self._client_id) ) if get_token is True: # get oauth token self.get_token()
[ "def", "init_session", "(", "self", ",", "get_token", "=", "True", ")", ":", "if", "(", "self", ".", "_client_id", "is", "None", ")", "or", "(", "self", ".", "_client_secret", "is", "None", ")", ":", "sys", ".", "exit", "(", "\"Please make sure to set the client id and client secret \"", "\"via the constructor, the environment variables or the config \"", "\"file; otherwise, the LaMetric cloud cannot be accessed. \"", "\"Abort!\"", ")", "self", ".", "_session", "=", "OAuth2Session", "(", "client", "=", "BackendApplicationClient", "(", "client_id", "=", "self", ".", "_client_id", ")", ")", "if", "get_token", "is", "True", ":", "# get oauth token", "self", ".", "get_token", "(", ")" ]
init a new oauth2 session that is required to access the cloud :param bool get_token: if True, a token will be obtained, after the session has been created
[ "init", "a", "new", "oauth2", "session", "that", "is", "required", "to", "access", "the", "cloud" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/session.py#L98-L119
keans/lmnotify
lmnotify/session.py
CloudSession.get_token
def get_token(self): """ get current oauth token """ self.token = self._session.fetch_token( token_url=CLOUD_URLS["get_token"][1], client_id=self._client_id, client_secret=self._client_secret )
python
def get_token(self): """ get current oauth token """ self.token = self._session.fetch_token( token_url=CLOUD_URLS["get_token"][1], client_id=self._client_id, client_secret=self._client_secret )
[ "def", "get_token", "(", "self", ")", ":", "self", ".", "token", "=", "self", ".", "_session", ".", "fetch_token", "(", "token_url", "=", "CLOUD_URLS", "[", "\"get_token\"", "]", "[", "1", "]", ",", "client_id", "=", "self", ".", "_client_id", ",", "client_secret", "=", "self", ".", "_client_secret", ")" ]
get current oauth token
[ "get", "current", "oauth", "token" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/session.py#L121-L129
comtihon/catcher
catcher/steps/external_step.py
ExternalStep.simple_input
def simple_input(self, variables): """ Use this method to get simple input as python object, with all templates filled in :param variables: :return: python object """ json_args = fill_template_str(json.dumps(self.data), variables) return try_get_objects(json_args)
python
def simple_input(self, variables): """ Use this method to get simple input as python object, with all templates filled in :param variables: :return: python object """ json_args = fill_template_str(json.dumps(self.data), variables) return try_get_objects(json_args)
[ "def", "simple_input", "(", "self", ",", "variables", ")", ":", "json_args", "=", "fill_template_str", "(", "json", ".", "dumps", "(", "self", ".", "data", ")", ",", "variables", ")", "return", "try_get_objects", "(", "json_args", ")" ]
Use this method to get simple input as python object, with all templates filled in :param variables: :return: python object
[ "Use", "this", "method", "to", "get", "simple", "input", "as", "python", "object", "with", "all", "templates", "filled", "in" ]
train
https://github.com/comtihon/catcher/blob/5124e69d11cb6987daca595a61a4062d2b5f5ecc/catcher/steps/external_step.py#L19-L29
keans/lmnotify
lmnotify/config.py
Config.create
def create(self): """ creates an empty configuration file """ if not self.exists(): # create new empyt config file based on template self.config.add_section("lametric") self.config.set("lametric", "client_id", "") self.config.set("lametric", "client_secret", "") # save new config self.save() # stop here, so user can set his config sys.exit( "An empty config file '{}' has been created. Please set " "the corresponding LaMetric API credentials.".format( self._filename ) )
python
def create(self): """ creates an empty configuration file """ if not self.exists(): # create new empyt config file based on template self.config.add_section("lametric") self.config.set("lametric", "client_id", "") self.config.set("lametric", "client_secret", "") # save new config self.save() # stop here, so user can set his config sys.exit( "An empty config file '{}' has been created. Please set " "the corresponding LaMetric API credentials.".format( self._filename ) )
[ "def", "create", "(", "self", ")", ":", "if", "not", "self", ".", "exists", "(", ")", ":", "# create new empyt config file based on template", "self", ".", "config", ".", "add_section", "(", "\"lametric\"", ")", "self", ".", "config", ".", "set", "(", "\"lametric\"", ",", "\"client_id\"", ",", "\"\"", ")", "self", ".", "config", ".", "set", "(", "\"lametric\"", ",", "\"client_secret\"", ",", "\"\"", ")", "# save new config", "self", ".", "save", "(", ")", "# stop here, so user can set his config", "sys", ".", "exit", "(", "\"An empty config file '{}' has been created. Please set \"", "\"the corresponding LaMetric API credentials.\"", ".", "format", "(", "self", ".", "_filename", ")", ")" ]
creates an empty configuration file
[ "creates", "an", "empty", "configuration", "file" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/config.py#L61-L80
keans/lmnotify
lmnotify/config.py
Config.save
def save(self): """ save current config to the file """ with open(self._filename, "w") as f: self.config.write(f)
python
def save(self): """ save current config to the file """ with open(self._filename, "w") as f: self.config.write(f)
[ "def", "save", "(", "self", ")", ":", "with", "open", "(", "self", ".", "_filename", ",", "\"w\"", ")", "as", "f", ":", "self", ".", "config", ".", "write", "(", "f", ")" ]
save current config to the file
[ "save", "current", "config", "to", "the", "file" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/config.py#L88-L93
jwass/directions.py
directions/base.py
Router.rate_limit_wait
def rate_limit_wait(self): """ Sleep if rate limiting is required based on current time and last query. """ if self._rate_limit_dt and self._last_query is not None: dt = time.time() - self._last_query wait = self._rate_limit_dt - dt if wait > 0: time.sleep(wait)
python
def rate_limit_wait(self): """ Sleep if rate limiting is required based on current time and last query. """ if self._rate_limit_dt and self._last_query is not None: dt = time.time() - self._last_query wait = self._rate_limit_dt - dt if wait > 0: time.sleep(wait)
[ "def", "rate_limit_wait", "(", "self", ")", ":", "if", "self", ".", "_rate_limit_dt", "and", "self", ".", "_last_query", "is", "not", "None", ":", "dt", "=", "time", ".", "time", "(", ")", "-", "self", ".", "_last_query", "wait", "=", "self", ".", "_rate_limit_dt", "-", "dt", "if", "wait", ">", "0", ":", "time", ".", "sleep", "(", "wait", ")" ]
Sleep if rate limiting is required based on current time and last query.
[ "Sleep", "if", "rate", "limiting", "is", "required", "based", "on", "current", "time", "and", "last", "query", "." ]
train
https://github.com/jwass/directions.py/blob/c3734e4cb499fe80e27b2a26575a91ac4e834e95/directions/base.py#L22-L32
jwass/directions.py
directions/base.py
Router.route
def route(self, arg, destination=None, waypoints=None, raw=False, **kwargs): """ Query a route. route(locations): points can be - a sequence of locations - a Shapely LineString route(origin, destination, waypoints=None) - origin and destination are a single destination - waypoints are the points to be inserted between the origin and destination If waypoints is specified, destination must also be specified Each location can be: - string (will be geocoded by the routing provider. Not all providers accept this as input) - (longitude, latitude) sequence (tuple, list, numpy array, etc.) - Shapely Point with x as longitude, y as latitude Additional parameters --------------------- raw : bool, default False Return the raw json dict response from the service Returns ------- list of Route objects If raw is True, returns the json dict instead of converting to Route objects Examples -------- mq = directions.Mapquest(key) routes = mq.route('1 magazine st. cambridge, ma', 'south station boston, ma') routes = mq.route('1 magazine st. cambridge, ma', 'south station boston, ma', waypoints=['700 commonwealth ave. boston, ma']) # Uses each point in the line as a waypoint. There is a limit to the # number of waypoints for each service. Consult the docs. line = LineString(...) routes = mq.route(line) # Feel free to mix different location types routes = mq.route(line.coords[0], 'south station boston, ma', waypoints=[(-71.103972, 42.349324)]) """ points = _parse_points(arg, destination, waypoints) if len(points) < 2: raise ValueError('You must specify at least 2 points') self.rate_limit_wait() data = self.raw_query(points, **kwargs) self._last_query = time.time() if raw: return data return self.format_output(data)
python
def route(self, arg, destination=None, waypoints=None, raw=False, **kwargs): """ Query a route. route(locations): points can be - a sequence of locations - a Shapely LineString route(origin, destination, waypoints=None) - origin and destination are a single destination - waypoints are the points to be inserted between the origin and destination If waypoints is specified, destination must also be specified Each location can be: - string (will be geocoded by the routing provider. Not all providers accept this as input) - (longitude, latitude) sequence (tuple, list, numpy array, etc.) - Shapely Point with x as longitude, y as latitude Additional parameters --------------------- raw : bool, default False Return the raw json dict response from the service Returns ------- list of Route objects If raw is True, returns the json dict instead of converting to Route objects Examples -------- mq = directions.Mapquest(key) routes = mq.route('1 magazine st. cambridge, ma', 'south station boston, ma') routes = mq.route('1 magazine st. cambridge, ma', 'south station boston, ma', waypoints=['700 commonwealth ave. boston, ma']) # Uses each point in the line as a waypoint. There is a limit to the # number of waypoints for each service. Consult the docs. line = LineString(...) routes = mq.route(line) # Feel free to mix different location types routes = mq.route(line.coords[0], 'south station boston, ma', waypoints=[(-71.103972, 42.349324)]) """ points = _parse_points(arg, destination, waypoints) if len(points) < 2: raise ValueError('You must specify at least 2 points') self.rate_limit_wait() data = self.raw_query(points, **kwargs) self._last_query = time.time() if raw: return data return self.format_output(data)
[ "def", "route", "(", "self", ",", "arg", ",", "destination", "=", "None", ",", "waypoints", "=", "None", ",", "raw", "=", "False", ",", "*", "*", "kwargs", ")", ":", "points", "=", "_parse_points", "(", "arg", ",", "destination", ",", "waypoints", ")", "if", "len", "(", "points", ")", "<", "2", ":", "raise", "ValueError", "(", "'You must specify at least 2 points'", ")", "self", ".", "rate_limit_wait", "(", ")", "data", "=", "self", ".", "raw_query", "(", "points", ",", "*", "*", "kwargs", ")", "self", ".", "_last_query", "=", "time", ".", "time", "(", ")", "if", "raw", ":", "return", "data", "return", "self", ".", "format_output", "(", "data", ")" ]
Query a route. route(locations): points can be - a sequence of locations - a Shapely LineString route(origin, destination, waypoints=None) - origin and destination are a single destination - waypoints are the points to be inserted between the origin and destination If waypoints is specified, destination must also be specified Each location can be: - string (will be geocoded by the routing provider. Not all providers accept this as input) - (longitude, latitude) sequence (tuple, list, numpy array, etc.) - Shapely Point with x as longitude, y as latitude Additional parameters --------------------- raw : bool, default False Return the raw json dict response from the service Returns ------- list of Route objects If raw is True, returns the json dict instead of converting to Route objects Examples -------- mq = directions.Mapquest(key) routes = mq.route('1 magazine st. cambridge, ma', 'south station boston, ma') routes = mq.route('1 magazine st. cambridge, ma', 'south station boston, ma', waypoints=['700 commonwealth ave. boston, ma']) # Uses each point in the line as a waypoint. There is a limit to the # number of waypoints for each service. Consult the docs. line = LineString(...) routes = mq.route(line) # Feel free to mix different location types routes = mq.route(line.coords[0], 'south station boston, ma', waypoints=[(-71.103972, 42.349324)])
[ "Query", "a", "route", "." ]
train
https://github.com/jwass/directions.py/blob/c3734e4cb499fe80e27b2a26575a91ac4e834e95/directions/base.py#L38-L99
jwass/directions.py
directions/base.py
Route.from_geojson
def from_geojson(cls, data): """ Return a Route from a GeoJSON dictionary, as returned by Route.geojson() """ properties = data['properties'] distance = properties.pop('distance') duration = properties.pop('duration') maneuvers = [] for feature in data['features']: geom = feature['geometry'] if geom['type'] == 'LineString': coords = geom['coordinates'] else: maneuvers.append(Maneuver.from_geojson(feature)) return Route(coords, distance, duration, maneuvers, **properties)
python
def from_geojson(cls, data): """ Return a Route from a GeoJSON dictionary, as returned by Route.geojson() """ properties = data['properties'] distance = properties.pop('distance') duration = properties.pop('duration') maneuvers = [] for feature in data['features']: geom = feature['geometry'] if geom['type'] == 'LineString': coords = geom['coordinates'] else: maneuvers.append(Maneuver.from_geojson(feature)) return Route(coords, distance, duration, maneuvers, **properties)
[ "def", "from_geojson", "(", "cls", ",", "data", ")", ":", "properties", "=", "data", "[", "'properties'", "]", "distance", "=", "properties", ".", "pop", "(", "'distance'", ")", "duration", "=", "properties", ".", "pop", "(", "'duration'", ")", "maneuvers", "=", "[", "]", "for", "feature", "in", "data", "[", "'features'", "]", ":", "geom", "=", "feature", "[", "'geometry'", "]", "if", "geom", "[", "'type'", "]", "==", "'LineString'", ":", "coords", "=", "geom", "[", "'coordinates'", "]", "else", ":", "maneuvers", ".", "append", "(", "Maneuver", ".", "from_geojson", "(", "feature", ")", ")", "return", "Route", "(", "coords", ",", "distance", ",", "duration", ",", "maneuvers", ",", "*", "*", "properties", ")" ]
Return a Route from a GeoJSON dictionary, as returned by Route.geojson()
[ "Return", "a", "Route", "from", "a", "GeoJSON", "dictionary", "as", "returned", "by", "Route", ".", "geojson", "()" ]
train
https://github.com/jwass/directions.py/blob/c3734e4cb499fe80e27b2a26575a91ac4e834e95/directions/base.py#L193-L210
comtihon/catcher
catcher/utils/module_utils.py
prepare_modules
def prepare_modules(module_paths: list, available: dict) -> dict: """ Scan all paths for external modules and form key-value dict. :param module_paths: list of external modules (either python packages or third-party scripts) :param available: dict of all registered python modules (can contain python modules from module_paths) :return: dict of external modules, where keys are filenames (same as stepnames) and values are the paths """ indexed = {} for path in module_paths: if not os.path.exists(path) and path not in available: err = 'No such path: ' + path error(err) else: for f in os.listdir(path): mod_path = join(path, f) if f in indexed: warning('Override ' + indexed[f] + ' with ' + mod_path) indexed[f] = mod_path return indexed
python
def prepare_modules(module_paths: list, available: dict) -> dict: """ Scan all paths for external modules and form key-value dict. :param module_paths: list of external modules (either python packages or third-party scripts) :param available: dict of all registered python modules (can contain python modules from module_paths) :return: dict of external modules, where keys are filenames (same as stepnames) and values are the paths """ indexed = {} for path in module_paths: if not os.path.exists(path) and path not in available: err = 'No such path: ' + path error(err) else: for f in os.listdir(path): mod_path = join(path, f) if f in indexed: warning('Override ' + indexed[f] + ' with ' + mod_path) indexed[f] = mod_path return indexed
[ "def", "prepare_modules", "(", "module_paths", ":", "list", ",", "available", ":", "dict", ")", "->", "dict", ":", "indexed", "=", "{", "}", "for", "path", "in", "module_paths", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "path", ")", "and", "path", "not", "in", "available", ":", "err", "=", "'No such path: '", "+", "path", "error", "(", "err", ")", "else", ":", "for", "f", "in", "os", ".", "listdir", "(", "path", ")", ":", "mod_path", "=", "join", "(", "path", ",", "f", ")", "if", "f", "in", "indexed", ":", "warning", "(", "'Override '", "+", "indexed", "[", "f", "]", "+", "' with '", "+", "mod_path", ")", "indexed", "[", "f", "]", "=", "mod_path", "return", "indexed" ]
Scan all paths for external modules and form key-value dict. :param module_paths: list of external modules (either python packages or third-party scripts) :param available: dict of all registered python modules (can contain python modules from module_paths) :return: dict of external modules, where keys are filenames (same as stepnames) and values are the paths
[ "Scan", "all", "paths", "for", "external", "modules", "and", "form", "key", "-", "value", "dict", ".", ":", "param", "module_paths", ":", "list", "of", "external", "modules", "(", "either", "python", "packages", "or", "third", "-", "party", "scripts", ")", ":", "param", "available", ":", "dict", "of", "all", "registered", "python", "modules", "(", "can", "contain", "python", "modules", "from", "module_paths", ")", ":", "return", ":", "dict", "of", "external", "modules", "where", "keys", "are", "filenames", "(", "same", "as", "stepnames", ")", "and", "values", "are", "the", "paths" ]
train
https://github.com/comtihon/catcher/blob/5124e69d11cb6987daca595a61a4062d2b5f5ecc/catcher/utils/module_utils.py#L14-L32
keans/lmnotify
lmnotify/ssdp.py
SSDPManager.discover_upnp_devices
def discover_upnp_devices( self, st="upnp:rootdevice", timeout=2, mx=1, retries=1 ): """ sends an SSDP discovery packet to the network and collects the devices that replies to it. A dictionary is returned using the devices unique usn as key """ # prepare UDP socket to transfer the SSDP packets s = socket.socket( socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP ) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2) s.settimeout(timeout) # prepare SSDP discover message msg = SSDPDiscoveryMessage(mx=mx, st=st) # try to get devices with multiple retries in case of failure devices = {} for _ in range(retries): # send SSDP discovery message s.sendto(msg.bytes, SSDP_MULTICAST_ADDR) devices = {} try: while True: # parse response and store it in dict r = SSDPResponse(s.recvfrom(65507)) devices[r.usn] = r except socket.timeout: break return devices
python
def discover_upnp_devices( self, st="upnp:rootdevice", timeout=2, mx=1, retries=1 ): """ sends an SSDP discovery packet to the network and collects the devices that replies to it. A dictionary is returned using the devices unique usn as key """ # prepare UDP socket to transfer the SSDP packets s = socket.socket( socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP ) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2) s.settimeout(timeout) # prepare SSDP discover message msg = SSDPDiscoveryMessage(mx=mx, st=st) # try to get devices with multiple retries in case of failure devices = {} for _ in range(retries): # send SSDP discovery message s.sendto(msg.bytes, SSDP_MULTICAST_ADDR) devices = {} try: while True: # parse response and store it in dict r = SSDPResponse(s.recvfrom(65507)) devices[r.usn] = r except socket.timeout: break return devices
[ "def", "discover_upnp_devices", "(", "self", ",", "st", "=", "\"upnp:rootdevice\"", ",", "timeout", "=", "2", ",", "mx", "=", "1", ",", "retries", "=", "1", ")", ":", "# prepare UDP socket to transfer the SSDP packets", "s", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_DGRAM", ",", "socket", ".", "IPPROTO_UDP", ")", "s", ".", "setsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_REUSEADDR", ",", "1", ")", "s", ".", "setsockopt", "(", "socket", ".", "IPPROTO_IP", ",", "socket", ".", "IP_MULTICAST_TTL", ",", "2", ")", "s", ".", "settimeout", "(", "timeout", ")", "# prepare SSDP discover message", "msg", "=", "SSDPDiscoveryMessage", "(", "mx", "=", "mx", ",", "st", "=", "st", ")", "# try to get devices with multiple retries in case of failure", "devices", "=", "{", "}", "for", "_", "in", "range", "(", "retries", ")", ":", "# send SSDP discovery message", "s", ".", "sendto", "(", "msg", ".", "bytes", ",", "SSDP_MULTICAST_ADDR", ")", "devices", "=", "{", "}", "try", ":", "while", "True", ":", "# parse response and store it in dict", "r", "=", "SSDPResponse", "(", "s", ".", "recvfrom", "(", "65507", ")", ")", "devices", "[", "r", ".", "usn", "]", "=", "r", "except", "socket", ".", "timeout", ":", "break", "return", "devices" ]
sends an SSDP discovery packet to the network and collects the devices that replies to it. A dictionary is returned using the devices unique usn as key
[ "sends", "an", "SSDP", "discovery", "packet", "to", "the", "network", "and", "collects", "the", "devices", "that", "replies", "to", "it", ".", "A", "dictionary", "is", "returned", "using", "the", "devices", "unique", "usn", "as", "key" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/ssdp.py#L80-L115
keans/lmnotify
lmnotify/ssdp.py
SSDPManager.get_filtered_devices
def get_filtered_devices( self, model_name, device_types="upnp:rootdevice", timeout=2 ): """ returns a dict of devices that contain the given model name """ # get list of all UPNP devices in the network upnp_devices = self.discover_upnp_devices(st=device_types) # go through all UPNP devices and filter wanted devices filtered_devices = collections.defaultdict(dict) for dev in upnp_devices.values(): try: # download XML file with information about the device # from the device's location r = requests.get(dev.location, timeout=timeout) if r.status_code == requests.codes.ok: # parse returned XML root = ET.fromstring(r.text) # add shortcut for XML namespace to access sub nodes ns = {"upnp": "urn:schemas-upnp-org:device-1-0"} # get device element device = root.find("upnp:device", ns) if model_name in device.find( "upnp:modelName", ns ).text: # model name is wanted => add to list # get unique UDN of the device that is used as key udn = device.find("upnp:UDN", ns).text # add url base url_base = root.find("upnp:URLBase", ns) if url_base is not None: filtered_devices[udn][ "URLBase" ] = url_base.text # add interesting device attributes and # use unique UDN as key for attr in ( "deviceType", "friendlyName", "manufacturer", "manufacturerURL", "modelDescription", "modelName", "modelNumber" ): el = device.find("upnp:%s" % attr, ns) if el is not None: filtered_devices[udn][ attr ] = el.text.strip() except ET.ParseError: # just skip devices that are invalid xml pass except requests.exceptions.ConnectTimeout: # just skip devices that are not replying in time print("Timeout for '%s'. Skipping." % dev.location) return filtered_devices
python
def get_filtered_devices( self, model_name, device_types="upnp:rootdevice", timeout=2 ): """ returns a dict of devices that contain the given model name """ # get list of all UPNP devices in the network upnp_devices = self.discover_upnp_devices(st=device_types) # go through all UPNP devices and filter wanted devices filtered_devices = collections.defaultdict(dict) for dev in upnp_devices.values(): try: # download XML file with information about the device # from the device's location r = requests.get(dev.location, timeout=timeout) if r.status_code == requests.codes.ok: # parse returned XML root = ET.fromstring(r.text) # add shortcut for XML namespace to access sub nodes ns = {"upnp": "urn:schemas-upnp-org:device-1-0"} # get device element device = root.find("upnp:device", ns) if model_name in device.find( "upnp:modelName", ns ).text: # model name is wanted => add to list # get unique UDN of the device that is used as key udn = device.find("upnp:UDN", ns).text # add url base url_base = root.find("upnp:URLBase", ns) if url_base is not None: filtered_devices[udn][ "URLBase" ] = url_base.text # add interesting device attributes and # use unique UDN as key for attr in ( "deviceType", "friendlyName", "manufacturer", "manufacturerURL", "modelDescription", "modelName", "modelNumber" ): el = device.find("upnp:%s" % attr, ns) if el is not None: filtered_devices[udn][ attr ] = el.text.strip() except ET.ParseError: # just skip devices that are invalid xml pass except requests.exceptions.ConnectTimeout: # just skip devices that are not replying in time print("Timeout for '%s'. Skipping." % dev.location) return filtered_devices
[ "def", "get_filtered_devices", "(", "self", ",", "model_name", ",", "device_types", "=", "\"upnp:rootdevice\"", ",", "timeout", "=", "2", ")", ":", "# get list of all UPNP devices in the network", "upnp_devices", "=", "self", ".", "discover_upnp_devices", "(", "st", "=", "device_types", ")", "# go through all UPNP devices and filter wanted devices", "filtered_devices", "=", "collections", ".", "defaultdict", "(", "dict", ")", "for", "dev", "in", "upnp_devices", ".", "values", "(", ")", ":", "try", ":", "# download XML file with information about the device", "# from the device's location", "r", "=", "requests", ".", "get", "(", "dev", ".", "location", ",", "timeout", "=", "timeout", ")", "if", "r", ".", "status_code", "==", "requests", ".", "codes", ".", "ok", ":", "# parse returned XML", "root", "=", "ET", ".", "fromstring", "(", "r", ".", "text", ")", "# add shortcut for XML namespace to access sub nodes", "ns", "=", "{", "\"upnp\"", ":", "\"urn:schemas-upnp-org:device-1-0\"", "}", "# get device element", "device", "=", "root", ".", "find", "(", "\"upnp:device\"", ",", "ns", ")", "if", "model_name", "in", "device", ".", "find", "(", "\"upnp:modelName\"", ",", "ns", ")", ".", "text", ":", "# model name is wanted => add to list", "# get unique UDN of the device that is used as key", "udn", "=", "device", ".", "find", "(", "\"upnp:UDN\"", ",", "ns", ")", ".", "text", "# add url base", "url_base", "=", "root", ".", "find", "(", "\"upnp:URLBase\"", ",", "ns", ")", "if", "url_base", "is", "not", "None", ":", "filtered_devices", "[", "udn", "]", "[", "\"URLBase\"", "]", "=", "url_base", ".", "text", "# add interesting device attributes and", "# use unique UDN as key", "for", "attr", "in", "(", "\"deviceType\"", ",", "\"friendlyName\"", ",", "\"manufacturer\"", ",", "\"manufacturerURL\"", ",", "\"modelDescription\"", ",", "\"modelName\"", ",", "\"modelNumber\"", ")", ":", "el", "=", "device", ".", "find", "(", "\"upnp:%s\"", "%", "attr", ",", "ns", ")", "if", "el", "is", "not", "None", ":", "filtered_devices", "[", "udn", "]", "[", "attr", "]", "=", "el", ".", "text", ".", "strip", "(", ")", "except", "ET", ".", "ParseError", ":", "# just skip devices that are invalid xml", "pass", "except", "requests", ".", "exceptions", ".", "ConnectTimeout", ":", "# just skip devices that are not replying in time", "print", "(", "\"Timeout for '%s'. Skipping.\"", "%", "dev", ".", "location", ")", "return", "filtered_devices" ]
returns a dict of devices that contain the given model name
[ "returns", "a", "dict", "of", "devices", "that", "contain", "the", "given", "model", "name" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/ssdp.py#L117-L180
rasbt/mputil
mputil/map.py
lazy_map
def lazy_map(data_processor, data_generator, n_cpus=1, stepsize=None): """A variant of multiprocessing.Pool.map that supports lazy evaluation As with the regular multiprocessing.Pool.map, the processes are spawned off asynchronously while the results are returned in order. In contrast to multiprocessing.Pool.map, the iterator (here: data_generator) is not consumed at once but evaluated lazily which is useful if the iterator (for example, a generator) contains objects with a large memory footprint. Parameters ========== data_processor : func A processing function that is applied to objects in `data_generator` data_generator : iterator or generator A python iterator or generator that yields objects to be fed into the `data_processor` function for processing. n_cpus=1 : int (default: 1) Number of processes to run in parallel. - If `n_cpus` > 0, the specified number of CPUs will be used. - If `n_cpus=0`, all available CPUs will be used. - If `n_cpus` < 0, all available CPUs - `n_cpus` will be used. stepsize : int or None (default: None) The number of items to fetch from the iterator to pass on to the workers at a time. If `stepsize=None` (default), the stepsize size will be set equal to `n_cpus`. Returns ========= list : A Python list containing the results returned by the `data_processor` function when called on all elements in yielded by the `data_generator` in sorted order. Note that the last list may contain fewer items if the number of elements in `data_generator` is not evenly divisible by `stepsize`. """ if not n_cpus: n_cpus = mp.cpu_count() elif n_cpus < 0: n_cpus = mp.cpu_count() - n_cpus if stepsize is None: stepsize = n_cpus results = [] with mp.Pool(processes=n_cpus) as p: while True: r = p.map(data_processor, islice(data_generator, stepsize)) if r: results.extend(r) else: break return results
python
def lazy_map(data_processor, data_generator, n_cpus=1, stepsize=None): """A variant of multiprocessing.Pool.map that supports lazy evaluation As with the regular multiprocessing.Pool.map, the processes are spawned off asynchronously while the results are returned in order. In contrast to multiprocessing.Pool.map, the iterator (here: data_generator) is not consumed at once but evaluated lazily which is useful if the iterator (for example, a generator) contains objects with a large memory footprint. Parameters ========== data_processor : func A processing function that is applied to objects in `data_generator` data_generator : iterator or generator A python iterator or generator that yields objects to be fed into the `data_processor` function for processing. n_cpus=1 : int (default: 1) Number of processes to run in parallel. - If `n_cpus` > 0, the specified number of CPUs will be used. - If `n_cpus=0`, all available CPUs will be used. - If `n_cpus` < 0, all available CPUs - `n_cpus` will be used. stepsize : int or None (default: None) The number of items to fetch from the iterator to pass on to the workers at a time. If `stepsize=None` (default), the stepsize size will be set equal to `n_cpus`. Returns ========= list : A Python list containing the results returned by the `data_processor` function when called on all elements in yielded by the `data_generator` in sorted order. Note that the last list may contain fewer items if the number of elements in `data_generator` is not evenly divisible by `stepsize`. """ if not n_cpus: n_cpus = mp.cpu_count() elif n_cpus < 0: n_cpus = mp.cpu_count() - n_cpus if stepsize is None: stepsize = n_cpus results = [] with mp.Pool(processes=n_cpus) as p: while True: r = p.map(data_processor, islice(data_generator, stepsize)) if r: results.extend(r) else: break return results
[ "def", "lazy_map", "(", "data_processor", ",", "data_generator", ",", "n_cpus", "=", "1", ",", "stepsize", "=", "None", ")", ":", "if", "not", "n_cpus", ":", "n_cpus", "=", "mp", ".", "cpu_count", "(", ")", "elif", "n_cpus", "<", "0", ":", "n_cpus", "=", "mp", ".", "cpu_count", "(", ")", "-", "n_cpus", "if", "stepsize", "is", "None", ":", "stepsize", "=", "n_cpus", "results", "=", "[", "]", "with", "mp", ".", "Pool", "(", "processes", "=", "n_cpus", ")", "as", "p", ":", "while", "True", ":", "r", "=", "p", ".", "map", "(", "data_processor", ",", "islice", "(", "data_generator", ",", "stepsize", ")", ")", "if", "r", ":", "results", ".", "extend", "(", "r", ")", "else", ":", "break", "return", "results" ]
A variant of multiprocessing.Pool.map that supports lazy evaluation As with the regular multiprocessing.Pool.map, the processes are spawned off asynchronously while the results are returned in order. In contrast to multiprocessing.Pool.map, the iterator (here: data_generator) is not consumed at once but evaluated lazily which is useful if the iterator (for example, a generator) contains objects with a large memory footprint. Parameters ========== data_processor : func A processing function that is applied to objects in `data_generator` data_generator : iterator or generator A python iterator or generator that yields objects to be fed into the `data_processor` function for processing. n_cpus=1 : int (default: 1) Number of processes to run in parallel. - If `n_cpus` > 0, the specified number of CPUs will be used. - If `n_cpus=0`, all available CPUs will be used. - If `n_cpus` < 0, all available CPUs - `n_cpus` will be used. stepsize : int or None (default: None) The number of items to fetch from the iterator to pass on to the workers at a time. If `stepsize=None` (default), the stepsize size will be set equal to `n_cpus`. Returns ========= list : A Python list containing the results returned by the `data_processor` function when called on all elements in yielded by the `data_generator` in sorted order. Note that the last list may contain fewer items if the number of elements in `data_generator` is not evenly divisible by `stepsize`.
[ "A", "variant", "of", "multiprocessing", ".", "Pool", ".", "map", "that", "supports", "lazy", "evaluation" ]
train
https://github.com/rasbt/mputil/blob/952844980d4083eabe7b387de69c3d50b04abb9a/mputil/map.py#L12-L68
rasbt/mputil
mputil/map.py
lazy_imap
def lazy_imap(data_processor, data_generator, n_cpus=1, stepsize=None): """A variant of multiprocessing.Pool.imap that supports lazy evaluation As with the regular multiprocessing.Pool.imap, the processes are spawned off asynchronously while the results are returned in order. In contrast to multiprocessing.Pool.imap, the iterator (here: data_generator) is not consumed at once but evaluated lazily which is useful if the iterator (for example, a generator) contains objects with a large memory footprint. Parameters ========== data_processor : func A processing function that is applied to objects in `data_generator` data_generator : iterator or generator A python iterator or generator that yields objects to be fed into the `data_processor` function for processing. n_cpus=1 : int (default: 1) Number of processes to run in parallel. - If `n_cpus` > 0, the specified number of CPUs will be used. - If `n_cpus=0`, all available CPUs will be used. - If `n_cpus` < 0, all available CPUs - `n_cpus` will be used. stepsize : int or None (default: None) The number of items to fetch from the iterator to pass on to the workers at a time. If `stepsize=None` (default), the stepsize size will be set equal to `n_cpus`. Returns ========= list : A Python list containing the *n* results returned by the `data_processor` function when called on elements by the `data_generator` in sorted order; *n* is equal to the size of `stepsize`. If `stepsize` is None, *n* is equal to `n_cpus`. """ if not n_cpus: n_cpus = mp.cpu_count() elif n_cpus < 0: n_cpus = mp.cpu_count() - n_cpus if stepsize is None: stepsize = n_cpus with mp.Pool(processes=n_cpus) as p: while True: r = p.map(data_processor, islice(data_generator, stepsize)) if r: yield r else: break
python
def lazy_imap(data_processor, data_generator, n_cpus=1, stepsize=None): """A variant of multiprocessing.Pool.imap that supports lazy evaluation As with the regular multiprocessing.Pool.imap, the processes are spawned off asynchronously while the results are returned in order. In contrast to multiprocessing.Pool.imap, the iterator (here: data_generator) is not consumed at once but evaluated lazily which is useful if the iterator (for example, a generator) contains objects with a large memory footprint. Parameters ========== data_processor : func A processing function that is applied to objects in `data_generator` data_generator : iterator or generator A python iterator or generator that yields objects to be fed into the `data_processor` function for processing. n_cpus=1 : int (default: 1) Number of processes to run in parallel. - If `n_cpus` > 0, the specified number of CPUs will be used. - If `n_cpus=0`, all available CPUs will be used. - If `n_cpus` < 0, all available CPUs - `n_cpus` will be used. stepsize : int or None (default: None) The number of items to fetch from the iterator to pass on to the workers at a time. If `stepsize=None` (default), the stepsize size will be set equal to `n_cpus`. Returns ========= list : A Python list containing the *n* results returned by the `data_processor` function when called on elements by the `data_generator` in sorted order; *n* is equal to the size of `stepsize`. If `stepsize` is None, *n* is equal to `n_cpus`. """ if not n_cpus: n_cpus = mp.cpu_count() elif n_cpus < 0: n_cpus = mp.cpu_count() - n_cpus if stepsize is None: stepsize = n_cpus with mp.Pool(processes=n_cpus) as p: while True: r = p.map(data_processor, islice(data_generator, stepsize)) if r: yield r else: break
[ "def", "lazy_imap", "(", "data_processor", ",", "data_generator", ",", "n_cpus", "=", "1", ",", "stepsize", "=", "None", ")", ":", "if", "not", "n_cpus", ":", "n_cpus", "=", "mp", ".", "cpu_count", "(", ")", "elif", "n_cpus", "<", "0", ":", "n_cpus", "=", "mp", ".", "cpu_count", "(", ")", "-", "n_cpus", "if", "stepsize", "is", "None", ":", "stepsize", "=", "n_cpus", "with", "mp", ".", "Pool", "(", "processes", "=", "n_cpus", ")", "as", "p", ":", "while", "True", ":", "r", "=", "p", ".", "map", "(", "data_processor", ",", "islice", "(", "data_generator", ",", "stepsize", ")", ")", "if", "r", ":", "yield", "r", "else", ":", "break" ]
A variant of multiprocessing.Pool.imap that supports lazy evaluation As with the regular multiprocessing.Pool.imap, the processes are spawned off asynchronously while the results are returned in order. In contrast to multiprocessing.Pool.imap, the iterator (here: data_generator) is not consumed at once but evaluated lazily which is useful if the iterator (for example, a generator) contains objects with a large memory footprint. Parameters ========== data_processor : func A processing function that is applied to objects in `data_generator` data_generator : iterator or generator A python iterator or generator that yields objects to be fed into the `data_processor` function for processing. n_cpus=1 : int (default: 1) Number of processes to run in parallel. - If `n_cpus` > 0, the specified number of CPUs will be used. - If `n_cpus=0`, all available CPUs will be used. - If `n_cpus` < 0, all available CPUs - `n_cpus` will be used. stepsize : int or None (default: None) The number of items to fetch from the iterator to pass on to the workers at a time. If `stepsize=None` (default), the stepsize size will be set equal to `n_cpus`. Returns ========= list : A Python list containing the *n* results returned by the `data_processor` function when called on elements by the `data_generator` in sorted order; *n* is equal to the size of `stepsize`. If `stepsize` is None, *n* is equal to `n_cpus`.
[ "A", "variant", "of", "multiprocessing", ".", "Pool", ".", "imap", "that", "supports", "lazy", "evaluation" ]
train
https://github.com/rasbt/mputil/blob/952844980d4083eabe7b387de69c3d50b04abb9a/mputil/map.py#L71-L123
comtihon/catcher
catcher/steps/step.py
update_variables
def update_variables(func): """ Use this decorator on Step.action implementation. Your action method should always return variables, or both variables and output. This decorator will update variables with output. """ @wraps(func) def wrapper(self, *args, **kwargs): result = func(self, *args, **kwargs) if isinstance(result, tuple): return self.process_register(result[0], result[1]) else: return self.process_register(result) return wrapper
python
def update_variables(func): """ Use this decorator on Step.action implementation. Your action method should always return variables, or both variables and output. This decorator will update variables with output. """ @wraps(func) def wrapper(self, *args, **kwargs): result = func(self, *args, **kwargs) if isinstance(result, tuple): return self.process_register(result[0], result[1]) else: return self.process_register(result) return wrapper
[ "def", "update_variables", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "result", "=", "func", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "if", "isinstance", "(", "result", ",", "tuple", ")", ":", "return", "self", ".", "process_register", "(", "result", "[", "0", "]", ",", "result", "[", "1", "]", ")", "else", ":", "return", "self", ".", "process_register", "(", "result", ")", "return", "wrapper" ]
Use this decorator on Step.action implementation. Your action method should always return variables, or both variables and output. This decorator will update variables with output.
[ "Use", "this", "decorator", "on", "Step", ".", "action", "implementation", "." ]
train
https://github.com/comtihon/catcher/blob/5124e69d11cb6987daca595a61a4062d2b5f5ecc/catcher/steps/step.py#L156-L175
keans/lmnotify
lmnotify/models.py
AppModel._set_properties
def _set_properties(self, data): """ set the properties of the app model by the given data dict """ for property in data.keys(): if property in vars(self): setattr(self, property, data[property])
python
def _set_properties(self, data): """ set the properties of the app model by the given data dict """ for property in data.keys(): if property in vars(self): setattr(self, property, data[property])
[ "def", "_set_properties", "(", "self", ",", "data", ")", ":", "for", "property", "in", "data", ".", "keys", "(", ")", ":", "if", "property", "in", "vars", "(", "self", ")", ":", "setattr", "(", "self", ",", "property", ",", "data", "[", "property", "]", ")" ]
set the properties of the app model by the given data dict
[ "set", "the", "properties", "of", "the", "app", "model", "by", "the", "given", "data", "dict" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/lmnotify/models.py#L18-L24
keans/lmnotify
setup.py
get_long_description
def get_long_description(): """ get long description from README.rst file """ with codecs.open(os.path.join(here, "README.rst"), "r", "utf-8") as f: return f.read()
python
def get_long_description(): """ get long description from README.rst file """ with codecs.open(os.path.join(here, "README.rst"), "r", "utf-8") as f: return f.read()
[ "def", "get_long_description", "(", ")", ":", "with", "codecs", ".", "open", "(", "os", ".", "path", ".", "join", "(", "here", ",", "\"README.rst\"", ")", ",", "\"r\"", ",", "\"utf-8\"", ")", "as", "f", ":", "return", "f", ".", "read", "(", ")" ]
get long description from README.rst file
[ "get", "long", "description", "from", "README", ".", "rst", "file" ]
train
https://github.com/keans/lmnotify/blob/b0a5282a582e5090852dc20fea8a135ca258d0d3/setup.py#L9-L14
AlecAivazis/graphql-over-kafka
nautilus/network/events/actionHandlers/rollCallHandler.py
roll_call_handler
async def roll_call_handler(service, action_type, payload, props, **kwds): """ This action handler responds to the "roll call" emitted by the api gateway when it is brought up with the normal summary produced by the service. """ # if the action type corresponds to a roll call if action_type == roll_call_type(): # then announce the service await service.announce()
python
async def roll_call_handler(service, action_type, payload, props, **kwds): """ This action handler responds to the "roll call" emitted by the api gateway when it is brought up with the normal summary produced by the service. """ # if the action type corresponds to a roll call if action_type == roll_call_type(): # then announce the service await service.announce()
[ "async", "def", "roll_call_handler", "(", "service", ",", "action_type", ",", "payload", ",", "props", ",", "*", "*", "kwds", ")", ":", "# if the action type corresponds to a roll call", "if", "action_type", "==", "roll_call_type", "(", ")", ":", "# then announce the service", "await", "service", ".", "announce", "(", ")" ]
This action handler responds to the "roll call" emitted by the api gateway when it is brought up with the normal summary produced by the service.
[ "This", "action", "handler", "responds", "to", "the", "roll", "call", "emitted", "by", "the", "api", "gateway", "when", "it", "is", "brought", "up", "with", "the", "normal", "summary", "produced", "by", "the", "service", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/network/events/actionHandlers/rollCallHandler.py#L3-L12
AlecAivazis/graphql-over-kafka
nautilus/network/events/actionHandlers/flexibleAPIHandler.py
flexible_api_handler
async def flexible_api_handler(service, action_type, payload, props, **kwds): """ This query handler builds the dynamic picture of availible services. """ # if the action represents a new service if action_type == intialize_service_action(): # the treat the payload like json if its a string model = json.loads(payload) if isinstance(payload, str) else payload # the list of known models models = service._external_service_data['models'] # the list of known connections connections = service._external_service_data['connections'] # the list of known mutations mutations = service._external_service_data['mutations'] # if the model is a connection if 'connection' in model: # if we haven't seen the connection before if not [conn for conn in connections if conn['name'] == model['name']]: # add it to the list connections.append(model) # or if there are registered fields elif 'fields' in model and not [mod for mod in models if mod['name'] == model['name']]: # add it to the model list models.append(model) # the service could provide mutations as well as affect the topology if 'mutations' in model: # go over each mutation announce for mutation in model['mutations']: # if there isn't a mutation by the same name in the local cache if not [mut for mut in mutations if mut['name'] == mutation['name']]: # add it to the local cache mutations.append(mutation) # if there are models if models: # create a new schema corresponding to the models and connections service.schema = generate_api_schema( models=models, connections=connections, mutations=mutations, )
python
async def flexible_api_handler(service, action_type, payload, props, **kwds): """ This query handler builds the dynamic picture of availible services. """ # if the action represents a new service if action_type == intialize_service_action(): # the treat the payload like json if its a string model = json.loads(payload) if isinstance(payload, str) else payload # the list of known models models = service._external_service_data['models'] # the list of known connections connections = service._external_service_data['connections'] # the list of known mutations mutations = service._external_service_data['mutations'] # if the model is a connection if 'connection' in model: # if we haven't seen the connection before if not [conn for conn in connections if conn['name'] == model['name']]: # add it to the list connections.append(model) # or if there are registered fields elif 'fields' in model and not [mod for mod in models if mod['name'] == model['name']]: # add it to the model list models.append(model) # the service could provide mutations as well as affect the topology if 'mutations' in model: # go over each mutation announce for mutation in model['mutations']: # if there isn't a mutation by the same name in the local cache if not [mut for mut in mutations if mut['name'] == mutation['name']]: # add it to the local cache mutations.append(mutation) # if there are models if models: # create a new schema corresponding to the models and connections service.schema = generate_api_schema( models=models, connections=connections, mutations=mutations, )
[ "async", "def", "flexible_api_handler", "(", "service", ",", "action_type", ",", "payload", ",", "props", ",", "*", "*", "kwds", ")", ":", "# if the action represents a new service", "if", "action_type", "==", "intialize_service_action", "(", ")", ":", "# the treat the payload like json if its a string", "model", "=", "json", ".", "loads", "(", "payload", ")", "if", "isinstance", "(", "payload", ",", "str", ")", "else", "payload", "# the list of known models", "models", "=", "service", ".", "_external_service_data", "[", "'models'", "]", "# the list of known connections", "connections", "=", "service", ".", "_external_service_data", "[", "'connections'", "]", "# the list of known mutations", "mutations", "=", "service", ".", "_external_service_data", "[", "'mutations'", "]", "# if the model is a connection", "if", "'connection'", "in", "model", ":", "# if we haven't seen the connection before", "if", "not", "[", "conn", "for", "conn", "in", "connections", "if", "conn", "[", "'name'", "]", "==", "model", "[", "'name'", "]", "]", ":", "# add it to the list", "connections", ".", "append", "(", "model", ")", "# or if there are registered fields", "elif", "'fields'", "in", "model", "and", "not", "[", "mod", "for", "mod", "in", "models", "if", "mod", "[", "'name'", "]", "==", "model", "[", "'name'", "]", "]", ":", "# add it to the model list", "models", ".", "append", "(", "model", ")", "# the service could provide mutations as well as affect the topology", "if", "'mutations'", "in", "model", ":", "# go over each mutation announce", "for", "mutation", "in", "model", "[", "'mutations'", "]", ":", "# if there isn't a mutation by the same name in the local cache", "if", "not", "[", "mut", "for", "mut", "in", "mutations", "if", "mut", "[", "'name'", "]", "==", "mutation", "[", "'name'", "]", "]", ":", "# add it to the local cache", "mutations", ".", "append", "(", "mutation", ")", "# if there are models", "if", "models", ":", "# create a new schema corresponding to the models and connections", "service", ".", "schema", "=", "generate_api_schema", "(", "models", "=", "models", ",", "connections", "=", "connections", ",", "mutations", "=", "mutations", ",", ")" ]
This query handler builds the dynamic picture of availible services.
[ "This", "query", "handler", "builds", "the", "dynamic", "picture", "of", "availible", "services", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/network/events/actionHandlers/flexibleAPIHandler.py#L7-L51
AlecAivazis/graphql-over-kafka
nautilus/api/filter.py
_parse_order_by
def _parse_order_by(model, order_by): """ This function figures out the list of orderings for the given model and argument. Args: model (nautilus.BaseModel): The model to compute ordering against order_by (list of str): the list of fields to order_by. If the field starts with a `+` then the order is acending, if `-` descending, if no character proceeds the field, the ordering is assumed to be ascending. Returns: (list of filters): the model filters to apply to the query """ # the list of filters for the models out = [] # for each attribute we have to order by for key in order_by: # remove any whitespace key = key.strip() # if the key starts with a plus if key.startswith("+"): # add the ascending filter to the list out.append(getattr(model, key[1:])) # otherwise if the key starts with a minus elif key.startswith("-"): # add the descending filter to the list out.append(getattr(model, key[1:]).desc()) # otherwise the key needs the default filter else: # add the default filter to the list out.append(getattr(model, key)) # returnt the list of filters return out
python
def _parse_order_by(model, order_by): """ This function figures out the list of orderings for the given model and argument. Args: model (nautilus.BaseModel): The model to compute ordering against order_by (list of str): the list of fields to order_by. If the field starts with a `+` then the order is acending, if `-` descending, if no character proceeds the field, the ordering is assumed to be ascending. Returns: (list of filters): the model filters to apply to the query """ # the list of filters for the models out = [] # for each attribute we have to order by for key in order_by: # remove any whitespace key = key.strip() # if the key starts with a plus if key.startswith("+"): # add the ascending filter to the list out.append(getattr(model, key[1:])) # otherwise if the key starts with a minus elif key.startswith("-"): # add the descending filter to the list out.append(getattr(model, key[1:]).desc()) # otherwise the key needs the default filter else: # add the default filter to the list out.append(getattr(model, key)) # returnt the list of filters return out
[ "def", "_parse_order_by", "(", "model", ",", "order_by", ")", ":", "# the list of filters for the models", "out", "=", "[", "]", "# for each attribute we have to order by", "for", "key", "in", "order_by", ":", "# remove any whitespace", "key", "=", "key", ".", "strip", "(", ")", "# if the key starts with a plus", "if", "key", ".", "startswith", "(", "\"+\"", ")", ":", "# add the ascending filter to the list", "out", ".", "append", "(", "getattr", "(", "model", ",", "key", "[", "1", ":", "]", ")", ")", "# otherwise if the key starts with a minus", "elif", "key", ".", "startswith", "(", "\"-\"", ")", ":", "# add the descending filter to the list", "out", ".", "append", "(", "getattr", "(", "model", ",", "key", "[", "1", ":", "]", ")", ".", "desc", "(", ")", ")", "# otherwise the key needs the default filter", "else", ":", "# add the default filter to the list", "out", ".", "append", "(", "getattr", "(", "model", ",", "key", ")", ")", "# returnt the list of filters", "return", "out" ]
This function figures out the list of orderings for the given model and argument. Args: model (nautilus.BaseModel): The model to compute ordering against order_by (list of str): the list of fields to order_by. If the field starts with a `+` then the order is acending, if `-` descending, if no character proceeds the field, the ordering is assumed to be ascending. Returns: (list of filters): the model filters to apply to the query
[ "This", "function", "figures", "out", "the", "list", "of", "orderings", "for", "the", "given", "model", "and", "argument", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/api/filter.py#L112-L147
AlecAivazis/graphql-over-kafka
nautilus/management/scripts/create.py
model
def model(model_names): """ Creates the example directory structure necessary for a model service. """ # for each model name we need to create for model_name in model_names: # the template context context = { 'name': model_name, } # render the model template render_template(template='common', context=context) render_template(template='model', context=context)
python
def model(model_names): """ Creates the example directory structure necessary for a model service. """ # for each model name we need to create for model_name in model_names: # the template context context = { 'name': model_name, } # render the model template render_template(template='common', context=context) render_template(template='model', context=context)
[ "def", "model", "(", "model_names", ")", ":", "# for each model name we need to create", "for", "model_name", "in", "model_names", ":", "# the template context", "context", "=", "{", "'name'", ":", "model_name", ",", "}", "# render the model template", "render_template", "(", "template", "=", "'common'", ",", "context", "=", "context", ")", "render_template", "(", "template", "=", "'model'", ",", "context", "=", "context", ")" ]
Creates the example directory structure necessary for a model service.
[ "Creates", "the", "example", "directory", "structure", "necessary", "for", "a", "model", "service", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/management/scripts/create.py#L19-L32
AlecAivazis/graphql-over-kafka
nautilus/management/scripts/create.py
api
def api(): """ Create the folder/directories for an ApiGateway service. """ # the template context context = { 'name': 'api', 'secret_key': random_string(32) } render_template(template='common', context=context) render_template(template='api', context=context)
python
def api(): """ Create the folder/directories for an ApiGateway service. """ # the template context context = { 'name': 'api', 'secret_key': random_string(32) } render_template(template='common', context=context) render_template(template='api', context=context)
[ "def", "api", "(", ")", ":", "# the template context", "context", "=", "{", "'name'", ":", "'api'", ",", "'secret_key'", ":", "random_string", "(", "32", ")", "}", "render_template", "(", "template", "=", "'common'", ",", "context", "=", "context", ")", "render_template", "(", "template", "=", "'api'", ",", "context", "=", "context", ")" ]
Create the folder/directories for an ApiGateway service.
[ "Create", "the", "folder", "/", "directories", "for", "an", "ApiGateway", "service", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/management/scripts/create.py#L36-L47
AlecAivazis/graphql-over-kafka
nautilus/management/scripts/create.py
auth
def auth(): """ Create the folder/directories for an Auth service. """ # the template context context = { 'name': 'auth', } render_template(template='common', context=context) render_template(template='auth', context=context)
python
def auth(): """ Create the folder/directories for an Auth service. """ # the template context context = { 'name': 'auth', } render_template(template='common', context=context) render_template(template='auth', context=context)
[ "def", "auth", "(", ")", ":", "# the template context", "context", "=", "{", "'name'", ":", "'auth'", ",", "}", "render_template", "(", "template", "=", "'common'", ",", "context", "=", "context", ")", "render_template", "(", "template", "=", "'auth'", ",", "context", "=", "context", ")" ]
Create the folder/directories for an Auth service.
[ "Create", "the", "folder", "/", "directories", "for", "an", "Auth", "service", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/management/scripts/create.py#L51-L61
AlecAivazis/graphql-over-kafka
nautilus/management/scripts/create.py
connection
def connection(model_connections): """ Creates the example directory structure necessary for a connection service. """ # for each connection group for connection_str in model_connections: # the services to connect services = connection_str.split(':') services.sort() service_name = ''.join([service.title() for service in services]) # the template context context = { # make sure the first letter is lowercase 'name': service_name[0].lower() + service_name[1:], 'services': services, } render_template(template='common', context=context) render_template(template='connection', context=context)
python
def connection(model_connections): """ Creates the example directory structure necessary for a connection service. """ # for each connection group for connection_str in model_connections: # the services to connect services = connection_str.split(':') services.sort() service_name = ''.join([service.title() for service in services]) # the template context context = { # make sure the first letter is lowercase 'name': service_name[0].lower() + service_name[1:], 'services': services, } render_template(template='common', context=context) render_template(template='connection', context=context)
[ "def", "connection", "(", "model_connections", ")", ":", "# for each connection group", "for", "connection_str", "in", "model_connections", ":", "# the services to connect", "services", "=", "connection_str", ".", "split", "(", "':'", ")", "services", ".", "sort", "(", ")", "service_name", "=", "''", ".", "join", "(", "[", "service", ".", "title", "(", ")", "for", "service", "in", "services", "]", ")", "# the template context", "context", "=", "{", "# make sure the first letter is lowercase", "'name'", ":", "service_name", "[", "0", "]", ".", "lower", "(", ")", "+", "service_name", "[", "1", ":", "]", ",", "'services'", ":", "services", ",", "}", "render_template", "(", "template", "=", "'common'", ",", "context", "=", "context", ")", "render_template", "(", "template", "=", "'connection'", ",", "context", "=", "context", ")" ]
Creates the example directory structure necessary for a connection service.
[ "Creates", "the", "example", "directory", "structure", "necessary", "for", "a", "connection", "service", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/management/scripts/create.py#L66-L89
AlecAivazis/graphql-over-kafka
nautilus/conventions/models.py
get_model_string
def get_model_string(model): """ This function returns the conventional action designator for a given model. """ name = model if isinstance(model, str) else model.__name__ return normalize_string(name)
python
def get_model_string(model): """ This function returns the conventional action designator for a given model. """ name = model if isinstance(model, str) else model.__name__ return normalize_string(name)
[ "def", "get_model_string", "(", "model", ")", ":", "name", "=", "model", "if", "isinstance", "(", "model", ",", "str", ")", "else", "model", ".", "__name__", "return", "normalize_string", "(", "name", ")" ]
This function returns the conventional action designator for a given model.
[ "This", "function", "returns", "the", "conventional", "action", "designator", "for", "a", "given", "model", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/conventions/models.py#L8-L13
AlecAivazis/graphql-over-kafka
nautilus/api/util/build_native_type_dictionary.py
build_native_type_dictionary
def build_native_type_dictionary(fields, respect_required=False, wrap_field=True, name=''): """ This function takes a list of type summaries and builds a dictionary with native representations of each entry. Useful for dynamically building native class records from summaries. """ # a place to start when building the input field attributes input_fields = {} # go over every input in the summary for field in fields: field_name = name + field['name'] field_type = field['type'] # if the type field is a string if isinstance(field_type, str): # compute the native api type for the field field_type = convert_typestring_to_api_native(field_type)( # required=respect_required and field['required'] ) # add an entry in the attributes input_fields[field['name']] = field_type # we could also be looking at a dictionary elif isinstance(field_type, dict): object_fields = field_type['fields'] # add the dictionary to the parent as a graphql object type input_fields[field['name']] = graphql_type_from_summary( summary={ 'name': field_name+"ArgType", 'fields': object_fields } ) # if we are supposed to wrap the object in a field if wrap_field: # then wrap the value we just added input_fields[field['name']] = graphene.Field(input_fields[field['name']]) # we're done return input_fields
python
def build_native_type_dictionary(fields, respect_required=False, wrap_field=True, name=''): """ This function takes a list of type summaries and builds a dictionary with native representations of each entry. Useful for dynamically building native class records from summaries. """ # a place to start when building the input field attributes input_fields = {} # go over every input in the summary for field in fields: field_name = name + field['name'] field_type = field['type'] # if the type field is a string if isinstance(field_type, str): # compute the native api type for the field field_type = convert_typestring_to_api_native(field_type)( # required=respect_required and field['required'] ) # add an entry in the attributes input_fields[field['name']] = field_type # we could also be looking at a dictionary elif isinstance(field_type, dict): object_fields = field_type['fields'] # add the dictionary to the parent as a graphql object type input_fields[field['name']] = graphql_type_from_summary( summary={ 'name': field_name+"ArgType", 'fields': object_fields } ) # if we are supposed to wrap the object in a field if wrap_field: # then wrap the value we just added input_fields[field['name']] = graphene.Field(input_fields[field['name']]) # we're done return input_fields
[ "def", "build_native_type_dictionary", "(", "fields", ",", "respect_required", "=", "False", ",", "wrap_field", "=", "True", ",", "name", "=", "''", ")", ":", "# a place to start when building the input field attributes", "input_fields", "=", "{", "}", "# go over every input in the summary", "for", "field", "in", "fields", ":", "field_name", "=", "name", "+", "field", "[", "'name'", "]", "field_type", "=", "field", "[", "'type'", "]", "# if the type field is a string", "if", "isinstance", "(", "field_type", ",", "str", ")", ":", "# compute the native api type for the field", "field_type", "=", "convert_typestring_to_api_native", "(", "field_type", ")", "(", "# required=respect_required and field['required']", ")", "# add an entry in the attributes", "input_fields", "[", "field", "[", "'name'", "]", "]", "=", "field_type", "# we could also be looking at a dictionary", "elif", "isinstance", "(", "field_type", ",", "dict", ")", ":", "object_fields", "=", "field_type", "[", "'fields'", "]", "# add the dictionary to the parent as a graphql object type", "input_fields", "[", "field", "[", "'name'", "]", "]", "=", "graphql_type_from_summary", "(", "summary", "=", "{", "'name'", ":", "field_name", "+", "\"ArgType\"", ",", "'fields'", ":", "object_fields", "}", ")", "# if we are supposed to wrap the object in a field", "if", "wrap_field", ":", "# then wrap the value we just added", "input_fields", "[", "field", "[", "'name'", "]", "]", "=", "graphene", ".", "Field", "(", "input_fields", "[", "field", "[", "'name'", "]", "]", ")", "# we're done", "return", "input_fields" ]
This function takes a list of type summaries and builds a dictionary with native representations of each entry. Useful for dynamically building native class records from summaries.
[ "This", "function", "takes", "a", "list", "of", "type", "summaries", "and", "builds", "a", "dictionary", "with", "native", "representations", "of", "each", "entry", ".", "Useful", "for", "dynamically", "building", "native", "class", "records", "from", "summaries", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/api/util/build_native_type_dictionary.py#L7-L49
AlecAivazis/graphql-over-kafka
nautilus/api/util/summarize_crud_mutation.py
summarize_crud_mutation
def summarize_crud_mutation(method, model, isAsync=False): """ This function provides the standard form for crud mutations. """ # create the approrpriate action type action_type = get_crud_action(method=method, model=model) # the name of the mutation name = crud_mutation_name(model=model, action=method) # a mapping of methods to input factories input_map = { 'create': create_mutation_inputs, 'update': update_mutation_inputs, 'delete': delete_mutation_inputs, } # a mappting of methods to output factories output_map = { 'create': create_mutation_outputs, 'update': update_mutation_outputs, 'delete': delete_mutation_outputs, } # the inputs for the mutation inputs = input_map[method](model) # the mutation outputs outputs = output_map[method](model) # return the appropriate summary return summarize_mutation( mutation_name=name, event=action_type, isAsync=isAsync, inputs=inputs, outputs=outputs )
python
def summarize_crud_mutation(method, model, isAsync=False): """ This function provides the standard form for crud mutations. """ # create the approrpriate action type action_type = get_crud_action(method=method, model=model) # the name of the mutation name = crud_mutation_name(model=model, action=method) # a mapping of methods to input factories input_map = { 'create': create_mutation_inputs, 'update': update_mutation_inputs, 'delete': delete_mutation_inputs, } # a mappting of methods to output factories output_map = { 'create': create_mutation_outputs, 'update': update_mutation_outputs, 'delete': delete_mutation_outputs, } # the inputs for the mutation inputs = input_map[method](model) # the mutation outputs outputs = output_map[method](model) # return the appropriate summary return summarize_mutation( mutation_name=name, event=action_type, isAsync=isAsync, inputs=inputs, outputs=outputs )
[ "def", "summarize_crud_mutation", "(", "method", ",", "model", ",", "isAsync", "=", "False", ")", ":", "# create the approrpriate action type", "action_type", "=", "get_crud_action", "(", "method", "=", "method", ",", "model", "=", "model", ")", "# the name of the mutation", "name", "=", "crud_mutation_name", "(", "model", "=", "model", ",", "action", "=", "method", ")", "# a mapping of methods to input factories", "input_map", "=", "{", "'create'", ":", "create_mutation_inputs", ",", "'update'", ":", "update_mutation_inputs", ",", "'delete'", ":", "delete_mutation_inputs", ",", "}", "# a mappting of methods to output factories", "output_map", "=", "{", "'create'", ":", "create_mutation_outputs", ",", "'update'", ":", "update_mutation_outputs", ",", "'delete'", ":", "delete_mutation_outputs", ",", "}", "# the inputs for the mutation", "inputs", "=", "input_map", "[", "method", "]", "(", "model", ")", "# the mutation outputs", "outputs", "=", "output_map", "[", "method", "]", "(", "model", ")", "# return the appropriate summary", "return", "summarize_mutation", "(", "mutation_name", "=", "name", ",", "event", "=", "action_type", ",", "isAsync", "=", "isAsync", ",", "inputs", "=", "inputs", ",", "outputs", "=", "outputs", ")" ]
This function provides the standard form for crud mutations.
[ "This", "function", "provides", "the", "standard", "form", "for", "crud", "mutations", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/api/util/summarize_crud_mutation.py#L15-L48
AlecAivazis/graphql-over-kafka
nautilus/network/events/consumers/kafka.py
KafkaBroker.start
def start(self): """ This function starts the brokers interaction with the kafka stream """ self.loop.run_until_complete(self._consumer.start()) self.loop.run_until_complete(self._producer.start()) self._consumer_task = self.loop.create_task(self._consume_event_callback())
python
def start(self): """ This function starts the brokers interaction with the kafka stream """ self.loop.run_until_complete(self._consumer.start()) self.loop.run_until_complete(self._producer.start()) self._consumer_task = self.loop.create_task(self._consume_event_callback())
[ "def", "start", "(", "self", ")", ":", "self", ".", "loop", ".", "run_until_complete", "(", "self", ".", "_consumer", ".", "start", "(", ")", ")", "self", ".", "loop", ".", "run_until_complete", "(", "self", ".", "_producer", ".", "start", "(", ")", ")", "self", ".", "_consumer_task", "=", "self", ".", "loop", ".", "create_task", "(", "self", ".", "_consume_event_callback", "(", ")", ")" ]
This function starts the brokers interaction with the kafka stream
[ "This", "function", "starts", "the", "brokers", "interaction", "with", "the", "kafka", "stream" ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/network/events/consumers/kafka.py#L88-L94
AlecAivazis/graphql-over-kafka
nautilus/network/events/consumers/kafka.py
KafkaBroker.stop
def stop(self): """ This method stops the brokers interaction with the kafka stream """ self.loop.run_until_complete(self._consumer.stop()) self.loop.run_until_complete(self._producer.stop()) # attempt try: # to cancel the service self._consumer_task.cancel() # if there was no service except AttributeError: # keep going pass
python
def stop(self): """ This method stops the brokers interaction with the kafka stream """ self.loop.run_until_complete(self._consumer.stop()) self.loop.run_until_complete(self._producer.stop()) # attempt try: # to cancel the service self._consumer_task.cancel() # if there was no service except AttributeError: # keep going pass
[ "def", "stop", "(", "self", ")", ":", "self", ".", "loop", ".", "run_until_complete", "(", "self", ".", "_consumer", ".", "stop", "(", ")", ")", "self", ".", "loop", ".", "run_until_complete", "(", "self", ".", "_producer", ".", "stop", "(", ")", ")", "# attempt", "try", ":", "# to cancel the service", "self", ".", "_consumer_task", ".", "cancel", "(", ")", "# if there was no service", "except", "AttributeError", ":", "# keep going", "pass" ]
This method stops the brokers interaction with the kafka stream
[ "This", "method", "stops", "the", "brokers", "interaction", "with", "the", "kafka", "stream" ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/network/events/consumers/kafka.py#L97-L111
AlecAivazis/graphql-over-kafka
nautilus/network/events/consumers/kafka.py
KafkaBroker.send
async def send(self, payload='', action_type='', channel=None, **kwds): """ This method sends a message over the kafka stream. """ # use a custom channel if one was provided channel = channel or self.producer_channel # serialize the action type for the message = serialize_action(action_type=action_type, payload=payload, **kwds) # send the message return await self._producer.send(channel, message.encode())
python
async def send(self, payload='', action_type='', channel=None, **kwds): """ This method sends a message over the kafka stream. """ # use a custom channel if one was provided channel = channel or self.producer_channel # serialize the action type for the message = serialize_action(action_type=action_type, payload=payload, **kwds) # send the message return await self._producer.send(channel, message.encode())
[ "async", "def", "send", "(", "self", ",", "payload", "=", "''", ",", "action_type", "=", "''", ",", "channel", "=", "None", ",", "*", "*", "kwds", ")", ":", "# use a custom channel if one was provided", "channel", "=", "channel", "or", "self", ".", "producer_channel", "# serialize the action type for the", "message", "=", "serialize_action", "(", "action_type", "=", "action_type", ",", "payload", "=", "payload", ",", "*", "*", "kwds", ")", "# send the message", "return", "await", "self", ".", "_producer", ".", "send", "(", "channel", ",", "message", ".", "encode", "(", ")", ")" ]
This method sends a message over the kafka stream.
[ "This", "method", "sends", "a", "message", "over", "the", "kafka", "stream", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/network/events/consumers/kafka.py#L114-L124
AlecAivazis/graphql-over-kafka
nautilus/conventions/actions.py
serialize_action
def serialize_action(action_type, payload, **extra_fields): """ This function returns the conventional form of the actions. """ action_dict = dict( action_type=action_type, payload=payload, **extra_fields ) # return a serializable version return json.dumps(action_dict)
python
def serialize_action(action_type, payload, **extra_fields): """ This function returns the conventional form of the actions. """ action_dict = dict( action_type=action_type, payload=payload, **extra_fields ) # return a serializable version return json.dumps(action_dict)
[ "def", "serialize_action", "(", "action_type", ",", "payload", ",", "*", "*", "extra_fields", ")", ":", "action_dict", "=", "dict", "(", "action_type", "=", "action_type", ",", "payload", "=", "payload", ",", "*", "*", "extra_fields", ")", "# return a serializable version", "return", "json", ".", "dumps", "(", "action_dict", ")" ]
This function returns the conventional form of the actions.
[ "This", "function", "returns", "the", "conventional", "form", "of", "the", "actions", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/conventions/actions.py#L27-L37
AlecAivazis/graphql-over-kafka
nautilus/api/util/fields_for_model.py
fields_for_model
def fields_for_model(model): """ This function returns the fields for a schema that matches the provided nautilus model. Args: model (nautilus.model.BaseModel): The model to base the field list on Returns: (dict<field_name: str, graphqlType>): A mapping of field names to graphql types """ # the attribute arguments (no filters) args = {field.name.lower() : convert_peewee_field(field) \ for field in model.fields()} # use the field arguments, without the segments return args
python
def fields_for_model(model): """ This function returns the fields for a schema that matches the provided nautilus model. Args: model (nautilus.model.BaseModel): The model to base the field list on Returns: (dict<field_name: str, graphqlType>): A mapping of field names to graphql types """ # the attribute arguments (no filters) args = {field.name.lower() : convert_peewee_field(field) \ for field in model.fields()} # use the field arguments, without the segments return args
[ "def", "fields_for_model", "(", "model", ")", ":", "# the attribute arguments (no filters)", "args", "=", "{", "field", ".", "name", ".", "lower", "(", ")", ":", "convert_peewee_field", "(", "field", ")", "for", "field", "in", "model", ".", "fields", "(", ")", "}", "# use the field arguments, without the segments", "return", "args" ]
This function returns the fields for a schema that matches the provided nautilus model. Args: model (nautilus.model.BaseModel): The model to base the field list on Returns: (dict<field_name: str, graphqlType>): A mapping of field names to graphql types
[ "This", "function", "returns", "the", "fields", "for", "a", "schema", "that", "matches", "the", "provided", "nautilus", "model", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/api/util/fields_for_model.py#L4-L21
AlecAivazis/graphql-over-kafka
nautilus/models/util.py
create_connection_model
def create_connection_model(service): """ Create an SQL Alchemy table that connects the provides services """ # the services connected services = service._services # the mixins / base for the model bases = (BaseModel,) # the fields of the derived attributes = {model_service_name(service): fields.CharField() for service in services} # create an instance of base model with the right attributes return type(BaseModel)(connection_service_name(service), bases, attributes)
python
def create_connection_model(service): """ Create an SQL Alchemy table that connects the provides services """ # the services connected services = service._services # the mixins / base for the model bases = (BaseModel,) # the fields of the derived attributes = {model_service_name(service): fields.CharField() for service in services} # create an instance of base model with the right attributes return type(BaseModel)(connection_service_name(service), bases, attributes)
[ "def", "create_connection_model", "(", "service", ")", ":", "# the services connected", "services", "=", "service", ".", "_services", "# the mixins / base for the model", "bases", "=", "(", "BaseModel", ",", ")", "# the fields of the derived", "attributes", "=", "{", "model_service_name", "(", "service", ")", ":", "fields", ".", "CharField", "(", ")", "for", "service", "in", "services", "}", "# create an instance of base model with the right attributes", "return", "type", "(", "BaseModel", ")", "(", "connection_service_name", "(", "service", ")", ",", "bases", ",", "attributes", ")" ]
Create an SQL Alchemy table that connects the provides services
[ "Create", "an", "SQL", "Alchemy", "table", "that", "connects", "the", "provides", "services" ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/models/util.py#L5-L16
AlecAivazis/graphql-over-kafka
nautilus/network/events/actionHandlers/createHandler.py
create_handler
def create_handler(Model, name=None, **kwds): """ This factory returns an action handler that creates a new instance of the specified model when a create action is recieved, assuming the action follows nautilus convetions. Args: Model (nautilus.BaseModel): The model to create when the action received. Returns: function(action_type, payload): The action handler for this model """ async def action_handler(service, action_type, payload, props, notify=True, **kwds): # if the payload represents a new instance of `Model` if action_type == get_crud_action('create', name or Model): # print('handling create for ' + name or Model) try: # the props of the message message_props = {} # if there was a correlation id in the request if 'correlation_id' in props: # make sure it ends up in the reply message_props['correlation_id'] = props['correlation_id'] # for each required field for requirement in Model.required_fields(): # save the name of the field field_name = requirement.name # ensure the value is in the payload # TODO: check all required fields rather than failing on the first if not field_name in payload and field_name != 'id': # yell loudly raise ValueError( "Required field not found in payload: %s" %field_name ) # create a new model new_model = Model(**payload) # save the new model instance new_model.save() # if we need to tell someone about what happened if notify: # publish the scucess event await service.event_broker.send( payload=ModelSerializer().serialize(new_model), action_type=change_action_status(action_type, success_status()), **message_props ) # if something goes wrong except Exception as err: # if we need to tell someone about what happened if notify: # publish the error as an event await service.event_broker.send( payload=str(err), action_type=change_action_status(action_type, error_status()), **message_props ) # otherwise we aren't supposed to notify else: # raise the exception normally raise err # return the handler return action_handler
python
def create_handler(Model, name=None, **kwds): """ This factory returns an action handler that creates a new instance of the specified model when a create action is recieved, assuming the action follows nautilus convetions. Args: Model (nautilus.BaseModel): The model to create when the action received. Returns: function(action_type, payload): The action handler for this model """ async def action_handler(service, action_type, payload, props, notify=True, **kwds): # if the payload represents a new instance of `Model` if action_type == get_crud_action('create', name or Model): # print('handling create for ' + name or Model) try: # the props of the message message_props = {} # if there was a correlation id in the request if 'correlation_id' in props: # make sure it ends up in the reply message_props['correlation_id'] = props['correlation_id'] # for each required field for requirement in Model.required_fields(): # save the name of the field field_name = requirement.name # ensure the value is in the payload # TODO: check all required fields rather than failing on the first if not field_name in payload and field_name != 'id': # yell loudly raise ValueError( "Required field not found in payload: %s" %field_name ) # create a new model new_model = Model(**payload) # save the new model instance new_model.save() # if we need to tell someone about what happened if notify: # publish the scucess event await service.event_broker.send( payload=ModelSerializer().serialize(new_model), action_type=change_action_status(action_type, success_status()), **message_props ) # if something goes wrong except Exception as err: # if we need to tell someone about what happened if notify: # publish the error as an event await service.event_broker.send( payload=str(err), action_type=change_action_status(action_type, error_status()), **message_props ) # otherwise we aren't supposed to notify else: # raise the exception normally raise err # return the handler return action_handler
[ "def", "create_handler", "(", "Model", ",", "name", "=", "None", ",", "*", "*", "kwds", ")", ":", "async", "def", "action_handler", "(", "service", ",", "action_type", ",", "payload", ",", "props", ",", "notify", "=", "True", ",", "*", "*", "kwds", ")", ":", "# if the payload represents a new instance of `Model`", "if", "action_type", "==", "get_crud_action", "(", "'create'", ",", "name", "or", "Model", ")", ":", "# print('handling create for ' + name or Model)", "try", ":", "# the props of the message", "message_props", "=", "{", "}", "# if there was a correlation id in the request", "if", "'correlation_id'", "in", "props", ":", "# make sure it ends up in the reply", "message_props", "[", "'correlation_id'", "]", "=", "props", "[", "'correlation_id'", "]", "# for each required field", "for", "requirement", "in", "Model", ".", "required_fields", "(", ")", ":", "# save the name of the field", "field_name", "=", "requirement", ".", "name", "# ensure the value is in the payload", "# TODO: check all required fields rather than failing on the first", "if", "not", "field_name", "in", "payload", "and", "field_name", "!=", "'id'", ":", "# yell loudly", "raise", "ValueError", "(", "\"Required field not found in payload: %s\"", "%", "field_name", ")", "# create a new model", "new_model", "=", "Model", "(", "*", "*", "payload", ")", "# save the new model instance", "new_model", ".", "save", "(", ")", "# if we need to tell someone about what happened", "if", "notify", ":", "# publish the scucess event", "await", "service", ".", "event_broker", ".", "send", "(", "payload", "=", "ModelSerializer", "(", ")", ".", "serialize", "(", "new_model", ")", ",", "action_type", "=", "change_action_status", "(", "action_type", ",", "success_status", "(", ")", ")", ",", "*", "*", "message_props", ")", "# if something goes wrong", "except", "Exception", "as", "err", ":", "# if we need to tell someone about what happened", "if", "notify", ":", "# publish the error as an event", "await", "service", ".", "event_broker", ".", "send", "(", "payload", "=", "str", "(", "err", ")", ",", "action_type", "=", "change_action_status", "(", "action_type", ",", "error_status", "(", ")", ")", ",", "*", "*", "message_props", ")", "# otherwise we aren't supposed to notify", "else", ":", "# raise the exception normally", "raise", "err", "# return the handler", "return", "action_handler" ]
This factory returns an action handler that creates a new instance of the specified model when a create action is recieved, assuming the action follows nautilus convetions. Args: Model (nautilus.BaseModel): The model to create when the action received. Returns: function(action_type, payload): The action handler for this model
[ "This", "factory", "returns", "an", "action", "handler", "that", "creates", "a", "new", "instance", "of", "the", "specified", "model", "when", "a", "create", "action", "is", "recieved", "assuming", "the", "action", "follows", "nautilus", "convetions", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/network/events/actionHandlers/createHandler.py#L12-L83
AlecAivazis/graphql-over-kafka
nautilus/api/util/graph_entity.py
GraphEntity._has_id
async def _has_id(self, *args, **kwds): """ Equality checks are overwitten to perform the actual check in a semantic way. """ # if there is only one positional argument if len(args) == 1: # parse the appropriate query result = await parse_string( self._query, self.service.object_resolver, self.service.connection_resolver, self.service.mutation_resolver, obey_auth=False ) # go to the bottom of the result for the list of matching ids return self._find_id(result['data'], args[0]) # otherwise else: # treat the attribute like a normal filter return self._has_id(**kwds)
python
async def _has_id(self, *args, **kwds): """ Equality checks are overwitten to perform the actual check in a semantic way. """ # if there is only one positional argument if len(args) == 1: # parse the appropriate query result = await parse_string( self._query, self.service.object_resolver, self.service.connection_resolver, self.service.mutation_resolver, obey_auth=False ) # go to the bottom of the result for the list of matching ids return self._find_id(result['data'], args[0]) # otherwise else: # treat the attribute like a normal filter return self._has_id(**kwds)
[ "async", "def", "_has_id", "(", "self", ",", "*", "args", ",", "*", "*", "kwds", ")", ":", "# if there is only one positional argument", "if", "len", "(", "args", ")", "==", "1", ":", "# parse the appropriate query", "result", "=", "await", "parse_string", "(", "self", ".", "_query", ",", "self", ".", "service", ".", "object_resolver", ",", "self", ".", "service", ".", "connection_resolver", ",", "self", ".", "service", ".", "mutation_resolver", ",", "obey_auth", "=", "False", ")", "# go to the bottom of the result for the list of matching ids", "return", "self", ".", "_find_id", "(", "result", "[", "'data'", "]", ",", "args", "[", "0", "]", ")", "# otherwise", "else", ":", "# treat the attribute like a normal filter", "return", "self", ".", "_has_id", "(", "*", "*", "kwds", ")" ]
Equality checks are overwitten to perform the actual check in a semantic way.
[ "Equality", "checks", "are", "overwitten", "to", "perform", "the", "actual", "check", "in", "a", "semantic", "way", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/api/util/graph_entity.py#L102-L122
AlecAivazis/graphql-over-kafka
nautilus/api/util/graph_entity.py
GraphEntity._find_id
def _find_id(self, result, uid): """ This method performs a depth-first search for the given uid in the dictionary of results. """ # if the result is a list if isinstance(result, list): # if the list has a valid entry if any([self._find_id(value, uid) for value in result]): # then we're done return True # otherwise results could be dictionaries if isinstance(result, dict): # the children of the result that are lists list_children = [value for value in result.values() if isinstance(value, list)] # go to every value that is a list for value in list_children: # if the value is a match if self._find_id(value, uid): # we're done return True # the children of the result that are dicts dict_children = [value for value in result.values() if isinstance(value, dict)] # perform the check on every child that is a dict for value in dict_children: # if the child is a match if self._find_id(value, uid): # we're done return True # if there are no values that are lists and there is an id key if not list_children and not dict_children and 'id' in result: # the value of the remote id field result_id = result['id'] # we've found a match if the id field matches (cast to match type) return result_id == type(result_id)(uid) # we didn't find the result return False
python
def _find_id(self, result, uid): """ This method performs a depth-first search for the given uid in the dictionary of results. """ # if the result is a list if isinstance(result, list): # if the list has a valid entry if any([self._find_id(value, uid) for value in result]): # then we're done return True # otherwise results could be dictionaries if isinstance(result, dict): # the children of the result that are lists list_children = [value for value in result.values() if isinstance(value, list)] # go to every value that is a list for value in list_children: # if the value is a match if self._find_id(value, uid): # we're done return True # the children of the result that are dicts dict_children = [value for value in result.values() if isinstance(value, dict)] # perform the check on every child that is a dict for value in dict_children: # if the child is a match if self._find_id(value, uid): # we're done return True # if there are no values that are lists and there is an id key if not list_children and not dict_children and 'id' in result: # the value of the remote id field result_id = result['id'] # we've found a match if the id field matches (cast to match type) return result_id == type(result_id)(uid) # we didn't find the result return False
[ "def", "_find_id", "(", "self", ",", "result", ",", "uid", ")", ":", "# if the result is a list", "if", "isinstance", "(", "result", ",", "list", ")", ":", "# if the list has a valid entry", "if", "any", "(", "[", "self", ".", "_find_id", "(", "value", ",", "uid", ")", "for", "value", "in", "result", "]", ")", ":", "# then we're done", "return", "True", "# otherwise results could be dictionaries", "if", "isinstance", "(", "result", ",", "dict", ")", ":", "# the children of the result that are lists", "list_children", "=", "[", "value", "for", "value", "in", "result", ".", "values", "(", ")", "if", "isinstance", "(", "value", ",", "list", ")", "]", "# go to every value that is a list", "for", "value", "in", "list_children", ":", "# if the value is a match", "if", "self", ".", "_find_id", "(", "value", ",", "uid", ")", ":", "# we're done", "return", "True", "# the children of the result that are dicts", "dict_children", "=", "[", "value", "for", "value", "in", "result", ".", "values", "(", ")", "if", "isinstance", "(", "value", ",", "dict", ")", "]", "# perform the check on every child that is a dict", "for", "value", "in", "dict_children", ":", "# if the child is a match", "if", "self", ".", "_find_id", "(", "value", ",", "uid", ")", ":", "# we're done", "return", "True", "# if there are no values that are lists and there is an id key", "if", "not", "list_children", "and", "not", "dict_children", "and", "'id'", "in", "result", ":", "# the value of the remote id field", "result_id", "=", "result", "[", "'id'", "]", "# we've found a match if the id field matches (cast to match type)", "return", "result_id", "==", "type", "(", "result_id", ")", "(", "uid", ")", "# we didn't find the result", "return", "False" ]
This method performs a depth-first search for the given uid in the dictionary of results.
[ "This", "method", "performs", "a", "depth", "-", "first", "search", "for", "the", "given", "uid", "in", "the", "dictionary", "of", "results", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/api/util/graph_entity.py#L126-L167
pyscaffold/configupdater
src/configupdater/configupdater.py
Block.add_before
def add_before(self): """Returns a builder inserting a new block before the current block""" idx = self._container.structure.index(self) return BlockBuilder(self._container, idx)
python
def add_before(self): """Returns a builder inserting a new block before the current block""" idx = self._container.structure.index(self) return BlockBuilder(self._container, idx)
[ "def", "add_before", "(", "self", ")", ":", "idx", "=", "self", ".", "_container", ".", "structure", ".", "index", "(", "self", ")", "return", "BlockBuilder", "(", "self", ".", "_container", ",", "idx", ")" ]
Returns a builder inserting a new block before the current block
[ "Returns", "a", "builder", "inserting", "a", "new", "block", "before", "the", "current", "block" ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L115-L118
pyscaffold/configupdater
src/configupdater/configupdater.py
Block.add_after
def add_after(self): """Returns a builder inserting a new block after the current block""" idx = self._container.structure.index(self) return BlockBuilder(self._container, idx+1)
python
def add_after(self): """Returns a builder inserting a new block after the current block""" idx = self._container.structure.index(self) return BlockBuilder(self._container, idx+1)
[ "def", "add_after", "(", "self", ")", ":", "idx", "=", "self", ".", "_container", ".", "structure", ".", "index", "(", "self", ")", "return", "BlockBuilder", "(", "self", ".", "_container", ",", "idx", "+", "1", ")" ]
Returns a builder inserting a new block after the current block
[ "Returns", "a", "builder", "inserting", "a", "new", "block", "after", "the", "current", "block" ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L121-L124
pyscaffold/configupdater
src/configupdater/configupdater.py
BlockBuilder.comment
def comment(self, text, comment_prefix='#'): """Creates a comment block Args: text (str): content of comment without # comment_prefix (str): character indicating start of comment Returns: self for chaining """ comment = Comment(self._container) if not text.startswith(comment_prefix): text = "{} {}".format(comment_prefix, text) if not text.endswith('\n'): text = "{}{}".format(text, '\n') comment.add_line(text) self._container.structure.insert(self._idx, comment) self._idx += 1 return self
python
def comment(self, text, comment_prefix='#'): """Creates a comment block Args: text (str): content of comment without # comment_prefix (str): character indicating start of comment Returns: self for chaining """ comment = Comment(self._container) if not text.startswith(comment_prefix): text = "{} {}".format(comment_prefix, text) if not text.endswith('\n'): text = "{}{}".format(text, '\n') comment.add_line(text) self._container.structure.insert(self._idx, comment) self._idx += 1 return self
[ "def", "comment", "(", "self", ",", "text", ",", "comment_prefix", "=", "'#'", ")", ":", "comment", "=", "Comment", "(", "self", ".", "_container", ")", "if", "not", "text", ".", "startswith", "(", "comment_prefix", ")", ":", "text", "=", "\"{} {}\"", ".", "format", "(", "comment_prefix", ",", "text", ")", "if", "not", "text", ".", "endswith", "(", "'\\n'", ")", ":", "text", "=", "\"{}{}\"", ".", "format", "(", "text", ",", "'\\n'", ")", "comment", ".", "add_line", "(", "text", ")", "self", ".", "_container", ".", "structure", ".", "insert", "(", "self", ".", "_idx", ",", "comment", ")", "self", ".", "_idx", "+=", "1", "return", "self" ]
Creates a comment block Args: text (str): content of comment without # comment_prefix (str): character indicating start of comment Returns: self for chaining
[ "Creates", "a", "comment", "block" ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L133-L151
pyscaffold/configupdater
src/configupdater/configupdater.py
BlockBuilder.section
def section(self, section): """Creates a section block Args: section (str or :class:`Section`): name of section or object Returns: self for chaining """ if not isinstance(self._container, ConfigUpdater): raise ValueError("Sections can only be added at section level!") if isinstance(section, str): # create a new section section = Section(section, container=self._container) elif not isinstance(section, Section): raise ValueError("Parameter must be a string or Section type!") if section.name in [block.name for block in self._container if isinstance(block, Section)]: raise DuplicateSectionError(section.name) self._container.structure.insert(self._idx, section) self._idx += 1 return self
python
def section(self, section): """Creates a section block Args: section (str or :class:`Section`): name of section or object Returns: self for chaining """ if not isinstance(self._container, ConfigUpdater): raise ValueError("Sections can only be added at section level!") if isinstance(section, str): # create a new section section = Section(section, container=self._container) elif not isinstance(section, Section): raise ValueError("Parameter must be a string or Section type!") if section.name in [block.name for block in self._container if isinstance(block, Section)]: raise DuplicateSectionError(section.name) self._container.structure.insert(self._idx, section) self._idx += 1 return self
[ "def", "section", "(", "self", ",", "section", ")", ":", "if", "not", "isinstance", "(", "self", ".", "_container", ",", "ConfigUpdater", ")", ":", "raise", "ValueError", "(", "\"Sections can only be added at section level!\"", ")", "if", "isinstance", "(", "section", ",", "str", ")", ":", "# create a new section", "section", "=", "Section", "(", "section", ",", "container", "=", "self", ".", "_container", ")", "elif", "not", "isinstance", "(", "section", ",", "Section", ")", ":", "raise", "ValueError", "(", "\"Parameter must be a string or Section type!\"", ")", "if", "section", ".", "name", "in", "[", "block", ".", "name", "for", "block", "in", "self", ".", "_container", "if", "isinstance", "(", "block", ",", "Section", ")", "]", ":", "raise", "DuplicateSectionError", "(", "section", ".", "name", ")", "self", ".", "_container", ".", "structure", ".", "insert", "(", "self", ".", "_idx", ",", "section", ")", "self", ".", "_idx", "+=", "1", "return", "self" ]
Creates a section block Args: section (str or :class:`Section`): name of section or object Returns: self for chaining
[ "Creates", "a", "section", "block" ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L153-L174
pyscaffold/configupdater
src/configupdater/configupdater.py
BlockBuilder.space
def space(self, newlines=1): """Creates a vertical space of newlines Args: newlines (int): number of empty lines Returns: self for chaining """ space = Space() for line in range(newlines): space.add_line('\n') self._container.structure.insert(self._idx, space) self._idx += 1 return self
python
def space(self, newlines=1): """Creates a vertical space of newlines Args: newlines (int): number of empty lines Returns: self for chaining """ space = Space() for line in range(newlines): space.add_line('\n') self._container.structure.insert(self._idx, space) self._idx += 1 return self
[ "def", "space", "(", "self", ",", "newlines", "=", "1", ")", ":", "space", "=", "Space", "(", ")", "for", "line", "in", "range", "(", "newlines", ")", ":", "space", ".", "add_line", "(", "'\\n'", ")", "self", ".", "_container", ".", "structure", ".", "insert", "(", "self", ".", "_idx", ",", "space", ")", "self", ".", "_idx", "+=", "1", "return", "self" ]
Creates a vertical space of newlines Args: newlines (int): number of empty lines Returns: self for chaining
[ "Creates", "a", "vertical", "space", "of", "newlines" ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L176-L190
pyscaffold/configupdater
src/configupdater/configupdater.py
BlockBuilder.option
def option(self, key, value=None, **kwargs): """Creates a new option inside a section Args: key (str): key of the option value (str or None): value of the option **kwargs: are passed to the constructor of :class:`Option` Returns: self for chaining """ if not isinstance(self._container, Section): raise ValueError("Options can only be added inside a section!") option = Option(key, value, container=self._container, **kwargs) option.value = value self._container.structure.insert(self._idx, option) self._idx += 1 return self
python
def option(self, key, value=None, **kwargs): """Creates a new option inside a section Args: key (str): key of the option value (str or None): value of the option **kwargs: are passed to the constructor of :class:`Option` Returns: self for chaining """ if not isinstance(self._container, Section): raise ValueError("Options can only be added inside a section!") option = Option(key, value, container=self._container, **kwargs) option.value = value self._container.structure.insert(self._idx, option) self._idx += 1 return self
[ "def", "option", "(", "self", ",", "key", ",", "value", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "not", "isinstance", "(", "self", ".", "_container", ",", "Section", ")", ":", "raise", "ValueError", "(", "\"Options can only be added inside a section!\"", ")", "option", "=", "Option", "(", "key", ",", "value", ",", "container", "=", "self", ".", "_container", ",", "*", "*", "kwargs", ")", "option", ".", "value", "=", "value", "self", ".", "_container", ".", "structure", ".", "insert", "(", "self", ".", "_idx", ",", "option", ")", "self", ".", "_idx", "+=", "1", "return", "self" ]
Creates a new option inside a section Args: key (str): key of the option value (str or None): value of the option **kwargs: are passed to the constructor of :class:`Option` Returns: self for chaining
[ "Creates", "a", "new", "option", "inside", "a", "section" ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L192-L209
pyscaffold/configupdater
src/configupdater/configupdater.py
Section.add_comment
def add_comment(self, line): """Add a Comment object to the section Used during initial parsing mainly Args: line (str): one line in the comment """ if not isinstance(self.last_item, Comment): comment = Comment(self._structure) self._structure.append(comment) self.last_item.add_line(line) return self
python
def add_comment(self, line): """Add a Comment object to the section Used during initial parsing mainly Args: line (str): one line in the comment """ if not isinstance(self.last_item, Comment): comment = Comment(self._structure) self._structure.append(comment) self.last_item.add_line(line) return self
[ "def", "add_comment", "(", "self", ",", "line", ")", ":", "if", "not", "isinstance", "(", "self", ".", "last_item", ",", "Comment", ")", ":", "comment", "=", "Comment", "(", "self", ".", "_structure", ")", "self", ".", "_structure", ".", "append", "(", "comment", ")", "self", ".", "last_item", ".", "add_line", "(", "line", ")", "return", "self" ]
Add a Comment object to the section Used during initial parsing mainly Args: line (str): one line in the comment
[ "Add", "a", "Comment", "object", "to", "the", "section" ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L254-L266
pyscaffold/configupdater
src/configupdater/configupdater.py
Section.add_space
def add_space(self, line): """Add a Space object to the section Used during initial parsing mainly Args: line (str): one line that defines the space, maybe whitespaces """ if not isinstance(self.last_item, Space): space = Space(self._structure) self._structure.append(space) self.last_item.add_line(line) return self
python
def add_space(self, line): """Add a Space object to the section Used during initial parsing mainly Args: line (str): one line that defines the space, maybe whitespaces """ if not isinstance(self.last_item, Space): space = Space(self._structure) self._structure.append(space) self.last_item.add_line(line) return self
[ "def", "add_space", "(", "self", ",", "line", ")", ":", "if", "not", "isinstance", "(", "self", ".", "last_item", ",", "Space", ")", ":", "space", "=", "Space", "(", "self", ".", "_structure", ")", "self", ".", "_structure", ".", "append", "(", "space", ")", "self", ".", "last_item", ".", "add_line", "(", "line", ")", "return", "self" ]
Add a Space object to the section Used during initial parsing mainly Args: line (str): one line that defines the space, maybe whitespaces
[ "Add", "a", "Space", "object", "to", "the", "section" ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L268-L280
pyscaffold/configupdater
src/configupdater/configupdater.py
Section.to_dict
def to_dict(self): """Transform to dictionary Returns: dict: dictionary with same content """ return {key: self.__getitem__(key).value for key in self.options()}
python
def to_dict(self): """Transform to dictionary Returns: dict: dictionary with same content """ return {key: self.__getitem__(key).value for key in self.options()}
[ "def", "to_dict", "(", "self", ")", ":", "return", "{", "key", ":", "self", ".", "__getitem__", "(", "key", ")", ".", "value", "for", "key", "in", "self", ".", "options", "(", ")", "}" ]
Transform to dictionary Returns: dict: dictionary with same content
[ "Transform", "to", "dictionary" ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L356-L362
pyscaffold/configupdater
src/configupdater/configupdater.py
Section.set
def set(self, option, value=None): """Set an option for chaining. Args: option (str): option name value (str): value, default None """ option = self._container.optionxform(option) if option in self.options(): self.__getitem__(option).value = value else: self.__setitem__(option, value) return self
python
def set(self, option, value=None): """Set an option for chaining. Args: option (str): option name value (str): value, default None """ option = self._container.optionxform(option) if option in self.options(): self.__getitem__(option).value = value else: self.__setitem__(option, value) return self
[ "def", "set", "(", "self", ",", "option", ",", "value", "=", "None", ")", ":", "option", "=", "self", ".", "_container", ".", "optionxform", "(", "option", ")", "if", "option", "in", "self", ".", "options", "(", ")", ":", "self", ".", "__getitem__", "(", "option", ")", ".", "value", "=", "value", "else", ":", "self", ".", "__setitem__", "(", "option", ",", "value", ")", "return", "self" ]
Set an option for chaining. Args: option (str): option name value (str): value, default None
[ "Set", "an", "option", "for", "chaining", "." ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L379-L391
pyscaffold/configupdater
src/configupdater/configupdater.py
Option.set_values
def set_values(self, values, separator='\n', indent=4*' '): """Sets the value to a given list of options, e.g. multi-line values Args: values (list): list of values separator (str): separator for values, default: line separator indent (str): indentation depth in case of line separator """ self._updated = True self._multiline_value_joined = True self._values = values if separator == '\n': values.insert(0, '') separator = separator + indent self._value = separator.join(values)
python
def set_values(self, values, separator='\n', indent=4*' '): """Sets the value to a given list of options, e.g. multi-line values Args: values (list): list of values separator (str): separator for values, default: line separator indent (str): indentation depth in case of line separator """ self._updated = True self._multiline_value_joined = True self._values = values if separator == '\n': values.insert(0, '') separator = separator + indent self._value = separator.join(values)
[ "def", "set_values", "(", "self", ",", "values", ",", "separator", "=", "'\\n'", ",", "indent", "=", "4", "*", "' '", ")", ":", "self", ".", "_updated", "=", "True", "self", ".", "_multiline_value_joined", "=", "True", "self", ".", "_values", "=", "values", "if", "separator", "==", "'\\n'", ":", "values", ".", "insert", "(", "0", ",", "''", ")", "separator", "=", "separator", "+", "indent", "self", ".", "_value", "=", "separator", ".", "join", "(", "values", ")" ]
Sets the value to a given list of options, e.g. multi-line values Args: values (list): list of values separator (str): separator for values, default: line separator indent (str): indentation depth in case of line separator
[ "Sets", "the", "value", "to", "a", "given", "list", "of", "options", "e", ".", "g", ".", "multi", "-", "line", "values" ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L478-L492
pyscaffold/configupdater
src/configupdater/configupdater.py
ConfigUpdater.read
def read(self, filename, encoding=None): """Read and parse a filename. Args: filename (str): path to file encoding (str): encoding of file, default None """ with open(filename, encoding=encoding) as fp: self._read(fp, filename) self._filename = os.path.abspath(filename)
python
def read(self, filename, encoding=None): """Read and parse a filename. Args: filename (str): path to file encoding (str): encoding of file, default None """ with open(filename, encoding=encoding) as fp: self._read(fp, filename) self._filename = os.path.abspath(filename)
[ "def", "read", "(", "self", ",", "filename", ",", "encoding", "=", "None", ")", ":", "with", "open", "(", "filename", ",", "encoding", "=", "encoding", ")", "as", "fp", ":", "self", ".", "_read", "(", "fp", ",", "filename", ")", "self", ".", "_filename", "=", "os", ".", "path", ".", "abspath", "(", "filename", ")" ]
Read and parse a filename. Args: filename (str): path to file encoding (str): encoding of file, default None
[ "Read", "and", "parse", "a", "filename", "." ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L597-L606
pyscaffold/configupdater
src/configupdater/configupdater.py
ConfigUpdater.update_file
def update_file(self): """Update the read-in configuration file. """ if self._filename is None: raise NoConfigFileReadError() with open(self._filename, 'w') as fb: self.write(fb)
python
def update_file(self): """Update the read-in configuration file. """ if self._filename is None: raise NoConfigFileReadError() with open(self._filename, 'w') as fb: self.write(fb)
[ "def", "update_file", "(", "self", ")", ":", "if", "self", ".", "_filename", "is", "None", ":", "raise", "NoConfigFileReadError", "(", ")", "with", "open", "(", "self", ".", "_filename", ",", "'w'", ")", "as", "fb", ":", "self", ".", "write", "(", "fb", ")" ]
Update the read-in configuration file.
[ "Update", "the", "read", "-", "in", "configuration", "file", "." ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L825-L831
pyscaffold/configupdater
src/configupdater/configupdater.py
ConfigUpdater.validate_format
def validate_format(self, **kwargs): """Call ConfigParser to validate config Args: kwargs: are passed to :class:`configparser.ConfigParser` """ args = dict( dict_type=self._dict, allow_no_value=self._allow_no_value, inline_comment_prefixes=self._inline_comment_prefixes, strict=self._strict, empty_lines_in_values=self._empty_lines_in_values ) args.update(kwargs) parser = ConfigParser(**args) updated_cfg = str(self) parser.read_string(updated_cfg)
python
def validate_format(self, **kwargs): """Call ConfigParser to validate config Args: kwargs: are passed to :class:`configparser.ConfigParser` """ args = dict( dict_type=self._dict, allow_no_value=self._allow_no_value, inline_comment_prefixes=self._inline_comment_prefixes, strict=self._strict, empty_lines_in_values=self._empty_lines_in_values ) args.update(kwargs) parser = ConfigParser(**args) updated_cfg = str(self) parser.read_string(updated_cfg)
[ "def", "validate_format", "(", "self", ",", "*", "*", "kwargs", ")", ":", "args", "=", "dict", "(", "dict_type", "=", "self", ".", "_dict", ",", "allow_no_value", "=", "self", ".", "_allow_no_value", ",", "inline_comment_prefixes", "=", "self", ".", "_inline_comment_prefixes", ",", "strict", "=", "self", ".", "_strict", ",", "empty_lines_in_values", "=", "self", ".", "_empty_lines_in_values", ")", "args", ".", "update", "(", "kwargs", ")", "parser", "=", "ConfigParser", "(", "*", "*", "args", ")", "updated_cfg", "=", "str", "(", "self", ")", "parser", ".", "read_string", "(", "updated_cfg", ")" ]
Call ConfigParser to validate config Args: kwargs: are passed to :class:`configparser.ConfigParser`
[ "Call", "ConfigParser", "to", "validate", "config" ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L833-L849
pyscaffold/configupdater
src/configupdater/configupdater.py
ConfigUpdater.add_section
def add_section(self, section): """Create a new section in the configuration. Raise DuplicateSectionError if a section by the specified name already exists. Raise ValueError if name is DEFAULT. Args: section (str or :class:`Section`): name or Section type """ if section in self.sections(): raise DuplicateSectionError(section) if isinstance(section, str): # create a new section section = Section(section, container=self) elif not isinstance(section, Section): raise ValueError("Parameter must be a string or Section type!") self._structure.append(section)
python
def add_section(self, section): """Create a new section in the configuration. Raise DuplicateSectionError if a section by the specified name already exists. Raise ValueError if name is DEFAULT. Args: section (str or :class:`Section`): name or Section type """ if section in self.sections(): raise DuplicateSectionError(section) if isinstance(section, str): # create a new section section = Section(section, container=self) elif not isinstance(section, Section): raise ValueError("Parameter must be a string or Section type!") self._structure.append(section)
[ "def", "add_section", "(", "self", ",", "section", ")", ":", "if", "section", "in", "self", ".", "sections", "(", ")", ":", "raise", "DuplicateSectionError", "(", "section", ")", "if", "isinstance", "(", "section", ",", "str", ")", ":", "# create a new section", "section", "=", "Section", "(", "section", ",", "container", "=", "self", ")", "elif", "not", "isinstance", "(", "section", ",", "Section", ")", ":", "raise", "ValueError", "(", "\"Parameter must be a string or Section type!\"", ")", "self", ".", "_structure", ".", "append", "(", "section", ")" ]
Create a new section in the configuration. Raise DuplicateSectionError if a section by the specified name already exists. Raise ValueError if name is DEFAULT. Args: section (str or :class:`Section`): name or Section type
[ "Create", "a", "new", "section", "in", "the", "configuration", "." ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L912-L928
pyscaffold/configupdater
src/configupdater/configupdater.py
ConfigUpdater.options
def options(self, section): """Returns list of configuration options for the named section. Args: section (str): name of section Returns: list: list of option names """ if not self.has_section(section): raise NoSectionError(section) from None return self.__getitem__(section).options()
python
def options(self, section): """Returns list of configuration options for the named section. Args: section (str): name of section Returns: list: list of option names """ if not self.has_section(section): raise NoSectionError(section) from None return self.__getitem__(section).options()
[ "def", "options", "(", "self", ",", "section", ")", ":", "if", "not", "self", ".", "has_section", "(", "section", ")", ":", "raise", "NoSectionError", "(", "section", ")", "from", "None", "return", "self", ".", "__getitem__", "(", "section", ")", ".", "options", "(", ")" ]
Returns list of configuration options for the named section. Args: section (str): name of section Returns: list: list of option names
[ "Returns", "list", "of", "configuration", "options", "for", "the", "named", "section", "." ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L941-L952
pyscaffold/configupdater
src/configupdater/configupdater.py
ConfigUpdater.get
def get(self, section, option): """Gets an option value for a given section. Args: section (str): section name option (str): option name Returns: :class:`Option`: Option object holding key/value pair """ if not self.has_section(section): raise NoSectionError(section) from None section = self.__getitem__(section) option = self.optionxform(option) try: value = section[option] except KeyError: raise NoOptionError(option, section) return value
python
def get(self, section, option): """Gets an option value for a given section. Args: section (str): section name option (str): option name Returns: :class:`Option`: Option object holding key/value pair """ if not self.has_section(section): raise NoSectionError(section) from None section = self.__getitem__(section) option = self.optionxform(option) try: value = section[option] except KeyError: raise NoOptionError(option, section) return value
[ "def", "get", "(", "self", ",", "section", ",", "option", ")", ":", "if", "not", "self", ".", "has_section", "(", "section", ")", ":", "raise", "NoSectionError", "(", "section", ")", "from", "None", "section", "=", "self", ".", "__getitem__", "(", "section", ")", "option", "=", "self", ".", "optionxform", "(", "option", ")", "try", ":", "value", "=", "section", "[", "option", "]", "except", "KeyError", ":", "raise", "NoOptionError", "(", "option", ",", "section", ")", "return", "value" ]
Gets an option value for a given section. Args: section (str): section name option (str): option name Returns: :class:`Option`: Option object holding key/value pair
[ "Gets", "an", "option", "value", "for", "a", "given", "section", "." ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L954-L974
pyscaffold/configupdater
src/configupdater/configupdater.py
ConfigUpdater.items
def items(self, section=_UNSET): """Return a list of (name, value) tuples for options or sections. If section is given, return a list of tuples with (name, value) for each option in the section. Otherwise, return a list of tuples with (section_name, section_type) for each section. Args: section (str): optional section name, default UNSET Returns: list: list of :class:`Section` or :class:`Option` objects """ if section is _UNSET: return [(sect.name, sect) for sect in self.sections_blocks()] section = self.__getitem__(section) return [(opt.key, opt) for opt in section.option_blocks()]
python
def items(self, section=_UNSET): """Return a list of (name, value) tuples for options or sections. If section is given, return a list of tuples with (name, value) for each option in the section. Otherwise, return a list of tuples with (section_name, section_type) for each section. Args: section (str): optional section name, default UNSET Returns: list: list of :class:`Section` or :class:`Option` objects """ if section is _UNSET: return [(sect.name, sect) for sect in self.sections_blocks()] section = self.__getitem__(section) return [(opt.key, opt) for opt in section.option_blocks()]
[ "def", "items", "(", "self", ",", "section", "=", "_UNSET", ")", ":", "if", "section", "is", "_UNSET", ":", "return", "[", "(", "sect", ".", "name", ",", "sect", ")", "for", "sect", "in", "self", ".", "sections_blocks", "(", ")", "]", "section", "=", "self", ".", "__getitem__", "(", "section", ")", "return", "[", "(", "opt", ".", "key", ",", "opt", ")", "for", "opt", "in", "section", ".", "option_blocks", "(", ")", "]" ]
Return a list of (name, value) tuples for options or sections. If section is given, return a list of tuples with (name, value) for each option in the section. Otherwise, return a list of tuples with (section_name, section_type) for each section. Args: section (str): optional section name, default UNSET Returns: list: list of :class:`Section` or :class:`Option` objects
[ "Return", "a", "list", "of", "(", "name", "value", ")", "tuples", "for", "options", "or", "sections", "." ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L976-L993
pyscaffold/configupdater
src/configupdater/configupdater.py
ConfigUpdater.has_option
def has_option(self, section, option): """Checks for the existence of a given option in a given section. Args: section (str): name of section option (str): name of option Returns: bool: whether the option exists in the given section """ if section not in self.sections(): return False else: option = self.optionxform(option) return option in self[section]
python
def has_option(self, section, option): """Checks for the existence of a given option in a given section. Args: section (str): name of section option (str): name of option Returns: bool: whether the option exists in the given section """ if section not in self.sections(): return False else: option = self.optionxform(option) return option in self[section]
[ "def", "has_option", "(", "self", ",", "section", ",", "option", ")", ":", "if", "section", "not", "in", "self", ".", "sections", "(", ")", ":", "return", "False", "else", ":", "option", "=", "self", ".", "optionxform", "(", "option", ")", "return", "option", "in", "self", "[", "section", "]" ]
Checks for the existence of a given option in a given section. Args: section (str): name of section option (str): name of option Returns: bool: whether the option exists in the given section
[ "Checks", "for", "the", "existence", "of", "a", "given", "option", "in", "a", "given", "section", "." ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L995-L1009
pyscaffold/configupdater
src/configupdater/configupdater.py
ConfigUpdater.set
def set(self, section, option, value=None): """Set an option. Args: section (str): section name option (str): option name value (str): value, default None """ try: section = self.__getitem__(section) except KeyError: raise NoSectionError(section) from None option = self.optionxform(option) if option in section: section[option].value = value else: section[option] = value return self
python
def set(self, section, option, value=None): """Set an option. Args: section (str): section name option (str): option name value (str): value, default None """ try: section = self.__getitem__(section) except KeyError: raise NoSectionError(section) from None option = self.optionxform(option) if option in section: section[option].value = value else: section[option] = value return self
[ "def", "set", "(", "self", ",", "section", ",", "option", ",", "value", "=", "None", ")", ":", "try", ":", "section", "=", "self", ".", "__getitem__", "(", "section", ")", "except", "KeyError", ":", "raise", "NoSectionError", "(", "section", ")", "from", "None", "option", "=", "self", ".", "optionxform", "(", "option", ")", "if", "option", "in", "section", ":", "section", "[", "option", "]", ".", "value", "=", "value", "else", ":", "section", "[", "option", "]", "=", "value", "return", "self" ]
Set an option. Args: section (str): section name option (str): option name value (str): value, default None
[ "Set", "an", "option", "." ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L1011-L1028
pyscaffold/configupdater
src/configupdater/configupdater.py
ConfigUpdater.remove_option
def remove_option(self, section, option): """Remove an option. Args: section (str): section name option (str): option name Returns: bool: whether the option was actually removed """ try: section = self.__getitem__(section) except KeyError: raise NoSectionError(section) from None option = self.optionxform(option) existed = option in section.options() if existed: del section[option] return existed
python
def remove_option(self, section, option): """Remove an option. Args: section (str): section name option (str): option name Returns: bool: whether the option was actually removed """ try: section = self.__getitem__(section) except KeyError: raise NoSectionError(section) from None option = self.optionxform(option) existed = option in section.options() if existed: del section[option] return existed
[ "def", "remove_option", "(", "self", ",", "section", ",", "option", ")", ":", "try", ":", "section", "=", "self", ".", "__getitem__", "(", "section", ")", "except", "KeyError", ":", "raise", "NoSectionError", "(", "section", ")", "from", "None", "option", "=", "self", ".", "optionxform", "(", "option", ")", "existed", "=", "option", "in", "section", ".", "options", "(", ")", "if", "existed", ":", "del", "section", "[", "option", "]", "return", "existed" ]
Remove an option. Args: section (str): section name option (str): option name Returns: bool: whether the option was actually removed
[ "Remove", "an", "option", "." ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L1030-L1048
pyscaffold/configupdater
src/configupdater/configupdater.py
ConfigUpdater.remove_section
def remove_section(self, name): """Remove a file section. Args: name: name of the section Returns: bool: whether the section was actually removed """ existed = self.has_section(name) if existed: idx = self._get_section_idx(name) del self._structure[idx] return existed
python
def remove_section(self, name): """Remove a file section. Args: name: name of the section Returns: bool: whether the section was actually removed """ existed = self.has_section(name) if existed: idx = self._get_section_idx(name) del self._structure[idx] return existed
[ "def", "remove_section", "(", "self", ",", "name", ")", ":", "existed", "=", "self", ".", "has_section", "(", "name", ")", "if", "existed", ":", "idx", "=", "self", ".", "_get_section_idx", "(", "name", ")", "del", "self", ".", "_structure", "[", "idx", "]", "return", "existed" ]
Remove a file section. Args: name: name of the section Returns: bool: whether the section was actually removed
[ "Remove", "a", "file", "section", "." ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L1050-L1063
pyscaffold/configupdater
src/configupdater/configupdater.py
ConfigUpdater.to_dict
def to_dict(self): """Transform to dictionary Returns: dict: dictionary with same content """ return {sect: self.__getitem__(sect).to_dict() for sect in self.sections()}
python
def to_dict(self): """Transform to dictionary Returns: dict: dictionary with same content """ return {sect: self.__getitem__(sect).to_dict() for sect in self.sections()}
[ "def", "to_dict", "(", "self", ")", ":", "return", "{", "sect", ":", "self", ".", "__getitem__", "(", "sect", ")", ".", "to_dict", "(", ")", "for", "sect", "in", "self", ".", "sections", "(", ")", "}" ]
Transform to dictionary Returns: dict: dictionary with same content
[ "Transform", "to", "dictionary" ]
train
https://github.com/pyscaffold/configupdater/blob/6ebac0b1fa7b8222baacdd4991d18cfc61659f84/src/configupdater/configupdater.py#L1065-L1072
AlecAivazis/graphql-over-kafka
nautilus/management/util/render_template.py
render_template
def render_template(template, out_dir='.', context=None): ''' This function renders the template desginated by the argument to the designated directory using the given context. Args: template (string) : the source template to use (relative to ./templates) out_dir (string) : the name of the output directory context (dict) : the template rendering context ''' # the directory containing templates template_directory = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'templates', template ) # the files and empty directories to copy files = [] empty_dirs = [] for (dirpath, _, filenames) in os.walk(template_directory): # if there are no files in the directory if len(filenames) == 0: # add the directory to the list empty_dirs.append(os.path.relpath(dirpath, template_directory)) # otherwise there are files in this directory else: # add the files to the list files.extend([os.path.join(dirpath, filepath) for filepath in filenames]) # for each template file for source_file in files: # open a new file that we are going to write to with open(source_file, 'r') as file: # create a template out of the source file contents template = Template(file.read()) # render the template with the given contents template_rendered = template.render(**(context or {})) # the location of the source relative to the template directory source_relpath = os.path.relpath(source_file, template_directory) # the target filename filename = os.path.join(out_dir, source_relpath) # create a jinja template out of the file path filename_rendered = Template(filename).render(**context) # the directory of the target file source_dir = os.path.dirname(filename_rendered) # if the directory doesn't exist if not os.path.exists(source_dir): # create the directories os.makedirs(source_dir) # create the target file with open(filename_rendered, 'w') as target_file: # write the rendered template to the target file target_file.write(template_rendered) # for each empty directory for dirpath in empty_dirs: try: # dirname dirname = os.path.join(out_dir, dirpath) # treat the dirname as a jinja template dirname_rendered = Template(dirname).render(**context) # if the directory doesn't exist if not os.path.exists(dirname_rendered): # create the directory in the target, replacing the name os.makedirs(dirname_rendered) except OSError as exc: # if the directory already exists if exc.errno == errno.EEXIST and os.path.isdir(dirpath): # keep going (noop) pass # otherwise its an error we don't handle else: # pass it along raise
python
def render_template(template, out_dir='.', context=None): ''' This function renders the template desginated by the argument to the designated directory using the given context. Args: template (string) : the source template to use (relative to ./templates) out_dir (string) : the name of the output directory context (dict) : the template rendering context ''' # the directory containing templates template_directory = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'templates', template ) # the files and empty directories to copy files = [] empty_dirs = [] for (dirpath, _, filenames) in os.walk(template_directory): # if there are no files in the directory if len(filenames) == 0: # add the directory to the list empty_dirs.append(os.path.relpath(dirpath, template_directory)) # otherwise there are files in this directory else: # add the files to the list files.extend([os.path.join(dirpath, filepath) for filepath in filenames]) # for each template file for source_file in files: # open a new file that we are going to write to with open(source_file, 'r') as file: # create a template out of the source file contents template = Template(file.read()) # render the template with the given contents template_rendered = template.render(**(context or {})) # the location of the source relative to the template directory source_relpath = os.path.relpath(source_file, template_directory) # the target filename filename = os.path.join(out_dir, source_relpath) # create a jinja template out of the file path filename_rendered = Template(filename).render(**context) # the directory of the target file source_dir = os.path.dirname(filename_rendered) # if the directory doesn't exist if not os.path.exists(source_dir): # create the directories os.makedirs(source_dir) # create the target file with open(filename_rendered, 'w') as target_file: # write the rendered template to the target file target_file.write(template_rendered) # for each empty directory for dirpath in empty_dirs: try: # dirname dirname = os.path.join(out_dir, dirpath) # treat the dirname as a jinja template dirname_rendered = Template(dirname).render(**context) # if the directory doesn't exist if not os.path.exists(dirname_rendered): # create the directory in the target, replacing the name os.makedirs(dirname_rendered) except OSError as exc: # if the directory already exists if exc.errno == errno.EEXIST and os.path.isdir(dirpath): # keep going (noop) pass # otherwise its an error we don't handle else: # pass it along raise
[ "def", "render_template", "(", "template", ",", "out_dir", "=", "'.'", ",", "context", "=", "None", ")", ":", "# the directory containing templates", "template_directory", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", ",", "'..'", ",", "'templates'", ",", "template", ")", "# the files and empty directories to copy", "files", "=", "[", "]", "empty_dirs", "=", "[", "]", "for", "(", "dirpath", ",", "_", ",", "filenames", ")", "in", "os", ".", "walk", "(", "template_directory", ")", ":", "# if there are no files in the directory", "if", "len", "(", "filenames", ")", "==", "0", ":", "# add the directory to the list", "empty_dirs", ".", "append", "(", "os", ".", "path", ".", "relpath", "(", "dirpath", ",", "template_directory", ")", ")", "# otherwise there are files in this directory", "else", ":", "# add the files to the list", "files", ".", "extend", "(", "[", "os", ".", "path", ".", "join", "(", "dirpath", ",", "filepath", ")", "for", "filepath", "in", "filenames", "]", ")", "# for each template file", "for", "source_file", "in", "files", ":", "# open a new file that we are going to write to", "with", "open", "(", "source_file", ",", "'r'", ")", "as", "file", ":", "# create a template out of the source file contents", "template", "=", "Template", "(", "file", ".", "read", "(", ")", ")", "# render the template with the given contents", "template_rendered", "=", "template", ".", "render", "(", "*", "*", "(", "context", "or", "{", "}", ")", ")", "# the location of the source relative to the template directory", "source_relpath", "=", "os", ".", "path", ".", "relpath", "(", "source_file", ",", "template_directory", ")", "# the target filename", "filename", "=", "os", ".", "path", ".", "join", "(", "out_dir", ",", "source_relpath", ")", "# create a jinja template out of the file path", "filename_rendered", "=", "Template", "(", "filename", ")", ".", "render", "(", "*", "*", "context", ")", "# the directory of the target file", "source_dir", "=", "os", ".", "path", ".", "dirname", "(", "filename_rendered", ")", "# if the directory doesn't exist", "if", "not", "os", ".", "path", ".", "exists", "(", "source_dir", ")", ":", "# create the directories", "os", ".", "makedirs", "(", "source_dir", ")", "# create the target file", "with", "open", "(", "filename_rendered", ",", "'w'", ")", "as", "target_file", ":", "# write the rendered template to the target file", "target_file", ".", "write", "(", "template_rendered", ")", "# for each empty directory", "for", "dirpath", "in", "empty_dirs", ":", "try", ":", "# dirname", "dirname", "=", "os", ".", "path", ".", "join", "(", "out_dir", ",", "dirpath", ")", "# treat the dirname as a jinja template", "dirname_rendered", "=", "Template", "(", "dirname", ")", ".", "render", "(", "*", "*", "context", ")", "# if the directory doesn't exist", "if", "not", "os", ".", "path", ".", "exists", "(", "dirname_rendered", ")", ":", "# create the directory in the target, replacing the name", "os", ".", "makedirs", "(", "dirname_rendered", ")", "except", "OSError", "as", "exc", ":", "# if the directory already exists", "if", "exc", ".", "errno", "==", "errno", ".", "EEXIST", "and", "os", ".", "path", ".", "isdir", "(", "dirpath", ")", ":", "# keep going (noop)", "pass", "# otherwise its an error we don't handle", "else", ":", "# pass it along", "raise" ]
This function renders the template desginated by the argument to the designated directory using the given context. Args: template (string) : the source template to use (relative to ./templates) out_dir (string) : the name of the output directory context (dict) : the template rendering context
[ "This", "function", "renders", "the", "template", "desginated", "by", "the", "argument", "to", "the", "designated", "directory", "using", "the", "given", "context", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/management/util/render_template.py#L6-L86
AlecAivazis/graphql-over-kafka
nautilus/management/scripts/events/publish.py
publish
def publish(type, payload): """ Publish a message with the specified action_type and payload over the event system. Useful for debugging. """ async def _produce(): # fire an action with the given values await producer.send(action_type=type, payload=payload) # notify the user that we were successful print("Successfully dispatched action with type {}.".format(type)) # create a producer producer = ActionHandler() # start the producer producer.start() # get the current event loop loop = asyncio.get_event_loop() # run the production sequence loop.run_until_complete(_produce()) # start the producer producer.stop()
python
def publish(type, payload): """ Publish a message with the specified action_type and payload over the event system. Useful for debugging. """ async def _produce(): # fire an action with the given values await producer.send(action_type=type, payload=payload) # notify the user that we were successful print("Successfully dispatched action with type {}.".format(type)) # create a producer producer = ActionHandler() # start the producer producer.start() # get the current event loop loop = asyncio.get_event_loop() # run the production sequence loop.run_until_complete(_produce()) # start the producer producer.stop()
[ "def", "publish", "(", "type", ",", "payload", ")", ":", "async", "def", "_produce", "(", ")", ":", "# fire an action with the given values", "await", "producer", ".", "send", "(", "action_type", "=", "type", ",", "payload", "=", "payload", ")", "# notify the user that we were successful", "print", "(", "\"Successfully dispatched action with type {}.\"", ".", "format", "(", "type", ")", ")", "# create a producer", "producer", "=", "ActionHandler", "(", ")", "# start the producer", "producer", ".", "start", "(", ")", "# get the current event loop", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "# run the production sequence", "loop", ".", "run_until_complete", "(", "_produce", "(", ")", ")", "# start the producer", "producer", ".", "stop", "(", ")" ]
Publish a message with the specified action_type and payload over the event system. Useful for debugging.
[ "Publish", "a", "message", "with", "the", "specified", "action_type", "and", "payload", "over", "the", "event", "system", ".", "Useful", "for", "debugging", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/management/scripts/events/publish.py#L15-L38
AlecAivazis/graphql-over-kafka
nautilus/network/events/actionHandlers/deleteHandler.py
delete_handler
def delete_handler(Model, name=None, **kwds): """ This factory returns an action handler that deletes a new instance of the specified model when a delete action is recieved, assuming the action follows nautilus convetions. Args: Model (nautilus.BaseModel): The model to delete when the action received. Returns: function(type, payload): The action handler for this model """ # necessary imports from nautilus.database import db async def action_handler(service, action_type, payload, props, notify=True, **kwds): # if the payload represents a new instance of `model` if action_type == get_crud_action('delete', name or Model): try: # the props of the message message_props = {} # if there was a correlation id in the request if 'correlation_id' in props: # make sure it ends up in the reply message_props['correlation_id'] = props['correlation_id'] # the id in the payload representing the record to delete record_id = payload['id'] if 'id' in payload else payload['pk'] # get the model matching the payload try: model_query = Model.select().where(Model.primary_key() == record_id) except KeyError: raise RuntimeError("Could not find appropriate id to remove service record.") # remove the model instance model_query.get().delete_instance() # if we need to tell someone about what happened if notify: # publish the success event await service.event_broker.send( payload='{"status":"ok"}', action_type=change_action_status(action_type, success_status()), **message_props ) # if something goes wrong except Exception as err: # if we need to tell someone about what happened if notify: # publish the error as an event await service.event_broker.send( payload=str(err), action_type=change_action_status(action_type, error_status()), **message_props ) # otherwise we aren't supposed to notify else: # raise the exception normally raise err # return the handler return action_handler
python
def delete_handler(Model, name=None, **kwds): """ This factory returns an action handler that deletes a new instance of the specified model when a delete action is recieved, assuming the action follows nautilus convetions. Args: Model (nautilus.BaseModel): The model to delete when the action received. Returns: function(type, payload): The action handler for this model """ # necessary imports from nautilus.database import db async def action_handler(service, action_type, payload, props, notify=True, **kwds): # if the payload represents a new instance of `model` if action_type == get_crud_action('delete', name or Model): try: # the props of the message message_props = {} # if there was a correlation id in the request if 'correlation_id' in props: # make sure it ends up in the reply message_props['correlation_id'] = props['correlation_id'] # the id in the payload representing the record to delete record_id = payload['id'] if 'id' in payload else payload['pk'] # get the model matching the payload try: model_query = Model.select().where(Model.primary_key() == record_id) except KeyError: raise RuntimeError("Could not find appropriate id to remove service record.") # remove the model instance model_query.get().delete_instance() # if we need to tell someone about what happened if notify: # publish the success event await service.event_broker.send( payload='{"status":"ok"}', action_type=change_action_status(action_type, success_status()), **message_props ) # if something goes wrong except Exception as err: # if we need to tell someone about what happened if notify: # publish the error as an event await service.event_broker.send( payload=str(err), action_type=change_action_status(action_type, error_status()), **message_props ) # otherwise we aren't supposed to notify else: # raise the exception normally raise err # return the handler return action_handler
[ "def", "delete_handler", "(", "Model", ",", "name", "=", "None", ",", "*", "*", "kwds", ")", ":", "# necessary imports", "from", "nautilus", ".", "database", "import", "db", "async", "def", "action_handler", "(", "service", ",", "action_type", ",", "payload", ",", "props", ",", "notify", "=", "True", ",", "*", "*", "kwds", ")", ":", "# if the payload represents a new instance of `model`", "if", "action_type", "==", "get_crud_action", "(", "'delete'", ",", "name", "or", "Model", ")", ":", "try", ":", "# the props of the message", "message_props", "=", "{", "}", "# if there was a correlation id in the request", "if", "'correlation_id'", "in", "props", ":", "# make sure it ends up in the reply", "message_props", "[", "'correlation_id'", "]", "=", "props", "[", "'correlation_id'", "]", "# the id in the payload representing the record to delete", "record_id", "=", "payload", "[", "'id'", "]", "if", "'id'", "in", "payload", "else", "payload", "[", "'pk'", "]", "# get the model matching the payload", "try", ":", "model_query", "=", "Model", ".", "select", "(", ")", ".", "where", "(", "Model", ".", "primary_key", "(", ")", "==", "record_id", ")", "except", "KeyError", ":", "raise", "RuntimeError", "(", "\"Could not find appropriate id to remove service record.\"", ")", "# remove the model instance", "model_query", ".", "get", "(", ")", ".", "delete_instance", "(", ")", "# if we need to tell someone about what happened", "if", "notify", ":", "# publish the success event", "await", "service", ".", "event_broker", ".", "send", "(", "payload", "=", "'{\"status\":\"ok\"}'", ",", "action_type", "=", "change_action_status", "(", "action_type", ",", "success_status", "(", ")", ")", ",", "*", "*", "message_props", ")", "# if something goes wrong", "except", "Exception", "as", "err", ":", "# if we need to tell someone about what happened", "if", "notify", ":", "# publish the error as an event", "await", "service", ".", "event_broker", ".", "send", "(", "payload", "=", "str", "(", "err", ")", ",", "action_type", "=", "change_action_status", "(", "action_type", ",", "error_status", "(", ")", ")", ",", "*", "*", "message_props", ")", "# otherwise we aren't supposed to notify", "else", ":", "# raise the exception normally", "raise", "err", "# return the handler", "return", "action_handler" ]
This factory returns an action handler that deletes a new instance of the specified model when a delete action is recieved, assuming the action follows nautilus convetions. Args: Model (nautilus.BaseModel): The model to delete when the action received. Returns: function(type, payload): The action handler for this model
[ "This", "factory", "returns", "an", "action", "handler", "that", "deletes", "a", "new", "instance", "of", "the", "specified", "model", "when", "a", "delete", "action", "is", "recieved", "assuming", "the", "action", "follows", "nautilus", "convetions", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/network/events/actionHandlers/deleteHandler.py#L10-L71
AlecAivazis/graphql-over-kafka
nautilus/network/events/actionHandlers/readHandler.py
read_handler
def read_handler(Model, name=None, **kwds): """ This factory returns an action handler that responds to read requests by resolving the payload as a graphql query against the internal schema. Args: Model (nautilus.BaseModel): The model to delete when the action received. Returns: function(type, payload): The action handler for this model """ async def action_handler(service, action_type, payload, props, **kwds): # if the payload represents a new instance of `model` if action_type == get_crud_action('read', name or Model): # the props of the message message_props = {} # if there was a correlation id in the request if 'correlation_id' in props: # make sure it ends up in the reply message_props['correlation_id'] = props['correlation_id'] try: # resolve the query using the service schema resolved = service.schema.execute(payload) # create the string response response = json.dumps({ 'data': {key:value for key,value in resolved.data.items()}, 'errors': resolved.errors }) # publish the success event await service.event_broker.send( payload=response, action_type=change_action_status(action_type, success_status()), **message_props ) # if something goes wrong except Exception as err: # publish the error as an event await service.event_broker.send( payload=str(err), action_type=change_action_status(action_type, error_status()), **message_props ) # return the handler return action_handler
python
def read_handler(Model, name=None, **kwds): """ This factory returns an action handler that responds to read requests by resolving the payload as a graphql query against the internal schema. Args: Model (nautilus.BaseModel): The model to delete when the action received. Returns: function(type, payload): The action handler for this model """ async def action_handler(service, action_type, payload, props, **kwds): # if the payload represents a new instance of `model` if action_type == get_crud_action('read', name or Model): # the props of the message message_props = {} # if there was a correlation id in the request if 'correlation_id' in props: # make sure it ends up in the reply message_props['correlation_id'] = props['correlation_id'] try: # resolve the query using the service schema resolved = service.schema.execute(payload) # create the string response response = json.dumps({ 'data': {key:value for key,value in resolved.data.items()}, 'errors': resolved.errors }) # publish the success event await service.event_broker.send( payload=response, action_type=change_action_status(action_type, success_status()), **message_props ) # if something goes wrong except Exception as err: # publish the error as an event await service.event_broker.send( payload=str(err), action_type=change_action_status(action_type, error_status()), **message_props ) # return the handler return action_handler
[ "def", "read_handler", "(", "Model", ",", "name", "=", "None", ",", "*", "*", "kwds", ")", ":", "async", "def", "action_handler", "(", "service", ",", "action_type", ",", "payload", ",", "props", ",", "*", "*", "kwds", ")", ":", "# if the payload represents a new instance of `model`", "if", "action_type", "==", "get_crud_action", "(", "'read'", ",", "name", "or", "Model", ")", ":", "# the props of the message", "message_props", "=", "{", "}", "# if there was a correlation id in the request", "if", "'correlation_id'", "in", "props", ":", "# make sure it ends up in the reply", "message_props", "[", "'correlation_id'", "]", "=", "props", "[", "'correlation_id'", "]", "try", ":", "# resolve the query using the service schema", "resolved", "=", "service", ".", "schema", ".", "execute", "(", "payload", ")", "# create the string response", "response", "=", "json", ".", "dumps", "(", "{", "'data'", ":", "{", "key", ":", "value", "for", "key", ",", "value", "in", "resolved", ".", "data", ".", "items", "(", ")", "}", ",", "'errors'", ":", "resolved", ".", "errors", "}", ")", "# publish the success event", "await", "service", ".", "event_broker", ".", "send", "(", "payload", "=", "response", ",", "action_type", "=", "change_action_status", "(", "action_type", ",", "success_status", "(", ")", ")", ",", "*", "*", "message_props", ")", "# if something goes wrong", "except", "Exception", "as", "err", ":", "# publish the error as an event", "await", "service", ".", "event_broker", ".", "send", "(", "payload", "=", "str", "(", "err", ")", ",", "action_type", "=", "change_action_status", "(", "action_type", ",", "error_status", "(", ")", ")", ",", "*", "*", "message_props", ")", "# return the handler", "return", "action_handler" ]
This factory returns an action handler that responds to read requests by resolving the payload as a graphql query against the internal schema. Args: Model (nautilus.BaseModel): The model to delete when the action received. Returns: function(type, payload): The action handler for this model
[ "This", "factory", "returns", "an", "action", "handler", "that", "responds", "to", "read", "requests", "by", "resolving", "the", "payload", "as", "a", "graphql", "query", "against", "the", "internal", "schema", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/network/events/actionHandlers/readHandler.py#L6-L56
AlecAivazis/graphql-over-kafka
nautilus/network/events/actionHandlers/crudHandler.py
crud_handler
def crud_handler(Model, name=None, **kwds): """ This action handler factory reaturns an action handler that responds to actions with CRUD types (following nautilus conventions) and performs the necessary mutation on the model's database. Args: Model (nautilus.BaseModel): The model to delete when the action received. Returns: function(type, payload): The action handler for this model """ # import the necessary modules from nautilus.network.events import combine_action_handlers from . import update_handler, create_handler, delete_handler, read_handler # combine them into one handler return combine_action_handlers( create_handler(Model, name=name), read_handler(Model, name=name), update_handler(Model, name=name), delete_handler(Model, name=name), )
python
def crud_handler(Model, name=None, **kwds): """ This action handler factory reaturns an action handler that responds to actions with CRUD types (following nautilus conventions) and performs the necessary mutation on the model's database. Args: Model (nautilus.BaseModel): The model to delete when the action received. Returns: function(type, payload): The action handler for this model """ # import the necessary modules from nautilus.network.events import combine_action_handlers from . import update_handler, create_handler, delete_handler, read_handler # combine them into one handler return combine_action_handlers( create_handler(Model, name=name), read_handler(Model, name=name), update_handler(Model, name=name), delete_handler(Model, name=name), )
[ "def", "crud_handler", "(", "Model", ",", "name", "=", "None", ",", "*", "*", "kwds", ")", ":", "# import the necessary modules", "from", "nautilus", ".", "network", ".", "events", "import", "combine_action_handlers", "from", ".", "import", "update_handler", ",", "create_handler", ",", "delete_handler", ",", "read_handler", "# combine them into one handler", "return", "combine_action_handlers", "(", "create_handler", "(", "Model", ",", "name", "=", "name", ")", ",", "read_handler", "(", "Model", ",", "name", "=", "name", ")", ",", "update_handler", "(", "Model", ",", "name", "=", "name", ")", ",", "delete_handler", "(", "Model", ",", "name", "=", "name", ")", ",", ")" ]
This action handler factory reaturns an action handler that responds to actions with CRUD types (following nautilus conventions) and performs the necessary mutation on the model's database. Args: Model (nautilus.BaseModel): The model to delete when the action received. Returns: function(type, payload): The action handler for this model
[ "This", "action", "handler", "factory", "reaturns", "an", "action", "handler", "that", "responds", "to", "actions", "with", "CRUD", "types", "(", "following", "nautilus", "conventions", ")", "and", "performs", "the", "necessary", "mutation", "on", "the", "model", "s", "database", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/network/events/actionHandlers/crudHandler.py#L1-L25
AlecAivazis/graphql-over-kafka
nautilus/management/scripts/events/ask.py
ask
def ask(type, payload): """ Publish a message with the specified action_type and payload over the event system. Useful for debugging. """ async def _produce(): # notify the user that we were successful print("Dispatching action with type {}...".format(type)) # fire an action with the given values response = await producer.ask(action_type=type, payload=payload) # show the user the reply print(response) # create a producer producer = ActionHandler() # start the producer producer.start() # get the current event loop loop = asyncio.get_event_loop() # run the production sequence loop.run_until_complete(_produce()) # start the producer producer.stop()
python
def ask(type, payload): """ Publish a message with the specified action_type and payload over the event system. Useful for debugging. """ async def _produce(): # notify the user that we were successful print("Dispatching action with type {}...".format(type)) # fire an action with the given values response = await producer.ask(action_type=type, payload=payload) # show the user the reply print(response) # create a producer producer = ActionHandler() # start the producer producer.start() # get the current event loop loop = asyncio.get_event_loop() # run the production sequence loop.run_until_complete(_produce()) # start the producer producer.stop()
[ "def", "ask", "(", "type", ",", "payload", ")", ":", "async", "def", "_produce", "(", ")", ":", "# notify the user that we were successful", "print", "(", "\"Dispatching action with type {}...\"", ".", "format", "(", "type", ")", ")", "# fire an action with the given values", "response", "=", "await", "producer", ".", "ask", "(", "action_type", "=", "type", ",", "payload", "=", "payload", ")", "# show the user the reply", "print", "(", "response", ")", "# create a producer", "producer", "=", "ActionHandler", "(", ")", "# start the producer", "producer", ".", "start", "(", ")", "# get the current event loop", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "# run the production sequence", "loop", ".", "run_until_complete", "(", "_produce", "(", ")", ")", "# start the producer", "producer", ".", "stop", "(", ")" ]
Publish a message with the specified action_type and payload over the event system. Useful for debugging.
[ "Publish", "a", "message", "with", "the", "specified", "action_type", "and", "payload", "over", "the", "event", "system", ".", "Useful", "for", "debugging", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/management/scripts/events/ask.py#L15-L40
AlecAivazis/graphql-over-kafka
nautilus/config/config.py
Config._from_type
def _from_type(self, config): """ This method converts a type into a dict. """ def is_user_attribute(attr): return ( not attr.startswith('__') and not isinstance(getattr(config, attr), collections.abc.Callable) ) return {attr: getattr(config, attr) for attr in dir(config) \ if is_user_attribute(attr)}
python
def _from_type(self, config): """ This method converts a type into a dict. """ def is_user_attribute(attr): return ( not attr.startswith('__') and not isinstance(getattr(config, attr), collections.abc.Callable) ) return {attr: getattr(config, attr) for attr in dir(config) \ if is_user_attribute(attr)}
[ "def", "_from_type", "(", "self", ",", "config", ")", ":", "def", "is_user_attribute", "(", "attr", ")", ":", "return", "(", "not", "attr", ".", "startswith", "(", "'__'", ")", "and", "not", "isinstance", "(", "getattr", "(", "config", ",", "attr", ")", ",", "collections", ".", "abc", ".", "Callable", ")", ")", "return", "{", "attr", ":", "getattr", "(", "config", ",", "attr", ")", "for", "attr", "in", "dir", "(", "config", ")", "if", "is_user_attribute", "(", "attr", ")", "}" ]
This method converts a type into a dict.
[ "This", "method", "converts", "a", "type", "into", "a", "dict", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/config/config.py#L41-L52
AlecAivazis/graphql-over-kafka
nautilus/api/util/walk_query.py
walk_query
async def walk_query(obj, object_resolver, connection_resolver, errors, current_user=None, __naut_name=None, obey_auth=True, **filters): """ This function traverses a query and collects the corresponding information in a dictionary. """ # if the object has no selection set if not hasattr(obj, 'selection_set'): # yell loudly raise ValueError("Can only resolve objects, not primitive types") # the name of the node node_name = __naut_name or obj.name.value if obj.name else obj.operation # the selected fields selection_set = obj.selection_set.selections def _build_arg_tree(arg): """ This function recursively builds the arguments for lists and single values """ # TODO: what about object arguments?? # if there is a single value if hasattr(arg, 'value'): # assign the value to the filter return arg.value # otherwise if there are multiple values for the argument elif hasattr(arg, 'values'): return [_build_arg_tree(node) for node in arg.values] # for each argument on this node for arg in obj.arguments: # add it to the query filters filters[arg.name.value] = _build_arg_tree(arg.value) # the fields we have to ask for fields = [field for field in selection_set if not field.selection_set] # the links between objects connections = [field for field in selection_set if field.selection_set] try: # resolve the model with the given fields models = await object_resolver(node_name, [field.name.value for field in fields], current_user=current_user, obey_auth=obey_auth, **filters) # if something went wrong resolving the object except Exception as e: # add the error as a string errors.append(e.__str__()) # stop here return None # add connections to each matching model for model in models: # if is an id for the model if 'pk' in model: # for each connection for connection in connections: # the name of the connection connection_name = connection.name.value # the target of the connection node = { 'name': node_name, 'pk': model['pk'] } try: # go through the connection connected_ids, next_target = await connection_resolver( connection_name, node, ) # if there are connections if connected_ids: # add the id filter to the list filters['pk_in'] = connected_ids # add the connection field value = await walk_query( connection, object_resolver, connection_resolver, errors, current_user=current_user, obey_auth=obey_auth, __naut_name=next_target, **filters ) # there were no connections else: value = [] # if something went wrong except Exception as e: # add the error as a string errors.append(e.__str__()) # stop here value = None # set the connection to the appropriate value model[connection_name] = value # return the list of matching models return models
python
async def walk_query(obj, object_resolver, connection_resolver, errors, current_user=None, __naut_name=None, obey_auth=True, **filters): """ This function traverses a query and collects the corresponding information in a dictionary. """ # if the object has no selection set if not hasattr(obj, 'selection_set'): # yell loudly raise ValueError("Can only resolve objects, not primitive types") # the name of the node node_name = __naut_name or obj.name.value if obj.name else obj.operation # the selected fields selection_set = obj.selection_set.selections def _build_arg_tree(arg): """ This function recursively builds the arguments for lists and single values """ # TODO: what about object arguments?? # if there is a single value if hasattr(arg, 'value'): # assign the value to the filter return arg.value # otherwise if there are multiple values for the argument elif hasattr(arg, 'values'): return [_build_arg_tree(node) for node in arg.values] # for each argument on this node for arg in obj.arguments: # add it to the query filters filters[arg.name.value] = _build_arg_tree(arg.value) # the fields we have to ask for fields = [field for field in selection_set if not field.selection_set] # the links between objects connections = [field for field in selection_set if field.selection_set] try: # resolve the model with the given fields models = await object_resolver(node_name, [field.name.value for field in fields], current_user=current_user, obey_auth=obey_auth, **filters) # if something went wrong resolving the object except Exception as e: # add the error as a string errors.append(e.__str__()) # stop here return None # add connections to each matching model for model in models: # if is an id for the model if 'pk' in model: # for each connection for connection in connections: # the name of the connection connection_name = connection.name.value # the target of the connection node = { 'name': node_name, 'pk': model['pk'] } try: # go through the connection connected_ids, next_target = await connection_resolver( connection_name, node, ) # if there are connections if connected_ids: # add the id filter to the list filters['pk_in'] = connected_ids # add the connection field value = await walk_query( connection, object_resolver, connection_resolver, errors, current_user=current_user, obey_auth=obey_auth, __naut_name=next_target, **filters ) # there were no connections else: value = [] # if something went wrong except Exception as e: # add the error as a string errors.append(e.__str__()) # stop here value = None # set the connection to the appropriate value model[connection_name] = value # return the list of matching models return models
[ "async", "def", "walk_query", "(", "obj", ",", "object_resolver", ",", "connection_resolver", ",", "errors", ",", "current_user", "=", "None", ",", "__naut_name", "=", "None", ",", "obey_auth", "=", "True", ",", "*", "*", "filters", ")", ":", "# if the object has no selection set", "if", "not", "hasattr", "(", "obj", ",", "'selection_set'", ")", ":", "# yell loudly", "raise", "ValueError", "(", "\"Can only resolve objects, not primitive types\"", ")", "# the name of the node", "node_name", "=", "__naut_name", "or", "obj", ".", "name", ".", "value", "if", "obj", ".", "name", "else", "obj", ".", "operation", "# the selected fields", "selection_set", "=", "obj", ".", "selection_set", ".", "selections", "def", "_build_arg_tree", "(", "arg", ")", ":", "\"\"\"\n This function recursively builds the arguments for lists and single values\n \"\"\"", "# TODO: what about object arguments??", "# if there is a single value", "if", "hasattr", "(", "arg", ",", "'value'", ")", ":", "# assign the value to the filter", "return", "arg", ".", "value", "# otherwise if there are multiple values for the argument", "elif", "hasattr", "(", "arg", ",", "'values'", ")", ":", "return", "[", "_build_arg_tree", "(", "node", ")", "for", "node", "in", "arg", ".", "values", "]", "# for each argument on this node", "for", "arg", "in", "obj", ".", "arguments", ":", "# add it to the query filters", "filters", "[", "arg", ".", "name", ".", "value", "]", "=", "_build_arg_tree", "(", "arg", ".", "value", ")", "# the fields we have to ask for", "fields", "=", "[", "field", "for", "field", "in", "selection_set", "if", "not", "field", ".", "selection_set", "]", "# the links between objects", "connections", "=", "[", "field", "for", "field", "in", "selection_set", "if", "field", ".", "selection_set", "]", "try", ":", "# resolve the model with the given fields", "models", "=", "await", "object_resolver", "(", "node_name", ",", "[", "field", ".", "name", ".", "value", "for", "field", "in", "fields", "]", ",", "current_user", "=", "current_user", ",", "obey_auth", "=", "obey_auth", ",", "*", "*", "filters", ")", "# if something went wrong resolving the object", "except", "Exception", "as", "e", ":", "# add the error as a string", "errors", ".", "append", "(", "e", ".", "__str__", "(", ")", ")", "# stop here", "return", "None", "# add connections to each matching model", "for", "model", "in", "models", ":", "# if is an id for the model", "if", "'pk'", "in", "model", ":", "# for each connection", "for", "connection", "in", "connections", ":", "# the name of the connection", "connection_name", "=", "connection", ".", "name", ".", "value", "# the target of the connection", "node", "=", "{", "'name'", ":", "node_name", ",", "'pk'", ":", "model", "[", "'pk'", "]", "}", "try", ":", "# go through the connection", "connected_ids", ",", "next_target", "=", "await", "connection_resolver", "(", "connection_name", ",", "node", ",", ")", "# if there are connections", "if", "connected_ids", ":", "# add the id filter to the list", "filters", "[", "'pk_in'", "]", "=", "connected_ids", "# add the connection field", "value", "=", "await", "walk_query", "(", "connection", ",", "object_resolver", ",", "connection_resolver", ",", "errors", ",", "current_user", "=", "current_user", ",", "obey_auth", "=", "obey_auth", ",", "__naut_name", "=", "next_target", ",", "*", "*", "filters", ")", "# there were no connections", "else", ":", "value", "=", "[", "]", "# if something went wrong", "except", "Exception", "as", "e", ":", "# add the error as a string", "errors", ".", "append", "(", "e", ".", "__str__", "(", ")", ")", "# stop here", "value", "=", "None", "# set the connection to the appropriate value", "model", "[", "connection_name", "]", "=", "value", "# return the list of matching models", "return", "models" ]
This function traverses a query and collects the corresponding information in a dictionary.
[ "This", "function", "traverses", "a", "query", "and", "collects", "the", "corresponding", "information", "in", "a", "dictionary", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/api/util/walk_query.py#L1-L102
AlecAivazis/graphql-over-kafka
nautilus/network/events/actionHandlers/queryHandler.py
query_handler
async def query_handler(service, action_type, payload, props, **kwds): """ This action handler interprets the payload as a query to be executed by the api gateway service. """ # check that the action type indicates a query if action_type == query_action_type(): print('encountered query event {!r} '.format(payload)) # perform the query result = await parse_string(payload, service.object_resolver, service.connection_resolver, service.mutation_resolver, obey_auth=False ) # the props for the reply message reply_props = {'correlation_id': props['correlation_id']} if 'correlation_id' in props else {} # publish the success event await service.event_broker.send( payload=result, action_type=change_action_status(action_type, success_status()), **reply_props )
python
async def query_handler(service, action_type, payload, props, **kwds): """ This action handler interprets the payload as a query to be executed by the api gateway service. """ # check that the action type indicates a query if action_type == query_action_type(): print('encountered query event {!r} '.format(payload)) # perform the query result = await parse_string(payload, service.object_resolver, service.connection_resolver, service.mutation_resolver, obey_auth=False ) # the props for the reply message reply_props = {'correlation_id': props['correlation_id']} if 'correlation_id' in props else {} # publish the success event await service.event_broker.send( payload=result, action_type=change_action_status(action_type, success_status()), **reply_props )
[ "async", "def", "query_handler", "(", "service", ",", "action_type", ",", "payload", ",", "props", ",", "*", "*", "kwds", ")", ":", "# check that the action type indicates a query", "if", "action_type", "==", "query_action_type", "(", ")", ":", "print", "(", "'encountered query event {!r} '", ".", "format", "(", "payload", ")", ")", "# perform the query", "result", "=", "await", "parse_string", "(", "payload", ",", "service", ".", "object_resolver", ",", "service", ".", "connection_resolver", ",", "service", ".", "mutation_resolver", ",", "obey_auth", "=", "False", ")", "# the props for the reply message", "reply_props", "=", "{", "'correlation_id'", ":", "props", "[", "'correlation_id'", "]", "}", "if", "'correlation_id'", "in", "props", "else", "{", "}", "# publish the success event", "await", "service", ".", "event_broker", ".", "send", "(", "payload", "=", "result", ",", "action_type", "=", "change_action_status", "(", "action_type", ",", "success_status", "(", ")", ")", ",", "*", "*", "reply_props", ")" ]
This action handler interprets the payload as a query to be executed by the api gateway service.
[ "This", "action", "handler", "interprets", "the", "payload", "as", "a", "query", "to", "be", "executed", "by", "the", "api", "gateway", "service", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/network/events/actionHandlers/queryHandler.py#L5-L29
AlecAivazis/graphql-over-kafka
nautilus/api/util/summarize_mutation_io.py
summarize_mutation_io
def summarize_mutation_io(name, type, required=False): """ This function returns the standard summary for mutations inputs and outputs """ return dict( name=name, type=type, required=required )
python
def summarize_mutation_io(name, type, required=False): """ This function returns the standard summary for mutations inputs and outputs """ return dict( name=name, type=type, required=required )
[ "def", "summarize_mutation_io", "(", "name", ",", "type", ",", "required", "=", "False", ")", ":", "return", "dict", "(", "name", "=", "name", ",", "type", "=", "type", ",", "required", "=", "required", ")" ]
This function returns the standard summary for mutations inputs and outputs
[ "This", "function", "returns", "the", "standard", "summary", "for", "mutations", "inputs", "and", "outputs" ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/api/util/summarize_mutation_io.py#L1-L10
AlecAivazis/graphql-over-kafka
nautilus/conventions/api.py
crud_mutation_name
def crud_mutation_name(action, model): """ This function returns the name of a mutation that performs the specified crud action on the given model service """ model_string = get_model_string(model) # make sure the mutation name is correctly camelcases model_string = model_string[0].upper() + model_string[1:] # return the mutation name return "{}{}".format(action, model_string)
python
def crud_mutation_name(action, model): """ This function returns the name of a mutation that performs the specified crud action on the given model service """ model_string = get_model_string(model) # make sure the mutation name is correctly camelcases model_string = model_string[0].upper() + model_string[1:] # return the mutation name return "{}{}".format(action, model_string)
[ "def", "crud_mutation_name", "(", "action", ",", "model", ")", ":", "model_string", "=", "get_model_string", "(", "model", ")", "# make sure the mutation name is correctly camelcases", "model_string", "=", "model_string", "[", "0", "]", ".", "upper", "(", ")", "+", "model_string", "[", "1", ":", "]", "# return the mutation name", "return", "\"{}{}\"", ".", "format", "(", "action", ",", "model_string", ")" ]
This function returns the name of a mutation that performs the specified crud action on the given model service
[ "This", "function", "returns", "the", "name", "of", "a", "mutation", "that", "performs", "the", "specified", "crud", "action", "on", "the", "given", "model", "service" ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/conventions/api.py#L15-L25
AlecAivazis/graphql-over-kafka
nautilus/conventions/api.py
create_mutation_inputs
def create_mutation_inputs(service): """ Args: service : The service being created by the mutation Returns: (list) : a list of all of the fields availible for the service, with the required ones respected. """ # grab the default list of field summaries inputs = _service_mutation_summaries(service) # make sure the pk isn't in the list inputs.remove([field for field in inputs if field['name'] == 'id'][0]) # return the final list return inputs
python
def create_mutation_inputs(service): """ Args: service : The service being created by the mutation Returns: (list) : a list of all of the fields availible for the service, with the required ones respected. """ # grab the default list of field summaries inputs = _service_mutation_summaries(service) # make sure the pk isn't in the list inputs.remove([field for field in inputs if field['name'] == 'id'][0]) # return the final list return inputs
[ "def", "create_mutation_inputs", "(", "service", ")", ":", "# grab the default list of field summaries", "inputs", "=", "_service_mutation_summaries", "(", "service", ")", "# make sure the pk isn't in the list", "inputs", ".", "remove", "(", "[", "field", "for", "field", "in", "inputs", "if", "field", "[", "'name'", "]", "==", "'id'", "]", "[", "0", "]", ")", "# return the final list", "return", "inputs" ]
Args: service : The service being created by the mutation Returns: (list) : a list of all of the fields availible for the service, with the required ones respected.
[ "Args", ":", "service", ":", "The", "service", "being", "created", "by", "the", "mutation", "Returns", ":", "(", "list", ")", ":", "a", "list", "of", "all", "of", "the", "fields", "availible", "for", "the", "service", "with", "the", "required", "ones", "respected", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/conventions/api.py#L28-L42
AlecAivazis/graphql-over-kafka
nautilus/conventions/api.py
update_mutation_inputs
def update_mutation_inputs(service): """ Args: service : The service being updated by the mutation Returns: (list) : a list of all of the fields availible for the service. Pk is a required field in order to filter the results """ # grab the default list of field summaries inputs = _service_mutation_summaries(service) # visit each field for field in inputs: # if we're looking at the id field if field['name'] == 'id': # make sure its required field['required'] = True # but no other field else: # is required field['required'] = False # return the final list return inputs
python
def update_mutation_inputs(service): """ Args: service : The service being updated by the mutation Returns: (list) : a list of all of the fields availible for the service. Pk is a required field in order to filter the results """ # grab the default list of field summaries inputs = _service_mutation_summaries(service) # visit each field for field in inputs: # if we're looking at the id field if field['name'] == 'id': # make sure its required field['required'] = True # but no other field else: # is required field['required'] = False # return the final list return inputs
[ "def", "update_mutation_inputs", "(", "service", ")", ":", "# grab the default list of field summaries", "inputs", "=", "_service_mutation_summaries", "(", "service", ")", "# visit each field", "for", "field", "in", "inputs", ":", "# if we're looking at the id field", "if", "field", "[", "'name'", "]", "==", "'id'", ":", "# make sure its required", "field", "[", "'required'", "]", "=", "True", "# but no other field", "else", ":", "# is required", "field", "[", "'required'", "]", "=", "False", "# return the final list", "return", "inputs" ]
Args: service : The service being updated by the mutation Returns: (list) : a list of all of the fields availible for the service. Pk is a required field in order to filter the results
[ "Args", ":", "service", ":", "The", "service", "being", "updated", "by", "the", "mutation", "Returns", ":", "(", "list", ")", ":", "a", "list", "of", "all", "of", "the", "fields", "availible", "for", "the", "service", ".", "Pk", "is", "a", "required", "field", "in", "order", "to", "filter", "the", "results" ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/conventions/api.py#L57-L80
AlecAivazis/graphql-over-kafka
nautilus/conventions/api.py
delete_mutation_inputs
def delete_mutation_inputs(service): """ Args: service : The service being deleted by the mutation Returns: ([str]): the only input for delete is the pk of the service. """ from nautilus.api.util import summarize_mutation_io # the only input for delete events is the pk of the service record return [summarize_mutation_io(name='pk', type='ID', required=True)]
python
def delete_mutation_inputs(service): """ Args: service : The service being deleted by the mutation Returns: ([str]): the only input for delete is the pk of the service. """ from nautilus.api.util import summarize_mutation_io # the only input for delete events is the pk of the service record return [summarize_mutation_io(name='pk', type='ID', required=True)]
[ "def", "delete_mutation_inputs", "(", "service", ")", ":", "from", "nautilus", ".", "api", ".", "util", "import", "summarize_mutation_io", "# the only input for delete events is the pk of the service record", "return", "[", "summarize_mutation_io", "(", "name", "=", "'pk'", ",", "type", "=", "'ID'", ",", "required", "=", "True", ")", "]" ]
Args: service : The service being deleted by the mutation Returns: ([str]): the only input for delete is the pk of the service.
[ "Args", ":", "service", ":", "The", "service", "being", "deleted", "by", "the", "mutation", "Returns", ":", "(", "[", "str", "]", ")", ":", "the", "only", "input", "for", "delete", "is", "the", "pk", "of", "the", "service", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/conventions/api.py#L96-L106
AlecAivazis/graphql-over-kafka
nautilus/conventions/api.py
_summarize_o_mutation_type
def _summarize_o_mutation_type(model): """ This function create the actual mutation io summary corresponding to the model """ from nautilus.api.util import summarize_mutation_io # compute the appropriate name for the object object_type_name = get_model_string(model) # return a mutation io object return summarize_mutation_io( name=object_type_name, type=_summarize_object_type(model), required=False )
python
def _summarize_o_mutation_type(model): """ This function create the actual mutation io summary corresponding to the model """ from nautilus.api.util import summarize_mutation_io # compute the appropriate name for the object object_type_name = get_model_string(model) # return a mutation io object return summarize_mutation_io( name=object_type_name, type=_summarize_object_type(model), required=False )
[ "def", "_summarize_o_mutation_type", "(", "model", ")", ":", "from", "nautilus", ".", "api", ".", "util", "import", "summarize_mutation_io", "# compute the appropriate name for the object", "object_type_name", "=", "get_model_string", "(", "model", ")", "# return a mutation io object", "return", "summarize_mutation_io", "(", "name", "=", "object_type_name", ",", "type", "=", "_summarize_object_type", "(", "model", ")", ",", "required", "=", "False", ")" ]
This function create the actual mutation io summary corresponding to the model
[ "This", "function", "create", "the", "actual", "mutation", "io", "summary", "corresponding", "to", "the", "model" ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/conventions/api.py#L134-L147
AlecAivazis/graphql-over-kafka
nautilus/conventions/api.py
_summarize_object_type
def _summarize_object_type(model): """ This function returns the summary for a given model """ # the fields for the service's model model_fields = {field.name: field for field in list(model.fields())} # summarize the model return { 'fields': [{ 'name': key, 'type': type(convert_peewee_field(value)).__name__ } for key, value in model_fields.items() ] }
python
def _summarize_object_type(model): """ This function returns the summary for a given model """ # the fields for the service's model model_fields = {field.name: field for field in list(model.fields())} # summarize the model return { 'fields': [{ 'name': key, 'type': type(convert_peewee_field(value)).__name__ } for key, value in model_fields.items() ] }
[ "def", "_summarize_object_type", "(", "model", ")", ":", "# the fields for the service's model", "model_fields", "=", "{", "field", ".", "name", ":", "field", "for", "field", "in", "list", "(", "model", ".", "fields", "(", ")", ")", "}", "# summarize the model", "return", "{", "'fields'", ":", "[", "{", "'name'", ":", "key", ",", "'type'", ":", "type", "(", "convert_peewee_field", "(", "value", ")", ")", ".", "__name__", "}", "for", "key", ",", "value", "in", "model_fields", ".", "items", "(", ")", "]", "}" ]
This function returns the summary for a given model
[ "This", "function", "returns", "the", "summary", "for", "a", "given", "model" ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/conventions/api.py#L149-L162
AlecAivazis/graphql-over-kafka
nautilus/network/events/util.py
combine_action_handlers
def combine_action_handlers(*handlers): """ This function combines the given action handlers into a single function which will call all of them. """ # make sure each of the given handlers is callable for handler in handlers: # if the handler is not a function if not (iscoroutinefunction(handler) or iscoroutine(handler)): # yell loudly raise ValueError("Provided handler is not a coroutine: %s" % handler) # the combined action handler async def combined_handler(*args, **kwds): # goes over every given handler for handler in handlers: # call the handler await handler(*args, **kwds) # return the combined action handler return combined_handler
python
def combine_action_handlers(*handlers): """ This function combines the given action handlers into a single function which will call all of them. """ # make sure each of the given handlers is callable for handler in handlers: # if the handler is not a function if not (iscoroutinefunction(handler) or iscoroutine(handler)): # yell loudly raise ValueError("Provided handler is not a coroutine: %s" % handler) # the combined action handler async def combined_handler(*args, **kwds): # goes over every given handler for handler in handlers: # call the handler await handler(*args, **kwds) # return the combined action handler return combined_handler
[ "def", "combine_action_handlers", "(", "*", "handlers", ")", ":", "# make sure each of the given handlers is callable", "for", "handler", "in", "handlers", ":", "# if the handler is not a function", "if", "not", "(", "iscoroutinefunction", "(", "handler", ")", "or", "iscoroutine", "(", "handler", ")", ")", ":", "# yell loudly", "raise", "ValueError", "(", "\"Provided handler is not a coroutine: %s\"", "%", "handler", ")", "# the combined action handler", "async", "def", "combined_handler", "(", "*", "args", ",", "*", "*", "kwds", ")", ":", "# goes over every given handler", "for", "handler", "in", "handlers", ":", "# call the handler", "await", "handler", "(", "*", "args", ",", "*", "*", "kwds", ")", "# return the combined action handler", "return", "combined_handler" ]
This function combines the given action handlers into a single function which will call all of them.
[ "This", "function", "combines", "the", "given", "action", "handlers", "into", "a", "single", "function", "which", "will", "call", "all", "of", "them", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/network/events/util.py#L6-L26
AlecAivazis/graphql-over-kafka
nautilus/network/events/actionHandlers/updateHandler.py
update_handler
def update_handler(Model, name=None, **kwds): """ This factory returns an action handler that updates a new instance of the specified model when a update action is recieved, assuming the action follows nautilus convetions. Args: Model (nautilus.BaseModel): The model to update when the action received. Returns: function(type, payload): The action handler for this model """ async def action_handler(service, action_type, payload, props, notify=True, **kwds): # if the payload represents a new instance of `Model` if action_type == get_crud_action('update', name or Model): try: # the props of the message message_props = {} # if there was a correlation id in the request if 'correlation_id' in props: # make sure it ends up in the reply message_props['correlation_id'] = props['correlation_id'] # grab the nam eof the primary key for the model pk_field = Model.primary_key() # make sure there is a primary key to id the model if not pk_field.name in payload: # yell loudly raise ValueError("Must specify the pk of the model when updating") # grab the matching model model = Model.select().where(pk_field == payload[pk_field.name]).get() # remove the key from the payload payload.pop(pk_field.name, None) # for every key,value pair for key, value in payload.items(): # TODO: add protection for certain fields from being # changed by the api setattr(model, key, value) # save the updates model.save() # if we need to tell someone about what happened if notify: # publish the scucess event await service.event_broker.send( payload=ModelSerializer().serialize(model), action_type=change_action_status(action_type, success_status()), **message_props ) # if something goes wrong except Exception as err: # if we need to tell someone about what happened if notify: # publish the error as an event await service.event_broker.send( payload=str(err), action_type=change_action_status(action_type, error_status()), **message_props ) # otherwise we aren't supposed to notify else: # raise the exception normally raise err # return the handler return action_handler
python
def update_handler(Model, name=None, **kwds): """ This factory returns an action handler that updates a new instance of the specified model when a update action is recieved, assuming the action follows nautilus convetions. Args: Model (nautilus.BaseModel): The model to update when the action received. Returns: function(type, payload): The action handler for this model """ async def action_handler(service, action_type, payload, props, notify=True, **kwds): # if the payload represents a new instance of `Model` if action_type == get_crud_action('update', name or Model): try: # the props of the message message_props = {} # if there was a correlation id in the request if 'correlation_id' in props: # make sure it ends up in the reply message_props['correlation_id'] = props['correlation_id'] # grab the nam eof the primary key for the model pk_field = Model.primary_key() # make sure there is a primary key to id the model if not pk_field.name in payload: # yell loudly raise ValueError("Must specify the pk of the model when updating") # grab the matching model model = Model.select().where(pk_field == payload[pk_field.name]).get() # remove the key from the payload payload.pop(pk_field.name, None) # for every key,value pair for key, value in payload.items(): # TODO: add protection for certain fields from being # changed by the api setattr(model, key, value) # save the updates model.save() # if we need to tell someone about what happened if notify: # publish the scucess event await service.event_broker.send( payload=ModelSerializer().serialize(model), action_type=change_action_status(action_type, success_status()), **message_props ) # if something goes wrong except Exception as err: # if we need to tell someone about what happened if notify: # publish the error as an event await service.event_broker.send( payload=str(err), action_type=change_action_status(action_type, error_status()), **message_props ) # otherwise we aren't supposed to notify else: # raise the exception normally raise err # return the handler return action_handler
[ "def", "update_handler", "(", "Model", ",", "name", "=", "None", ",", "*", "*", "kwds", ")", ":", "async", "def", "action_handler", "(", "service", ",", "action_type", ",", "payload", ",", "props", ",", "notify", "=", "True", ",", "*", "*", "kwds", ")", ":", "# if the payload represents a new instance of `Model`", "if", "action_type", "==", "get_crud_action", "(", "'update'", ",", "name", "or", "Model", ")", ":", "try", ":", "# the props of the message", "message_props", "=", "{", "}", "# if there was a correlation id in the request", "if", "'correlation_id'", "in", "props", ":", "# make sure it ends up in the reply", "message_props", "[", "'correlation_id'", "]", "=", "props", "[", "'correlation_id'", "]", "# grab the nam eof the primary key for the model", "pk_field", "=", "Model", ".", "primary_key", "(", ")", "# make sure there is a primary key to id the model", "if", "not", "pk_field", ".", "name", "in", "payload", ":", "# yell loudly", "raise", "ValueError", "(", "\"Must specify the pk of the model when updating\"", ")", "# grab the matching model", "model", "=", "Model", ".", "select", "(", ")", ".", "where", "(", "pk_field", "==", "payload", "[", "pk_field", ".", "name", "]", ")", ".", "get", "(", ")", "# remove the key from the payload", "payload", ".", "pop", "(", "pk_field", ".", "name", ",", "None", ")", "# for every key,value pair", "for", "key", ",", "value", "in", "payload", ".", "items", "(", ")", ":", "# TODO: add protection for certain fields from being", "# changed by the api", "setattr", "(", "model", ",", "key", ",", "value", ")", "# save the updates", "model", ".", "save", "(", ")", "# if we need to tell someone about what happened", "if", "notify", ":", "# publish the scucess event", "await", "service", ".", "event_broker", ".", "send", "(", "payload", "=", "ModelSerializer", "(", ")", ".", "serialize", "(", "model", ")", ",", "action_type", "=", "change_action_status", "(", "action_type", ",", "success_status", "(", ")", ")", ",", "*", "*", "message_props", ")", "# if something goes wrong", "except", "Exception", "as", "err", ":", "# if we need to tell someone about what happened", "if", "notify", ":", "# publish the error as an event", "await", "service", ".", "event_broker", ".", "send", "(", "payload", "=", "str", "(", "err", ")", ",", "action_type", "=", "change_action_status", "(", "action_type", ",", "error_status", "(", ")", ")", ",", "*", "*", "message_props", ")", "# otherwise we aren't supposed to notify", "else", ":", "# raise the exception normally", "raise", "err", "# return the handler", "return", "action_handler" ]
This factory returns an action handler that updates a new instance of the specified model when a update action is recieved, assuming the action follows nautilus convetions. Args: Model (nautilus.BaseModel): The model to update when the action received. Returns: function(type, payload): The action handler for this model
[ "This", "factory", "returns", "an", "action", "handler", "that", "updates", "a", "new", "instance", "of", "the", "specified", "model", "when", "a", "update", "action", "is", "recieved", "assuming", "the", "action", "follows", "nautilus", "convetions", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/network/events/actionHandlers/updateHandler.py#L10-L83
AlecAivazis/graphql-over-kafka
nautilus/api/util/graphql_mutation_from_summary.py
graphql_mutation_from_summary
def graphql_mutation_from_summary(summary): """ This function returns a graphql mutation corresponding to the provided summary. """ # get the name of the mutation from the summary mutation_name = summary['name'] # print(summary) # the treat the "type" string as a gra input_name = mutation_name + "Input" input_fields = build_native_type_dictionary(summary['inputs'], name=input_name, respect_required=True) # the inputs for the mutation are defined by a class record inputs = type('Input', (object,), input_fields) # the outputs for the mutation are attributes to the class record output_name = mutation_name + "Output" outputs = build_native_type_dictionary(summary['outputs'], name=output_name) # a no-op in order to satisfy the introspection query mutate = classmethod(lambda *_, **__ : 'hello') # create the appropriate mutation class record mutation = type(mutation_name, (graphene.Mutation,), { 'Input': inputs, 'mutate': mutate, **outputs }) # return the newly created mutation record return mutation
python
def graphql_mutation_from_summary(summary): """ This function returns a graphql mutation corresponding to the provided summary. """ # get the name of the mutation from the summary mutation_name = summary['name'] # print(summary) # the treat the "type" string as a gra input_name = mutation_name + "Input" input_fields = build_native_type_dictionary(summary['inputs'], name=input_name, respect_required=True) # the inputs for the mutation are defined by a class record inputs = type('Input', (object,), input_fields) # the outputs for the mutation are attributes to the class record output_name = mutation_name + "Output" outputs = build_native_type_dictionary(summary['outputs'], name=output_name) # a no-op in order to satisfy the introspection query mutate = classmethod(lambda *_, **__ : 'hello') # create the appropriate mutation class record mutation = type(mutation_name, (graphene.Mutation,), { 'Input': inputs, 'mutate': mutate, **outputs }) # return the newly created mutation record return mutation
[ "def", "graphql_mutation_from_summary", "(", "summary", ")", ":", "# get the name of the mutation from the summary", "mutation_name", "=", "summary", "[", "'name'", "]", "# print(summary)", "# the treat the \"type\" string as a gra", "input_name", "=", "mutation_name", "+", "\"Input\"", "input_fields", "=", "build_native_type_dictionary", "(", "summary", "[", "'inputs'", "]", ",", "name", "=", "input_name", ",", "respect_required", "=", "True", ")", "# the inputs for the mutation are defined by a class record", "inputs", "=", "type", "(", "'Input'", ",", "(", "object", ",", ")", ",", "input_fields", ")", "# the outputs for the mutation are attributes to the class record", "output_name", "=", "mutation_name", "+", "\"Output\"", "outputs", "=", "build_native_type_dictionary", "(", "summary", "[", "'outputs'", "]", ",", "name", "=", "output_name", ")", "# a no-op in order to satisfy the introspection query", "mutate", "=", "classmethod", "(", "lambda", "*", "_", ",", "*", "*", "__", ":", "'hello'", ")", "# create the appropriate mutation class record", "mutation", "=", "type", "(", "mutation_name", ",", "(", "graphene", ".", "Mutation", ",", ")", ",", "{", "'Input'", ":", "inputs", ",", "'mutate'", ":", "mutate", ",", "*", "*", "outputs", "}", ")", "# return the newly created mutation record", "return", "mutation" ]
This function returns a graphql mutation corresponding to the provided summary.
[ "This", "function", "returns", "a", "graphql", "mutation", "corresponding", "to", "the", "provided", "summary", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/api/util/graphql_mutation_from_summary.py#L6-L38
AlecAivazis/graphql-over-kafka
nautilus/api/util/arg_string_from_dict.py
arg_string_from_dict
def arg_string_from_dict(arg_dict, **kwds): """ This function takes a series of ditionaries and creates an argument string for a graphql query """ # the filters dictionary filters = { **arg_dict, **kwds, } # return the correctly formed string return ", ".join("{}: {}".format(key, json.dumps(value)) for key,value in filters.items())
python
def arg_string_from_dict(arg_dict, **kwds): """ This function takes a series of ditionaries and creates an argument string for a graphql query """ # the filters dictionary filters = { **arg_dict, **kwds, } # return the correctly formed string return ", ".join("{}: {}".format(key, json.dumps(value)) for key,value in filters.items())
[ "def", "arg_string_from_dict", "(", "arg_dict", ",", "*", "*", "kwds", ")", ":", "# the filters dictionary", "filters", "=", "{", "*", "*", "arg_dict", ",", "*", "*", "kwds", ",", "}", "# return the correctly formed string", "return", "\", \"", ".", "join", "(", "\"{}: {}\"", ".", "format", "(", "key", ",", "json", ".", "dumps", "(", "value", ")", ")", "for", "key", ",", "value", "in", "filters", ".", "items", "(", ")", ")" ]
This function takes a series of ditionaries and creates an argument string for a graphql query
[ "This", "function", "takes", "a", "series", "of", "ditionaries", "and", "creates", "an", "argument", "string", "for", "a", "graphql", "query" ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/api/util/arg_string_from_dict.py#L3-L14
AlecAivazis/graphql-over-kafka
nautilus/api/util/create_model_schema.py
create_model_schema
def create_model_schema(target_model): """ This function creates a graphql schema that provides a single model """ from nautilus.database import db # create the schema instance schema = graphene.Schema(auto_camelcase=False) # grab the primary key from the model primary_key = target_model.primary_key() primary_key_type = convert_peewee_field(primary_key) # create a graphene object class ModelObjectType(PeeweeObjectType): class Meta: model = target_model pk = Field(primary_key_type, description="The primary key for this object.") @graphene.resolve_only_args def resolve_pk(self): return getattr(self, self.primary_key().name) class Query(graphene.ObjectType): """ the root level query """ all_models = List(ModelObjectType, args=args_for_model(target_model)) @graphene.resolve_only_args def resolve_all_models(self, **args): # filter the model query according to the arguments # print(filter_model(target_model, args)[0].__dict__) return filter_model(target_model, args) # add the query to the schema schema.query = Query return schema
python
def create_model_schema(target_model): """ This function creates a graphql schema that provides a single model """ from nautilus.database import db # create the schema instance schema = graphene.Schema(auto_camelcase=False) # grab the primary key from the model primary_key = target_model.primary_key() primary_key_type = convert_peewee_field(primary_key) # create a graphene object class ModelObjectType(PeeweeObjectType): class Meta: model = target_model pk = Field(primary_key_type, description="The primary key for this object.") @graphene.resolve_only_args def resolve_pk(self): return getattr(self, self.primary_key().name) class Query(graphene.ObjectType): """ the root level query """ all_models = List(ModelObjectType, args=args_for_model(target_model)) @graphene.resolve_only_args def resolve_all_models(self, **args): # filter the model query according to the arguments # print(filter_model(target_model, args)[0].__dict__) return filter_model(target_model, args) # add the query to the schema schema.query = Query return schema
[ "def", "create_model_schema", "(", "target_model", ")", ":", "from", "nautilus", ".", "database", "import", "db", "# create the schema instance", "schema", "=", "graphene", ".", "Schema", "(", "auto_camelcase", "=", "False", ")", "# grab the primary key from the model", "primary_key", "=", "target_model", ".", "primary_key", "(", ")", "primary_key_type", "=", "convert_peewee_field", "(", "primary_key", ")", "# create a graphene object", "class", "ModelObjectType", "(", "PeeweeObjectType", ")", ":", "class", "Meta", ":", "model", "=", "target_model", "pk", "=", "Field", "(", "primary_key_type", ",", "description", "=", "\"The primary key for this object.\"", ")", "@", "graphene", ".", "resolve_only_args", "def", "resolve_pk", "(", "self", ")", ":", "return", "getattr", "(", "self", ",", "self", ".", "primary_key", "(", ")", ".", "name", ")", "class", "Query", "(", "graphene", ".", "ObjectType", ")", ":", "\"\"\" the root level query \"\"\"", "all_models", "=", "List", "(", "ModelObjectType", ",", "args", "=", "args_for_model", "(", "target_model", ")", ")", "@", "graphene", ".", "resolve_only_args", "def", "resolve_all_models", "(", "self", ",", "*", "*", "args", ")", ":", "# filter the model query according to the arguments", "# print(filter_model(target_model, args)[0].__dict__)", "return", "filter_model", "(", "target_model", ",", "args", ")", "# add the query to the schema", "schema", ".", "query", "=", "Query", "return", "schema" ]
This function creates a graphql schema that provides a single model
[ "This", "function", "creates", "a", "graphql", "schema", "that", "provides", "a", "single", "model" ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/api/util/create_model_schema.py#L9-L48
AlecAivazis/graphql-over-kafka
nautilus/conventions/services.py
connection_service_name
def connection_service_name(service, *args): ''' the name of a service that manages the connection between services ''' # if the service is a string if isinstance(service, str): return service return normalize_string(type(service).__name__)
python
def connection_service_name(service, *args): ''' the name of a service that manages the connection between services ''' # if the service is a string if isinstance(service, str): return service return normalize_string(type(service).__name__)
[ "def", "connection_service_name", "(", "service", ",", "*", "args", ")", ":", "# if the service is a string", "if", "isinstance", "(", "service", ",", "str", ")", ":", "return", "service", "return", "normalize_string", "(", "type", "(", "service", ")", ".", "__name__", ")" ]
the name of a service that manages the connection between services
[ "the", "name", "of", "a", "service", "that", "manages", "the", "connection", "between", "services" ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/conventions/services.py#L22-L28
AlecAivazis/graphql-over-kafka
nautilus/auth/util/read_session_token.py
read_session_token
def read_session_token(secret_key, token): """ This function verifies the token using the secret key and returns its contents. """ return jwt.decode(token.encode('utf-8'), secret_key, algorithms=[token_encryption_algorithm()] )
python
def read_session_token(secret_key, token): """ This function verifies the token using the secret key and returns its contents. """ return jwt.decode(token.encode('utf-8'), secret_key, algorithms=[token_encryption_algorithm()] )
[ "def", "read_session_token", "(", "secret_key", ",", "token", ")", ":", "return", "jwt", ".", "decode", "(", "token", ".", "encode", "(", "'utf-8'", ")", ",", "secret_key", ",", "algorithms", "=", "[", "token_encryption_algorithm", "(", ")", "]", ")" ]
This function verifies the token using the secret key and returns its contents.
[ "This", "function", "verifies", "the", "token", "using", "the", "secret", "key", "and", "returns", "its", "contents", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/auth/util/read_session_token.py#L6-L13
AlecAivazis/graphql-over-kafka
nautilus/services/service.py
ServiceActionHandler.handle_action
async def handle_action(self, action_type, payload, **kwds): """ The default action Handler has no action. """ # if there is a service attached to the action handler if hasattr(self, 'service'): # handle roll calls await roll_call_handler(self.service, action_type, payload, **kwds)
python
async def handle_action(self, action_type, payload, **kwds): """ The default action Handler has no action. """ # if there is a service attached to the action handler if hasattr(self, 'service'): # handle roll calls await roll_call_handler(self.service, action_type, payload, **kwds)
[ "async", "def", "handle_action", "(", "self", ",", "action_type", ",", "payload", ",", "*", "*", "kwds", ")", ":", "# if there is a service attached to the action handler", "if", "hasattr", "(", "self", ",", "'service'", ")", ":", "# handle roll calls", "await", "roll_call_handler", "(", "self", ".", "service", ",", "action_type", ",", "payload", ",", "*", "*", "kwds", ")" ]
The default action Handler has no action.
[ "The", "default", "action", "Handler", "has", "no", "action", "." ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/services/service.py#L47-L54
AlecAivazis/graphql-over-kafka
nautilus/services/service.py
Service.announce
async def announce(self): """ This method is used to announce the existence of the service """ # send a serialized event await self.event_broker.send( action_type=intialize_service_action(), payload=json.dumps(self.summarize()) )
python
async def announce(self): """ This method is used to announce the existence of the service """ # send a serialized event await self.event_broker.send( action_type=intialize_service_action(), payload=json.dumps(self.summarize()) )
[ "async", "def", "announce", "(", "self", ")", ":", "# send a serialized event", "await", "self", ".", "event_broker", ".", "send", "(", "action_type", "=", "intialize_service_action", "(", ")", ",", "payload", "=", "json", ".", "dumps", "(", "self", ".", "summarize", "(", ")", ")", ")" ]
This method is used to announce the existence of the service
[ "This", "method", "is", "used", "to", "announce", "the", "existence", "of", "the", "service" ]
train
https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/services/service.py#L166-L174