partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
test
PickleParameter._store
Returns a dictionary for storage. Every element in the dictionary except for 'explored_data' is a pickle dump. Reusage of objects is identified over the object id, i.e. python's built-in id function. 'explored_data' contains the references to the objects to be able to recall the order of objects later on.
pypet/parameter.py
def _store(self): """Returns a dictionary for storage. Every element in the dictionary except for 'explored_data' is a pickle dump. Reusage of objects is identified over the object id, i.e. python's built-in id function. 'explored_data' contains the references to the objects to be able to recall the order of objects later on. """ store_dict = {} if self._data is not None: dump = pickle.dumps(self._data, protocol=self.v_protocol) store_dict['data'] = dump store_dict[PickleParameter.PROTOCOL] = self.v_protocol if self.f_has_range(): store_dict['explored_data'] = \ ObjectTable(columns=['idx'], index=list(range(len(self)))) smart_dict = {} count = 0 for idx, val in enumerate(self._explored_range): obj_id = id(val) if obj_id in smart_dict: name_id = smart_dict[obj_id] add = False else: name_id = count add = True name = self._build_name(name_id) store_dict['explored_data']['idx'][idx] = name_id if add: store_dict[name] = pickle.dumps(val, protocol=self.v_protocol) smart_dict[obj_id] = name_id count += 1 self._locked = True return store_dict
def _store(self): """Returns a dictionary for storage. Every element in the dictionary except for 'explored_data' is a pickle dump. Reusage of objects is identified over the object id, i.e. python's built-in id function. 'explored_data' contains the references to the objects to be able to recall the order of objects later on. """ store_dict = {} if self._data is not None: dump = pickle.dumps(self._data, protocol=self.v_protocol) store_dict['data'] = dump store_dict[PickleParameter.PROTOCOL] = self.v_protocol if self.f_has_range(): store_dict['explored_data'] = \ ObjectTable(columns=['idx'], index=list(range(len(self)))) smart_dict = {} count = 0 for idx, val in enumerate(self._explored_range): obj_id = id(val) if obj_id in smart_dict: name_id = smart_dict[obj_id] add = False else: name_id = count add = True name = self._build_name(name_id) store_dict['explored_data']['idx'][idx] = name_id if add: store_dict[name] = pickle.dumps(val, protocol=self.v_protocol) smart_dict[obj_id] = name_id count += 1 self._locked = True return store_dict
[ "Returns", "a", "dictionary", "for", "storage", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/parameter.py#L1737-L1784
[ "def", "_store", "(", "self", ")", ":", "store_dict", "=", "{", "}", "if", "self", ".", "_data", "is", "not", "None", ":", "dump", "=", "pickle", ".", "dumps", "(", "self", ".", "_data", ",", "protocol", "=", "self", ".", "v_protocol", ")", "store_dict", "[", "'data'", "]", "=", "dump", "store_dict", "[", "PickleParameter", ".", "PROTOCOL", "]", "=", "self", ".", "v_protocol", "if", "self", ".", "f_has_range", "(", ")", ":", "store_dict", "[", "'explored_data'", "]", "=", "ObjectTable", "(", "columns", "=", "[", "'idx'", "]", ",", "index", "=", "list", "(", "range", "(", "len", "(", "self", ")", ")", ")", ")", "smart_dict", "=", "{", "}", "count", "=", "0", "for", "idx", ",", "val", "in", "enumerate", "(", "self", ".", "_explored_range", ")", ":", "obj_id", "=", "id", "(", "val", ")", "if", "obj_id", "in", "smart_dict", ":", "name_id", "=", "smart_dict", "[", "obj_id", "]", "add", "=", "False", "else", ":", "name_id", "=", "count", "add", "=", "True", "name", "=", "self", ".", "_build_name", "(", "name_id", ")", "store_dict", "[", "'explored_data'", "]", "[", "'idx'", "]", "[", "idx", "]", "=", "name_id", "if", "add", ":", "store_dict", "[", "name", "]", "=", "pickle", ".", "dumps", "(", "val", ",", "protocol", "=", "self", ".", "v_protocol", ")", "smart_dict", "[", "obj_id", "]", "=", "name_id", "count", "+=", "1", "self", ".", "_locked", "=", "True", "return", "store_dict" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
PickleParameter._load
Reconstructs objects from the pickle dumps in `load_dict`. The 'explored_data' entry in `load_dict` is used to reconstruct the exploration range in the correct order. Sets the `v_protocol` property to the protocol used to store 'data'.
pypet/parameter.py
def _load(self, load_dict): """Reconstructs objects from the pickle dumps in `load_dict`. The 'explored_data' entry in `load_dict` is used to reconstruct the exploration range in the correct order. Sets the `v_protocol` property to the protocol used to store 'data'. """ if self.v_locked: raise pex.ParameterLockedException('Parameter `%s` is locked!' % self.v_full_name) if 'data' in load_dict: dump = load_dict['data'] self._data = pickle.loads(dump) else: self._logger.warning('Your parameter `%s` is empty, ' 'I did not find any data on disk.' % self.v_full_name) try: self.v_protocol = load_dict[PickleParameter.PROTOCOL] except KeyError: # For backwards compatibility self.v_protocol = PickleParameter._get_protocol(dump) if 'explored_data' in load_dict: explore_table = load_dict['explored_data'] name_col = explore_table['idx'] explore_list = [] for name_id in name_col: arrayname = self._build_name(name_id) loaded = pickle.loads(load_dict[arrayname]) explore_list.append(loaded) self._explored_range = explore_list self._explored = True self._default = self._data self._locked = True
def _load(self, load_dict): """Reconstructs objects from the pickle dumps in `load_dict`. The 'explored_data' entry in `load_dict` is used to reconstruct the exploration range in the correct order. Sets the `v_protocol` property to the protocol used to store 'data'. """ if self.v_locked: raise pex.ParameterLockedException('Parameter `%s` is locked!' % self.v_full_name) if 'data' in load_dict: dump = load_dict['data'] self._data = pickle.loads(dump) else: self._logger.warning('Your parameter `%s` is empty, ' 'I did not find any data on disk.' % self.v_full_name) try: self.v_protocol = load_dict[PickleParameter.PROTOCOL] except KeyError: # For backwards compatibility self.v_protocol = PickleParameter._get_protocol(dump) if 'explored_data' in load_dict: explore_table = load_dict['explored_data'] name_col = explore_table['idx'] explore_list = [] for name_id in name_col: arrayname = self._build_name(name_id) loaded = pickle.loads(load_dict[arrayname]) explore_list.append(loaded) self._explored_range = explore_list self._explored = True self._default = self._data self._locked = True
[ "Reconstructs", "objects", "from", "the", "pickle", "dumps", "in", "load_dict", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/parameter.py#L1792-L1832
[ "def", "_load", "(", "self", ",", "load_dict", ")", ":", "if", "self", ".", "v_locked", ":", "raise", "pex", ".", "ParameterLockedException", "(", "'Parameter `%s` is locked!'", "%", "self", ".", "v_full_name", ")", "if", "'data'", "in", "load_dict", ":", "dump", "=", "load_dict", "[", "'data'", "]", "self", ".", "_data", "=", "pickle", ".", "loads", "(", "dump", ")", "else", ":", "self", ".", "_logger", ".", "warning", "(", "'Your parameter `%s` is empty, '", "'I did not find any data on disk.'", "%", "self", ".", "v_full_name", ")", "try", ":", "self", ".", "v_protocol", "=", "load_dict", "[", "PickleParameter", ".", "PROTOCOL", "]", "except", "KeyError", ":", "# For backwards compatibility", "self", ".", "v_protocol", "=", "PickleParameter", ".", "_get_protocol", "(", "dump", ")", "if", "'explored_data'", "in", "load_dict", ":", "explore_table", "=", "load_dict", "[", "'explored_data'", "]", "name_col", "=", "explore_table", "[", "'idx'", "]", "explore_list", "=", "[", "]", "for", "name_id", "in", "name_col", ":", "arrayname", "=", "self", ".", "_build_name", "(", "name_id", ")", "loaded", "=", "pickle", ".", "loads", "(", "load_dict", "[", "arrayname", "]", ")", "explore_list", ".", "append", "(", "loaded", ")", "self", ".", "_explored_range", "=", "explore_list", "self", ".", "_explored", "=", "True", "self", ".", "_default", "=", "self", ".", "_data", "self", ".", "_locked", "=", "True" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
Result.f_translate_key
Translates integer indices into the appropriate names
pypet/parameter.py
def f_translate_key(self, key): """Translates integer indices into the appropriate names""" if isinstance(key, int): if key == 0: key = self.v_name else: key = self.v_name + '_%d' % key return key
def f_translate_key(self, key): """Translates integer indices into the appropriate names""" if isinstance(key, int): if key == 0: key = self.v_name else: key = self.v_name + '_%d' % key return key
[ "Translates", "integer", "indices", "into", "the", "appropriate", "names" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/parameter.py#L1983-L1990
[ "def", "f_translate_key", "(", "self", ",", "key", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "if", "key", "==", "0", ":", "key", "=", "self", ".", "v_name", "else", ":", "key", "=", "self", ".", "v_name", "+", "'_%d'", "%", "key", "return", "key" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
Result.f_val_to_str
Summarizes data handled by the result as a string. Calls `__repr__` on all handled data. Data is NOT ordered. Truncates the string if it is longer than :const:`pypetconstants.HDF5_STRCOL_MAX_VALUE_LENGTH` :return: string
pypet/parameter.py
def f_val_to_str(self): """Summarizes data handled by the result as a string. Calls `__repr__` on all handled data. Data is NOT ordered. Truncates the string if it is longer than :const:`pypetconstants.HDF5_STRCOL_MAX_VALUE_LENGTH` :return: string """ resstrlist = [] strlen = 0 for key in self._data: val = self._data[key] resstr = '%s=%s, ' % (key, repr(val)) resstrlist.append(resstr) strlen += len(resstr) if strlen > pypetconstants.HDF5_STRCOL_MAX_VALUE_LENGTH: break return_string = "".join(resstrlist) if len(return_string) > pypetconstants.HDF5_STRCOL_MAX_VALUE_LENGTH: return_string =\ return_string[0:pypetconstants.HDF5_STRCOL_MAX_VALUE_LENGTH - 3] + '...' else: return_string = return_string[0:-2] # Delete the last `, ` return return_string
def f_val_to_str(self): """Summarizes data handled by the result as a string. Calls `__repr__` on all handled data. Data is NOT ordered. Truncates the string if it is longer than :const:`pypetconstants.HDF5_STRCOL_MAX_VALUE_LENGTH` :return: string """ resstrlist = [] strlen = 0 for key in self._data: val = self._data[key] resstr = '%s=%s, ' % (key, repr(val)) resstrlist.append(resstr) strlen += len(resstr) if strlen > pypetconstants.HDF5_STRCOL_MAX_VALUE_LENGTH: break return_string = "".join(resstrlist) if len(return_string) > pypetconstants.HDF5_STRCOL_MAX_VALUE_LENGTH: return_string =\ return_string[0:pypetconstants.HDF5_STRCOL_MAX_VALUE_LENGTH - 3] + '...' else: return_string = return_string[0:-2] # Delete the last `, ` return return_string
[ "Summarizes", "data", "handled", "by", "the", "result", "as", "a", "string", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/parameter.py#L1996-L2027
[ "def", "f_val_to_str", "(", "self", ")", ":", "resstrlist", "=", "[", "]", "strlen", "=", "0", "for", "key", "in", "self", ".", "_data", ":", "val", "=", "self", ".", "_data", "[", "key", "]", "resstr", "=", "'%s=%s, '", "%", "(", "key", ",", "repr", "(", "val", ")", ")", "resstrlist", ".", "append", "(", "resstr", ")", "strlen", "+=", "len", "(", "resstr", ")", "if", "strlen", ">", "pypetconstants", ".", "HDF5_STRCOL_MAX_VALUE_LENGTH", ":", "break", "return_string", "=", "\"\"", ".", "join", "(", "resstrlist", ")", "if", "len", "(", "return_string", ")", ">", "pypetconstants", ".", "HDF5_STRCOL_MAX_VALUE_LENGTH", ":", "return_string", "=", "return_string", "[", "0", ":", "pypetconstants", ".", "HDF5_STRCOL_MAX_VALUE_LENGTH", "-", "3", "]", "+", "'...'", "else", ":", "return_string", "=", "return_string", "[", "0", ":", "-", "2", "]", "# Delete the last `, `", "return", "return_string" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
Result.f_to_dict
Returns all handled data as a dictionary. :param copy: Whether the original dictionary or a shallow copy is returned. :return: Data dictionary
pypet/parameter.py
def f_to_dict(self, copy=True): """Returns all handled data as a dictionary. :param copy: Whether the original dictionary or a shallow copy is returned. :return: Data dictionary """ if copy: return self._data.copy() else: return self._data
def f_to_dict(self, copy=True): """Returns all handled data as a dictionary. :param copy: Whether the original dictionary or a shallow copy is returned. :return: Data dictionary """ if copy: return self._data.copy() else: return self._data
[ "Returns", "all", "handled", "data", "as", "a", "dictionary", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/parameter.py#L2051-L2064
[ "def", "f_to_dict", "(", "self", ",", "copy", "=", "True", ")", ":", "if", "copy", ":", "return", "self", ".", "_data", ".", "copy", "(", ")", "else", ":", "return", "self", ".", "_data" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
Result.f_set
Method to put data into the result. :param args: The first positional argument is stored with the name of the result. Following arguments are stored with `name_X` where `X` is the position of the argument. :param kwargs: Arguments are stored with the key as name. :raises: TypeError if outer data structure is not understood. Example usage: >>> res = Result('supergroup.subgroup.myresult', comment='I am a neat example!') >>> res.f_set(333,42.0, mystring='String!') >>> res.f_get('myresult') 333 >>> res.f_get('myresult_1') 42.0 >>> res.f_get(1) 42.0 >>> res.f_get('mystring') 'String!'
pypet/parameter.py
def f_set(self, *args, **kwargs): """ Method to put data into the result. :param args: The first positional argument is stored with the name of the result. Following arguments are stored with `name_X` where `X` is the position of the argument. :param kwargs: Arguments are stored with the key as name. :raises: TypeError if outer data structure is not understood. Example usage: >>> res = Result('supergroup.subgroup.myresult', comment='I am a neat example!') >>> res.f_set(333,42.0, mystring='String!') >>> res.f_get('myresult') 333 >>> res.f_get('myresult_1') 42.0 >>> res.f_get(1) 42.0 >>> res.f_get('mystring') 'String!' """ if args and self.v_name is None: raise AttributeError('Cannot set positional value because I do not have a name!') for idx, arg in enumerate(args): valstr = self.f_translate_key(idx) self.f_set_single(valstr, arg) for key, arg in kwargs.items(): self.f_set_single(key, arg)
def f_set(self, *args, **kwargs): """ Method to put data into the result. :param args: The first positional argument is stored with the name of the result. Following arguments are stored with `name_X` where `X` is the position of the argument. :param kwargs: Arguments are stored with the key as name. :raises: TypeError if outer data structure is not understood. Example usage: >>> res = Result('supergroup.subgroup.myresult', comment='I am a neat example!') >>> res.f_set(333,42.0, mystring='String!') >>> res.f_get('myresult') 333 >>> res.f_get('myresult_1') 42.0 >>> res.f_get(1) 42.0 >>> res.f_get('mystring') 'String!' """ if args and self.v_name is None: raise AttributeError('Cannot set positional value because I do not have a name!') for idx, arg in enumerate(args): valstr = self.f_translate_key(idx) self.f_set_single(valstr, arg) for key, arg in kwargs.items(): self.f_set_single(key, arg)
[ "Method", "to", "put", "data", "into", "the", "result", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/parameter.py#L2078-L2112
[ "def", "f_set", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "args", "and", "self", ".", "v_name", "is", "None", ":", "raise", "AttributeError", "(", "'Cannot set positional value because I do not have a name!'", ")", "for", "idx", ",", "arg", "in", "enumerate", "(", "args", ")", ":", "valstr", "=", "self", ".", "f_translate_key", "(", "idx", ")", "self", ".", "f_set_single", "(", "valstr", ",", "arg", ")", "for", "key", ",", "arg", "in", "kwargs", ".", "items", "(", ")", ":", "self", ".", "f_set_single", "(", "key", ",", "arg", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
Result.f_get
Returns items handled by the result. If only a single name is given, a single data item is returned. If several names are given, a list is returned. For integer inputs the result returns `resultname_X`. If the result contains only a single entry you can call `f_get()` without arguments. If you call `f_get()` and the result contains more than one element a ValueError is thrown. If the requested item(s) cannot be found an AttributeError is thrown. :param args: strings-names or integers :return: Single data item or tuple of data Example: >>> res = Result('supergroup.subgroup.myresult', comment='I am a neat example!' \ [1000,2000], {'a':'b','c':333}, hitchhiker='Arthur Dent') >>> res.f_get('hitchhiker') 'Arthur Dent' >>> res.f_get(0) [1000,2000] >>> res.f_get('hitchhiker', 'myresult') ('Arthur Dent', [1000,2000])
pypet/parameter.py
def f_get(self, *args): """Returns items handled by the result. If only a single name is given, a single data item is returned. If several names are given, a list is returned. For integer inputs the result returns `resultname_X`. If the result contains only a single entry you can call `f_get()` without arguments. If you call `f_get()` and the result contains more than one element a ValueError is thrown. If the requested item(s) cannot be found an AttributeError is thrown. :param args: strings-names or integers :return: Single data item or tuple of data Example: >>> res = Result('supergroup.subgroup.myresult', comment='I am a neat example!' \ [1000,2000], {'a':'b','c':333}, hitchhiker='Arthur Dent') >>> res.f_get('hitchhiker') 'Arthur Dent' >>> res.f_get(0) [1000,2000] >>> res.f_get('hitchhiker', 'myresult') ('Arthur Dent', [1000,2000]) """ if len(args) == 0: if len(self._data) == 1: return list(self._data.values())[0] elif len(self._data) > 1: raise ValueError('Your result `%s` contains more than one entry: ' '`%s` Please use >>f_get<< with one of these.' % (self.v_full_name, str(list(self._data.keys())))) else: raise AttributeError('Your result `%s` is empty, cannot access data.' % self.v_full_name) result_list = [] for name in args: name = self.f_translate_key(name) if not name in self._data: if name == 'data' and len(self._data) == 1: return self._data[list(self._data.keys())[0]] else: raise AttributeError('`%s` is not part of your result `%s`.' % (name, self.v_full_name)) result_list.append(self._data[name]) if len(args) == 1: return result_list[0] else: return result_list
def f_get(self, *args): """Returns items handled by the result. If only a single name is given, a single data item is returned. If several names are given, a list is returned. For integer inputs the result returns `resultname_X`. If the result contains only a single entry you can call `f_get()` without arguments. If you call `f_get()` and the result contains more than one element a ValueError is thrown. If the requested item(s) cannot be found an AttributeError is thrown. :param args: strings-names or integers :return: Single data item or tuple of data Example: >>> res = Result('supergroup.subgroup.myresult', comment='I am a neat example!' \ [1000,2000], {'a':'b','c':333}, hitchhiker='Arthur Dent') >>> res.f_get('hitchhiker') 'Arthur Dent' >>> res.f_get(0) [1000,2000] >>> res.f_get('hitchhiker', 'myresult') ('Arthur Dent', [1000,2000]) """ if len(args) == 0: if len(self._data) == 1: return list(self._data.values())[0] elif len(self._data) > 1: raise ValueError('Your result `%s` contains more than one entry: ' '`%s` Please use >>f_get<< with one of these.' % (self.v_full_name, str(list(self._data.keys())))) else: raise AttributeError('Your result `%s` is empty, cannot access data.' % self.v_full_name) result_list = [] for name in args: name = self.f_translate_key(name) if not name in self._data: if name == 'data' and len(self._data) == 1: return self._data[list(self._data.keys())[0]] else: raise AttributeError('`%s` is not part of your result `%s`.' % (name, self.v_full_name)) result_list.append(self._data[name]) if len(args) == 1: return result_list[0] else: return result_list
[ "Returns", "items", "handled", "by", "the", "result", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/parameter.py#L2134-L2189
[ "def", "f_get", "(", "self", ",", "*", "args", ")", ":", "if", "len", "(", "args", ")", "==", "0", ":", "if", "len", "(", "self", ".", "_data", ")", "==", "1", ":", "return", "list", "(", "self", ".", "_data", ".", "values", "(", ")", ")", "[", "0", "]", "elif", "len", "(", "self", ".", "_data", ")", ">", "1", ":", "raise", "ValueError", "(", "'Your result `%s` contains more than one entry: '", "'`%s` Please use >>f_get<< with one of these.'", "%", "(", "self", ".", "v_full_name", ",", "str", "(", "list", "(", "self", ".", "_data", ".", "keys", "(", ")", ")", ")", ")", ")", "else", ":", "raise", "AttributeError", "(", "'Your result `%s` is empty, cannot access data.'", "%", "self", ".", "v_full_name", ")", "result_list", "=", "[", "]", "for", "name", "in", "args", ":", "name", "=", "self", ".", "f_translate_key", "(", "name", ")", "if", "not", "name", "in", "self", ".", "_data", ":", "if", "name", "==", "'data'", "and", "len", "(", "self", ".", "_data", ")", "==", "1", ":", "return", "self", ".", "_data", "[", "list", "(", "self", ".", "_data", ".", "keys", "(", ")", ")", "[", "0", "]", "]", "else", ":", "raise", "AttributeError", "(", "'`%s` is not part of your result `%s`.'", "%", "(", "name", ",", "self", ".", "v_full_name", ")", ")", "result_list", ".", "append", "(", "self", ".", "_data", "[", "name", "]", ")", "if", "len", "(", "args", ")", "==", "1", ":", "return", "result_list", "[", "0", "]", "else", ":", "return", "result_list" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
Result.f_set_single
Sets a single data item of the result. Raises TypeError if the type of the outer data structure is not understood. Note that the type check is shallow. For example, if the data item is a list, the individual list elements are NOT checked whether their types are appropriate. :param name: The name of the data item :param item: The data item :raises: TypeError Example usage: >>> res.f_set_single('answer', 42) >>> res.f_get('answer') 42
pypet/parameter.py
def f_set_single(self, name, item): """Sets a single data item of the result. Raises TypeError if the type of the outer data structure is not understood. Note that the type check is shallow. For example, if the data item is a list, the individual list elements are NOT checked whether their types are appropriate. :param name: The name of the data item :param item: The data item :raises: TypeError Example usage: >>> res.f_set_single('answer', 42) >>> res.f_get('answer') 42 """ if self.v_stored: self._logger.debug('You are changing an already stored result. If ' 'you not explicitly overwrite the data on disk, this change ' 'might be lost and not propagated to disk.') if self._supports(item): # self._check_if_empty(item, name) # No longer needed if name in self._data: self._logger.debug('Replacing `%s` in result `%s`.' % (name, self.v_full_name)) self._data[name] = item else: raise TypeError('Your result `%s` of type `%s` is not supported.' % (name, str(type(item))))
def f_set_single(self, name, item): """Sets a single data item of the result. Raises TypeError if the type of the outer data structure is not understood. Note that the type check is shallow. For example, if the data item is a list, the individual list elements are NOT checked whether their types are appropriate. :param name: The name of the data item :param item: The data item :raises: TypeError Example usage: >>> res.f_set_single('answer', 42) >>> res.f_get('answer') 42 """ if self.v_stored: self._logger.debug('You are changing an already stored result. If ' 'you not explicitly overwrite the data on disk, this change ' 'might be lost and not propagated to disk.') if self._supports(item): # self._check_if_empty(item, name) # No longer needed if name in self._data: self._logger.debug('Replacing `%s` in result `%s`.' % (name, self.v_full_name)) self._data[name] = item else: raise TypeError('Your result `%s` of type `%s` is not supported.' % (name, str(type(item))))
[ "Sets", "a", "single", "data", "item", "of", "the", "result", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/parameter.py#L2191-L2227
[ "def", "f_set_single", "(", "self", ",", "name", ",", "item", ")", ":", "if", "self", ".", "v_stored", ":", "self", ".", "_logger", ".", "debug", "(", "'You are changing an already stored result. If '", "'you not explicitly overwrite the data on disk, this change '", "'might be lost and not propagated to disk.'", ")", "if", "self", ".", "_supports", "(", "item", ")", ":", "# self._check_if_empty(item, name) # No longer needed", "if", "name", "in", "self", ".", "_data", ":", "self", ".", "_logger", ".", "debug", "(", "'Replacing `%s` in result `%s`.'", "%", "(", "name", ",", "self", ".", "v_full_name", ")", ")", "self", ".", "_data", "[", "name", "]", "=", "item", "else", ":", "raise", "TypeError", "(", "'Your result `%s` of type `%s` is not supported.'", "%", "(", "name", ",", "str", "(", "type", "(", "item", ")", ")", ")", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
Result.f_remove
Removes `*args` from the result
pypet/parameter.py
def f_remove(self, *args): """Removes `*args` from the result""" for arg in args: arg = self.f_translate_key(arg) if arg in self._data: del self._data[arg] else: raise AttributeError('Your result `%s` does not contain %s.' % (self.name_, arg))
def f_remove(self, *args): """Removes `*args` from the result""" for arg in args: arg = self.f_translate_key(arg) if arg in self._data: del self._data[arg] else: raise AttributeError('Your result `%s` does not contain %s.' % (self.name_, arg))
[ "Removes", "*", "args", "from", "the", "result" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/parameter.py#L2255-L2262
[ "def", "f_remove", "(", "self", ",", "*", "args", ")", ":", "for", "arg", "in", "args", ":", "arg", "=", "self", ".", "f_translate_key", "(", "arg", ")", "if", "arg", "in", "self", ".", "_data", ":", "del", "self", ".", "_data", "[", "arg", "]", "else", ":", "raise", "AttributeError", "(", "'Your result `%s` does not contain %s.'", "%", "(", "self", ".", "name_", ",", "arg", ")", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
SparseResult._supports
Supports everything of parent class and csr, csc, bsr, and dia sparse matrices.
pypet/parameter.py
def _supports(self, item): """Supports everything of parent class and csr, csc, bsr, and dia sparse matrices.""" if SparseParameter._is_supported_matrix(item): return True else: return super(SparseResult, self)._supports(item)
def _supports(self, item): """Supports everything of parent class and csr, csc, bsr, and dia sparse matrices.""" if SparseParameter._is_supported_matrix(item): return True else: return super(SparseResult, self)._supports(item)
[ "Supports", "everything", "of", "parent", "class", "and", "csr", "csc", "bsr", "and", "dia", "sparse", "matrices", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/parameter.py#L2337-L2342
[ "def", "_supports", "(", "self", ",", "item", ")", ":", "if", "SparseParameter", ".", "_is_supported_matrix", "(", "item", ")", ":", "return", "True", "else", ":", "return", "super", "(", "SparseResult", ",", "self", ")", ".", "_supports", "(", "item", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
SparseResult._store
Returns a storage dictionary understood by the storage service. Sparse matrices are extracted similar to the :class:`~pypet.parameter.SparseParameter` and marked with the identifier `__spsp__`.
pypet/parameter.py
def _store(self): """Returns a storage dictionary understood by the storage service. Sparse matrices are extracted similar to the :class:`~pypet.parameter.SparseParameter` and marked with the identifier `__spsp__`. """ store_dict = {} for key in self._data: val = self._data[key] if SparseParameter._is_supported_matrix(val): data_list, name_list, hash_tuple = SparseParameter._serialize_matrix(val) rename_list = ['%s%s%s' % (key, SparseParameter.IDENTIFIER, name) for name in name_list] is_dia = int(len(rename_list) == 4) store_dict[key + SparseResult.IDENTIFIER + 'is_dia'] = is_dia for idx, name in enumerate(rename_list): store_dict[name] = data_list[idx] else: store_dict[key] = val return store_dict
def _store(self): """Returns a storage dictionary understood by the storage service. Sparse matrices are extracted similar to the :class:`~pypet.parameter.SparseParameter` and marked with the identifier `__spsp__`. """ store_dict = {} for key in self._data: val = self._data[key] if SparseParameter._is_supported_matrix(val): data_list, name_list, hash_tuple = SparseParameter._serialize_matrix(val) rename_list = ['%s%s%s' % (key, SparseParameter.IDENTIFIER, name) for name in name_list] is_dia = int(len(rename_list) == 4) store_dict[key + SparseResult.IDENTIFIER + 'is_dia'] = is_dia for idx, name in enumerate(rename_list): store_dict[name] = data_list[idx] else: store_dict[key] = val return store_dict
[ "Returns", "a", "storage", "dictionary", "understood", "by", "the", "storage", "service", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/parameter.py#L2352-L2377
[ "def", "_store", "(", "self", ")", ":", "store_dict", "=", "{", "}", "for", "key", "in", "self", ".", "_data", ":", "val", "=", "self", ".", "_data", "[", "key", "]", "if", "SparseParameter", ".", "_is_supported_matrix", "(", "val", ")", ":", "data_list", ",", "name_list", ",", "hash_tuple", "=", "SparseParameter", ".", "_serialize_matrix", "(", "val", ")", "rename_list", "=", "[", "'%s%s%s'", "%", "(", "key", ",", "SparseParameter", ".", "IDENTIFIER", ",", "name", ")", "for", "name", "in", "name_list", "]", "is_dia", "=", "int", "(", "len", "(", "rename_list", ")", "==", "4", ")", "store_dict", "[", "key", "+", "SparseResult", ".", "IDENTIFIER", "+", "'is_dia'", "]", "=", "is_dia", "for", "idx", ",", "name", "in", "enumerate", "(", "rename_list", ")", ":", "store_dict", "[", "name", "]", "=", "data_list", "[", "idx", "]", "else", ":", "store_dict", "[", "key", "]", "=", "val", "return", "store_dict" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
SparseResult._load
Loads data from `load_dict` Reconstruction of sparse matrices similar to the :class:`~pypet.parameter.SparseParameter`.
pypet/parameter.py
def _load(self, load_dict): """Loads data from `load_dict` Reconstruction of sparse matrices similar to the :class:`~pypet.parameter.SparseParameter`. """ for key in list(load_dict.keys()): # We delete keys over time: if key in load_dict: if SparseResult.IDENTIFIER in key: new_key = key.split(SparseResult.IDENTIFIER)[0] is_dia = load_dict.pop(new_key + SparseResult.IDENTIFIER + 'is_dia') name_list = SparseParameter._get_name_list(is_dia) rename_list = ['%s%s%s' % (new_key, SparseResult.IDENTIFIER, name) for name in name_list] data_list = [load_dict.pop(name) for name in rename_list] matrix = SparseParameter._reconstruct_matrix(data_list) self._data[new_key] = matrix else: self._data[key] = load_dict[key]
def _load(self, load_dict): """Loads data from `load_dict` Reconstruction of sparse matrices similar to the :class:`~pypet.parameter.SparseParameter`. """ for key in list(load_dict.keys()): # We delete keys over time: if key in load_dict: if SparseResult.IDENTIFIER in key: new_key = key.split(SparseResult.IDENTIFIER)[0] is_dia = load_dict.pop(new_key + SparseResult.IDENTIFIER + 'is_dia') name_list = SparseParameter._get_name_list(is_dia) rename_list = ['%s%s%s' % (new_key, SparseResult.IDENTIFIER, name) for name in name_list] data_list = [load_dict.pop(name) for name in rename_list] matrix = SparseParameter._reconstruct_matrix(data_list) self._data[new_key] = matrix else: self._data[key] = load_dict[key]
[ "Loads", "data", "from", "load_dict" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/parameter.py#L2379-L2401
[ "def", "_load", "(", "self", ",", "load_dict", ")", ":", "for", "key", "in", "list", "(", "load_dict", ".", "keys", "(", ")", ")", ":", "# We delete keys over time:", "if", "key", "in", "load_dict", ":", "if", "SparseResult", ".", "IDENTIFIER", "in", "key", ":", "new_key", "=", "key", ".", "split", "(", "SparseResult", ".", "IDENTIFIER", ")", "[", "0", "]", "is_dia", "=", "load_dict", ".", "pop", "(", "new_key", "+", "SparseResult", ".", "IDENTIFIER", "+", "'is_dia'", ")", "name_list", "=", "SparseParameter", ".", "_get_name_list", "(", "is_dia", ")", "rename_list", "=", "[", "'%s%s%s'", "%", "(", "new_key", ",", "SparseResult", ".", "IDENTIFIER", ",", "name", ")", "for", "name", "in", "name_list", "]", "data_list", "=", "[", "load_dict", ".", "pop", "(", "name", ")", "for", "name", "in", "rename_list", "]", "matrix", "=", "SparseParameter", ".", "_reconstruct_matrix", "(", "data_list", ")", "self", ".", "_data", "[", "new_key", "]", "=", "matrix", "else", ":", "self", ".", "_data", "[", "key", "]", "=", "load_dict", "[", "key", "]" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
PickleResult.f_set_single
Adds a single data item to the pickle result. Note that it is NOT checked if the item can be pickled!
pypet/parameter.py
def f_set_single(self, name, item): """Adds a single data item to the pickle result. Note that it is NOT checked if the item can be pickled! """ if self.v_stored: self._logger.debug('You are changing an already stored result. If ' 'you not explicitly overwrite the data on disk, this change ' 'might be lost and not propagated to disk.') if name == PickleResult.PROTOCOL: raise AttributeError('You cannot name an entry `%s`' % PickleResult.PROTOCOL) self._data[name] = item
def f_set_single(self, name, item): """Adds a single data item to the pickle result. Note that it is NOT checked if the item can be pickled! """ if self.v_stored: self._logger.debug('You are changing an already stored result. If ' 'you not explicitly overwrite the data on disk, this change ' 'might be lost and not propagated to disk.') if name == PickleResult.PROTOCOL: raise AttributeError('You cannot name an entry `%s`' % PickleResult.PROTOCOL) self._data[name] = item
[ "Adds", "a", "single", "data", "item", "to", "the", "pickle", "result", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/parameter.py#L2448-L2463
[ "def", "f_set_single", "(", "self", ",", "name", ",", "item", ")", ":", "if", "self", ".", "v_stored", ":", "self", ".", "_logger", ".", "debug", "(", "'You are changing an already stored result. If '", "'you not explicitly overwrite the data on disk, this change '", "'might be lost and not propagated to disk.'", ")", "if", "name", "==", "PickleResult", ".", "PROTOCOL", ":", "raise", "AttributeError", "(", "'You cannot name an entry `%s`'", "%", "PickleResult", ".", "PROTOCOL", ")", "self", ".", "_data", "[", "name", "]", "=", "item" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
PickleResult._store
Returns a dictionary containing pickle dumps
pypet/parameter.py
def _store(self): """Returns a dictionary containing pickle dumps""" store_dict = {} for key, val in self._data.items(): store_dict[key] = pickle.dumps(val, protocol=self.v_protocol) store_dict[PickleResult.PROTOCOL] = self.v_protocol return store_dict
def _store(self): """Returns a dictionary containing pickle dumps""" store_dict = {} for key, val in self._data.items(): store_dict[key] = pickle.dumps(val, protocol=self.v_protocol) store_dict[PickleResult.PROTOCOL] = self.v_protocol return store_dict
[ "Returns", "a", "dictionary", "containing", "pickle", "dumps" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/parameter.py#L2466-L2472
[ "def", "_store", "(", "self", ")", ":", "store_dict", "=", "{", "}", "for", "key", ",", "val", "in", "self", ".", "_data", ".", "items", "(", ")", ":", "store_dict", "[", "key", "]", "=", "pickle", ".", "dumps", "(", "val", ",", "protocol", "=", "self", ".", "v_protocol", ")", "store_dict", "[", "PickleResult", ".", "PROTOCOL", "]", "=", "self", ".", "v_protocol", "return", "store_dict" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
PickleResult._load
Reconstructs all items from the pickle dumps in `load_dict`. Sets the `v_protocol` property to the protocol of the first reconstructed item.
pypet/parameter.py
def _load(self, load_dict): """Reconstructs all items from the pickle dumps in `load_dict`. Sets the `v_protocol` property to the protocol of the first reconstructed item. """ try: self.v_protocol = load_dict.pop(PickleParameter.PROTOCOL) except KeyError: # For backwards compatibility dump = next(load_dict.values()) self.v_protocol = PickleParameter._get_protocol(dump) for key in load_dict: val = load_dict[key] self._data[key] = pickle.loads(val)
def _load(self, load_dict): """Reconstructs all items from the pickle dumps in `load_dict`. Sets the `v_protocol` property to the protocol of the first reconstructed item. """ try: self.v_protocol = load_dict.pop(PickleParameter.PROTOCOL) except KeyError: # For backwards compatibility dump = next(load_dict.values()) self.v_protocol = PickleParameter._get_protocol(dump) for key in load_dict: val = load_dict[key] self._data[key] = pickle.loads(val)
[ "Reconstructs", "all", "items", "from", "the", "pickle", "dumps", "in", "load_dict", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/parameter.py#L2474-L2488
[ "def", "_load", "(", "self", ",", "load_dict", ")", ":", "try", ":", "self", ".", "v_protocol", "=", "load_dict", ".", "pop", "(", "PickleParameter", ".", "PROTOCOL", ")", "except", "KeyError", ":", "# For backwards compatibility", "dump", "=", "next", "(", "load_dict", ".", "values", "(", ")", ")", "self", ".", "v_protocol", "=", "PickleParameter", ".", "_get_protocol", "(", "dump", ")", "for", "key", "in", "load_dict", ":", "val", "=", "load_dict", "[", "key", "]", "self", ".", "_data", "[", "key", "]", "=", "pickle", ".", "loads", "(", "val", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
main
Simply merge all trajectories in the working directory
examples/example_22_saga_python/merge_trajs.py
def main(): """Simply merge all trajectories in the working directory""" folder = os.getcwd() print('Merging all files') merge_all_in_folder(folder, delete_other_files=True, # We will only keep one trajectory dynamic_imports=FunctionParameter, backup=False) print('Done')
def main(): """Simply merge all trajectories in the working directory""" folder = os.getcwd() print('Merging all files') merge_all_in_folder(folder, delete_other_files=True, # We will only keep one trajectory dynamic_imports=FunctionParameter, backup=False) print('Done')
[ "Simply", "merge", "all", "trajectories", "in", "the", "working", "directory" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/examples/example_22_saga_python/merge_trajs.py#L9-L17
[ "def", "main", "(", ")", ":", "folder", "=", "os", ".", "getcwd", "(", ")", "print", "(", "'Merging all files'", ")", "merge_all_in_folder", "(", "folder", ",", "delete_other_files", "=", "True", ",", "# We will only keep one trajectory", "dynamic_imports", "=", "FunctionParameter", ",", "backup", "=", "False", ")", "print", "(", "'Done'", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
upload_file
Uploads a file
examples/example_22_saga_python/start_saga.py
def upload_file(filename, session): """ Uploads a file """ print('Uploading file %s' % filename) outfilesource = os.path.join(os.getcwd(), filename) outfiletarget = 'sftp://' + ADDRESS + WORKING_DIR out = saga.filesystem.File(outfilesource, session=session, flags=OVERWRITE) out.copy(outfiletarget) print('Transfer of `%s` to `%s` successful' % (filename, outfiletarget))
def upload_file(filename, session): """ Uploads a file """ print('Uploading file %s' % filename) outfilesource = os.path.join(os.getcwd(), filename) outfiletarget = 'sftp://' + ADDRESS + WORKING_DIR out = saga.filesystem.File(outfilesource, session=session, flags=OVERWRITE) out.copy(outfiletarget) print('Transfer of `%s` to `%s` successful' % (filename, outfiletarget))
[ "Uploads", "a", "file" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/examples/example_22_saga_python/start_saga.py#L25-L32
[ "def", "upload_file", "(", "filename", ",", "session", ")", ":", "print", "(", "'Uploading file %s'", "%", "filename", ")", "outfilesource", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "filename", ")", "outfiletarget", "=", "'sftp://'", "+", "ADDRESS", "+", "WORKING_DIR", "out", "=", "saga", ".", "filesystem", ".", "File", "(", "outfilesource", ",", "session", "=", "session", ",", "flags", "=", "OVERWRITE", ")", "out", ".", "copy", "(", "outfiletarget", ")", "print", "(", "'Transfer of `%s` to `%s` successful'", "%", "(", "filename", ",", "outfiletarget", ")", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
download_file
Downloads a file
examples/example_22_saga_python/start_saga.py
def download_file(filename, session): """ Downloads a file """ print('Downloading file %s' % filename) infilesource = os.path.join('sftp://' + ADDRESS + WORKING_DIR, filename) infiletarget = os.path.join(os.getcwd(), filename) incoming = saga.filesystem.File(infilesource, session=session, flags=OVERWRITE) incoming.copy(infiletarget) print('Transfer of `%s` to `%s` successful' % (filename, infiletarget))
def download_file(filename, session): """ Downloads a file """ print('Downloading file %s' % filename) infilesource = os.path.join('sftp://' + ADDRESS + WORKING_DIR, filename) infiletarget = os.path.join(os.getcwd(), filename) incoming = saga.filesystem.File(infilesource, session=session, flags=OVERWRITE) incoming.copy(infiletarget) print('Transfer of `%s` to `%s` successful' % (filename, infiletarget))
[ "Downloads", "a", "file" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/examples/example_22_saga_python/start_saga.py#L35-L43
[ "def", "download_file", "(", "filename", ",", "session", ")", ":", "print", "(", "'Downloading file %s'", "%", "filename", ")", "infilesource", "=", "os", ".", "path", ".", "join", "(", "'sftp://'", "+", "ADDRESS", "+", "WORKING_DIR", ",", "filename", ")", "infiletarget", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "filename", ")", "incoming", "=", "saga", ".", "filesystem", ".", "File", "(", "infilesource", ",", "session", "=", "session", ",", "flags", "=", "OVERWRITE", ")", "incoming", ".", "copy", "(", "infiletarget", ")", "print", "(", "'Transfer of `%s` to `%s` successful'", "%", "(", "filename", ",", "infiletarget", ")", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
create_session
Creates and returns a new SAGA session
examples/example_22_saga_python/start_saga.py
def create_session(): """ Creates and returns a new SAGA session """ ctx = saga.Context("UserPass") ctx.user_id = USER ctx.user_pass = PASSWORD session = saga.Session() session.add_context(ctx) return session
def create_session(): """ Creates and returns a new SAGA session """ ctx = saga.Context("UserPass") ctx.user_id = USER ctx.user_pass = PASSWORD session = saga.Session() session.add_context(ctx) return session
[ "Creates", "and", "returns", "a", "new", "SAGA", "session" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/examples/example_22_saga_python/start_saga.py#L46-L55
[ "def", "create_session", "(", ")", ":", "ctx", "=", "saga", ".", "Context", "(", "\"UserPass\"", ")", "ctx", ".", "user_id", "=", "USER", "ctx", ".", "user_pass", "=", "PASSWORD", "session", "=", "saga", ".", "Session", "(", ")", "session", ".", "add_context", "(", "ctx", ")", "return", "session" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
merge_trajectories
Merges all trajectories found in the working directory
examples/example_22_saga_python/start_saga.py
def merge_trajectories(session): """ Merges all trajectories found in the working directory """ jd = saga.job.Description() jd.executable = 'python' jd.arguments = ['merge_trajs.py'] jd.output = "mysagajob_merge.stdout" jd.error = "mysagajob_merge.stderr" jd.working_directory = WORKING_DIR js = saga.job.Service('ssh://' + ADDRESS, session=session) myjob = js.create_job(jd) print("\n...starting job...\n") # Now we can start our job. myjob.run() print("Job ID : %s" % (myjob.id)) print("Job State : %s" % (myjob.state)) print("\n...waiting for job...\n") # wait for the job to either finish or fail myjob.wait() print("Job State : %s" % (myjob.state)) print("Exitcode : %s" % (myjob.exit_code))
def merge_trajectories(session): """ Merges all trajectories found in the working directory """ jd = saga.job.Description() jd.executable = 'python' jd.arguments = ['merge_trajs.py'] jd.output = "mysagajob_merge.stdout" jd.error = "mysagajob_merge.stderr" jd.working_directory = WORKING_DIR js = saga.job.Service('ssh://' + ADDRESS, session=session) myjob = js.create_job(jd) print("\n...starting job...\n") # Now we can start our job. myjob.run() print("Job ID : %s" % (myjob.id)) print("Job State : %s" % (myjob.state)) print("\n...waiting for job...\n") # wait for the job to either finish or fail myjob.wait() print("Job State : %s" % (myjob.state)) print("Exitcode : %s" % (myjob.exit_code))
[ "Merges", "all", "trajectories", "found", "in", "the", "working", "directory" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/examples/example_22_saga_python/start_saga.py#L58-L82
[ "def", "merge_trajectories", "(", "session", ")", ":", "jd", "=", "saga", ".", "job", ".", "Description", "(", ")", "jd", ".", "executable", "=", "'python'", "jd", ".", "arguments", "=", "[", "'merge_trajs.py'", "]", "jd", ".", "output", "=", "\"mysagajob_merge.stdout\"", "jd", ".", "error", "=", "\"mysagajob_merge.stderr\"", "jd", ".", "working_directory", "=", "WORKING_DIR", "js", "=", "saga", ".", "job", ".", "Service", "(", "'ssh://'", "+", "ADDRESS", ",", "session", "=", "session", ")", "myjob", "=", "js", ".", "create_job", "(", "jd", ")", "print", "(", "\"\\n...starting job...\\n\"", ")", "# Now we can start our job.", "myjob", ".", "run", "(", ")", "print", "(", "\"Job ID : %s\"", "%", "(", "myjob", ".", "id", ")", ")", "print", "(", "\"Job State : %s\"", "%", "(", "myjob", ".", "state", ")", ")", "print", "(", "\"\\n...waiting for job...\\n\"", ")", "# wait for the job to either finish or fail", "myjob", ".", "wait", "(", ")", "print", "(", "\"Job State : %s\"", "%", "(", "myjob", ".", "state", ")", ")", "print", "(", "\"Exitcode : %s\"", "%", "(", "myjob", ".", "exit_code", ")", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
start_jobs
Starts all jobs and runs `the_task.py` in batches.
examples/example_22_saga_python/start_saga.py
def start_jobs(session): """ Starts all jobs and runs `the_task.py` in batches. """ js = saga.job.Service('ssh://' + ADDRESS, session=session) batches = range(3) jobs = [] for batch in batches: print('Starting batch %d' % batch) jd = saga.job.Description() jd.executable = 'python' jd.arguments = ['the_task.py --batch=' + str(batch)] jd.output = "mysagajob.stdout" + str(batch) jd.error = "mysagajob.stderr" + str(batch) jd.working_directory = WORKING_DIR myjob = js.create_job(jd) print("Job ID : %s" % (myjob.id)) print("Job State : %s" % (myjob.state)) print("\n...starting job...\n") myjob.run() jobs.append(myjob) for myjob in jobs: print("Job ID : %s" % (myjob.id)) print("Job State : %s" % (myjob.state)) print("\n...waiting for job...\n") # wait for the job to either finish or fail myjob.wait() print("Job State : %s" % (myjob.state)) print("Exitcode : %s" % (myjob.exit_code))
def start_jobs(session): """ Starts all jobs and runs `the_task.py` in batches. """ js = saga.job.Service('ssh://' + ADDRESS, session=session) batches = range(3) jobs = [] for batch in batches: print('Starting batch %d' % batch) jd = saga.job.Description() jd.executable = 'python' jd.arguments = ['the_task.py --batch=' + str(batch)] jd.output = "mysagajob.stdout" + str(batch) jd.error = "mysagajob.stderr" + str(batch) jd.working_directory = WORKING_DIR myjob = js.create_job(jd) print("Job ID : %s" % (myjob.id)) print("Job State : %s" % (myjob.state)) print("\n...starting job...\n") myjob.run() jobs.append(myjob) for myjob in jobs: print("Job ID : %s" % (myjob.id)) print("Job State : %s" % (myjob.state)) print("\n...waiting for job...\n") # wait for the job to either finish or fail myjob.wait() print("Job State : %s" % (myjob.state)) print("Exitcode : %s" % (myjob.exit_code))
[ "Starts", "all", "jobs", "and", "runs", "the_task", ".", "py", "in", "batches", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/examples/example_22_saga_python/start_saga.py#L85-L123
[ "def", "start_jobs", "(", "session", ")", ":", "js", "=", "saga", ".", "job", ".", "Service", "(", "'ssh://'", "+", "ADDRESS", ",", "session", "=", "session", ")", "batches", "=", "range", "(", "3", ")", "jobs", "=", "[", "]", "for", "batch", "in", "batches", ":", "print", "(", "'Starting batch %d'", "%", "batch", ")", "jd", "=", "saga", ".", "job", ".", "Description", "(", ")", "jd", ".", "executable", "=", "'python'", "jd", ".", "arguments", "=", "[", "'the_task.py --batch='", "+", "str", "(", "batch", ")", "]", "jd", ".", "output", "=", "\"mysagajob.stdout\"", "+", "str", "(", "batch", ")", "jd", ".", "error", "=", "\"mysagajob.stderr\"", "+", "str", "(", "batch", ")", "jd", ".", "working_directory", "=", "WORKING_DIR", "myjob", "=", "js", ".", "create_job", "(", "jd", ")", "print", "(", "\"Job ID : %s\"", "%", "(", "myjob", ".", "id", ")", ")", "print", "(", "\"Job State : %s\"", "%", "(", "myjob", ".", "state", ")", ")", "print", "(", "\"\\n...starting job...\\n\"", ")", "myjob", ".", "run", "(", ")", "jobs", ".", "append", "(", "myjob", ")", "for", "myjob", "in", "jobs", ":", "print", "(", "\"Job ID : %s\"", "%", "(", "myjob", ".", "id", ")", ")", "print", "(", "\"Job State : %s\"", "%", "(", "myjob", ".", "state", ")", ")", "print", "(", "\"\\n...waiting for job...\\n\"", ")", "# wait for the job to either finish or fail", "myjob", ".", "wait", "(", ")", "print", "(", "\"Job State : %s\"", "%", "(", "myjob", ".", "state", ")", ")", "print", "(", "\"Exitcode : %s\"", "%", "(", "myjob", ".", "exit_code", ")", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
multiply
Sophisticated simulation of multiplication
examples/example_21_scoop_multiprocessing.py
def multiply(traj): """Sophisticated simulation of multiplication""" z=traj.x*traj.y traj.f_add_result('z',z=z, comment='I am the product of two reals!')
def multiply(traj): """Sophisticated simulation of multiplication""" z=traj.x*traj.y traj.f_add_result('z',z=z, comment='I am the product of two reals!')
[ "Sophisticated", "simulation", "of", "multiplication" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/examples/example_21_scoop_multiprocessing.py#L16-L19
[ "def", "multiply", "(", "traj", ")", ":", "z", "=", "traj", ".", "x", "*", "traj", ".", "y", "traj", ".", "f_add_result", "(", "'z'", ",", "z", "=", "z", ",", "comment", "=", "'I am the product of two reals!'", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
main
Main function to protect the *entry point* of the program. If you want to use multiprocessing with SCOOP you need to wrap your main code creating an environment into a function. Otherwise the newly started child processes will re-execute the code and throw errors (also see http://scoop.readthedocs.org/en/latest/usage.html#pitfalls).
examples/example_21_scoop_multiprocessing.py
def main(): """Main function to protect the *entry point* of the program. If you want to use multiprocessing with SCOOP you need to wrap your main code creating an environment into a function. Otherwise the newly started child processes will re-execute the code and throw errors (also see http://scoop.readthedocs.org/en/latest/usage.html#pitfalls). """ # Create an environment that handles running. # Let's enable multiprocessing with scoop: filename = os.path.join('hdf5', 'example_21.hdf5') env = Environment(trajectory='Example_21_SCOOP', filename=filename, file_title='Example_21_SCOOP', log_stdout=True, comment='Multiprocessing example using SCOOP!', multiproc=True, freeze_input=True, # We want to save overhead and freeze input use_scoop=True, # Yes we want SCOOP! wrap_mode=pypetconstants.WRAP_MODE_LOCAL, # SCOOP only works with 'LOCAL' # or 'NETLOCK' wrapping overwrite_file=True) # Get the trajectory from the environment traj = env.trajectory # Add both parameters traj.f_add_parameter('x', 1.0, comment='I am the first dimension!') traj.f_add_parameter('y', 1.0, comment='I am the second dimension!') # Explore the parameters with a cartesian product, but we want to explore a bit more traj.f_explore(cartesian_product({'x':[float(x) for x in range(20)], 'y':[float(y) for y in range(20)]})) # Run the simulation env.run(multiply) # Let's check that all runs are completed! assert traj.f_is_completed() # Finally disable logging and close all log-files env.disable_logging()
def main(): """Main function to protect the *entry point* of the program. If you want to use multiprocessing with SCOOP you need to wrap your main code creating an environment into a function. Otherwise the newly started child processes will re-execute the code and throw errors (also see http://scoop.readthedocs.org/en/latest/usage.html#pitfalls). """ # Create an environment that handles running. # Let's enable multiprocessing with scoop: filename = os.path.join('hdf5', 'example_21.hdf5') env = Environment(trajectory='Example_21_SCOOP', filename=filename, file_title='Example_21_SCOOP', log_stdout=True, comment='Multiprocessing example using SCOOP!', multiproc=True, freeze_input=True, # We want to save overhead and freeze input use_scoop=True, # Yes we want SCOOP! wrap_mode=pypetconstants.WRAP_MODE_LOCAL, # SCOOP only works with 'LOCAL' # or 'NETLOCK' wrapping overwrite_file=True) # Get the trajectory from the environment traj = env.trajectory # Add both parameters traj.f_add_parameter('x', 1.0, comment='I am the first dimension!') traj.f_add_parameter('y', 1.0, comment='I am the second dimension!') # Explore the parameters with a cartesian product, but we want to explore a bit more traj.f_explore(cartesian_product({'x':[float(x) for x in range(20)], 'y':[float(y) for y in range(20)]})) # Run the simulation env.run(multiply) # Let's check that all runs are completed! assert traj.f_is_completed() # Finally disable logging and close all log-files env.disable_logging()
[ "Main", "function", "to", "protect", "the", "*", "entry", "point", "*", "of", "the", "program", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/examples/example_21_scoop_multiprocessing.py#L22-L64
[ "def", "main", "(", ")", ":", "# Create an environment that handles running.", "# Let's enable multiprocessing with scoop:", "filename", "=", "os", ".", "path", ".", "join", "(", "'hdf5'", ",", "'example_21.hdf5'", ")", "env", "=", "Environment", "(", "trajectory", "=", "'Example_21_SCOOP'", ",", "filename", "=", "filename", ",", "file_title", "=", "'Example_21_SCOOP'", ",", "log_stdout", "=", "True", ",", "comment", "=", "'Multiprocessing example using SCOOP!'", ",", "multiproc", "=", "True", ",", "freeze_input", "=", "True", ",", "# We want to save overhead and freeze input", "use_scoop", "=", "True", ",", "# Yes we want SCOOP!", "wrap_mode", "=", "pypetconstants", ".", "WRAP_MODE_LOCAL", ",", "# SCOOP only works with 'LOCAL'", "# or 'NETLOCK' wrapping", "overwrite_file", "=", "True", ")", "# Get the trajectory from the environment", "traj", "=", "env", ".", "trajectory", "# Add both parameters", "traj", ".", "f_add_parameter", "(", "'x'", ",", "1.0", ",", "comment", "=", "'I am the first dimension!'", ")", "traj", ".", "f_add_parameter", "(", "'y'", ",", "1.0", ",", "comment", "=", "'I am the second dimension!'", ")", "# Explore the parameters with a cartesian product, but we want to explore a bit more", "traj", ".", "f_explore", "(", "cartesian_product", "(", "{", "'x'", ":", "[", "float", "(", "x", ")", "for", "x", "in", "range", "(", "20", ")", "]", ",", "'y'", ":", "[", "float", "(", "y", ")", "for", "y", "in", "range", "(", "20", ")", "]", "}", ")", ")", "# Run the simulation", "env", ".", "run", "(", "multiply", ")", "# Let's check that all runs are completed!", "assert", "traj", ".", "f_is_completed", "(", ")", "# Finally disable logging and close all log-files", "env", ".", "disable_logging", "(", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
run_neuron
Runs a simulation of a model neuron. :param traj: Container with all parameters. :return: An estimate of the firing rate of the neuron
examples/example_13_post_processing/main.py
def run_neuron(traj): """Runs a simulation of a model neuron. :param traj: Container with all parameters. :return: An estimate of the firing rate of the neuron """ # Extract all parameters from `traj` V_init = traj.par.neuron.V_init I = traj.par.neuron.I tau_V = traj.par.neuron.tau_V tau_ref = traj.par.neuron.tau_ref dt = traj.par.simulation.dt duration = traj.par.simulation.duration steps = int(duration / float(dt)) # Create some containers for the Euler integration V_array = np.zeros(steps) V_array[0] = V_init spiketimes = [] # List to collect all times of action potentials # Do the Euler integration: print('Starting Euler Integration') for step in range(1, steps): if V_array[step-1] >= 1: # The membrane potential crossed the threshold and we mark this as # an action potential V_array[step] = 0 spiketimes.append((step-1)*dt) elif spiketimes and step * dt - spiketimes[-1] <= tau_ref: # We are in the refractory period, so we simply clamp the voltage # to 0 V_array[step] = 0 else: # Euler Integration step: dV = -1/tau_V * V_array[step-1] + I V_array[step] = V_array[step-1] + dV*dt print('Finished Euler Integration') # Add the voltage trace and spike times traj.f_add_result('neuron.$', V=V_array, nspikes=len(spiketimes), comment='Contains the development of the membrane potential over time ' 'as well as the number of spikes.') # This result will be renamed to `traj.results.neuron.run_XXXXXXXX`. # And finally we return the estimate of the firing rate return len(spiketimes) / float(traj.par.simulation.duration) *1000
def run_neuron(traj): """Runs a simulation of a model neuron. :param traj: Container with all parameters. :return: An estimate of the firing rate of the neuron """ # Extract all parameters from `traj` V_init = traj.par.neuron.V_init I = traj.par.neuron.I tau_V = traj.par.neuron.tau_V tau_ref = traj.par.neuron.tau_ref dt = traj.par.simulation.dt duration = traj.par.simulation.duration steps = int(duration / float(dt)) # Create some containers for the Euler integration V_array = np.zeros(steps) V_array[0] = V_init spiketimes = [] # List to collect all times of action potentials # Do the Euler integration: print('Starting Euler Integration') for step in range(1, steps): if V_array[step-1] >= 1: # The membrane potential crossed the threshold and we mark this as # an action potential V_array[step] = 0 spiketimes.append((step-1)*dt) elif spiketimes and step * dt - spiketimes[-1] <= tau_ref: # We are in the refractory period, so we simply clamp the voltage # to 0 V_array[step] = 0 else: # Euler Integration step: dV = -1/tau_V * V_array[step-1] + I V_array[step] = V_array[step-1] + dV*dt print('Finished Euler Integration') # Add the voltage trace and spike times traj.f_add_result('neuron.$', V=V_array, nspikes=len(spiketimes), comment='Contains the development of the membrane potential over time ' 'as well as the number of spikes.') # This result will be renamed to `traj.results.neuron.run_XXXXXXXX`. # And finally we return the estimate of the firing rate return len(spiketimes) / float(traj.par.simulation.duration) *1000
[ "Runs", "a", "simulation", "of", "a", "model", "neuron", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/examples/example_13_post_processing/main.py#L11-L64
[ "def", "run_neuron", "(", "traj", ")", ":", "# Extract all parameters from `traj`", "V_init", "=", "traj", ".", "par", ".", "neuron", ".", "V_init", "I", "=", "traj", ".", "par", ".", "neuron", ".", "I", "tau_V", "=", "traj", ".", "par", ".", "neuron", ".", "tau_V", "tau_ref", "=", "traj", ".", "par", ".", "neuron", ".", "tau_ref", "dt", "=", "traj", ".", "par", ".", "simulation", ".", "dt", "duration", "=", "traj", ".", "par", ".", "simulation", ".", "duration", "steps", "=", "int", "(", "duration", "/", "float", "(", "dt", ")", ")", "# Create some containers for the Euler integration", "V_array", "=", "np", ".", "zeros", "(", "steps", ")", "V_array", "[", "0", "]", "=", "V_init", "spiketimes", "=", "[", "]", "# List to collect all times of action potentials", "# Do the Euler integration:", "print", "(", "'Starting Euler Integration'", ")", "for", "step", "in", "range", "(", "1", ",", "steps", ")", ":", "if", "V_array", "[", "step", "-", "1", "]", ">=", "1", ":", "# The membrane potential crossed the threshold and we mark this as", "# an action potential", "V_array", "[", "step", "]", "=", "0", "spiketimes", ".", "append", "(", "(", "step", "-", "1", ")", "*", "dt", ")", "elif", "spiketimes", "and", "step", "*", "dt", "-", "spiketimes", "[", "-", "1", "]", "<=", "tau_ref", ":", "# We are in the refractory period, so we simply clamp the voltage", "# to 0", "V_array", "[", "step", "]", "=", "0", "else", ":", "# Euler Integration step:", "dV", "=", "-", "1", "/", "tau_V", "*", "V_array", "[", "step", "-", "1", "]", "+", "I", "V_array", "[", "step", "]", "=", "V_array", "[", "step", "-", "1", "]", "+", "dV", "*", "dt", "print", "(", "'Finished Euler Integration'", ")", "# Add the voltage trace and spike times", "traj", ".", "f_add_result", "(", "'neuron.$'", ",", "V", "=", "V_array", ",", "nspikes", "=", "len", "(", "spiketimes", ")", ",", "comment", "=", "'Contains the development of the membrane potential over time '", "'as well as the number of spikes.'", ")", "# This result will be renamed to `traj.results.neuron.run_XXXXXXXX`.", "# And finally we return the estimate of the firing rate", "return", "len", "(", "spiketimes", ")", "/", "float", "(", "traj", ".", "par", ".", "simulation", ".", "duration", ")", "*", "1000" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
neuron_postproc
Postprocessing, sorts computed firing rates into a table :param traj: Container for results and parameters :param result_list: List of tuples, where first entry is the run index and second is the actual result of the corresponding run. :return:
examples/example_13_post_processing/main.py
def neuron_postproc(traj, result_list): """Postprocessing, sorts computed firing rates into a table :param traj: Container for results and parameters :param result_list: List of tuples, where first entry is the run index and second is the actual result of the corresponding run. :return: """ # Let's create a pandas DataFrame to sort the computed firing rate according to the # parameters. We could have also used a 2D numpy array. # But a pandas DataFrame has the advantage that we can index into directly with # the parameter values without translating these into integer indices. I_range = traj.par.neuron.f_get('I').f_get_range() ref_range = traj.par.neuron.f_get('tau_ref').f_get_range() I_index = sorted(set(I_range)) ref_index = sorted(set(ref_range)) rates_frame = pd.DataFrame(columns=ref_index, index=I_index) # This frame is basically a two dimensional table that we can index with our # parameters # Now iterate over the results. The result list is a list of tuples, with the # run index at first position and our result at the second for result_tuple in result_list: run_idx = result_tuple[0] firing_rates = result_tuple[1] I_val = I_range[run_idx] ref_val = ref_range[run_idx] rates_frame.loc[I_val, ref_val] = firing_rates # Put the firing rate into the # data frame # Finally we going to store our new firing rate table into the trajectory traj.f_add_result('summary.firing_rates', rates_frame=rates_frame, comment='Contains a pandas data frame with all firing rates.')
def neuron_postproc(traj, result_list): """Postprocessing, sorts computed firing rates into a table :param traj: Container for results and parameters :param result_list: List of tuples, where first entry is the run index and second is the actual result of the corresponding run. :return: """ # Let's create a pandas DataFrame to sort the computed firing rate according to the # parameters. We could have also used a 2D numpy array. # But a pandas DataFrame has the advantage that we can index into directly with # the parameter values without translating these into integer indices. I_range = traj.par.neuron.f_get('I').f_get_range() ref_range = traj.par.neuron.f_get('tau_ref').f_get_range() I_index = sorted(set(I_range)) ref_index = sorted(set(ref_range)) rates_frame = pd.DataFrame(columns=ref_index, index=I_index) # This frame is basically a two dimensional table that we can index with our # parameters # Now iterate over the results. The result list is a list of tuples, with the # run index at first position and our result at the second for result_tuple in result_list: run_idx = result_tuple[0] firing_rates = result_tuple[1] I_val = I_range[run_idx] ref_val = ref_range[run_idx] rates_frame.loc[I_val, ref_val] = firing_rates # Put the firing rate into the # data frame # Finally we going to store our new firing rate table into the trajectory traj.f_add_result('summary.firing_rates', rates_frame=rates_frame, comment='Contains a pandas data frame with all firing rates.')
[ "Postprocessing", "sorts", "computed", "firing", "rates", "into", "a", "table" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/examples/example_13_post_processing/main.py#L68-L108
[ "def", "neuron_postproc", "(", "traj", ",", "result_list", ")", ":", "# Let's create a pandas DataFrame to sort the computed firing rate according to the", "# parameters. We could have also used a 2D numpy array.", "# But a pandas DataFrame has the advantage that we can index into directly with", "# the parameter values without translating these into integer indices.", "I_range", "=", "traj", ".", "par", ".", "neuron", ".", "f_get", "(", "'I'", ")", ".", "f_get_range", "(", ")", "ref_range", "=", "traj", ".", "par", ".", "neuron", ".", "f_get", "(", "'tau_ref'", ")", ".", "f_get_range", "(", ")", "I_index", "=", "sorted", "(", "set", "(", "I_range", ")", ")", "ref_index", "=", "sorted", "(", "set", "(", "ref_range", ")", ")", "rates_frame", "=", "pd", ".", "DataFrame", "(", "columns", "=", "ref_index", ",", "index", "=", "I_index", ")", "# This frame is basically a two dimensional table that we can index with our", "# parameters", "# Now iterate over the results. The result list is a list of tuples, with the", "# run index at first position and our result at the second", "for", "result_tuple", "in", "result_list", ":", "run_idx", "=", "result_tuple", "[", "0", "]", "firing_rates", "=", "result_tuple", "[", "1", "]", "I_val", "=", "I_range", "[", "run_idx", "]", "ref_val", "=", "ref_range", "[", "run_idx", "]", "rates_frame", ".", "loc", "[", "I_val", ",", "ref_val", "]", "=", "firing_rates", "# Put the firing rate into the", "# data frame", "# Finally we going to store our new firing rate table into the trajectory", "traj", ".", "f_add_result", "(", "'summary.firing_rates'", ",", "rates_frame", "=", "rates_frame", ",", "comment", "=", "'Contains a pandas data frame with all firing rates.'", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
add_parameters
Adds all parameters to `traj`
examples/example_13_post_processing/main.py
def add_parameters(traj): """Adds all parameters to `traj`""" print('Adding Parameters') traj.f_add_parameter('neuron.V_init', 0.0, comment='The initial condition for the ' 'membrane potential') traj.f_add_parameter('neuron.I', 0.0, comment='The externally applied current.') traj.f_add_parameter('neuron.tau_V', 10.0, comment='The membrane time constant in milliseconds') traj.f_add_parameter('neuron.tau_ref', 5.0, comment='The refractory period in milliseconds ' 'where the membrane potnetial ' 'is clamped.') traj.f_add_parameter('simulation.duration', 1000.0, comment='The duration of the experiment in ' 'milliseconds.') traj.f_add_parameter('simulation.dt', 0.1, comment='The step size of an Euler integration step.')
def add_parameters(traj): """Adds all parameters to `traj`""" print('Adding Parameters') traj.f_add_parameter('neuron.V_init', 0.0, comment='The initial condition for the ' 'membrane potential') traj.f_add_parameter('neuron.I', 0.0, comment='The externally applied current.') traj.f_add_parameter('neuron.tau_V', 10.0, comment='The membrane time constant in milliseconds') traj.f_add_parameter('neuron.tau_ref', 5.0, comment='The refractory period in milliseconds ' 'where the membrane potnetial ' 'is clamped.') traj.f_add_parameter('simulation.duration', 1000.0, comment='The duration of the experiment in ' 'milliseconds.') traj.f_add_parameter('simulation.dt', 0.1, comment='The step size of an Euler integration step.')
[ "Adds", "all", "parameters", "to", "traj" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/examples/example_13_post_processing/main.py#L111-L131
[ "def", "add_parameters", "(", "traj", ")", ":", "print", "(", "'Adding Parameters'", ")", "traj", ".", "f_add_parameter", "(", "'neuron.V_init'", ",", "0.0", ",", "comment", "=", "'The initial condition for the '", "'membrane potential'", ")", "traj", ".", "f_add_parameter", "(", "'neuron.I'", ",", "0.0", ",", "comment", "=", "'The externally applied current.'", ")", "traj", ".", "f_add_parameter", "(", "'neuron.tau_V'", ",", "10.0", ",", "comment", "=", "'The membrane time constant in milliseconds'", ")", "traj", ".", "f_add_parameter", "(", "'neuron.tau_ref'", ",", "5.0", ",", "comment", "=", "'The refractory period in milliseconds '", "'where the membrane potnetial '", "'is clamped.'", ")", "traj", ".", "f_add_parameter", "(", "'simulation.duration'", ",", "1000.0", ",", "comment", "=", "'The duration of the experiment in '", "'milliseconds.'", ")", "traj", ".", "f_add_parameter", "(", "'simulation.dt'", ",", "0.1", ",", "comment", "=", "'The step size of an Euler integration step.'", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
add_exploration
Explores different values of `I` and `tau_ref`.
examples/example_13_post_processing/main.py
def add_exploration(traj): """Explores different values of `I` and `tau_ref`.""" print('Adding exploration of I and tau_ref') explore_dict = {'neuron.I': np.arange(0, 1.01, 0.01).tolist(), 'neuron.tau_ref': [5.0, 7.5, 10.0]} explore_dict = cartesian_product(explore_dict, ('neuron.tau_ref', 'neuron.I')) # The second argument, the tuple, specifies the order of the cartesian product, # The variable on the right most side changes fastest and defines the # 'inner for-loop' of the cartesian product traj.f_explore(explore_dict)
def add_exploration(traj): """Explores different values of `I` and `tau_ref`.""" print('Adding exploration of I and tau_ref') explore_dict = {'neuron.I': np.arange(0, 1.01, 0.01).tolist(), 'neuron.tau_ref': [5.0, 7.5, 10.0]} explore_dict = cartesian_product(explore_dict, ('neuron.tau_ref', 'neuron.I')) # The second argument, the tuple, specifies the order of the cartesian product, # The variable on the right most side changes fastest and defines the # 'inner for-loop' of the cartesian product traj.f_explore(explore_dict)
[ "Explores", "different", "values", "of", "I", "and", "tau_ref", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/examples/example_13_post_processing/main.py#L134-L147
[ "def", "add_exploration", "(", "traj", ")", ":", "print", "(", "'Adding exploration of I and tau_ref'", ")", "explore_dict", "=", "{", "'neuron.I'", ":", "np", ".", "arange", "(", "0", ",", "1.01", ",", "0.01", ")", ".", "tolist", "(", ")", ",", "'neuron.tau_ref'", ":", "[", "5.0", ",", "7.5", ",", "10.0", "]", "}", "explore_dict", "=", "cartesian_product", "(", "explore_dict", ",", "(", "'neuron.tau_ref'", ",", "'neuron.I'", ")", ")", "# The second argument, the tuple, specifies the order of the cartesian product,", "# The variable on the right most side changes fastest and defines the", "# 'inner for-loop' of the cartesian product", "traj", ".", "f_explore", "(", "explore_dict", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
NetworkRunner.execute_network_pre_run
Runs a network before the actual experiment. Called by a :class:`~pypet.brian2.network.NetworkManager`. Similar to :func:`~pypet.brian2.network.NetworkRunner.run_network`. Subruns and their durations are extracted from the trajectory. All :class:`~pypet.brian2.parameter.Brian2Parameter` instances found under `traj.parameters.simulation.pre_durations` (default, you can change the name of the group where to search for durations at runner initialisation). The order is determined from the `v_annotations.order` attributes. There must be at least one subrun in the trajectory, otherwise an AttributeError is thrown. If two subruns equal in their order property a RuntimeError is thrown. :param traj: Trajectory container :param network: BRIAN2 network :param network_dict: Dictionary of items shared among all components :param component_list: List of :class:`~pypet.brian2.network.NetworkComponent` objects :param analyser_list: List of :class:`~pypet.brian2.network.NetworkAnalyser` objects
pypet/brian2/network.py
def execute_network_pre_run(self, traj, network, network_dict, component_list, analyser_list): """Runs a network before the actual experiment. Called by a :class:`~pypet.brian2.network.NetworkManager`. Similar to :func:`~pypet.brian2.network.NetworkRunner.run_network`. Subruns and their durations are extracted from the trajectory. All :class:`~pypet.brian2.parameter.Brian2Parameter` instances found under `traj.parameters.simulation.pre_durations` (default, you can change the name of the group where to search for durations at runner initialisation). The order is determined from the `v_annotations.order` attributes. There must be at least one subrun in the trajectory, otherwise an AttributeError is thrown. If two subruns equal in their order property a RuntimeError is thrown. :param traj: Trajectory container :param network: BRIAN2 network :param network_dict: Dictionary of items shared among all components :param component_list: List of :class:`~pypet.brian2.network.NetworkComponent` objects :param analyser_list: List of :class:`~pypet.brian2.network.NetworkAnalyser` objects """ self._execute_network_run(traj, network, network_dict, component_list, analyser_list, pre_run=True)
def execute_network_pre_run(self, traj, network, network_dict, component_list, analyser_list): """Runs a network before the actual experiment. Called by a :class:`~pypet.brian2.network.NetworkManager`. Similar to :func:`~pypet.brian2.network.NetworkRunner.run_network`. Subruns and their durations are extracted from the trajectory. All :class:`~pypet.brian2.parameter.Brian2Parameter` instances found under `traj.parameters.simulation.pre_durations` (default, you can change the name of the group where to search for durations at runner initialisation). The order is determined from the `v_annotations.order` attributes. There must be at least one subrun in the trajectory, otherwise an AttributeError is thrown. If two subruns equal in their order property a RuntimeError is thrown. :param traj: Trajectory container :param network: BRIAN2 network :param network_dict: Dictionary of items shared among all components :param component_list: List of :class:`~pypet.brian2.network.NetworkComponent` objects :param analyser_list: List of :class:`~pypet.brian2.network.NetworkAnalyser` objects """ self._execute_network_run(traj, network, network_dict, component_list, analyser_list, pre_run=True)
[ "Runs", "a", "network", "before", "the", "actual", "experiment", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/brian2/network.py#L276-L303
[ "def", "execute_network_pre_run", "(", "self", ",", "traj", ",", "network", ",", "network_dict", ",", "component_list", ",", "analyser_list", ")", ":", "self", ".", "_execute_network_run", "(", "traj", ",", "network", ",", "network_dict", ",", "component_list", ",", "analyser_list", ",", "pre_run", "=", "True", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
NetworkRunner.execute_network_run
Runs a network in an experimental run. Called by a :class:`~pypet.brian2.network.NetworkManager`. A network run is divided into several subruns which are defined as :class:`~pypet.brian2.parameter.Brian2Parameter` instances. These subruns are extracted from the trajectory. All :class:`~pypet.brian2.parameter.Brian2Parameter` instances found under `traj.parameters.simulation.durations` (default, you can change the name of the group where to search for durations at runner initialisation). The order is determined from the `v_annotations.order` attributes. An error is thrown if no orders attribute can be found or if two parameters have the same order. There must be at least one subrun in the trajectory, otherwise an AttributeError is thrown. If two subruns equal in their order property a RuntimeError is thrown. For every subrun the following steps are executed: 1. Calling :func:`~pypet.brian2.network.NetworkComponent.add_to_network` for every every :class:`~pypet.brian2.network.NetworkComponent` in the order as they were passed to the :class:`~pypet.brian2.network.NetworkManager`. 2. Calling :func:`~pypet.brian2.network.NetworkComponent.add_to_network` for every every :class:`~pypet.brian2.network.NetworkAnalyser` in the order as they were passed to the :class:`~pypet.brian2.network.NetworkManager`. 3. Calling :func:`~pypet.brian2.network.NetworkComponent.add_to_network` of the NetworkRunner itself (usually the network runner should not add or remove anything from the network, but this step is executed for completeness). 4. Running the BRIAN2 network for the duration of the current subrun by calling the network's `run` function. 5. Calling :func:`~pypet.brian2.network.NetworkAnalyser.analyse` for every every :class:`~pypet.brian2.network.NetworkAnalyser` in the order as they were passed to the :class:`~pypet.brian2.network.NetworkManager`. 6. Calling :func:`~pypet.brian2.network.NetworkComponent.remove_from_network` of the NetworkRunner itself (usually the network runner should not add or remove anything from the network, but this step is executed for completeness). 7. Calling :func:`~pypet.brian2.network.NetworkComponent.remove_from_network` for every every :class:`~pypet.brian2.network.NetworkAnalyser` in the order as they were passed to the :class:`~pypet.brian2.network.NetworkManager` 8. Calling :func:`~pypet.brian2.network.NetworkComponent.remove_from_network` for every every :class:`~pypet.brian2.network.NetworkComponent` in the order as they were passed to the :class:`~pypet.brian2.network.NetworkManager`. These 8 steps are repeated for every subrun in the `subrun_list`. The `subrun_list` passed to all `add_to_network`, `analyse` and `remove_from_network` methods can be modified within these functions to potentially alter the order of execution or even erase or add upcoming subruns if necessary. For example, a NetworkAnalyser checks for epileptic pathological activity and cancels all coming subruns in case of undesired network dynamics. :param traj: Trajectory container :param network: BRIAN2 network :param network_dict: Dictionary of items shared among all components :param component_list: List of :class:`~pypet.brian2.network.NetworkComponent` objects :param analyser_list: List of :class:`~pypet.brian2.network.NetworkAnalyser` objects
pypet/brian2/network.py
def execute_network_run(self, traj, network, network_dict, component_list, analyser_list): """Runs a network in an experimental run. Called by a :class:`~pypet.brian2.network.NetworkManager`. A network run is divided into several subruns which are defined as :class:`~pypet.brian2.parameter.Brian2Parameter` instances. These subruns are extracted from the trajectory. All :class:`~pypet.brian2.parameter.Brian2Parameter` instances found under `traj.parameters.simulation.durations` (default, you can change the name of the group where to search for durations at runner initialisation). The order is determined from the `v_annotations.order` attributes. An error is thrown if no orders attribute can be found or if two parameters have the same order. There must be at least one subrun in the trajectory, otherwise an AttributeError is thrown. If two subruns equal in their order property a RuntimeError is thrown. For every subrun the following steps are executed: 1. Calling :func:`~pypet.brian2.network.NetworkComponent.add_to_network` for every every :class:`~pypet.brian2.network.NetworkComponent` in the order as they were passed to the :class:`~pypet.brian2.network.NetworkManager`. 2. Calling :func:`~pypet.brian2.network.NetworkComponent.add_to_network` for every every :class:`~pypet.brian2.network.NetworkAnalyser` in the order as they were passed to the :class:`~pypet.brian2.network.NetworkManager`. 3. Calling :func:`~pypet.brian2.network.NetworkComponent.add_to_network` of the NetworkRunner itself (usually the network runner should not add or remove anything from the network, but this step is executed for completeness). 4. Running the BRIAN2 network for the duration of the current subrun by calling the network's `run` function. 5. Calling :func:`~pypet.brian2.network.NetworkAnalyser.analyse` for every every :class:`~pypet.brian2.network.NetworkAnalyser` in the order as they were passed to the :class:`~pypet.brian2.network.NetworkManager`. 6. Calling :func:`~pypet.brian2.network.NetworkComponent.remove_from_network` of the NetworkRunner itself (usually the network runner should not add or remove anything from the network, but this step is executed for completeness). 7. Calling :func:`~pypet.brian2.network.NetworkComponent.remove_from_network` for every every :class:`~pypet.brian2.network.NetworkAnalyser` in the order as they were passed to the :class:`~pypet.brian2.network.NetworkManager` 8. Calling :func:`~pypet.brian2.network.NetworkComponent.remove_from_network` for every every :class:`~pypet.brian2.network.NetworkComponent` in the order as they were passed to the :class:`~pypet.brian2.network.NetworkManager`. These 8 steps are repeated for every subrun in the `subrun_list`. The `subrun_list` passed to all `add_to_network`, `analyse` and `remove_from_network` methods can be modified within these functions to potentially alter the order of execution or even erase or add upcoming subruns if necessary. For example, a NetworkAnalyser checks for epileptic pathological activity and cancels all coming subruns in case of undesired network dynamics. :param traj: Trajectory container :param network: BRIAN2 network :param network_dict: Dictionary of items shared among all components :param component_list: List of :class:`~pypet.brian2.network.NetworkComponent` objects :param analyser_list: List of :class:`~pypet.brian2.network.NetworkAnalyser` objects """ self._execute_network_run(traj, network, network_dict, component_list, analyser_list, pre_run=False)
def execute_network_run(self, traj, network, network_dict, component_list, analyser_list): """Runs a network in an experimental run. Called by a :class:`~pypet.brian2.network.NetworkManager`. A network run is divided into several subruns which are defined as :class:`~pypet.brian2.parameter.Brian2Parameter` instances. These subruns are extracted from the trajectory. All :class:`~pypet.brian2.parameter.Brian2Parameter` instances found under `traj.parameters.simulation.durations` (default, you can change the name of the group where to search for durations at runner initialisation). The order is determined from the `v_annotations.order` attributes. An error is thrown if no orders attribute can be found or if two parameters have the same order. There must be at least one subrun in the trajectory, otherwise an AttributeError is thrown. If two subruns equal in their order property a RuntimeError is thrown. For every subrun the following steps are executed: 1. Calling :func:`~pypet.brian2.network.NetworkComponent.add_to_network` for every every :class:`~pypet.brian2.network.NetworkComponent` in the order as they were passed to the :class:`~pypet.brian2.network.NetworkManager`. 2. Calling :func:`~pypet.brian2.network.NetworkComponent.add_to_network` for every every :class:`~pypet.brian2.network.NetworkAnalyser` in the order as they were passed to the :class:`~pypet.brian2.network.NetworkManager`. 3. Calling :func:`~pypet.brian2.network.NetworkComponent.add_to_network` of the NetworkRunner itself (usually the network runner should not add or remove anything from the network, but this step is executed for completeness). 4. Running the BRIAN2 network for the duration of the current subrun by calling the network's `run` function. 5. Calling :func:`~pypet.brian2.network.NetworkAnalyser.analyse` for every every :class:`~pypet.brian2.network.NetworkAnalyser` in the order as they were passed to the :class:`~pypet.brian2.network.NetworkManager`. 6. Calling :func:`~pypet.brian2.network.NetworkComponent.remove_from_network` of the NetworkRunner itself (usually the network runner should not add or remove anything from the network, but this step is executed for completeness). 7. Calling :func:`~pypet.brian2.network.NetworkComponent.remove_from_network` for every every :class:`~pypet.brian2.network.NetworkAnalyser` in the order as they were passed to the :class:`~pypet.brian2.network.NetworkManager` 8. Calling :func:`~pypet.brian2.network.NetworkComponent.remove_from_network` for every every :class:`~pypet.brian2.network.NetworkComponent` in the order as they were passed to the :class:`~pypet.brian2.network.NetworkManager`. These 8 steps are repeated for every subrun in the `subrun_list`. The `subrun_list` passed to all `add_to_network`, `analyse` and `remove_from_network` methods can be modified within these functions to potentially alter the order of execution or even erase or add upcoming subruns if necessary. For example, a NetworkAnalyser checks for epileptic pathological activity and cancels all coming subruns in case of undesired network dynamics. :param traj: Trajectory container :param network: BRIAN2 network :param network_dict: Dictionary of items shared among all components :param component_list: List of :class:`~pypet.brian2.network.NetworkComponent` objects :param analyser_list: List of :class:`~pypet.brian2.network.NetworkAnalyser` objects """ self._execute_network_run(traj, network, network_dict, component_list, analyser_list, pre_run=False)
[ "Runs", "a", "network", "in", "an", "experimental", "run", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/brian2/network.py#L305-L381
[ "def", "execute_network_run", "(", "self", ",", "traj", ",", "network", ",", "network_dict", ",", "component_list", ",", "analyser_list", ")", ":", "self", ".", "_execute_network_run", "(", "traj", ",", "network", ",", "network_dict", ",", "component_list", ",", "analyser_list", ",", "pre_run", "=", "False", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
NetworkRunner._extract_subruns
Extracts subruns from the trajectory. :param traj: Trajectory container :param pre_run: Boolean whether current run is regular or a pre-run :raises: RuntimeError if orders are duplicates or even missing
pypet/brian2/network.py
def _extract_subruns(self, traj, pre_run=False): """Extracts subruns from the trajectory. :param traj: Trajectory container :param pre_run: Boolean whether current run is regular or a pre-run :raises: RuntimeError if orders are duplicates or even missing """ if pre_run: durations_list = traj.f_get_all(self._pre_durations_group_name) else: durations_list = traj.f_get_all(self._durations_group_name) subruns = {} orders = [] for durations in durations_list: for duration_param in durations.f_iter_leaves(with_links=False): if 'order' in duration_param.v_annotations: order = duration_param.v_annotations.order else: raise RuntimeError('Your duration parameter %s has no order. Please add ' 'an order in `v_annotations.order`.' % duration_param.v_full_name) if order in subruns: raise RuntimeError('Your durations must differ in their order, there are two ' 'with order %d.' % order) else: subruns[order] = duration_param orders.append(order) return [subruns[order] for order in sorted(orders)]
def _extract_subruns(self, traj, pre_run=False): """Extracts subruns from the trajectory. :param traj: Trajectory container :param pre_run: Boolean whether current run is regular or a pre-run :raises: RuntimeError if orders are duplicates or even missing """ if pre_run: durations_list = traj.f_get_all(self._pre_durations_group_name) else: durations_list = traj.f_get_all(self._durations_group_name) subruns = {} orders = [] for durations in durations_list: for duration_param in durations.f_iter_leaves(with_links=False): if 'order' in duration_param.v_annotations: order = duration_param.v_annotations.order else: raise RuntimeError('Your duration parameter %s has no order. Please add ' 'an order in `v_annotations.order`.' % duration_param.v_full_name) if order in subruns: raise RuntimeError('Your durations must differ in their order, there are two ' 'with order %d.' % order) else: subruns[order] = duration_param orders.append(order) return [subruns[order] for order in sorted(orders)]
[ "Extracts", "subruns", "from", "the", "trajectory", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/brian2/network.py#L383-L420
[ "def", "_extract_subruns", "(", "self", ",", "traj", ",", "pre_run", "=", "False", ")", ":", "if", "pre_run", ":", "durations_list", "=", "traj", ".", "f_get_all", "(", "self", ".", "_pre_durations_group_name", ")", "else", ":", "durations_list", "=", "traj", ".", "f_get_all", "(", "self", ".", "_durations_group_name", ")", "subruns", "=", "{", "}", "orders", "=", "[", "]", "for", "durations", "in", "durations_list", ":", "for", "duration_param", "in", "durations", ".", "f_iter_leaves", "(", "with_links", "=", "False", ")", ":", "if", "'order'", "in", "duration_param", ".", "v_annotations", ":", "order", "=", "duration_param", ".", "v_annotations", ".", "order", "else", ":", "raise", "RuntimeError", "(", "'Your duration parameter %s has no order. Please add '", "'an order in `v_annotations.order`.'", "%", "duration_param", ".", "v_full_name", ")", "if", "order", "in", "subruns", ":", "raise", "RuntimeError", "(", "'Your durations must differ in their order, there are two '", "'with order %d.'", "%", "order", ")", "else", ":", "subruns", "[", "order", "]", "=", "duration_param", "orders", ".", "append", "(", "order", ")", "return", "[", "subruns", "[", "order", "]", "for", "order", "in", "sorted", "(", "orders", ")", "]" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
NetworkRunner._execute_network_run
Generic `execute_network_run` function, handles experimental runs as well as pre-runs. See also :func:`~pypet.brian2.network.NetworkRunner.execute_network_run` and :func:`~pypet.brian2.network.NetworkRunner.execute_network_pre_run`.
pypet/brian2/network.py
def _execute_network_run(self, traj, network, network_dict, component_list, analyser_list, pre_run=False): """Generic `execute_network_run` function, handles experimental runs as well as pre-runs. See also :func:`~pypet.brian2.network.NetworkRunner.execute_network_run` and :func:`~pypet.brian2.network.NetworkRunner.execute_network_pre_run`. """ # Initially extract the `subrun_list` subrun_list = self._extract_subruns(traj, pre_run=pre_run) # counter for subruns subrun_number = 0 # Execute all subruns in order while len(subrun_list) > 0: # Get the next subrun current_subrun = subrun_list.pop(0) # 1. Call `add` of all normal components for component in component_list: component.add_to_network(traj, network, current_subrun, subrun_list, network_dict) # 2. Call `add` of all analyser components for analyser in analyser_list: analyser.add_to_network(traj, network, current_subrun, subrun_list, network_dict) # 3. Call `add` of the network runner itself self.add_to_network(traj, network, current_subrun, subrun_list, network_dict) # 4. Run the network self._logger.info('STARTING subrun `%s` (#%d) lasting %s.' % (current_subrun.v_name, subrun_number, str(current_subrun.f_get()))) network.run(duration=current_subrun.f_get(), report=self._report, report_period=self._report_period) # 5. Call `analyse` of all analyser components for analyser in analyser_list: analyser.analyse(traj, network, current_subrun, subrun_list, network_dict) # 6. Call `remove` of the network runner itself self.remove_from_network(traj, network, current_subrun, subrun_list, network_dict) # 7. Call `remove` for all analyser components for analyser in analyser_list: analyser.remove_from_network(traj, network, current_subrun, subrun_list, network_dict) # 8. Call `remove` for all normal components for component in component_list: component.remove_from_network(traj, network, current_subrun, subrun_list, network_dict) subrun_number += 1
def _execute_network_run(self, traj, network, network_dict, component_list, analyser_list, pre_run=False): """Generic `execute_network_run` function, handles experimental runs as well as pre-runs. See also :func:`~pypet.brian2.network.NetworkRunner.execute_network_run` and :func:`~pypet.brian2.network.NetworkRunner.execute_network_pre_run`. """ # Initially extract the `subrun_list` subrun_list = self._extract_subruns(traj, pre_run=pre_run) # counter for subruns subrun_number = 0 # Execute all subruns in order while len(subrun_list) > 0: # Get the next subrun current_subrun = subrun_list.pop(0) # 1. Call `add` of all normal components for component in component_list: component.add_to_network(traj, network, current_subrun, subrun_list, network_dict) # 2. Call `add` of all analyser components for analyser in analyser_list: analyser.add_to_network(traj, network, current_subrun, subrun_list, network_dict) # 3. Call `add` of the network runner itself self.add_to_network(traj, network, current_subrun, subrun_list, network_dict) # 4. Run the network self._logger.info('STARTING subrun `%s` (#%d) lasting %s.' % (current_subrun.v_name, subrun_number, str(current_subrun.f_get()))) network.run(duration=current_subrun.f_get(), report=self._report, report_period=self._report_period) # 5. Call `analyse` of all analyser components for analyser in analyser_list: analyser.analyse(traj, network, current_subrun, subrun_list, network_dict) # 6. Call `remove` of the network runner itself self.remove_from_network(traj, network, current_subrun, subrun_list, network_dict) # 7. Call `remove` for all analyser components for analyser in analyser_list: analyser.remove_from_network(traj, network, current_subrun, subrun_list, network_dict) # 8. Call `remove` for all normal components for component in component_list: component.remove_from_network(traj, network, current_subrun, subrun_list, network_dict) subrun_number += 1
[ "Generic", "execute_network_run", "function", "handles", "experimental", "runs", "as", "well", "as", "pre", "-", "runs", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/brian2/network.py#L422-L483
[ "def", "_execute_network_run", "(", "self", ",", "traj", ",", "network", ",", "network_dict", ",", "component_list", ",", "analyser_list", ",", "pre_run", "=", "False", ")", ":", "# Initially extract the `subrun_list`", "subrun_list", "=", "self", ".", "_extract_subruns", "(", "traj", ",", "pre_run", "=", "pre_run", ")", "# counter for subruns", "subrun_number", "=", "0", "# Execute all subruns in order", "while", "len", "(", "subrun_list", ")", ">", "0", ":", "# Get the next subrun", "current_subrun", "=", "subrun_list", ".", "pop", "(", "0", ")", "# 1. Call `add` of all normal components", "for", "component", "in", "component_list", ":", "component", ".", "add_to_network", "(", "traj", ",", "network", ",", "current_subrun", ",", "subrun_list", ",", "network_dict", ")", "# 2. Call `add` of all analyser components", "for", "analyser", "in", "analyser_list", ":", "analyser", ".", "add_to_network", "(", "traj", ",", "network", ",", "current_subrun", ",", "subrun_list", ",", "network_dict", ")", "# 3. Call `add` of the network runner itself", "self", ".", "add_to_network", "(", "traj", ",", "network", ",", "current_subrun", ",", "subrun_list", ",", "network_dict", ")", "# 4. Run the network", "self", ".", "_logger", ".", "info", "(", "'STARTING subrun `%s` (#%d) lasting %s.'", "%", "(", "current_subrun", ".", "v_name", ",", "subrun_number", ",", "str", "(", "current_subrun", ".", "f_get", "(", ")", ")", ")", ")", "network", ".", "run", "(", "duration", "=", "current_subrun", ".", "f_get", "(", ")", ",", "report", "=", "self", ".", "_report", ",", "report_period", "=", "self", ".", "_report_period", ")", "# 5. Call `analyse` of all analyser components", "for", "analyser", "in", "analyser_list", ":", "analyser", ".", "analyse", "(", "traj", ",", "network", ",", "current_subrun", ",", "subrun_list", ",", "network_dict", ")", "# 6. Call `remove` of the network runner itself", "self", ".", "remove_from_network", "(", "traj", ",", "network", ",", "current_subrun", ",", "subrun_list", ",", "network_dict", ")", "# 7. Call `remove` for all analyser components", "for", "analyser", "in", "analyser_list", ":", "analyser", ".", "remove_from_network", "(", "traj", ",", "network", ",", "current_subrun", ",", "subrun_list", ",", "network_dict", ")", "# 8. Call `remove` for all normal components", "for", "component", "in", "component_list", ":", "component", ".", "remove_from_network", "(", "traj", ",", "network", ",", "current_subrun", ",", "subrun_list", ",", "network_dict", ")", "subrun_number", "+=", "1" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
NetworkManager.add_parameters
Adds parameters for a network simulation. Calls :func:`~pypet.brian2.network.NetworkComponent.add_parameters` for all components, analyser, and the network runner (in this order). :param traj: Trajectory container
pypet/brian2/network.py
def add_parameters(self, traj): """Adds parameters for a network simulation. Calls :func:`~pypet.brian2.network.NetworkComponent.add_parameters` for all components, analyser, and the network runner (in this order). :param traj: Trajectory container """ self._logger.info('Adding Parameters of Components') for component in self.components: component.add_parameters(traj) if self.analysers: self._logger.info('Adding Parameters of Analysers') for analyser in self.analysers: analyser.add_parameters(traj) self._logger.info('Adding Parameters of Runner') self.network_runner.add_parameters(traj)
def add_parameters(self, traj): """Adds parameters for a network simulation. Calls :func:`~pypet.brian2.network.NetworkComponent.add_parameters` for all components, analyser, and the network runner (in this order). :param traj: Trajectory container """ self._logger.info('Adding Parameters of Components') for component in self.components: component.add_parameters(traj) if self.analysers: self._logger.info('Adding Parameters of Analysers') for analyser in self.analysers: analyser.add_parameters(traj) self._logger.info('Adding Parameters of Runner') self.network_runner.add_parameters(traj)
[ "Adds", "parameters", "for", "a", "network", "simulation", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/brian2/network.py#L541-L563
[ "def", "add_parameters", "(", "self", ",", "traj", ")", ":", "self", ".", "_logger", ".", "info", "(", "'Adding Parameters of Components'", ")", "for", "component", "in", "self", ".", "components", ":", "component", ".", "add_parameters", "(", "traj", ")", "if", "self", ".", "analysers", ":", "self", ".", "_logger", ".", "info", "(", "'Adding Parameters of Analysers'", ")", "for", "analyser", "in", "self", ".", "analysers", ":", "analyser", ".", "add_parameters", "(", "traj", ")", "self", ".", "_logger", ".", "info", "(", "'Adding Parameters of Runner'", ")", "self", ".", "network_runner", ".", "add_parameters", "(", "traj", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
NetworkManager.pre_build
Pre-builds network components. Calls :func:`~pypet.brian2.network.NetworkComponent.pre_build` for all components, analysers, and the network runner. `pre_build` is not automatically called but either needs to be executed manually by the user, either calling it directly or by using :func:`~pypet.brian2.network.NetworkManager.pre_run`. This function does not create a `BRIAN2 network`, but only it's components. :param traj: Trajectory container
pypet/brian2/network.py
def pre_build(self, traj): """Pre-builds network components. Calls :func:`~pypet.brian2.network.NetworkComponent.pre_build` for all components, analysers, and the network runner. `pre_build` is not automatically called but either needs to be executed manually by the user, either calling it directly or by using :func:`~pypet.brian2.network.NetworkManager.pre_run`. This function does not create a `BRIAN2 network`, but only it's components. :param traj: Trajectory container """ self._logger.info('Pre-Building Components') for component in self.components: component.pre_build(traj, self._brian_list, self._network_dict) if self.analysers: self._logger.info('Pre-Building Analysers') for analyser in self.analysers: analyser.pre_build(traj, self._brian_list, self._network_dict) self._logger.info('Pre-Building NetworkRunner') self.network_runner.pre_build(traj, self._brian_list, self._network_dict) self._pre_built = True
def pre_build(self, traj): """Pre-builds network components. Calls :func:`~pypet.brian2.network.NetworkComponent.pre_build` for all components, analysers, and the network runner. `pre_build` is not automatically called but either needs to be executed manually by the user, either calling it directly or by using :func:`~pypet.brian2.network.NetworkManager.pre_run`. This function does not create a `BRIAN2 network`, but only it's components. :param traj: Trajectory container """ self._logger.info('Pre-Building Components') for component in self.components: component.pre_build(traj, self._brian_list, self._network_dict) if self.analysers: self._logger.info('Pre-Building Analysers') for analyser in self.analysers: analyser.pre_build(traj, self._brian_list, self._network_dict) self._logger.info('Pre-Building NetworkRunner') self.network_runner.pre_build(traj, self._brian_list, self._network_dict) self._pre_built = True
[ "Pre", "-", "builds", "network", "components", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/brian2/network.py#L565-L595
[ "def", "pre_build", "(", "self", ",", "traj", ")", ":", "self", ".", "_logger", ".", "info", "(", "'Pre-Building Components'", ")", "for", "component", "in", "self", ".", "components", ":", "component", ".", "pre_build", "(", "traj", ",", "self", ".", "_brian_list", ",", "self", ".", "_network_dict", ")", "if", "self", ".", "analysers", ":", "self", ".", "_logger", ".", "info", "(", "'Pre-Building Analysers'", ")", "for", "analyser", "in", "self", ".", "analysers", ":", "analyser", ".", "pre_build", "(", "traj", ",", "self", ".", "_brian_list", ",", "self", ".", "_network_dict", ")", "self", ".", "_logger", ".", "info", "(", "'Pre-Building NetworkRunner'", ")", "self", ".", "network_runner", ".", "pre_build", "(", "traj", ",", "self", ".", "_brian_list", ",", "self", ".", "_network_dict", ")", "self", ".", "_pre_built", "=", "True" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
NetworkManager.build
Pre-builds network components. Calls :func:`~pypet.brian2.network.NetworkComponent.build` for all components, analysers and the network runner. `build` does not need to be called by the user. If `~pypet.brian2.network.run_network` is passed to an :class:`~pypet.environment.Environment` with this Network manager, `build` is automatically called for each individual experimental run. :param traj: Trajectory container
pypet/brian2/network.py
def build(self, traj): """Pre-builds network components. Calls :func:`~pypet.brian2.network.NetworkComponent.build` for all components, analysers and the network runner. `build` does not need to be called by the user. If `~pypet.brian2.network.run_network` is passed to an :class:`~pypet.environment.Environment` with this Network manager, `build` is automatically called for each individual experimental run. :param traj: Trajectory container """ self._logger.info('Building Components') for component in self.components: component.build(traj, self._brian_list, self._network_dict) if self.analysers: self._logger.info('Building Analysers') for analyser in self.analysers: analyser.build(traj, self._brian_list, self._network_dict) self._logger.info('Building NetworkRunner') self.network_runner.build(traj, self._brian_list, self._network_dict)
def build(self, traj): """Pre-builds network components. Calls :func:`~pypet.brian2.network.NetworkComponent.build` for all components, analysers and the network runner. `build` does not need to be called by the user. If `~pypet.brian2.network.run_network` is passed to an :class:`~pypet.environment.Environment` with this Network manager, `build` is automatically called for each individual experimental run. :param traj: Trajectory container """ self._logger.info('Building Components') for component in self.components: component.build(traj, self._brian_list, self._network_dict) if self.analysers: self._logger.info('Building Analysers') for analyser in self.analysers: analyser.build(traj, self._brian_list, self._network_dict) self._logger.info('Building NetworkRunner') self.network_runner.build(traj, self._brian_list, self._network_dict)
[ "Pre", "-", "builds", "network", "components", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/brian2/network.py#L598-L624
[ "def", "build", "(", "self", ",", "traj", ")", ":", "self", ".", "_logger", ".", "info", "(", "'Building Components'", ")", "for", "component", "in", "self", ".", "components", ":", "component", ".", "build", "(", "traj", ",", "self", ".", "_brian_list", ",", "self", ".", "_network_dict", ")", "if", "self", ".", "analysers", ":", "self", ".", "_logger", ".", "info", "(", "'Building Analysers'", ")", "for", "analyser", "in", "self", ".", "analysers", ":", "analyser", ".", "build", "(", "traj", ",", "self", ".", "_brian_list", ",", "self", ".", "_network_dict", ")", "self", ".", "_logger", ".", "info", "(", "'Building NetworkRunner'", ")", "self", ".", "network_runner", ".", "build", "(", "traj", ",", "self", ".", "_brian_list", ",", "self", ".", "_network_dict", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
NetworkManager.pre_run_network
Starts a network run before the individual run. Useful if a network needs an initial run that can be shared by all individual experimental runs during parameter exploration. Needs to be called by the user. If `pre_run_network` is started by the user, :func:`~pypet.brian2.network.NetworkManager.pre_build` will be automatically called from this function. This function will create a new BRIAN2 network which is run by the :class:`~pypet.brian2.network.NetworkRunner` and it's :func:`~pypet.brian2.network.NetworkRunner.execute_network_pre_run`. To see how a network run is structured also take a look at :func:`~pypet.brian2.network.NetworkRunner.run_network`. :param traj: Trajectory container
pypet/brian2/network.py
def pre_run_network(self, traj): """Starts a network run before the individual run. Useful if a network needs an initial run that can be shared by all individual experimental runs during parameter exploration. Needs to be called by the user. If `pre_run_network` is started by the user, :func:`~pypet.brian2.network.NetworkManager.pre_build` will be automatically called from this function. This function will create a new BRIAN2 network which is run by the :class:`~pypet.brian2.network.NetworkRunner` and it's :func:`~pypet.brian2.network.NetworkRunner.execute_network_pre_run`. To see how a network run is structured also take a look at :func:`~pypet.brian2.network.NetworkRunner.run_network`. :param traj: Trajectory container """ self.pre_build(traj) self._logger.info('\n------------------------\n' 'Pre-Running the Network\n' '------------------------') self._network = self._network_constructor(*self._brian_list) self.network_runner.execute_network_pre_run(traj, self._network, self._network_dict, self.components, self.analysers) self._logger.info('\n-----------------------------\n' 'Network Simulation successful\n' '-----------------------------') self._pre_run = True if hasattr(self._network, 'store'): self._network.store('pre_run')
def pre_run_network(self, traj): """Starts a network run before the individual run. Useful if a network needs an initial run that can be shared by all individual experimental runs during parameter exploration. Needs to be called by the user. If `pre_run_network` is started by the user, :func:`~pypet.brian2.network.NetworkManager.pre_build` will be automatically called from this function. This function will create a new BRIAN2 network which is run by the :class:`~pypet.brian2.network.NetworkRunner` and it's :func:`~pypet.brian2.network.NetworkRunner.execute_network_pre_run`. To see how a network run is structured also take a look at :func:`~pypet.brian2.network.NetworkRunner.run_network`. :param traj: Trajectory container """ self.pre_build(traj) self._logger.info('\n------------------------\n' 'Pre-Running the Network\n' '------------------------') self._network = self._network_constructor(*self._brian_list) self.network_runner.execute_network_pre_run(traj, self._network, self._network_dict, self.components, self.analysers) self._logger.info('\n-----------------------------\n' 'Network Simulation successful\n' '-----------------------------') self._pre_run = True if hasattr(self._network, 'store'): self._network.store('pre_run')
[ "Starts", "a", "network", "run", "before", "the", "individual", "run", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/brian2/network.py#L627-L664
[ "def", "pre_run_network", "(", "self", ",", "traj", ")", ":", "self", ".", "pre_build", "(", "traj", ")", "self", ".", "_logger", ".", "info", "(", "'\\n------------------------\\n'", "'Pre-Running the Network\\n'", "'------------------------'", ")", "self", ".", "_network", "=", "self", ".", "_network_constructor", "(", "*", "self", ".", "_brian_list", ")", "self", ".", "network_runner", ".", "execute_network_pre_run", "(", "traj", ",", "self", ".", "_network", ",", "self", ".", "_network_dict", ",", "self", ".", "components", ",", "self", ".", "analysers", ")", "self", ".", "_logger", ".", "info", "(", "'\\n-----------------------------\\n'", "'Network Simulation successful\\n'", "'-----------------------------'", ")", "self", ".", "_pre_run", "=", "True", "if", "hasattr", "(", "self", ".", "_network", ",", "'store'", ")", ":", "self", ".", "_network", ".", "store", "(", "'pre_run'", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
NetworkManager.run_network
Top-level simulation function, pass this to the environment Performs an individual network run during parameter exploration. `run_network` does not need to be called by the user. If this method (not this one of the NetworkManager) is passed to an :class:`~pypet.environment.Environment` with this NetworkManager, `run_network` and :func:`~pypet.brian2.network.NetworkManager.build` are automatically called for each individual experimental run. This function will create a new BRIAN2 network in case one was not pre-run. The execution of the network run is carried out by the :class:`~pypet.brian2.network.NetworkRunner` and it's :func:`~pypet.brian2.network.NetworkRunner.execute_network_run` (also take a look at this function's documentation to see the structure of a network run). :param traj: Trajectory container
pypet/brian2/network.py
def run_network(self, traj): """Top-level simulation function, pass this to the environment Performs an individual network run during parameter exploration. `run_network` does not need to be called by the user. If this method (not this one of the NetworkManager) is passed to an :class:`~pypet.environment.Environment` with this NetworkManager, `run_network` and :func:`~pypet.brian2.network.NetworkManager.build` are automatically called for each individual experimental run. This function will create a new BRIAN2 network in case one was not pre-run. The execution of the network run is carried out by the :class:`~pypet.brian2.network.NetworkRunner` and it's :func:`~pypet.brian2.network.NetworkRunner.execute_network_run` (also take a look at this function's documentation to see the structure of a network run). :param traj: Trajectory container """ # Check if the network was pre-built if self._pre_built: if self._pre_run and hasattr(self._network, 'restore'): self._network.restore('pre_run') # Temprorary fix for https://github.com/brian-team/brian2/issues/681 self._network.store('pre_run') self._run_network(traj) else: self._run_network(traj)
def run_network(self, traj): """Top-level simulation function, pass this to the environment Performs an individual network run during parameter exploration. `run_network` does not need to be called by the user. If this method (not this one of the NetworkManager) is passed to an :class:`~pypet.environment.Environment` with this NetworkManager, `run_network` and :func:`~pypet.brian2.network.NetworkManager.build` are automatically called for each individual experimental run. This function will create a new BRIAN2 network in case one was not pre-run. The execution of the network run is carried out by the :class:`~pypet.brian2.network.NetworkRunner` and it's :func:`~pypet.brian2.network.NetworkRunner.execute_network_run` (also take a look at this function's documentation to see the structure of a network run). :param traj: Trajectory container """ # Check if the network was pre-built if self._pre_built: if self._pre_run and hasattr(self._network, 'restore'): self._network.restore('pre_run') # Temprorary fix for https://github.com/brian-team/brian2/issues/681 self._network.store('pre_run') self._run_network(traj) else: self._run_network(traj)
[ "Top", "-", "level", "simulation", "function", "pass", "this", "to", "the", "environment" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/brian2/network.py#L667-L696
[ "def", "run_network", "(", "self", ",", "traj", ")", ":", "# Check if the network was pre-built", "if", "self", ".", "_pre_built", ":", "if", "self", ".", "_pre_run", "and", "hasattr", "(", "self", ".", "_network", ",", "'restore'", ")", ":", "self", ".", "_network", ".", "restore", "(", "'pre_run'", ")", "# Temprorary fix for https://github.com/brian-team/brian2/issues/681", "self", ".", "_network", ".", "store", "(", "'pre_run'", ")", "self", ".", "_run_network", "(", "traj", ")", "else", ":", "self", ".", "_run_network", "(", "traj", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
NetworkManager._run_network
Starts a single run carried out by a NetworkRunner. Called from the public function :func:`~pypet.brian2.network.NetworkManger.run_network`. :param traj: Trajectory container
pypet/brian2/network.py
def _run_network(self, traj): """Starts a single run carried out by a NetworkRunner. Called from the public function :func:`~pypet.brian2.network.NetworkManger.run_network`. :param traj: Trajectory container """ self.build(traj) self._pretty_print_explored_parameters(traj) # We need to construct a network object in case one was not pre-run if not self._pre_run: self._network = self._network_constructor(*self._brian_list) # Start the experimental run self.network_runner.execute_network_run(traj, self._network, self._network_dict, self.components, self.analysers) self._logger.info('\n-----------------------------\n' 'Network Simulation successful\n' '-----------------------------')
def _run_network(self, traj): """Starts a single run carried out by a NetworkRunner. Called from the public function :func:`~pypet.brian2.network.NetworkManger.run_network`. :param traj: Trajectory container """ self.build(traj) self._pretty_print_explored_parameters(traj) # We need to construct a network object in case one was not pre-run if not self._pre_run: self._network = self._network_constructor(*self._brian_list) # Start the experimental run self.network_runner.execute_network_run(traj, self._network, self._network_dict, self.components, self.analysers) self._logger.info('\n-----------------------------\n' 'Network Simulation successful\n' '-----------------------------')
[ "Starts", "a", "single", "run", "carried", "out", "by", "a", "NetworkRunner", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/brian2/network.py#L714-L737
[ "def", "_run_network", "(", "self", ",", "traj", ")", ":", "self", ".", "build", "(", "traj", ")", "self", ".", "_pretty_print_explored_parameters", "(", "traj", ")", "# We need to construct a network object in case one was not pre-run", "if", "not", "self", ".", "_pre_run", ":", "self", ".", "_network", "=", "self", ".", "_network_constructor", "(", "*", "self", ".", "_brian_list", ")", "# Start the experimental run", "self", ".", "network_runner", ".", "execute_network_run", "(", "traj", ",", "self", ".", "_network", ",", "self", ".", "_network_dict", ",", "self", ".", "components", ",", "self", ".", "analysers", ")", "self", ".", "_logger", ".", "info", "(", "'\\n-----------------------------\\n'", "'Network Simulation successful\\n'", "'-----------------------------'", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
make_filename
Function to create generic filenames based on what has been explored
examples/example_17_wrapping_an_existing_project/pypetwrap.py
def make_filename(traj): """ Function to create generic filenames based on what has been explored """ explored_parameters = traj.f_get_explored_parameters() filename = '' for param in explored_parameters.values(): short_name = param.v_name val = param.f_get() filename += '%s_%s__' % (short_name, str(val)) return filename[:-2] + '.png'
def make_filename(traj): """ Function to create generic filenames based on what has been explored """ explored_parameters = traj.f_get_explored_parameters() filename = '' for param in explored_parameters.values(): short_name = param.v_name val = param.f_get() filename += '%s_%s__' % (short_name, str(val)) return filename[:-2] + '.png'
[ "Function", "to", "create", "generic", "filenames", "based", "on", "what", "has", "been", "explored" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/examples/example_17_wrapping_an_existing_project/pypetwrap.py#L23-L32
[ "def", "make_filename", "(", "traj", ")", ":", "explored_parameters", "=", "traj", ".", "f_get_explored_parameters", "(", ")", "filename", "=", "''", "for", "param", "in", "explored_parameters", ".", "values", "(", ")", ":", "short_name", "=", "param", ".", "v_name", "val", "=", "param", ".", "f_get", "(", ")", "filename", "+=", "'%s_%s__'", "%", "(", "short_name", ",", "str", "(", "val", ")", ")", "return", "filename", "[", ":", "-", "2", "]", "+", "'.png'" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
wrap_automaton
Simple wrapper function for compatibility with *pypet*. We will call the original simulation functions with data extracted from ``traj``. The resulting automaton patterns wil also be stored into the trajectory. :param traj: Trajectory container for data
examples/example_17_wrapping_an_existing_project/pypetwrap.py
def wrap_automaton(traj): """ Simple wrapper function for compatibility with *pypet*. We will call the original simulation functions with data extracted from ``traj``. The resulting automaton patterns wil also be stored into the trajectory. :param traj: Trajectory container for data """ # Make initial state initial_state = make_initial_state(traj.initial_name, traj.ncells, traj.seed) # Run simulation pattern = cellular_automaton_1D(initial_state, traj.rule_number, traj.steps) # Store the computed pattern traj.f_add_result('pattern', pattern, comment='Development of CA over time')
def wrap_automaton(traj): """ Simple wrapper function for compatibility with *pypet*. We will call the original simulation functions with data extracted from ``traj``. The resulting automaton patterns wil also be stored into the trajectory. :param traj: Trajectory container for data """ # Make initial state initial_state = make_initial_state(traj.initial_name, traj.ncells, traj.seed) # Run simulation pattern = cellular_automaton_1D(initial_state, traj.rule_number, traj.steps) # Store the computed pattern traj.f_add_result('pattern', pattern, comment='Development of CA over time')
[ "Simple", "wrapper", "function", "for", "compatibility", "with", "*", "pypet", "*", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/examples/example_17_wrapping_an_existing_project/pypetwrap.py#L34-L49
[ "def", "wrap_automaton", "(", "traj", ")", ":", "# Make initial state", "initial_state", "=", "make_initial_state", "(", "traj", ".", "initial_name", ",", "traj", ".", "ncells", ",", "traj", ".", "seed", ")", "# Run simulation", "pattern", "=", "cellular_automaton_1D", "(", "initial_state", ",", "traj", ".", "rule_number", ",", "traj", ".", "steps", ")", "# Store the computed pattern", "traj", ".", "f_add_result", "(", "'pattern'", ",", "pattern", ",", "comment", "=", "'Development of CA over time'", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
main
Main *boilerplate* function to start simulation
examples/example_17_wrapping_an_existing_project/pypetwrap.py
def main(): """ Main *boilerplate* function to start simulation """ # Now let's make use of logging logger = logging.getLogger() # Create folders for data and plots folder = os.path.join(os.getcwd(), 'experiments', 'ca_patterns_pypet') if not os.path.isdir(folder): os.makedirs(folder) filename = os.path.join(folder, 'all_patterns.hdf5') # Create an environment env = Environment(trajectory='cellular_automata', multiproc=True, ncores=4, wrap_mode='QUEUE', filename=filename, overwrite_file=True) # extract the trajectory traj = env.traj traj.par.ncells = Parameter('ncells', 400, 'Number of cells') traj.par.steps = Parameter('steps', 250, 'Number of timesteps') traj.par.rule_number = Parameter('rule_number', 30, 'The ca rule') traj.par.initial_name = Parameter('initial_name', 'random', 'The type of initial state') traj.par.seed = Parameter('seed', 100042, 'RNG Seed') # Explore exp_dict = {'rule_number' : [10, 30, 90, 110, 184], 'initial_name' : ['single', 'random'],} # # You can uncomment the ``exp_dict`` below to see that changing the # # exploration scheme is now really easy: # exp_dict = {'rule_number' : [10, 30, 90, 110, 184], # 'ncells' : [100, 200, 300], # 'seed': [333444555, 123456]} exp_dict = cartesian_product(exp_dict) traj.f_explore(exp_dict) # Run the simulation logger.info('Starting Simulation') env.run(wrap_automaton) # Load all data traj.f_load(load_data=2) logger.info('Printing data') for idx, run_name in enumerate(traj.f_iter_runs()): # Plot all patterns filename = os.path.join(folder, make_filename(traj)) plot_pattern(traj.crun.pattern, traj.rule_number, filename) progressbar(idx, len(traj), logger=logger) # Finally disable logging and close all log-files env.disable_logging()
def main(): """ Main *boilerplate* function to start simulation """ # Now let's make use of logging logger = logging.getLogger() # Create folders for data and plots folder = os.path.join(os.getcwd(), 'experiments', 'ca_patterns_pypet') if not os.path.isdir(folder): os.makedirs(folder) filename = os.path.join(folder, 'all_patterns.hdf5') # Create an environment env = Environment(trajectory='cellular_automata', multiproc=True, ncores=4, wrap_mode='QUEUE', filename=filename, overwrite_file=True) # extract the trajectory traj = env.traj traj.par.ncells = Parameter('ncells', 400, 'Number of cells') traj.par.steps = Parameter('steps', 250, 'Number of timesteps') traj.par.rule_number = Parameter('rule_number', 30, 'The ca rule') traj.par.initial_name = Parameter('initial_name', 'random', 'The type of initial state') traj.par.seed = Parameter('seed', 100042, 'RNG Seed') # Explore exp_dict = {'rule_number' : [10, 30, 90, 110, 184], 'initial_name' : ['single', 'random'],} # # You can uncomment the ``exp_dict`` below to see that changing the # # exploration scheme is now really easy: # exp_dict = {'rule_number' : [10, 30, 90, 110, 184], # 'ncells' : [100, 200, 300], # 'seed': [333444555, 123456]} exp_dict = cartesian_product(exp_dict) traj.f_explore(exp_dict) # Run the simulation logger.info('Starting Simulation') env.run(wrap_automaton) # Load all data traj.f_load(load_data=2) logger.info('Printing data') for idx, run_name in enumerate(traj.f_iter_runs()): # Plot all patterns filename = os.path.join(folder, make_filename(traj)) plot_pattern(traj.crun.pattern, traj.rule_number, filename) progressbar(idx, len(traj), logger=logger) # Finally disable logging and close all log-files env.disable_logging()
[ "Main", "*", "boilerplate", "*", "function", "to", "start", "simulation" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/examples/example_17_wrapping_an_existing_project/pypetwrap.py#L52-L106
[ "def", "main", "(", ")", ":", "# Now let's make use of logging", "logger", "=", "logging", ".", "getLogger", "(", ")", "# Create folders for data and plots", "folder", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "'experiments'", ",", "'ca_patterns_pypet'", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "folder", ")", ":", "os", ".", "makedirs", "(", "folder", ")", "filename", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "'all_patterns.hdf5'", ")", "# Create an environment", "env", "=", "Environment", "(", "trajectory", "=", "'cellular_automata'", ",", "multiproc", "=", "True", ",", "ncores", "=", "4", ",", "wrap_mode", "=", "'QUEUE'", ",", "filename", "=", "filename", ",", "overwrite_file", "=", "True", ")", "# extract the trajectory", "traj", "=", "env", ".", "traj", "traj", ".", "par", ".", "ncells", "=", "Parameter", "(", "'ncells'", ",", "400", ",", "'Number of cells'", ")", "traj", ".", "par", ".", "steps", "=", "Parameter", "(", "'steps'", ",", "250", ",", "'Number of timesteps'", ")", "traj", ".", "par", ".", "rule_number", "=", "Parameter", "(", "'rule_number'", ",", "30", ",", "'The ca rule'", ")", "traj", ".", "par", ".", "initial_name", "=", "Parameter", "(", "'initial_name'", ",", "'random'", ",", "'The type of initial state'", ")", "traj", ".", "par", ".", "seed", "=", "Parameter", "(", "'seed'", ",", "100042", ",", "'RNG Seed'", ")", "# Explore", "exp_dict", "=", "{", "'rule_number'", ":", "[", "10", ",", "30", ",", "90", ",", "110", ",", "184", "]", ",", "'initial_name'", ":", "[", "'single'", ",", "'random'", "]", ",", "}", "# # You can uncomment the ``exp_dict`` below to see that changing the", "# # exploration scheme is now really easy:", "# exp_dict = {'rule_number' : [10, 30, 90, 110, 184],", "# 'ncells' : [100, 200, 300],", "# 'seed': [333444555, 123456]}", "exp_dict", "=", "cartesian_product", "(", "exp_dict", ")", "traj", ".", "f_explore", "(", "exp_dict", ")", "# Run the simulation", "logger", ".", "info", "(", "'Starting Simulation'", ")", "env", ".", "run", "(", "wrap_automaton", ")", "# Load all data", "traj", ".", "f_load", "(", "load_data", "=", "2", ")", "logger", ".", "info", "(", "'Printing data'", ")", "for", "idx", ",", "run_name", "in", "enumerate", "(", "traj", ".", "f_iter_runs", "(", ")", ")", ":", "# Plot all patterns", "filename", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "make_filename", "(", "traj", ")", ")", "plot_pattern", "(", "traj", ".", "crun", ".", "pattern", ",", "traj", ".", "rule_number", ",", "filename", ")", "progressbar", "(", "idx", ",", "len", "(", "traj", ")", ",", "logger", "=", "logger", ")", "# Finally disable logging and close all log-files", "env", ".", "disable_logging", "(", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
IteratorChain.next
Returns next element from chain. More precisely, it returns the next element of the foremost iterator. If this iterator is empty it moves iteratively along the chain of available iterators to pick the new foremost one. Raises StopIteration if there are no elements left.
pypet/utils/helpful_classes.py
def next(self): """Returns next element from chain. More precisely, it returns the next element of the foremost iterator. If this iterator is empty it moves iteratively along the chain of available iterators to pick the new foremost one. Raises StopIteration if there are no elements left. """ while True: # We need this loop because some iterators may already be empty. # We keep on popping from the left until next succeeds and as long # as there are iterators available try: return next(self._current) except StopIteration: try: self._current = iter(self._chain.popleft()) except IndexError: # If we run out of iterators we are sure that # there can be no more element raise StopIteration('Reached end of iterator chain')
def next(self): """Returns next element from chain. More precisely, it returns the next element of the foremost iterator. If this iterator is empty it moves iteratively along the chain of available iterators to pick the new foremost one. Raises StopIteration if there are no elements left. """ while True: # We need this loop because some iterators may already be empty. # We keep on popping from the left until next succeeds and as long # as there are iterators available try: return next(self._current) except StopIteration: try: self._current = iter(self._chain.popleft()) except IndexError: # If we run out of iterators we are sure that # there can be no more element raise StopIteration('Reached end of iterator chain')
[ "Returns", "next", "element", "from", "chain", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/utils/helpful_classes.py#L35-L57
[ "def", "next", "(", "self", ")", ":", "while", "True", ":", "# We need this loop because some iterators may already be empty.", "# We keep on popping from the left until next succeeds and as long", "# as there are iterators available", "try", ":", "return", "next", "(", "self", ".", "_current", ")", "except", "StopIteration", ":", "try", ":", "self", ".", "_current", "=", "iter", "(", "self", ".", "_chain", ".", "popleft", "(", ")", ")", "except", "IndexError", ":", "# If we run out of iterators we are sure that", "# there can be no more element", "raise", "StopIteration", "(", "'Reached end of iterator chain'", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
merge_all_in_folder
Merges all files in a given folder. IMPORTANT: Does not check if there are more than 1 trajectory in a file. Always uses the last trajectory in file and ignores the other ones. Trajectories are merged according to the alphabetical order of the files, i.e. the resulting merged trajectory is found in the first file (according to lexicographic ordering). :param folder: folder (not recursive) where to look for files :param ext: only files with the given extension are used :param dynamic_imports: Dynamic imports for loading :param storage_service: storage service to use, leave `None` to use the default one :param force: If loading should be forced. :param delete_other_files: Deletes files of merged trajectories All other parameters as in `f_merge_many` of the trajectory. :return: The merged traj
pypet/utils/trajectory_utils.py
def merge_all_in_folder(folder, ext='.hdf5', dynamic_imports=None, storage_service=None, force=False, ignore_data=(), move_data=False, delete_other_files=False, keep_info=True, keep_other_trajectory_info=True, merge_config=True, backup=True): """Merges all files in a given folder. IMPORTANT: Does not check if there are more than 1 trajectory in a file. Always uses the last trajectory in file and ignores the other ones. Trajectories are merged according to the alphabetical order of the files, i.e. the resulting merged trajectory is found in the first file (according to lexicographic ordering). :param folder: folder (not recursive) where to look for files :param ext: only files with the given extension are used :param dynamic_imports: Dynamic imports for loading :param storage_service: storage service to use, leave `None` to use the default one :param force: If loading should be forced. :param delete_other_files: Deletes files of merged trajectories All other parameters as in `f_merge_many` of the trajectory. :return: The merged traj """ in_dir = os.listdir(folder) all_files = [] # Find all files with matching extension for file in in_dir: full_file = os.path.join(folder, file) if os.path.isfile(full_file): _, extension = os.path.splitext(full_file) if extension == ext: all_files.append(full_file) all_files = sorted(all_files) # Open all trajectories trajs = [] for full_file in all_files: traj = load_trajectory(index=-1, storage_service=storage_service, filename=full_file, load_data=0, force=force, dynamic_imports=dynamic_imports) trajs.append(traj) # Merge all trajectories first_traj = trajs.pop(0) first_traj.f_merge_many(trajs, ignore_data=ignore_data, move_data=move_data, delete_other_trajectory=False, keep_info=keep_info, keep_other_trajectory_info=keep_other_trajectory_info, merge_config=merge_config, backup=backup) if delete_other_files: # Delete all but the first file for file in all_files[1:]: os.remove(file) return first_traj
def merge_all_in_folder(folder, ext='.hdf5', dynamic_imports=None, storage_service=None, force=False, ignore_data=(), move_data=False, delete_other_files=False, keep_info=True, keep_other_trajectory_info=True, merge_config=True, backup=True): """Merges all files in a given folder. IMPORTANT: Does not check if there are more than 1 trajectory in a file. Always uses the last trajectory in file and ignores the other ones. Trajectories are merged according to the alphabetical order of the files, i.e. the resulting merged trajectory is found in the first file (according to lexicographic ordering). :param folder: folder (not recursive) where to look for files :param ext: only files with the given extension are used :param dynamic_imports: Dynamic imports for loading :param storage_service: storage service to use, leave `None` to use the default one :param force: If loading should be forced. :param delete_other_files: Deletes files of merged trajectories All other parameters as in `f_merge_many` of the trajectory. :return: The merged traj """ in_dir = os.listdir(folder) all_files = [] # Find all files with matching extension for file in in_dir: full_file = os.path.join(folder, file) if os.path.isfile(full_file): _, extension = os.path.splitext(full_file) if extension == ext: all_files.append(full_file) all_files = sorted(all_files) # Open all trajectories trajs = [] for full_file in all_files: traj = load_trajectory(index=-1, storage_service=storage_service, filename=full_file, load_data=0, force=force, dynamic_imports=dynamic_imports) trajs.append(traj) # Merge all trajectories first_traj = trajs.pop(0) first_traj.f_merge_many(trajs, ignore_data=ignore_data, move_data=move_data, delete_other_trajectory=False, keep_info=keep_info, keep_other_trajectory_info=keep_other_trajectory_info, merge_config=merge_config, backup=backup) if delete_other_files: # Delete all but the first file for file in all_files[1:]: os.remove(file) return first_traj
[ "Merges", "all", "files", "in", "a", "given", "folder", "." ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/utils/trajectory_utils.py#L7-L77
[ "def", "merge_all_in_folder", "(", "folder", ",", "ext", "=", "'.hdf5'", ",", "dynamic_imports", "=", "None", ",", "storage_service", "=", "None", ",", "force", "=", "False", ",", "ignore_data", "=", "(", ")", ",", "move_data", "=", "False", ",", "delete_other_files", "=", "False", ",", "keep_info", "=", "True", ",", "keep_other_trajectory_info", "=", "True", ",", "merge_config", "=", "True", ",", "backup", "=", "True", ")", ":", "in_dir", "=", "os", ".", "listdir", "(", "folder", ")", "all_files", "=", "[", "]", "# Find all files with matching extension", "for", "file", "in", "in_dir", ":", "full_file", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "file", ")", "if", "os", ".", "path", ".", "isfile", "(", "full_file", ")", ":", "_", ",", "extension", "=", "os", ".", "path", ".", "splitext", "(", "full_file", ")", "if", "extension", "==", "ext", ":", "all_files", ".", "append", "(", "full_file", ")", "all_files", "=", "sorted", "(", "all_files", ")", "# Open all trajectories", "trajs", "=", "[", "]", "for", "full_file", "in", "all_files", ":", "traj", "=", "load_trajectory", "(", "index", "=", "-", "1", ",", "storage_service", "=", "storage_service", ",", "filename", "=", "full_file", ",", "load_data", "=", "0", ",", "force", "=", "force", ",", "dynamic_imports", "=", "dynamic_imports", ")", "trajs", ".", "append", "(", "traj", ")", "# Merge all trajectories", "first_traj", "=", "trajs", ".", "pop", "(", "0", ")", "first_traj", ".", "f_merge_many", "(", "trajs", ",", "ignore_data", "=", "ignore_data", ",", "move_data", "=", "move_data", ",", "delete_other_trajectory", "=", "False", ",", "keep_info", "=", "keep_info", ",", "keep_other_trajectory_info", "=", "keep_other_trajectory_info", ",", "merge_config", "=", "merge_config", ",", "backup", "=", "backup", ")", "if", "delete_other_files", ":", "# Delete all but the first file", "for", "file", "in", "all_files", "[", "1", ":", "]", ":", "os", ".", "remove", "(", "file", ")", "return", "first_traj" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
_SigintHandler._handle_sigint
Handler of SIGINT Does nothing if SIGINT is encountered once but raises a KeyboardInterrupt in case it is encountered twice. immediatly.
pypet/utils/siginthandling.py
def _handle_sigint(self, signum, frame): """Handler of SIGINT Does nothing if SIGINT is encountered once but raises a KeyboardInterrupt in case it is encountered twice. immediatly. """ if self.hit: prompt = 'Exiting immediately!' raise KeyboardInterrupt(prompt) else: self.hit = True prompt = ('\nYou killed the process(es) via `SIGINT` (`CTRL+C`). ' 'I am trying to exit ' 'gracefully. Using `SIGINT` (`CTRL+C`) ' 'again will cause an immediate exit.\n') sys.stderr.write(prompt)
def _handle_sigint(self, signum, frame): """Handler of SIGINT Does nothing if SIGINT is encountered once but raises a KeyboardInterrupt in case it is encountered twice. immediatly. """ if self.hit: prompt = 'Exiting immediately!' raise KeyboardInterrupt(prompt) else: self.hit = True prompt = ('\nYou killed the process(es) via `SIGINT` (`CTRL+C`). ' 'I am trying to exit ' 'gracefully. Using `SIGINT` (`CTRL+C`) ' 'again will cause an immediate exit.\n') sys.stderr.write(prompt)
[ "Handler", "of", "SIGINT" ]
SmokinCaterpillar/pypet
python
https://github.com/SmokinCaterpillar/pypet/blob/97ad3e80d46dbdea02deeb98ea41f05a19565826/pypet/utils/siginthandling.py#L28-L45
[ "def", "_handle_sigint", "(", "self", ",", "signum", ",", "frame", ")", ":", "if", "self", ".", "hit", ":", "prompt", "=", "'Exiting immediately!'", "raise", "KeyboardInterrupt", "(", "prompt", ")", "else", ":", "self", ".", "hit", "=", "True", "prompt", "=", "(", "'\\nYou killed the process(es) via `SIGINT` (`CTRL+C`). '", "'I am trying to exit '", "'gracefully. Using `SIGINT` (`CTRL+C`) '", "'again will cause an immediate exit.\\n'", ")", "sys", ".", "stderr", ".", "write", "(", "prompt", ")" ]
97ad3e80d46dbdea02deeb98ea41f05a19565826
test
config_from_file
Small configuration file management function
pyecobee/__init__.py
def config_from_file(filename, config=None): ''' Small configuration file management function''' if config: # We're writing configuration try: with open(filename, 'w') as fdesc: fdesc.write(json.dumps(config)) except IOError as error: logger.exception(error) return False return True else: # We're reading config if os.path.isfile(filename): try: with open(filename, 'r') as fdesc: return json.loads(fdesc.read()) except IOError as error: return False else: return {}
def config_from_file(filename, config=None): ''' Small configuration file management function''' if config: # We're writing configuration try: with open(filename, 'w') as fdesc: fdesc.write(json.dumps(config)) except IOError as error: logger.exception(error) return False return True else: # We're reading config if os.path.isfile(filename): try: with open(filename, 'r') as fdesc: return json.loads(fdesc.read()) except IOError as error: return False else: return {}
[ "Small", "configuration", "file", "management", "function" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L12-L32
[ "def", "config_from_file", "(", "filename", ",", "config", "=", "None", ")", ":", "if", "config", ":", "# We're writing configuration", "try", ":", "with", "open", "(", "filename", ",", "'w'", ")", "as", "fdesc", ":", "fdesc", ".", "write", "(", "json", ".", "dumps", "(", "config", ")", ")", "except", "IOError", "as", "error", ":", "logger", ".", "exception", "(", "error", ")", "return", "False", "return", "True", "else", ":", "# We're reading config", "if", "os", ".", "path", ".", "isfile", "(", "filename", ")", ":", "try", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "fdesc", ":", "return", "json", ".", "loads", "(", "fdesc", ".", "read", "(", ")", ")", "except", "IOError", "as", "error", ":", "return", "False", "else", ":", "return", "{", "}" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.request_pin
Method to request a PIN from ecobee for authorization
pyecobee/__init__.py
def request_pin(self): ''' Method to request a PIN from ecobee for authorization ''' url = 'https://api.ecobee.com/authorize' params = {'response_type': 'ecobeePin', 'client_id': self.api_key, 'scope': 'smartWrite'} try: request = requests.get(url, params=params) except RequestException: logger.warn("Error connecting to Ecobee. Possible connectivity outage." "Could not request pin.") return self.authorization_code = request.json()['code'] self.pin = request.json()['ecobeePin'] logger.error('Please authorize your ecobee developer app with PIN code ' + self.pin + '\nGoto https://www.ecobee.com/consumerportal' '/index.html, click\nMy Apps, Add application, Enter Pin' ' and click Authorize.\nAfter authorizing, call request_' 'tokens() method.')
def request_pin(self): ''' Method to request a PIN from ecobee for authorization ''' url = 'https://api.ecobee.com/authorize' params = {'response_type': 'ecobeePin', 'client_id': self.api_key, 'scope': 'smartWrite'} try: request = requests.get(url, params=params) except RequestException: logger.warn("Error connecting to Ecobee. Possible connectivity outage." "Could not request pin.") return self.authorization_code = request.json()['code'] self.pin = request.json()['ecobeePin'] logger.error('Please authorize your ecobee developer app with PIN code ' + self.pin + '\nGoto https://www.ecobee.com/consumerportal' '/index.html, click\nMy Apps, Add application, Enter Pin' ' and click Authorize.\nAfter authorizing, call request_' 'tokens() method.')
[ "Method", "to", "request", "a", "PIN", "from", "ecobee", "for", "authorization" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L77-L94
[ "def", "request_pin", "(", "self", ")", ":", "url", "=", "'https://api.ecobee.com/authorize'", "params", "=", "{", "'response_type'", ":", "'ecobeePin'", ",", "'client_id'", ":", "self", ".", "api_key", ",", "'scope'", ":", "'smartWrite'", "}", "try", ":", "request", "=", "requests", ".", "get", "(", "url", ",", "params", "=", "params", ")", "except", "RequestException", ":", "logger", ".", "warn", "(", "\"Error connecting to Ecobee. Possible connectivity outage.\"", "\"Could not request pin.\"", ")", "return", "self", ".", "authorization_code", "=", "request", ".", "json", "(", ")", "[", "'code'", "]", "self", ".", "pin", "=", "request", ".", "json", "(", ")", "[", "'ecobeePin'", "]", "logger", ".", "error", "(", "'Please authorize your ecobee developer app with PIN code '", "+", "self", ".", "pin", "+", "'\\nGoto https://www.ecobee.com/consumerportal'", "'/index.html, click\\nMy Apps, Add application, Enter Pin'", "' and click Authorize.\\nAfter authorizing, call request_'", "'tokens() method.'", ")" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.request_tokens
Method to request API tokens from ecobee
pyecobee/__init__.py
def request_tokens(self): ''' Method to request API tokens from ecobee ''' url = 'https://api.ecobee.com/token' params = {'grant_type': 'ecobeePin', 'code': self.authorization_code, 'client_id': self.api_key} try: request = requests.post(url, params=params) except RequestException: logger.warn("Error connecting to Ecobee. Possible connectivity outage." "Could not request token.") return if request.status_code == requests.codes.ok: self.access_token = request.json()['access_token'] self.refresh_token = request.json()['refresh_token'] self.write_tokens_to_file() self.pin = None else: logger.warn('Error while requesting tokens from ecobee.com.' ' Status code: ' + str(request.status_code)) return
def request_tokens(self): ''' Method to request API tokens from ecobee ''' url = 'https://api.ecobee.com/token' params = {'grant_type': 'ecobeePin', 'code': self.authorization_code, 'client_id': self.api_key} try: request = requests.post(url, params=params) except RequestException: logger.warn("Error connecting to Ecobee. Possible connectivity outage." "Could not request token.") return if request.status_code == requests.codes.ok: self.access_token = request.json()['access_token'] self.refresh_token = request.json()['refresh_token'] self.write_tokens_to_file() self.pin = None else: logger.warn('Error while requesting tokens from ecobee.com.' ' Status code: ' + str(request.status_code)) return
[ "Method", "to", "request", "API", "tokens", "from", "ecobee" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L96-L115
[ "def", "request_tokens", "(", "self", ")", ":", "url", "=", "'https://api.ecobee.com/token'", "params", "=", "{", "'grant_type'", ":", "'ecobeePin'", ",", "'code'", ":", "self", ".", "authorization_code", ",", "'client_id'", ":", "self", ".", "api_key", "}", "try", ":", "request", "=", "requests", ".", "post", "(", "url", ",", "params", "=", "params", ")", "except", "RequestException", ":", "logger", ".", "warn", "(", "\"Error connecting to Ecobee. Possible connectivity outage.\"", "\"Could not request token.\"", ")", "return", "if", "request", ".", "status_code", "==", "requests", ".", "codes", ".", "ok", ":", "self", ".", "access_token", "=", "request", ".", "json", "(", ")", "[", "'access_token'", "]", "self", ".", "refresh_token", "=", "request", ".", "json", "(", ")", "[", "'refresh_token'", "]", "self", ".", "write_tokens_to_file", "(", ")", "self", ".", "pin", "=", "None", "else", ":", "logger", ".", "warn", "(", "'Error while requesting tokens from ecobee.com.'", "' Status code: '", "+", "str", "(", "request", ".", "status_code", ")", ")", "return" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.refresh_tokens
Method to refresh API tokens from ecobee
pyecobee/__init__.py
def refresh_tokens(self): ''' Method to refresh API tokens from ecobee ''' url = 'https://api.ecobee.com/token' params = {'grant_type': 'refresh_token', 'refresh_token': self.refresh_token, 'client_id': self.api_key} request = requests.post(url, params=params) if request.status_code == requests.codes.ok: self.access_token = request.json()['access_token'] self.refresh_token = request.json()['refresh_token'] self.write_tokens_to_file() return True else: self.request_pin()
def refresh_tokens(self): ''' Method to refresh API tokens from ecobee ''' url = 'https://api.ecobee.com/token' params = {'grant_type': 'refresh_token', 'refresh_token': self.refresh_token, 'client_id': self.api_key} request = requests.post(url, params=params) if request.status_code == requests.codes.ok: self.access_token = request.json()['access_token'] self.refresh_token = request.json()['refresh_token'] self.write_tokens_to_file() return True else: self.request_pin()
[ "Method", "to", "refresh", "API", "tokens", "from", "ecobee" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L117-L130
[ "def", "refresh_tokens", "(", "self", ")", ":", "url", "=", "'https://api.ecobee.com/token'", "params", "=", "{", "'grant_type'", ":", "'refresh_token'", ",", "'refresh_token'", ":", "self", ".", "refresh_token", ",", "'client_id'", ":", "self", ".", "api_key", "}", "request", "=", "requests", ".", "post", "(", "url", ",", "params", "=", "params", ")", "if", "request", ".", "status_code", "==", "requests", ".", "codes", ".", "ok", ":", "self", ".", "access_token", "=", "request", ".", "json", "(", ")", "[", "'access_token'", "]", "self", ".", "refresh_token", "=", "request", ".", "json", "(", ")", "[", "'refresh_token'", "]", "self", ".", "write_tokens_to_file", "(", ")", "return", "True", "else", ":", "self", ".", "request_pin", "(", ")" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.get_thermostats
Set self.thermostats to a json list of thermostats from ecobee
pyecobee/__init__.py
def get_thermostats(self): ''' Set self.thermostats to a json list of thermostats from ecobee ''' url = 'https://api.ecobee.com/1/thermostat' header = {'Content-Type': 'application/json;charset=UTF-8', 'Authorization': 'Bearer ' + self.access_token} params = {'json': ('{"selection":{"selectionType":"registered",' '"includeRuntime":"true",' '"includeSensors":"true",' '"includeProgram":"true",' '"includeEquipmentStatus":"true",' '"includeEvents":"true",' '"includeWeather":"true",' '"includeSettings":"true"}}')} try: request = requests.get(url, headers=header, params=params) except RequestException: logger.warn("Error connecting to Ecobee. Possible connectivity outage.") return None if request.status_code == requests.codes.ok: self.authenticated = True self.thermostats = request.json()['thermostatList'] return self.thermostats else: self.authenticated = False logger.info("Error connecting to Ecobee while attempting to get " "thermostat data. Refreshing tokens and trying again.") if self.refresh_tokens(): return self.get_thermostats() else: return None
def get_thermostats(self): ''' Set self.thermostats to a json list of thermostats from ecobee ''' url = 'https://api.ecobee.com/1/thermostat' header = {'Content-Type': 'application/json;charset=UTF-8', 'Authorization': 'Bearer ' + self.access_token} params = {'json': ('{"selection":{"selectionType":"registered",' '"includeRuntime":"true",' '"includeSensors":"true",' '"includeProgram":"true",' '"includeEquipmentStatus":"true",' '"includeEvents":"true",' '"includeWeather":"true",' '"includeSettings":"true"}}')} try: request = requests.get(url, headers=header, params=params) except RequestException: logger.warn("Error connecting to Ecobee. Possible connectivity outage.") return None if request.status_code == requests.codes.ok: self.authenticated = True self.thermostats = request.json()['thermostatList'] return self.thermostats else: self.authenticated = False logger.info("Error connecting to Ecobee while attempting to get " "thermostat data. Refreshing tokens and trying again.") if self.refresh_tokens(): return self.get_thermostats() else: return None
[ "Set", "self", ".", "thermostats", "to", "a", "json", "list", "of", "thermostats", "from", "ecobee" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L132-L161
[ "def", "get_thermostats", "(", "self", ")", ":", "url", "=", "'https://api.ecobee.com/1/thermostat'", "header", "=", "{", "'Content-Type'", ":", "'application/json;charset=UTF-8'", ",", "'Authorization'", ":", "'Bearer '", "+", "self", ".", "access_token", "}", "params", "=", "{", "'json'", ":", "(", "'{\"selection\":{\"selectionType\":\"registered\",'", "'\"includeRuntime\":\"true\",'", "'\"includeSensors\":\"true\",'", "'\"includeProgram\":\"true\",'", "'\"includeEquipmentStatus\":\"true\",'", "'\"includeEvents\":\"true\",'", "'\"includeWeather\":\"true\",'", "'\"includeSettings\":\"true\"}}'", ")", "}", "try", ":", "request", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "header", ",", "params", "=", "params", ")", "except", "RequestException", ":", "logger", ".", "warn", "(", "\"Error connecting to Ecobee. Possible connectivity outage.\"", ")", "return", "None", "if", "request", ".", "status_code", "==", "requests", ".", "codes", ".", "ok", ":", "self", ".", "authenticated", "=", "True", "self", ".", "thermostats", "=", "request", ".", "json", "(", ")", "[", "'thermostatList'", "]", "return", "self", ".", "thermostats", "else", ":", "self", ".", "authenticated", "=", "False", "logger", ".", "info", "(", "\"Error connecting to Ecobee while attempting to get \"", "\"thermostat data. Refreshing tokens and trying again.\"", ")", "if", "self", ".", "refresh_tokens", "(", ")", ":", "return", "self", ".", "get_thermostats", "(", ")", "else", ":", "return", "None" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.write_tokens_to_file
Write api tokens to a file
pyecobee/__init__.py
def write_tokens_to_file(self): ''' Write api tokens to a file ''' config = dict() config['API_KEY'] = self.api_key config['ACCESS_TOKEN'] = self.access_token config['REFRESH_TOKEN'] = self.refresh_token config['AUTHORIZATION_CODE'] = self.authorization_code if self.file_based_config: config_from_file(self.config_filename, config) else: self.config = config
def write_tokens_to_file(self): ''' Write api tokens to a file ''' config = dict() config['API_KEY'] = self.api_key config['ACCESS_TOKEN'] = self.access_token config['REFRESH_TOKEN'] = self.refresh_token config['AUTHORIZATION_CODE'] = self.authorization_code if self.file_based_config: config_from_file(self.config_filename, config) else: self.config = config
[ "Write", "api", "tokens", "to", "a", "file" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L171-L181
[ "def", "write_tokens_to_file", "(", "self", ")", ":", "config", "=", "dict", "(", ")", "config", "[", "'API_KEY'", "]", "=", "self", ".", "api_key", "config", "[", "'ACCESS_TOKEN'", "]", "=", "self", ".", "access_token", "config", "[", "'REFRESH_TOKEN'", "]", "=", "self", ".", "refresh_token", "config", "[", "'AUTHORIZATION_CODE'", "]", "=", "self", ".", "authorization_code", "if", "self", ".", "file_based_config", ":", "config_from_file", "(", "self", ".", "config_filename", ",", "config", ")", "else", ":", "self", ".", "config", "=", "config" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.set_hvac_mode
possible hvac modes are auto, auxHeatOnly, cool, heat, off
pyecobee/__init__.py
def set_hvac_mode(self, index, hvac_mode): ''' possible hvac modes are auto, auxHeatOnly, cool, heat, off ''' body = {"selection": {"selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "thermostat": { "settings": { "hvacMode": hvac_mode } }} log_msg_action = "set HVAC mode" return self.make_request(body, log_msg_action)
def set_hvac_mode(self, index, hvac_mode): ''' possible hvac modes are auto, auxHeatOnly, cool, heat, off ''' body = {"selection": {"selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "thermostat": { "settings": { "hvacMode": hvac_mode } }} log_msg_action = "set HVAC mode" return self.make_request(body, log_msg_action)
[ "possible", "hvac", "modes", "are", "auto", "auxHeatOnly", "cool", "heat", "off" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L207-L217
[ "def", "set_hvac_mode", "(", "self", ",", "index", ",", "hvac_mode", ")", ":", "body", "=", "{", "\"selection\"", ":", "{", "\"selectionType\"", ":", "\"thermostats\"", ",", "\"selectionMatch\"", ":", "self", ".", "thermostats", "[", "index", "]", "[", "'identifier'", "]", "}", ",", "\"thermostat\"", ":", "{", "\"settings\"", ":", "{", "\"hvacMode\"", ":", "hvac_mode", "}", "}", "}", "log_msg_action", "=", "\"set HVAC mode\"", "return", "self", ".", "make_request", "(", "body", ",", "log_msg_action", ")" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.set_fan_min_on_time
The minimum time, in minutes, to run the fan each hour. Value from 1 to 60
pyecobee/__init__.py
def set_fan_min_on_time(self, index, fan_min_on_time): ''' The minimum time, in minutes, to run the fan each hour. Value from 1 to 60 ''' body = {"selection": {"selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "thermostat": { "settings": { "fanMinOnTime": fan_min_on_time } }} log_msg_action = "set fan minimum on time." return self.make_request(body, log_msg_action)
def set_fan_min_on_time(self, index, fan_min_on_time): ''' The minimum time, in minutes, to run the fan each hour. Value from 1 to 60 ''' body = {"selection": {"selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "thermostat": { "settings": { "fanMinOnTime": fan_min_on_time } }} log_msg_action = "set fan minimum on time." return self.make_request(body, log_msg_action)
[ "The", "minimum", "time", "in", "minutes", "to", "run", "the", "fan", "each", "hour", ".", "Value", "from", "1", "to", "60" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L219-L229
[ "def", "set_fan_min_on_time", "(", "self", ",", "index", ",", "fan_min_on_time", ")", ":", "body", "=", "{", "\"selection\"", ":", "{", "\"selectionType\"", ":", "\"thermostats\"", ",", "\"selectionMatch\"", ":", "self", ".", "thermostats", "[", "index", "]", "[", "'identifier'", "]", "}", ",", "\"thermostat\"", ":", "{", "\"settings\"", ":", "{", "\"fanMinOnTime\"", ":", "fan_min_on_time", "}", "}", "}", "log_msg_action", "=", "\"set fan minimum on time.\"", "return", "self", ".", "make_request", "(", "body", ",", "log_msg_action", ")" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.set_fan_mode
Set fan mode. Values: auto, minontime, on
pyecobee/__init__.py
def set_fan_mode(self, index, fan_mode, cool_temp, heat_temp, hold_type="nextTransition"): ''' Set fan mode. Values: auto, minontime, on ''' body = {"selection": { "selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "functions": [{"type": "setHold", "params": { "holdType": hold_type, "coolHoldTemp": int(cool_temp * 10), "heatHoldTemp": int(heat_temp * 10), "fan": fan_mode }}]} log_msg_action = "set fan mode" return self.make_request(body, log_msg_action)
def set_fan_mode(self, index, fan_mode, cool_temp, heat_temp, hold_type="nextTransition"): ''' Set fan mode. Values: auto, minontime, on ''' body = {"selection": { "selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "functions": [{"type": "setHold", "params": { "holdType": hold_type, "coolHoldTemp": int(cool_temp * 10), "heatHoldTemp": int(heat_temp * 10), "fan": fan_mode }}]} log_msg_action = "set fan mode" return self.make_request(body, log_msg_action)
[ "Set", "fan", "mode", ".", "Values", ":", "auto", "minontime", "on" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L231-L243
[ "def", "set_fan_mode", "(", "self", ",", "index", ",", "fan_mode", ",", "cool_temp", ",", "heat_temp", ",", "hold_type", "=", "\"nextTransition\"", ")", ":", "body", "=", "{", "\"selection\"", ":", "{", "\"selectionType\"", ":", "\"thermostats\"", ",", "\"selectionMatch\"", ":", "self", ".", "thermostats", "[", "index", "]", "[", "'identifier'", "]", "}", ",", "\"functions\"", ":", "[", "{", "\"type\"", ":", "\"setHold\"", ",", "\"params\"", ":", "{", "\"holdType\"", ":", "hold_type", ",", "\"coolHoldTemp\"", ":", "int", "(", "cool_temp", "*", "10", ")", ",", "\"heatHoldTemp\"", ":", "int", "(", "heat_temp", "*", "10", ")", ",", "\"fan\"", ":", "fan_mode", "}", "}", "]", "}", "log_msg_action", "=", "\"set fan mode\"", "return", "self", ".", "make_request", "(", "body", ",", "log_msg_action", ")" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.set_hold_temp
Set a hold
pyecobee/__init__.py
def set_hold_temp(self, index, cool_temp, heat_temp, hold_type="nextTransition"): ''' Set a hold ''' body = {"selection": { "selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "functions": [{"type": "setHold", "params": { "holdType": hold_type, "coolHoldTemp": int(cool_temp * 10), "heatHoldTemp": int(heat_temp * 10) }}]} log_msg_action = "set hold temp" return self.make_request(body, log_msg_action)
def set_hold_temp(self, index, cool_temp, heat_temp, hold_type="nextTransition"): ''' Set a hold ''' body = {"selection": { "selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "functions": [{"type": "setHold", "params": { "holdType": hold_type, "coolHoldTemp": int(cool_temp * 10), "heatHoldTemp": int(heat_temp * 10) }}]} log_msg_action = "set hold temp" return self.make_request(body, log_msg_action)
[ "Set", "a", "hold" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L245-L257
[ "def", "set_hold_temp", "(", "self", ",", "index", ",", "cool_temp", ",", "heat_temp", ",", "hold_type", "=", "\"nextTransition\"", ")", ":", "body", "=", "{", "\"selection\"", ":", "{", "\"selectionType\"", ":", "\"thermostats\"", ",", "\"selectionMatch\"", ":", "self", ".", "thermostats", "[", "index", "]", "[", "'identifier'", "]", "}", ",", "\"functions\"", ":", "[", "{", "\"type\"", ":", "\"setHold\"", ",", "\"params\"", ":", "{", "\"holdType\"", ":", "hold_type", ",", "\"coolHoldTemp\"", ":", "int", "(", "cool_temp", "*", "10", ")", ",", "\"heatHoldTemp\"", ":", "int", "(", "heat_temp", "*", "10", ")", "}", "}", "]", "}", "log_msg_action", "=", "\"set hold temp\"", "return", "self", ".", "make_request", "(", "body", ",", "log_msg_action", ")" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.set_climate_hold
Set a climate hold - ie away, home, sleep
pyecobee/__init__.py
def set_climate_hold(self, index, climate, hold_type="nextTransition"): ''' Set a climate hold - ie away, home, sleep ''' body = {"selection": { "selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "functions": [{"type": "setHold", "params": { "holdType": hold_type, "holdClimateRef": climate }}]} log_msg_action = "set climate hold" return self.make_request(body, log_msg_action)
def set_climate_hold(self, index, climate, hold_type="nextTransition"): ''' Set a climate hold - ie away, home, sleep ''' body = {"selection": { "selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "functions": [{"type": "setHold", "params": { "holdType": hold_type, "holdClimateRef": climate }}]} log_msg_action = "set climate hold" return self.make_request(body, log_msg_action)
[ "Set", "a", "climate", "hold", "-", "ie", "away", "home", "sleep" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L259-L269
[ "def", "set_climate_hold", "(", "self", ",", "index", ",", "climate", ",", "hold_type", "=", "\"nextTransition\"", ")", ":", "body", "=", "{", "\"selection\"", ":", "{", "\"selectionType\"", ":", "\"thermostats\"", ",", "\"selectionMatch\"", ":", "self", ".", "thermostats", "[", "index", "]", "[", "'identifier'", "]", "}", ",", "\"functions\"", ":", "[", "{", "\"type\"", ":", "\"setHold\"", ",", "\"params\"", ":", "{", "\"holdType\"", ":", "hold_type", ",", "\"holdClimateRef\"", ":", "climate", "}", "}", "]", "}", "log_msg_action", "=", "\"set climate hold\"", "return", "self", ".", "make_request", "(", "body", ",", "log_msg_action", ")" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.delete_vacation
Delete the vacation with name vacation
pyecobee/__init__.py
def delete_vacation(self, index, vacation): ''' Delete the vacation with name vacation ''' body = {"selection": { "selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "functions": [{"type": "deleteVacation", "params": { "name": vacation }}]} log_msg_action = "delete a vacation" return self.make_request(body, log_msg_action)
def delete_vacation(self, index, vacation): ''' Delete the vacation with name vacation ''' body = {"selection": { "selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "functions": [{"type": "deleteVacation", "params": { "name": vacation }}]} log_msg_action = "delete a vacation" return self.make_request(body, log_msg_action)
[ "Delete", "the", "vacation", "with", "name", "vacation" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L271-L281
[ "def", "delete_vacation", "(", "self", ",", "index", ",", "vacation", ")", ":", "body", "=", "{", "\"selection\"", ":", "{", "\"selectionType\"", ":", "\"thermostats\"", ",", "\"selectionMatch\"", ":", "self", ".", "thermostats", "[", "index", "]", "[", "'identifier'", "]", "}", ",", "\"functions\"", ":", "[", "{", "\"type\"", ":", "\"deleteVacation\"", ",", "\"params\"", ":", "{", "\"name\"", ":", "vacation", "}", "}", "]", "}", "log_msg_action", "=", "\"delete a vacation\"", "return", "self", ".", "make_request", "(", "body", ",", "log_msg_action", ")" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.resume_program
Resume currently scheduled program
pyecobee/__init__.py
def resume_program(self, index, resume_all=False): ''' Resume currently scheduled program ''' body = {"selection": { "selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "functions": [{"type": "resumeProgram", "params": { "resumeAll": resume_all }}]} log_msg_action = "resume program" return self.make_request(body, log_msg_action)
def resume_program(self, index, resume_all=False): ''' Resume currently scheduled program ''' body = {"selection": { "selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "functions": [{"type": "resumeProgram", "params": { "resumeAll": resume_all }}]} log_msg_action = "resume program" return self.make_request(body, log_msg_action)
[ "Resume", "currently", "scheduled", "program" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L283-L293
[ "def", "resume_program", "(", "self", ",", "index", ",", "resume_all", "=", "False", ")", ":", "body", "=", "{", "\"selection\"", ":", "{", "\"selectionType\"", ":", "\"thermostats\"", ",", "\"selectionMatch\"", ":", "self", ".", "thermostats", "[", "index", "]", "[", "'identifier'", "]", "}", ",", "\"functions\"", ":", "[", "{", "\"type\"", ":", "\"resumeProgram\"", ",", "\"params\"", ":", "{", "\"resumeAll\"", ":", "resume_all", "}", "}", "]", "}", "log_msg_action", "=", "\"resume program\"", "return", "self", ".", "make_request", "(", "body", ",", "log_msg_action", ")" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.send_message
Send a message to the thermostat
pyecobee/__init__.py
def send_message(self, index, message="Hello from python-ecobee!"): ''' Send a message to the thermostat ''' body = {"selection": { "selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "functions": [{"type": "sendMessage", "params": { "text": message[0:500] }}]} log_msg_action = "send message" return self.make_request(body, log_msg_action)
def send_message(self, index, message="Hello from python-ecobee!"): ''' Send a message to the thermostat ''' body = {"selection": { "selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "functions": [{"type": "sendMessage", "params": { "text": message[0:500] }}]} log_msg_action = "send message" return self.make_request(body, log_msg_action)
[ "Send", "a", "message", "to", "the", "thermostat" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L295-L305
[ "def", "send_message", "(", "self", ",", "index", ",", "message", "=", "\"Hello from python-ecobee!\"", ")", ":", "body", "=", "{", "\"selection\"", ":", "{", "\"selectionType\"", ":", "\"thermostats\"", ",", "\"selectionMatch\"", ":", "self", ".", "thermostats", "[", "index", "]", "[", "'identifier'", "]", "}", ",", "\"functions\"", ":", "[", "{", "\"type\"", ":", "\"sendMessage\"", ",", "\"params\"", ":", "{", "\"text\"", ":", "message", "[", "0", ":", "500", "]", "}", "}", "]", "}", "log_msg_action", "=", "\"send message\"", "return", "self", ".", "make_request", "(", "body", ",", "log_msg_action", ")" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.set_humidity
Set humidity level
pyecobee/__init__.py
def set_humidity(self, index, humidity): ''' Set humidity level''' body = {"selection": {"selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "thermostat": { "settings": { "humidity": int(humidity) } }} log_msg_action = "set humidity level" return self.make_request(body, log_msg_action)
def set_humidity(self, index, humidity): ''' Set humidity level''' body = {"selection": {"selectionType": "thermostats", "selectionMatch": self.thermostats[index]['identifier']}, "thermostat": { "settings": { "humidity": int(humidity) } }} log_msg_action = "set humidity level" return self.make_request(body, log_msg_action)
[ "Set", "humidity", "level" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L307-L318
[ "def", "set_humidity", "(", "self", ",", "index", ",", "humidity", ")", ":", "body", "=", "{", "\"selection\"", ":", "{", "\"selectionType\"", ":", "\"thermostats\"", ",", "\"selectionMatch\"", ":", "self", ".", "thermostats", "[", "index", "]", "[", "'identifier'", "]", "}", ",", "\"thermostat\"", ":", "{", "\"settings\"", ":", "{", "\"humidity\"", ":", "int", "(", "humidity", ")", "}", "}", "}", "log_msg_action", "=", "\"set humidity level\"", "return", "self", ".", "make_request", "(", "body", ",", "log_msg_action", ")" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.set_mic_mode
Enable/disable Alexa mic (only for Ecobee 4) Values: True, False
pyecobee/__init__.py
def set_mic_mode(self, index, mic_enabled): '''Enable/disable Alexa mic (only for Ecobee 4) Values: True, False ''' body = { 'selection': { 'selectionType': 'thermostats', 'selectionMatch': self.thermostats[index]['identifier']}, 'thermostat': { 'audio': { 'microphoneEnabled': mic_enabled}}} log_msg_action = 'set mic mode' return self.make_request(body, log_msg_action)
def set_mic_mode(self, index, mic_enabled): '''Enable/disable Alexa mic (only for Ecobee 4) Values: True, False ''' body = { 'selection': { 'selectionType': 'thermostats', 'selectionMatch': self.thermostats[index]['identifier']}, 'thermostat': { 'audio': { 'microphoneEnabled': mic_enabled}}} log_msg_action = 'set mic mode' return self.make_request(body, log_msg_action)
[ "Enable", "/", "disable", "Alexa", "mic", "(", "only", "for", "Ecobee", "4", ")", "Values", ":", "True", "False" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L320-L334
[ "def", "set_mic_mode", "(", "self", ",", "index", ",", "mic_enabled", ")", ":", "body", "=", "{", "'selection'", ":", "{", "'selectionType'", ":", "'thermostats'", ",", "'selectionMatch'", ":", "self", ".", "thermostats", "[", "index", "]", "[", "'identifier'", "]", "}", ",", "'thermostat'", ":", "{", "'audio'", ":", "{", "'microphoneEnabled'", ":", "mic_enabled", "}", "}", "}", "log_msg_action", "=", "'set mic mode'", "return", "self", ".", "make_request", "(", "body", ",", "log_msg_action", ")" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.set_occupancy_modes
Enable/disable Smart Home/Away and Follow Me modes Values: True, False
pyecobee/__init__.py
def set_occupancy_modes(self, index, auto_away=None, follow_me=None): '''Enable/disable Smart Home/Away and Follow Me modes Values: True, False ''' body = { 'selection': { 'selectionType': 'thermostats', 'selectionMatch': self.thermostats[index]['identifier']}, 'thermostat': { 'settings': { 'autoAway': auto_away, 'followMeComfort': follow_me}}} log_msg_action = 'set occupancy modes' return self.make_request(body, log_msg_action)
def set_occupancy_modes(self, index, auto_away=None, follow_me=None): '''Enable/disable Smart Home/Away and Follow Me modes Values: True, False ''' body = { 'selection': { 'selectionType': 'thermostats', 'selectionMatch': self.thermostats[index]['identifier']}, 'thermostat': { 'settings': { 'autoAway': auto_away, 'followMeComfort': follow_me}}} log_msg_action = 'set occupancy modes' return self.make_request(body, log_msg_action)
[ "Enable", "/", "disable", "Smart", "Home", "/", "Away", "and", "Follow", "Me", "modes", "Values", ":", "True", "False" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L336-L351
[ "def", "set_occupancy_modes", "(", "self", ",", "index", ",", "auto_away", "=", "None", ",", "follow_me", "=", "None", ")", ":", "body", "=", "{", "'selection'", ":", "{", "'selectionType'", ":", "'thermostats'", ",", "'selectionMatch'", ":", "self", ".", "thermostats", "[", "index", "]", "[", "'identifier'", "]", "}", ",", "'thermostat'", ":", "{", "'settings'", ":", "{", "'autoAway'", ":", "auto_away", ",", "'followMeComfort'", ":", "follow_me", "}", "}", "}", "log_msg_action", "=", "'set occupancy modes'", "return", "self", ".", "make_request", "(", "body", ",", "log_msg_action", ")" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
Ecobee.set_dst_mode
Enable/disable daylight savings Values: True, False
pyecobee/__init__.py
def set_dst_mode(self, index, dst): '''Enable/disable daylight savings Values: True, False ''' body = { 'selection': { 'selectionType': 'thermostats', 'selectionMatch': self.thermostats[index]['identifier']}, 'thermostat': { 'location': { 'isDaylightSaving': dst}}} log_msg_action = 'set dst mode' return self.make_request(body, log_msg_action)
def set_dst_mode(self, index, dst): '''Enable/disable daylight savings Values: True, False ''' body = { 'selection': { 'selectionType': 'thermostats', 'selectionMatch': self.thermostats[index]['identifier']}, 'thermostat': { 'location': { 'isDaylightSaving': dst}}} log_msg_action = 'set dst mode' return self.make_request(body, log_msg_action)
[ "Enable", "/", "disable", "daylight", "savings", "Values", ":", "True", "False" ]
nkgilley/python-ecobee-api
python
https://github.com/nkgilley/python-ecobee-api/blob/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174/pyecobee/__init__.py#L353-L367
[ "def", "set_dst_mode", "(", "self", ",", "index", ",", "dst", ")", ":", "body", "=", "{", "'selection'", ":", "{", "'selectionType'", ":", "'thermostats'", ",", "'selectionMatch'", ":", "self", ".", "thermostats", "[", "index", "]", "[", "'identifier'", "]", "}", ",", "'thermostat'", ":", "{", "'location'", ":", "{", "'isDaylightSaving'", ":", "dst", "}", "}", "}", "log_msg_action", "=", "'set dst mode'", "return", "self", ".", "make_request", "(", "body", ",", "log_msg_action", ")" ]
cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174
test
future_dt_str
.
dhcpcanon/timers.py
def future_dt_str(dt, td): """.""" if isinstance(td, str): td = float(td) td = timedelta(seconds=td) future_dt = dt + td return future_dt.strftime(DT_PRINT_FORMAT)
def future_dt_str(dt, td): """.""" if isinstance(td, str): td = float(td) td = timedelta(seconds=td) future_dt = dt + td return future_dt.strftime(DT_PRINT_FORMAT)
[ "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/timers.py#L18-L24
[ "def", "future_dt_str", "(", "dt", ",", "td", ")", ":", "if", "isinstance", "(", "td", ",", "str", ")", ":", "td", "=", "float", "(", "td", ")", "td", "=", "timedelta", "(", "seconds", "=", "td", ")", "future_dt", "=", "dt", "+", "td", "return", "future_dt", ".", "strftime", "(", "DT_PRINT_FORMAT", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
gen_delay_selecting
Generate the delay in seconds in which the DISCOVER will be sent. [:rfc:`2131#section-4.4.1`]:: The client SHOULD wait a random time between one and ten seconds to desynchronize the use of DHCP at startup.
dhcpcanon/timers.py
def gen_delay_selecting(): """Generate the delay in seconds in which the DISCOVER will be sent. [:rfc:`2131#section-4.4.1`]:: The client SHOULD wait a random time between one and ten seconds to desynchronize the use of DHCP at startup. """ delay = float(random.randint(0, MAX_DELAY_SELECTING)) logger.debug('Delay to enter in SELECTING %s.', delay) logger.debug('SELECTING will happen on %s', future_dt_str(nowutc(), delay)) return delay
def gen_delay_selecting(): """Generate the delay in seconds in which the DISCOVER will be sent. [:rfc:`2131#section-4.4.1`]:: The client SHOULD wait a random time between one and ten seconds to desynchronize the use of DHCP at startup. """ delay = float(random.randint(0, MAX_DELAY_SELECTING)) logger.debug('Delay to enter in SELECTING %s.', delay) logger.debug('SELECTING will happen on %s', future_dt_str(nowutc(), delay)) return delay
[ "Generate", "the", "delay", "in", "seconds", "in", "which", "the", "DISCOVER", "will", "be", "sent", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/timers.py#L35-L48
[ "def", "gen_delay_selecting", "(", ")", ":", "delay", "=", "float", "(", "random", ".", "randint", "(", "0", ",", "MAX_DELAY_SELECTING", ")", ")", "logger", ".", "debug", "(", "'Delay to enter in SELECTING %s.'", ",", "delay", ")", "logger", ".", "debug", "(", "'SELECTING will happen on %s'", ",", "future_dt_str", "(", "nowutc", "(", ")", ",", "delay", ")", ")", "return", "delay" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
gen_timeout_resend
Generate the time in seconds in which DHCPDISCOVER wil be retransmited. [:rfc:`2131#section-3.1`]:: might retransmit the DHCPREQUEST message four times, for a total delay of 60 seconds [:rfc:`2131#section-4.1`]:: For example, in a 10Mb/sec Ethernet internetwork, the delay before the first retransmission SHOULD be 4 seconds randomized by the value of a uniform random number chosen from the range -1 to +1. Clients with clocks that provide resolution granularity of less than one second may choose a non-integer randomization value. The delay before the next retransmission SHOULD be 8 seconds randomized by the value of a uniform number chosen from the range -1 to +1. The retransmission delay SHOULD be doubled with subsequent retransmissions up to a maximum of 64 seconds.
dhcpcanon/timers.py
def gen_timeout_resend(attempts): """Generate the time in seconds in which DHCPDISCOVER wil be retransmited. [:rfc:`2131#section-3.1`]:: might retransmit the DHCPREQUEST message four times, for a total delay of 60 seconds [:rfc:`2131#section-4.1`]:: For example, in a 10Mb/sec Ethernet internetwork, the delay before the first retransmission SHOULD be 4 seconds randomized by the value of a uniform random number chosen from the range -1 to +1. Clients with clocks that provide resolution granularity of less than one second may choose a non-integer randomization value. The delay before the next retransmission SHOULD be 8 seconds randomized by the value of a uniform number chosen from the range -1 to +1. The retransmission delay SHOULD be doubled with subsequent retransmissions up to a maximum of 64 seconds. """ timeout = 2 ** (attempts + 1) + random.uniform(-1, +1) logger.debug('next timeout resending will happen on %s', future_dt_str(nowutc(), timeout)) return timeout
def gen_timeout_resend(attempts): """Generate the time in seconds in which DHCPDISCOVER wil be retransmited. [:rfc:`2131#section-3.1`]:: might retransmit the DHCPREQUEST message four times, for a total delay of 60 seconds [:rfc:`2131#section-4.1`]:: For example, in a 10Mb/sec Ethernet internetwork, the delay before the first retransmission SHOULD be 4 seconds randomized by the value of a uniform random number chosen from the range -1 to +1. Clients with clocks that provide resolution granularity of less than one second may choose a non-integer randomization value. The delay before the next retransmission SHOULD be 8 seconds randomized by the value of a uniform number chosen from the range -1 to +1. The retransmission delay SHOULD be doubled with subsequent retransmissions up to a maximum of 64 seconds. """ timeout = 2 ** (attempts + 1) + random.uniform(-1, +1) logger.debug('next timeout resending will happen on %s', future_dt_str(nowutc(), timeout)) return timeout
[ "Generate", "the", "time", "in", "seconds", "in", "which", "DHCPDISCOVER", "wil", "be", "retransmited", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/timers.py#L51-L75
[ "def", "gen_timeout_resend", "(", "attempts", ")", ":", "timeout", "=", "2", "**", "(", "attempts", "+", "1", ")", "+", "random", ".", "uniform", "(", "-", "1", ",", "+", "1", ")", "logger", ".", "debug", "(", "'next timeout resending will happen on %s'", ",", "future_dt_str", "(", "nowutc", "(", ")", ",", "timeout", ")", ")", "return", "timeout" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
gen_timeout_request_renew
Generate time in seconds to retransmit DHCPREQUEST. [:rfc:`2131#section-4..4.5`]:: In both RENEWING and REBINDING states, if the client receives no response to its DHCPREQUEST message, the client SHOULD wait one-half of the remaining time until T2 (in RENEWING state) and one-half of the remaining lease time (in REBINDING state), down to a minimum of 60 seconds, before retransmitting the DHCPREQUEST message.
dhcpcanon/timers.py
def gen_timeout_request_renew(lease): """Generate time in seconds to retransmit DHCPREQUEST. [:rfc:`2131#section-4..4.5`]:: In both RENEWING and REBINDING states, if the client receives no response to its DHCPREQUEST message, the client SHOULD wait one-half of the remaining time until T2 (in RENEWING state) and one-half of the remaining lease time (in REBINDING state), down to a minimum of 60 seconds, before retransmitting the DHCPREQUEST message. """ time_left = (lease.rebinding_time - lease.renewing_time) * RENEW_PERC if time_left < 60: time_left = 60 logger.debug('Next request in renew will happen on %s', future_dt_str(nowutc(), time_left)) return time_left
def gen_timeout_request_renew(lease): """Generate time in seconds to retransmit DHCPREQUEST. [:rfc:`2131#section-4..4.5`]:: In both RENEWING and REBINDING states, if the client receives no response to its DHCPREQUEST message, the client SHOULD wait one-half of the remaining time until T2 (in RENEWING state) and one-half of the remaining lease time (in REBINDING state), down to a minimum of 60 seconds, before retransmitting the DHCPREQUEST message. """ time_left = (lease.rebinding_time - lease.renewing_time) * RENEW_PERC if time_left < 60: time_left = 60 logger.debug('Next request in renew will happen on %s', future_dt_str(nowutc(), time_left)) return time_left
[ "Generate", "time", "in", "seconds", "to", "retransmit", "DHCPREQUEST", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/timers.py#L78-L97
[ "def", "gen_timeout_request_renew", "(", "lease", ")", ":", "time_left", "=", "(", "lease", ".", "rebinding_time", "-", "lease", ".", "renewing_time", ")", "*", "RENEW_PERC", "if", "time_left", "<", "60", ":", "time_left", "=", "60", "logger", ".", "debug", "(", "'Next request in renew will happen on %s'", ",", "future_dt_str", "(", "nowutc", "(", ")", ",", "time_left", ")", ")", "return", "time_left" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
gen_timeout_request_rebind
.
dhcpcanon/timers.py
def gen_timeout_request_rebind(lease): """.""" time_left = (lease.lease_time - lease.rebinding_time) * RENEW_PERC if time_left < 60: time_left = 60 logger.debug('Next request on rebinding will happen on %s', future_dt_str(nowutc(), time_left)) return time_left
def gen_timeout_request_rebind(lease): """.""" time_left = (lease.lease_time - lease.rebinding_time) * RENEW_PERC if time_left < 60: time_left = 60 logger.debug('Next request on rebinding will happen on %s', future_dt_str(nowutc(), time_left)) return time_left
[ "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/timers.py#L100-L107
[ "def", "gen_timeout_request_rebind", "(", "lease", ")", ":", "time_left", "=", "(", "lease", ".", "lease_time", "-", "lease", ".", "rebinding_time", ")", "*", "RENEW_PERC", "if", "time_left", "<", "60", ":", "time_left", "=", "60", "logger", ".", "debug", "(", "'Next request on rebinding will happen on %s'", ",", "future_dt_str", "(", "nowutc", "(", ")", ",", "time_left", ")", ")", "return", "time_left" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
gen_renewing_time
Generate RENEWING time. [:rfc:`2131#section-4.4.5`]:: T1 defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 * duration_of_lease). Times T1 and T2 SHOULD be chosen with some random "fuzz" around a fixed value, to avoid synchronization of client reacquisition.
dhcpcanon/timers.py
def gen_renewing_time(lease_time, elapsed=0): """Generate RENEWING time. [:rfc:`2131#section-4.4.5`]:: T1 defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 * duration_of_lease). Times T1 and T2 SHOULD be chosen with some random "fuzz" around a fixed value, to avoid synchronization of client reacquisition. """ renewing_time = int(lease_time) * RENEW_PERC - elapsed # FIXME:80 [:rfc:`2131#section-4.4.5`]: the chosen "fuzz" could fingerprint # the implementation # NOTE: here using same "fuzz" as systemd? range_fuzz = int(lease_time) * REBIND_PERC - renewing_time logger.debug('rebinding fuzz range %s', range_fuzz) fuzz = random.uniform(-(range_fuzz), +(range_fuzz)) renewing_time += fuzz logger.debug('Renewing time %s.', renewing_time) return renewing_time
def gen_renewing_time(lease_time, elapsed=0): """Generate RENEWING time. [:rfc:`2131#section-4.4.5`]:: T1 defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 * duration_of_lease). Times T1 and T2 SHOULD be chosen with some random "fuzz" around a fixed value, to avoid synchronization of client reacquisition. """ renewing_time = int(lease_time) * RENEW_PERC - elapsed # FIXME:80 [:rfc:`2131#section-4.4.5`]: the chosen "fuzz" could fingerprint # the implementation # NOTE: here using same "fuzz" as systemd? range_fuzz = int(lease_time) * REBIND_PERC - renewing_time logger.debug('rebinding fuzz range %s', range_fuzz) fuzz = random.uniform(-(range_fuzz), +(range_fuzz)) renewing_time += fuzz logger.debug('Renewing time %s.', renewing_time) return renewing_time
[ "Generate", "RENEWING", "time", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/timers.py#L110-L132
[ "def", "gen_renewing_time", "(", "lease_time", ",", "elapsed", "=", "0", ")", ":", "renewing_time", "=", "int", "(", "lease_time", ")", "*", "RENEW_PERC", "-", "elapsed", "# FIXME:80 [:rfc:`2131#section-4.4.5`]: the chosen \"fuzz\" could fingerprint", "# the implementation", "# NOTE: here using same \"fuzz\" as systemd?", "range_fuzz", "=", "int", "(", "lease_time", ")", "*", "REBIND_PERC", "-", "renewing_time", "logger", ".", "debug", "(", "'rebinding fuzz range %s'", ",", "range_fuzz", ")", "fuzz", "=", "random", ".", "uniform", "(", "-", "(", "range_fuzz", ")", ",", "+", "(", "range_fuzz", ")", ")", "renewing_time", "+=", "fuzz", "logger", ".", "debug", "(", "'Renewing time %s.'", ",", "renewing_time", ")", "return", "renewing_time" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
gen_rebinding_time
.
dhcpcanon/timers.py
def gen_rebinding_time(lease_time, elapsed=0): """.""" rebinding_time = int(lease_time) * REBIND_PERC - elapsed # FIXME:90 [:rfc:`2131#section-4.4.5`]: the chosen "fuzz" could fingerprint # the implementation # NOTE: here using same "fuzz" as systemd? range_fuzz = int(lease_time) - rebinding_time logger.debug('rebinding fuzz range %s', range_fuzz) fuzz = random.uniform(-(range_fuzz), +(range_fuzz)) rebinding_time += fuzz logger.debug('Rebinding time %s.', rebinding_time) return rebinding_time
def gen_rebinding_time(lease_time, elapsed=0): """.""" rebinding_time = int(lease_time) * REBIND_PERC - elapsed # FIXME:90 [:rfc:`2131#section-4.4.5`]: the chosen "fuzz" could fingerprint # the implementation # NOTE: here using same "fuzz" as systemd? range_fuzz = int(lease_time) - rebinding_time logger.debug('rebinding fuzz range %s', range_fuzz) fuzz = random.uniform(-(range_fuzz), +(range_fuzz)) rebinding_time += fuzz logger.debug('Rebinding time %s.', rebinding_time) return rebinding_time
[ "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/timers.py#L135-L147
[ "def", "gen_rebinding_time", "(", "lease_time", ",", "elapsed", "=", "0", ")", ":", "rebinding_time", "=", "int", "(", "lease_time", ")", "*", "REBIND_PERC", "-", "elapsed", "# FIXME:90 [:rfc:`2131#section-4.4.5`]: the chosen \"fuzz\" could fingerprint", "# the implementation", "# NOTE: here using same \"fuzz\" as systemd?", "range_fuzz", "=", "int", "(", "lease_time", ")", "-", "rebinding_time", "logger", ".", "debug", "(", "'rebinding fuzz range %s'", ",", "range_fuzz", ")", "fuzz", "=", "random", ".", "uniform", "(", "-", "(", "range_fuzz", ")", ",", "+", "(", "range_fuzz", ")", ")", "rebinding_time", "+=", "fuzz", "logger", ".", "debug", "(", "'Rebinding time %s.'", ",", "rebinding_time", ")", "return", "rebinding_time" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.dict_self
Return the self object attributes not inherited as dict.
dhcpcanon/dhcpcapfsm.py
def dict_self(self): """Return the self object attributes not inherited as dict.""" return {k: v for k, v in self.__dict__.items() if k in FSM_ATTRS}
def dict_self(self): """Return the self object attributes not inherited as dict.""" return {k: v for k, v in self.__dict__.items() if k in FSM_ATTRS}
[ "Return", "the", "self", "object", "attributes", "not", "inherited", "as", "dict", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L38-L40
[ "def", "dict_self", "(", "self", ")", ":", "return", "{", "k", ":", "v", "for", "k", ",", "v", "in", "self", ".", "__dict__", ".", "items", "(", ")", "if", "k", "in", "FSM_ATTRS", "}" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.reset
Reset object attributes when state is INIT.
dhcpcanon/dhcpcapfsm.py
def reset(self, iface=None, client_mac=None, xid=None, scriptfile=None): """Reset object attributes when state is INIT.""" logger.debug('Reseting attributes.') if iface is None: iface = conf.iface if client_mac is None: # scapy for python 3 returns byte, not tuple tempmac = get_if_raw_hwaddr(iface) if isinstance(tempmac, tuple) and len(tempmac) == 2: mac = tempmac[1] else: mac = tempmac client_mac = str2mac(mac) self.client = DHCPCAP(iface=iface, client_mac=client_mac, xid=xid) if scriptfile is not None: self.script = ClientScript(scriptfile) else: self.script = None self.time_sent_request = None self.discover_attempts = 0 self.request_attempts = 0 self.current_state = STATE_PREINIT self.offers = list()
def reset(self, iface=None, client_mac=None, xid=None, scriptfile=None): """Reset object attributes when state is INIT.""" logger.debug('Reseting attributes.') if iface is None: iface = conf.iface if client_mac is None: # scapy for python 3 returns byte, not tuple tempmac = get_if_raw_hwaddr(iface) if isinstance(tempmac, tuple) and len(tempmac) == 2: mac = tempmac[1] else: mac = tempmac client_mac = str2mac(mac) self.client = DHCPCAP(iface=iface, client_mac=client_mac, xid=xid) if scriptfile is not None: self.script = ClientScript(scriptfile) else: self.script = None self.time_sent_request = None self.discover_attempts = 0 self.request_attempts = 0 self.current_state = STATE_PREINIT self.offers = list()
[ "Reset", "object", "attributes", "when", "state", "is", "INIT", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L48-L70
[ "def", "reset", "(", "self", ",", "iface", "=", "None", ",", "client_mac", "=", "None", ",", "xid", "=", "None", ",", "scriptfile", "=", "None", ")", ":", "logger", ".", "debug", "(", "'Reseting attributes.'", ")", "if", "iface", "is", "None", ":", "iface", "=", "conf", ".", "iface", "if", "client_mac", "is", "None", ":", "# scapy for python 3 returns byte, not tuple", "tempmac", "=", "get_if_raw_hwaddr", "(", "iface", ")", "if", "isinstance", "(", "tempmac", ",", "tuple", ")", "and", "len", "(", "tempmac", ")", "==", "2", ":", "mac", "=", "tempmac", "[", "1", "]", "else", ":", "mac", "=", "tempmac", "client_mac", "=", "str2mac", "(", "mac", ")", "self", ".", "client", "=", "DHCPCAP", "(", "iface", "=", "iface", ",", "client_mac", "=", "client_mac", ",", "xid", "=", "xid", ")", "if", "scriptfile", "is", "not", "None", ":", "self", ".", "script", "=", "ClientScript", "(", "scriptfile", ")", "else", ":", "self", ".", "script", "=", "None", "self", ".", "time_sent_request", "=", "None", "self", ".", "discover_attempts", "=", "0", "self", ".", "request_attempts", "=", "0", "self", ".", "current_state", "=", "STATE_PREINIT", "self", ".", "offers", "=", "list", "(", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.get_timeout
Workaround to get timeout in the ATMT.timeout class method.
dhcpcanon/dhcpcapfsm.py
def get_timeout(self, state, function): """Workaround to get timeout in the ATMT.timeout class method.""" state = STATES2NAMES[state] for timeout_fn_t in self.timeout[state]: # access the function name if timeout_fn_t[1] is not None and \ timeout_fn_t[1].atmt_condname == function.atmt_condname: logger.debug('Timeout for state %s, function %s, is %s', state, function.atmt_condname, timeout_fn_t[0]) return timeout_fn_t[0] return None
def get_timeout(self, state, function): """Workaround to get timeout in the ATMT.timeout class method.""" state = STATES2NAMES[state] for timeout_fn_t in self.timeout[state]: # access the function name if timeout_fn_t[1] is not None and \ timeout_fn_t[1].atmt_condname == function.atmt_condname: logger.debug('Timeout for state %s, function %s, is %s', state, function.atmt_condname, timeout_fn_t[0]) return timeout_fn_t[0] return None
[ "Workaround", "to", "get", "timeout", "in", "the", "ATMT", ".", "timeout", "class", "method", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L106-L116
[ "def", "get_timeout", "(", "self", ",", "state", ",", "function", ")", ":", "state", "=", "STATES2NAMES", "[", "state", "]", "for", "timeout_fn_t", "in", "self", ".", "timeout", "[", "state", "]", ":", "# access the function name", "if", "timeout_fn_t", "[", "1", "]", "is", "not", "None", "and", "timeout_fn_t", "[", "1", "]", ".", "atmt_condname", "==", "function", ".", "atmt_condname", ":", "logger", ".", "debug", "(", "'Timeout for state %s, function %s, is %s'", ",", "state", ",", "function", ".", "atmt_condname", ",", "timeout_fn_t", "[", "0", "]", ")", "return", "timeout_fn_t", "[", "0", "]", "return", "None" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.set_timeout
Workaround to change timeout values in the ATMT.timeout class method. self.timeout format is:: {'STATE': [ (TIMEOUT0, <function foo>), (TIMEOUT1, <function bar>)), (None, None) ], }
dhcpcanon/dhcpcapfsm.py
def set_timeout(self, state, function, newtimeout): """ Workaround to change timeout values in the ATMT.timeout class method. self.timeout format is:: {'STATE': [ (TIMEOUT0, <function foo>), (TIMEOUT1, <function bar>)), (None, None) ], } """ state = STATES2NAMES[state] for timeout_fn_t in self.timeout[state]: # access the function name if timeout_fn_t[1] is not None and \ timeout_fn_t[1].atmt_condname == function.atmt_condname: # convert list to tuple to make it mutable timeout_l = list(timeout_fn_t) # modify the timeout timeout_l[0] = newtimeout # set the new timeoute to self.timeout i = self.timeout[state].index(timeout_fn_t) self.timeout[state][i] = tuple(timeout_l) logger.debug('Set state %s, function %s, to timeout %s', state, function.atmt_condname, newtimeout)
def set_timeout(self, state, function, newtimeout): """ Workaround to change timeout values in the ATMT.timeout class method. self.timeout format is:: {'STATE': [ (TIMEOUT0, <function foo>), (TIMEOUT1, <function bar>)), (None, None) ], } """ state = STATES2NAMES[state] for timeout_fn_t in self.timeout[state]: # access the function name if timeout_fn_t[1] is not None and \ timeout_fn_t[1].atmt_condname == function.atmt_condname: # convert list to tuple to make it mutable timeout_l = list(timeout_fn_t) # modify the timeout timeout_l[0] = newtimeout # set the new timeoute to self.timeout i = self.timeout[state].index(timeout_fn_t) self.timeout[state][i] = tuple(timeout_l) logger.debug('Set state %s, function %s, to timeout %s', state, function.atmt_condname, newtimeout)
[ "Workaround", "to", "change", "timeout", "values", "in", "the", "ATMT", ".", "timeout", "class", "method", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L118-L146
[ "def", "set_timeout", "(", "self", ",", "state", ",", "function", ",", "newtimeout", ")", ":", "state", "=", "STATES2NAMES", "[", "state", "]", "for", "timeout_fn_t", "in", "self", ".", "timeout", "[", "state", "]", ":", "# access the function name", "if", "timeout_fn_t", "[", "1", "]", "is", "not", "None", "and", "timeout_fn_t", "[", "1", "]", ".", "atmt_condname", "==", "function", ".", "atmt_condname", ":", "# convert list to tuple to make it mutable", "timeout_l", "=", "list", "(", "timeout_fn_t", ")", "# modify the timeout", "timeout_l", "[", "0", "]", "=", "newtimeout", "# set the new timeoute to self.timeout", "i", "=", "self", ".", "timeout", "[", "state", "]", ".", "index", "(", "timeout_fn_t", ")", "self", ".", "timeout", "[", "state", "]", "[", "i", "]", "=", "tuple", "(", "timeout_l", ")", "logger", ".", "debug", "(", "'Set state %s, function %s, to timeout %s'", ",", "state", ",", "function", ".", "atmt_condname", ",", "newtimeout", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.send_discover
Send discover.
dhcpcanon/dhcpcapfsm.py
def send_discover(self): """Send discover.""" assert self.client assert self.current_state == STATE_INIT or \ self.current_state == STATE_SELECTING pkt = self.client.gen_discover() sendp(pkt) # FIXME:20 check that this is correct,: all or only discover? if self.discover_attempts < MAX_ATTEMPTS_DISCOVER: self.discover_attempts += 1 timeout = gen_timeout_resend(self.discover_attempts) self.set_timeout(self.current_state, self.timeout_selecting, timeout)
def send_discover(self): """Send discover.""" assert self.client assert self.current_state == STATE_INIT or \ self.current_state == STATE_SELECTING pkt = self.client.gen_discover() sendp(pkt) # FIXME:20 check that this is correct,: all or only discover? if self.discover_attempts < MAX_ATTEMPTS_DISCOVER: self.discover_attempts += 1 timeout = gen_timeout_resend(self.discover_attempts) self.set_timeout(self.current_state, self.timeout_selecting, timeout)
[ "Send", "discover", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L148-L161
[ "def", "send_discover", "(", "self", ")", ":", "assert", "self", ".", "client", "assert", "self", ".", "current_state", "==", "STATE_INIT", "or", "self", ".", "current_state", "==", "STATE_SELECTING", "pkt", "=", "self", ".", "client", ".", "gen_discover", "(", ")", "sendp", "(", "pkt", ")", "# FIXME:20 check that this is correct,: all or only discover?", "if", "self", ".", "discover_attempts", "<", "MAX_ATTEMPTS_DISCOVER", ":", "self", ".", "discover_attempts", "+=", "1", "timeout", "=", "gen_timeout_resend", "(", "self", ".", "discover_attempts", ")", "self", ".", "set_timeout", "(", "self", ".", "current_state", ",", "self", ".", "timeout_selecting", ",", "timeout", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.select_offer
Select an offer from the offers received. [:rfc:`2131#section-4.2`]:: DHCP clients are free to use any strategy in selecting a DHCP server among those from which the client receives a DHCPOFFER. [:rfc:`2131#section-4.4.1`]:: The time over which the client collects messages and the mechanism used to select one DHCPOFFER are implementation dependent. Nor [:rfc:`7844`] nor [:rfc:`2131`] specify the algorithm. Here, currently the first offer is selected. .. todo:: - Check other implementations algorithm to select offer.
dhcpcanon/dhcpcapfsm.py
def select_offer(self): """Select an offer from the offers received. [:rfc:`2131#section-4.2`]:: DHCP clients are free to use any strategy in selecting a DHCP server among those from which the client receives a DHCPOFFER. [:rfc:`2131#section-4.4.1`]:: The time over which the client collects messages and the mechanism used to select one DHCPOFFER are implementation dependent. Nor [:rfc:`7844`] nor [:rfc:`2131`] specify the algorithm. Here, currently the first offer is selected. .. todo:: - Check other implementations algorithm to select offer. """ logger.debug('Selecting offer.') pkt = self.offers[0] self.client.handle_offer(pkt)
def select_offer(self): """Select an offer from the offers received. [:rfc:`2131#section-4.2`]:: DHCP clients are free to use any strategy in selecting a DHCP server among those from which the client receives a DHCPOFFER. [:rfc:`2131#section-4.4.1`]:: The time over which the client collects messages and the mechanism used to select one DHCPOFFER are implementation dependent. Nor [:rfc:`7844`] nor [:rfc:`2131`] specify the algorithm. Here, currently the first offer is selected. .. todo:: - Check other implementations algorithm to select offer. """ logger.debug('Selecting offer.') pkt = self.offers[0] self.client.handle_offer(pkt)
[ "Select", "an", "offer", "from", "the", "offers", "received", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L166-L190
[ "def", "select_offer", "(", "self", ")", ":", "logger", ".", "debug", "(", "'Selecting offer.'", ")", "pkt", "=", "self", ".", "offers", "[", "0", "]", "self", ".", "client", ".", "handle_offer", "(", "pkt", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.send_request
Send request. [:rfc:`2131#section-3.1`]:: a client retransmitting as described in section 4.1 might retransmit the DHCPREQUEST message four times, for a total delay of 60 seconds .. todo:: - The maximum number of retransmitted REQUESTs is per state or in total? - Are the retransmitted REQUESTs independent to the retransmitted DISCOVERs?
dhcpcanon/dhcpcapfsm.py
def send_request(self): """Send request. [:rfc:`2131#section-3.1`]:: a client retransmitting as described in section 4.1 might retransmit the DHCPREQUEST message four times, for a total delay of 60 seconds .. todo:: - The maximum number of retransmitted REQUESTs is per state or in total? - Are the retransmitted REQUESTs independent to the retransmitted DISCOVERs? """ assert self.client if self.current_state == STATE_BOUND: pkt = self.client.gen_request_unicast() else: pkt = self.client.gen_request() sendp(pkt) logger.debug('Modifying FSM obj, setting time_sent_request.') self.time_sent_request = nowutc() logger.info('DHCPREQUEST of %s on %s to %s port %s', self.client.iface, self.client.client_ip, self.client.server_ip, self.client.server_port) # NOTE: see previous TODO, maybe the MAX_ATTEMPTS_REQUEST needs to be # calculated per state. if self.request_attempts < MAX_ATTEMPTS_REQUEST: self.request_attempts *= 2 logger.debug('Increased request attempts to %s', self.request_attempts) if self.current_state == STATE_RENEWING: timeout_renewing = gen_timeout_request_renew(self.client.lease) self.set_timeout(self.current_state, self.timeout_request_renewing, timeout_renewing) elif self.current_state == STATE_REBINDING: timeout_rebinding = gen_timeout_request_rebind(self.client.lease) self.set_timeout(self.current_state, self.timeout_request_rebinding, timeout_rebinding) else: timeout_requesting = \ gen_timeout_resend(self.request_attempts) self.set_timeout(self.current_state, self.timeout_requesting, timeout_requesting)
def send_request(self): """Send request. [:rfc:`2131#section-3.1`]:: a client retransmitting as described in section 4.1 might retransmit the DHCPREQUEST message four times, for a total delay of 60 seconds .. todo:: - The maximum number of retransmitted REQUESTs is per state or in total? - Are the retransmitted REQUESTs independent to the retransmitted DISCOVERs? """ assert self.client if self.current_state == STATE_BOUND: pkt = self.client.gen_request_unicast() else: pkt = self.client.gen_request() sendp(pkt) logger.debug('Modifying FSM obj, setting time_sent_request.') self.time_sent_request = nowutc() logger.info('DHCPREQUEST of %s on %s to %s port %s', self.client.iface, self.client.client_ip, self.client.server_ip, self.client.server_port) # NOTE: see previous TODO, maybe the MAX_ATTEMPTS_REQUEST needs to be # calculated per state. if self.request_attempts < MAX_ATTEMPTS_REQUEST: self.request_attempts *= 2 logger.debug('Increased request attempts to %s', self.request_attempts) if self.current_state == STATE_RENEWING: timeout_renewing = gen_timeout_request_renew(self.client.lease) self.set_timeout(self.current_state, self.timeout_request_renewing, timeout_renewing) elif self.current_state == STATE_REBINDING: timeout_rebinding = gen_timeout_request_rebind(self.client.lease) self.set_timeout(self.current_state, self.timeout_request_rebinding, timeout_rebinding) else: timeout_requesting = \ gen_timeout_resend(self.request_attempts) self.set_timeout(self.current_state, self.timeout_requesting, timeout_requesting)
[ "Send", "request", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L192-L241
[ "def", "send_request", "(", "self", ")", ":", "assert", "self", ".", "client", "if", "self", ".", "current_state", "==", "STATE_BOUND", ":", "pkt", "=", "self", ".", "client", ".", "gen_request_unicast", "(", ")", "else", ":", "pkt", "=", "self", ".", "client", ".", "gen_request", "(", ")", "sendp", "(", "pkt", ")", "logger", ".", "debug", "(", "'Modifying FSM obj, setting time_sent_request.'", ")", "self", ".", "time_sent_request", "=", "nowutc", "(", ")", "logger", ".", "info", "(", "'DHCPREQUEST of %s on %s to %s port %s'", ",", "self", ".", "client", ".", "iface", ",", "self", ".", "client", ".", "client_ip", ",", "self", ".", "client", ".", "server_ip", ",", "self", ".", "client", ".", "server_port", ")", "# NOTE: see previous TODO, maybe the MAX_ATTEMPTS_REQUEST needs to be", "# calculated per state.", "if", "self", ".", "request_attempts", "<", "MAX_ATTEMPTS_REQUEST", ":", "self", ".", "request_attempts", "*=", "2", "logger", ".", "debug", "(", "'Increased request attempts to %s'", ",", "self", ".", "request_attempts", ")", "if", "self", ".", "current_state", "==", "STATE_RENEWING", ":", "timeout_renewing", "=", "gen_timeout_request_renew", "(", "self", ".", "client", ".", "lease", ")", "self", ".", "set_timeout", "(", "self", ".", "current_state", ",", "self", ".", "timeout_request_renewing", ",", "timeout_renewing", ")", "elif", "self", ".", "current_state", "==", "STATE_REBINDING", ":", "timeout_rebinding", "=", "gen_timeout_request_rebind", "(", "self", ".", "client", ".", "lease", ")", "self", ".", "set_timeout", "(", "self", ".", "current_state", ",", "self", ".", "timeout_request_rebinding", ",", "timeout_rebinding", ")", "else", ":", "timeout_requesting", "=", "gen_timeout_resend", "(", "self", ".", "request_attempts", ")", "self", ".", "set_timeout", "(", "self", ".", "current_state", ",", "self", ".", "timeout_requesting", ",", "timeout_requesting", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.set_timers
Set renewal, rebinding times.
dhcpcanon/dhcpcapfsm.py
def set_timers(self): """Set renewal, rebinding times.""" logger.debug('setting timeouts') self.set_timeout(self.current_state, self.renewing_time_expires, self.client.lease.renewal_time) self.set_timeout(self.current_state, self.rebinding_time_expires, self.client.lease.rebinding_time)
def set_timers(self): """Set renewal, rebinding times.""" logger.debug('setting timeouts') self.set_timeout(self.current_state, self.renewing_time_expires, self.client.lease.renewal_time) self.set_timeout(self.current_state, self.rebinding_time_expires, self.client.lease.rebinding_time)
[ "Set", "renewal", "rebinding", "times", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L243-L251
[ "def", "set_timers", "(", "self", ")", ":", "logger", ".", "debug", "(", "'setting timeouts'", ")", "self", ".", "set_timeout", "(", "self", ".", "current_state", ",", "self", ".", "renewing_time_expires", ",", "self", ".", "client", ".", "lease", ".", "renewal_time", ")", "self", ".", "set_timeout", "(", "self", ".", "current_state", ",", "self", ".", "rebinding_time_expires", ",", "self", ".", "client", ".", "lease", ".", "rebinding_time", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.process_received_ack
Process a received ACK packet. Not specifiyed in [:rfc:`7844`]. Probe the offered IP in [:rfc:`2131#section-2.2.`]:: the allocating server SHOULD probe the reused address before allocating the address, e.g., with an ICMP echo request, and the client SHOULD probe the newly received address, e.g., with ARP. The client SHOULD broadcast an ARP reply to announce the client's new IP address and clear any outdated ARP cache entries in hosts on the client's subnet. It is also not specifiyed in [:rfc:`7844`] nor [:rfc:`2131`] how to check that the offered IP is valid. .. todo:: - Check that nor ``dhclient`` nor ``systemd-networkd`` send an ARP. - Check how other implementations check that the ACK paremeters are valid, ie, if the ACK fields match the fields in the OFFER. - Check to which state the client should go back to when the offered parameters are not valid.
dhcpcanon/dhcpcapfsm.py
def process_received_ack(self, pkt): """Process a received ACK packet. Not specifiyed in [:rfc:`7844`]. Probe the offered IP in [:rfc:`2131#section-2.2.`]:: the allocating server SHOULD probe the reused address before allocating the address, e.g., with an ICMP echo request, and the client SHOULD probe the newly received address, e.g., with ARP. The client SHOULD broadcast an ARP reply to announce the client's new IP address and clear any outdated ARP cache entries in hosts on the client's subnet. It is also not specifiyed in [:rfc:`7844`] nor [:rfc:`2131`] how to check that the offered IP is valid. .. todo:: - Check that nor ``dhclient`` nor ``systemd-networkd`` send an ARP. - Check how other implementations check that the ACK paremeters are valid, ie, if the ACK fields match the fields in the OFFER. - Check to which state the client should go back to when the offered parameters are not valid. """ if isack(pkt): try: self.event = self.client.handle_ack(pkt, self.time_sent_request) except AddrFormatError as err: logger.error(err) # NOTE: see previous TODO, maybe should go back to other state. raise self.SELECTING() # NOTE: see previous TODO, not checking address with ARP. logger.info('DHCPACK of %s from %s' % (self.client.client_ip, self.client.server_ip)) return True return False
def process_received_ack(self, pkt): """Process a received ACK packet. Not specifiyed in [:rfc:`7844`]. Probe the offered IP in [:rfc:`2131#section-2.2.`]:: the allocating server SHOULD probe the reused address before allocating the address, e.g., with an ICMP echo request, and the client SHOULD probe the newly received address, e.g., with ARP. The client SHOULD broadcast an ARP reply to announce the client's new IP address and clear any outdated ARP cache entries in hosts on the client's subnet. It is also not specifiyed in [:rfc:`7844`] nor [:rfc:`2131`] how to check that the offered IP is valid. .. todo:: - Check that nor ``dhclient`` nor ``systemd-networkd`` send an ARP. - Check how other implementations check that the ACK paremeters are valid, ie, if the ACK fields match the fields in the OFFER. - Check to which state the client should go back to when the offered parameters are not valid. """ if isack(pkt): try: self.event = self.client.handle_ack(pkt, self.time_sent_request) except AddrFormatError as err: logger.error(err) # NOTE: see previous TODO, maybe should go back to other state. raise self.SELECTING() # NOTE: see previous TODO, not checking address with ARP. logger.info('DHCPACK of %s from %s' % (self.client.client_ip, self.client.server_ip)) return True return False
[ "Process", "a", "received", "ACK", "packet", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L253-L291
[ "def", "process_received_ack", "(", "self", ",", "pkt", ")", ":", "if", "isack", "(", "pkt", ")", ":", "try", ":", "self", ".", "event", "=", "self", ".", "client", ".", "handle_ack", "(", "pkt", ",", "self", ".", "time_sent_request", ")", "except", "AddrFormatError", "as", "err", ":", "logger", ".", "error", "(", "err", ")", "# NOTE: see previous TODO, maybe should go back to other state.", "raise", "self", ".", "SELECTING", "(", ")", "# NOTE: see previous TODO, not checking address with ARP.", "logger", ".", "info", "(", "'DHCPACK of %s from %s'", "%", "(", "self", ".", "client", ".", "client_ip", ",", "self", ".", "client", ".", "server_ip", ")", ")", "return", "True", "return", "False" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.process_received_nak
Process a received NAK packet.
dhcpcanon/dhcpcapfsm.py
def process_received_nak(self, pkt): """Process a received NAK packet.""" if isnak(pkt): logger.info('DHCPNAK of %s from %s', self.client.client_ip, self.client.server_ip) return True return False
def process_received_nak(self, pkt): """Process a received NAK packet.""" if isnak(pkt): logger.info('DHCPNAK of %s from %s', self.client.client_ip, self.client.server_ip) return True return False
[ "Process", "a", "received", "NAK", "packet", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L293-L299
[ "def", "process_received_nak", "(", "self", ",", "pkt", ")", ":", "if", "isnak", "(", "pkt", ")", ":", "logger", ".", "info", "(", "'DHCPNAK of %s from %s'", ",", "self", ".", "client", ".", "client_ip", ",", "self", ".", "client", ".", "server_ip", ")", "return", "True", "return", "False" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.INIT
INIT state. [:rfc:`2131#section-4.4.1`]:: The client SHOULD wait a random time between one and ten seconds to desynchronize the use of DHCP at startup .. todo:: - The initial delay is implemented, but probably is not in other implementations. Check what other implementations do.
dhcpcanon/dhcpcapfsm.py
def INIT(self): """INIT state. [:rfc:`2131#section-4.4.1`]:: The client SHOULD wait a random time between one and ten seconds to desynchronize the use of DHCP at startup .. todo:: - The initial delay is implemented, but probably is not in other implementations. Check what other implementations do. """ # NOTE: in case INIT is reached from other state, initialize attributes # reset all variables. logger.debug('In state: INIT') if self.current_state is not STATE_PREINIT: self.reset() self.current_state = STATE_INIT # NOTE: see previous TODO, maybe this is not needed. if self.delay_selecting: if self.delay_before_selecting is None: delay_before_selecting = gen_delay_selecting() else: delay_before_selecting = self.delay_before_selecting else: delay_before_selecting = 0 self.set_timeout(self.current_state, self.timeout_delay_before_selecting, delay_before_selecting) if self.timeout_select is not None: self.set_timeout(STATE_SELECTING, self.timeout_selecting, self.timeout_select)
def INIT(self): """INIT state. [:rfc:`2131#section-4.4.1`]:: The client SHOULD wait a random time between one and ten seconds to desynchronize the use of DHCP at startup .. todo:: - The initial delay is implemented, but probably is not in other implementations. Check what other implementations do. """ # NOTE: in case INIT is reached from other state, initialize attributes # reset all variables. logger.debug('In state: INIT') if self.current_state is not STATE_PREINIT: self.reset() self.current_state = STATE_INIT # NOTE: see previous TODO, maybe this is not needed. if self.delay_selecting: if self.delay_before_selecting is None: delay_before_selecting = gen_delay_selecting() else: delay_before_selecting = self.delay_before_selecting else: delay_before_selecting = 0 self.set_timeout(self.current_state, self.timeout_delay_before_selecting, delay_before_selecting) if self.timeout_select is not None: self.set_timeout(STATE_SELECTING, self.timeout_selecting, self.timeout_select)
[ "INIT", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L309-L341
[ "def", "INIT", "(", "self", ")", ":", "# NOTE: in case INIT is reached from other state, initialize attributes", "# reset all variables.", "logger", ".", "debug", "(", "'In state: INIT'", ")", "if", "self", ".", "current_state", "is", "not", "STATE_PREINIT", ":", "self", ".", "reset", "(", ")", "self", ".", "current_state", "=", "STATE_INIT", "# NOTE: see previous TODO, maybe this is not needed.", "if", "self", ".", "delay_selecting", ":", "if", "self", ".", "delay_before_selecting", "is", "None", ":", "delay_before_selecting", "=", "gen_delay_selecting", "(", ")", "else", ":", "delay_before_selecting", "=", "self", ".", "delay_before_selecting", "else", ":", "delay_before_selecting", "=", "0", "self", ".", "set_timeout", "(", "self", ".", "current_state", ",", "self", ".", "timeout_delay_before_selecting", ",", "delay_before_selecting", ")", "if", "self", ".", "timeout_select", "is", "not", "None", ":", "self", ".", "set_timeout", "(", "STATE_SELECTING", ",", "self", ".", "timeout_selecting", ",", "self", ".", "timeout_select", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.BOUND
BOUND state.
dhcpcanon/dhcpcapfsm.py
def BOUND(self): """BOUND state.""" logger.debug('In state: BOUND') logger.info('(%s) state changed %s -> bound', self.client.iface, STATES2NAMES[self.current_state]) self.current_state = STATE_BOUND self.client.lease.info_lease() if self.script is not None: self.script.script_init(self.client.lease, self.current_state) self.script.script_go() else: try: set_net(self.client.lease) except Exception as e: logger.error('Can not set IP', exc_info=True)
def BOUND(self): """BOUND state.""" logger.debug('In state: BOUND') logger.info('(%s) state changed %s -> bound', self.client.iface, STATES2NAMES[self.current_state]) self.current_state = STATE_BOUND self.client.lease.info_lease() if self.script is not None: self.script.script_init(self.client.lease, self.current_state) self.script.script_go() else: try: set_net(self.client.lease) except Exception as e: logger.error('Can not set IP', exc_info=True)
[ "BOUND", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L356-L370
[ "def", "BOUND", "(", "self", ")", ":", "logger", ".", "debug", "(", "'In state: BOUND'", ")", "logger", ".", "info", "(", "'(%s) state changed %s -> bound'", ",", "self", ".", "client", ".", "iface", ",", "STATES2NAMES", "[", "self", ".", "current_state", "]", ")", "self", ".", "current_state", "=", "STATE_BOUND", "self", ".", "client", ".", "lease", ".", "info_lease", "(", ")", "if", "self", ".", "script", "is", "not", "None", ":", "self", ".", "script", ".", "script_init", "(", "self", ".", "client", ".", "lease", ",", "self", ".", "current_state", ")", "self", ".", "script", ".", "script_go", "(", ")", "else", ":", "try", ":", "set_net", "(", "self", ".", "client", ".", "lease", ")", "except", "Exception", "as", "e", ":", "logger", ".", "error", "(", "'Can not set IP'", ",", "exc_info", "=", "True", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.RENEWING
RENEWING state.
dhcpcanon/dhcpcapfsm.py
def RENEWING(self): """RENEWING state.""" logger.debug('In state: RENEWING') self.current_state = STATE_RENEWING if self.script is not None: self.script.script_init(self.client.lease, self.current_state) self.script.script_go() else: set_net(self.client.lease)
def RENEWING(self): """RENEWING state.""" logger.debug('In state: RENEWING') self.current_state = STATE_RENEWING if self.script is not None: self.script.script_init(self.client.lease, self.current_state) self.script.script_go() else: set_net(self.client.lease)
[ "RENEWING", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L375-L383
[ "def", "RENEWING", "(", "self", ")", ":", "logger", ".", "debug", "(", "'In state: RENEWING'", ")", "self", ".", "current_state", "=", "STATE_RENEWING", "if", "self", ".", "script", "is", "not", "None", ":", "self", ".", "script", ".", "script_init", "(", "self", ".", "client", ".", "lease", ",", "self", ".", "current_state", ")", "self", ".", "script", ".", "script_go", "(", ")", "else", ":", "set_net", "(", "self", ".", "client", ".", "lease", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.REBINDING
REBINDING state.
dhcpcanon/dhcpcapfsm.py
def REBINDING(self): """REBINDING state.""" logger.debug('In state: REBINDING') self.current_state = STATE_REBINDING if self.script is not None: self.script.script_init(self.client.lease, self.current_state) self.script.script_go() else: set_net(self.client.lease)
def REBINDING(self): """REBINDING state.""" logger.debug('In state: REBINDING') self.current_state = STATE_REBINDING if self.script is not None: self.script.script_init(self.client.lease, self.current_state) self.script.script_go() else: set_net(self.client.lease)
[ "REBINDING", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L386-L394
[ "def", "REBINDING", "(", "self", ")", ":", "logger", ".", "debug", "(", "'In state: REBINDING'", ")", "self", ".", "current_state", "=", "STATE_REBINDING", "if", "self", ".", "script", "is", "not", "None", ":", "self", ".", "script", ".", "script_init", "(", "self", ".", "client", ".", "lease", ",", "self", ".", "current_state", ")", "self", ".", "script", ".", "script_go", "(", ")", "else", ":", "set_net", "(", "self", ".", "client", ".", "lease", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.END
END state.
dhcpcanon/dhcpcapfsm.py
def END(self): """END state.""" logger.debug('In state: END') self.current_state = STATE_END if self.script is not None: self.script.script_init(self.client.lease, self.current_state) self.script.script_go() else: set_net(self.client.lease) return
def END(self): """END state.""" logger.debug('In state: END') self.current_state = STATE_END if self.script is not None: self.script.script_init(self.client.lease, self.current_state) self.script.script_go() else: set_net(self.client.lease) return
[ "END", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L397-L406
[ "def", "END", "(", "self", ")", ":", "logger", ".", "debug", "(", "'In state: END'", ")", "self", ".", "current_state", "=", "STATE_END", "if", "self", ".", "script", "is", "not", "None", ":", "self", ".", "script", ".", "script_init", "(", "self", ".", "client", ".", "lease", ",", "self", ".", "current_state", ")", "self", ".", "script", ".", "script_go", "(", ")", "else", ":", "set_net", "(", "self", ".", "client", ".", "lease", ")", "return" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.ERROR
ERROR state.
dhcpcanon/dhcpcapfsm.py
def ERROR(self): """ERROR state.""" logger.debug('In state: ERROR') self.current_state = STATE_ERROR if self.script is not None: self.script.script_init(self.client.lease, self.current_state) self.script.script_go() set_net(self.client.lease) raise self.INIT()
def ERROR(self): """ERROR state.""" logger.debug('In state: ERROR') self.current_state = STATE_ERROR if self.script is not None: self.script.script_init(self.client.lease, self.current_state) self.script.script_go() set_net(self.client.lease) raise self.INIT()
[ "ERROR", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L409-L417
[ "def", "ERROR", "(", "self", ")", ":", "logger", ".", "debug", "(", "'In state: ERROR'", ")", "self", ".", "current_state", "=", "STATE_ERROR", "if", "self", ".", "script", "is", "not", "None", ":", "self", ".", "script", ".", "script_init", "(", "self", ".", "client", ".", "lease", ",", "self", ".", "current_state", ")", "self", ".", "script", ".", "script_go", "(", ")", "set_net", "(", "self", ".", "client", ".", "lease", ")", "raise", "self", ".", "INIT", "(", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.timeout_selecting
Timeout of selecting on SELECTING state. Not specifiyed in [:rfc:`7844`]. See comments in :func:`dhcpcapfsm.DHCPCAPFSM.timeout_request`.
dhcpcanon/dhcpcapfsm.py
def timeout_selecting(self): """Timeout of selecting on SELECTING state. Not specifiyed in [:rfc:`7844`]. See comments in :func:`dhcpcapfsm.DHCPCAPFSM.timeout_request`. """ logger.debug('C2.1: T In %s, timeout receiving response to select.', self.current_state) if len(self.offers) >= MAX_OFFERS_COLLECTED: logger.debug('C2.2: T Maximum number of offers reached, ' 'raise REQUESTING.') raise self.REQUESTING() if self.discover_attempts >= MAX_ATTEMPTS_DISCOVER: logger.debug('C2.3: T Maximum number of discover retries is %s' ' and already sent %s.', MAX_ATTEMPTS_DISCOVER, self.discover_attempts) if len(self.offers) <= 0: logger.debug('C2.4: T. But no OFFERS where received, ' 'raise ERROR.') raise self.ERROR() logger.debug('C2.4: F. But there is some OFFERS, ' 'raise REQUESTING.') raise self.REQUESTING() logger.debug('C2.2: F. Still not received all OFFERS, but not ' 'max # attemps reached, raise SELECTING.') raise self.SELECTING()
def timeout_selecting(self): """Timeout of selecting on SELECTING state. Not specifiyed in [:rfc:`7844`]. See comments in :func:`dhcpcapfsm.DHCPCAPFSM.timeout_request`. """ logger.debug('C2.1: T In %s, timeout receiving response to select.', self.current_state) if len(self.offers) >= MAX_OFFERS_COLLECTED: logger.debug('C2.2: T Maximum number of offers reached, ' 'raise REQUESTING.') raise self.REQUESTING() if self.discover_attempts >= MAX_ATTEMPTS_DISCOVER: logger.debug('C2.3: T Maximum number of discover retries is %s' ' and already sent %s.', MAX_ATTEMPTS_DISCOVER, self.discover_attempts) if len(self.offers) <= 0: logger.debug('C2.4: T. But no OFFERS where received, ' 'raise ERROR.') raise self.ERROR() logger.debug('C2.4: F. But there is some OFFERS, ' 'raise REQUESTING.') raise self.REQUESTING() logger.debug('C2.2: F. Still not received all OFFERS, but not ' 'max # attemps reached, raise SELECTING.') raise self.SELECTING()
[ "Timeout", "of", "selecting", "on", "SELECTING", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L432-L461
[ "def", "timeout_selecting", "(", "self", ")", ":", "logger", ".", "debug", "(", "'C2.1: T In %s, timeout receiving response to select.'", ",", "self", ".", "current_state", ")", "if", "len", "(", "self", ".", "offers", ")", ">=", "MAX_OFFERS_COLLECTED", ":", "logger", ".", "debug", "(", "'C2.2: T Maximum number of offers reached, '", "'raise REQUESTING.'", ")", "raise", "self", ".", "REQUESTING", "(", ")", "if", "self", ".", "discover_attempts", ">=", "MAX_ATTEMPTS_DISCOVER", ":", "logger", ".", "debug", "(", "'C2.3: T Maximum number of discover retries is %s'", "' and already sent %s.'", ",", "MAX_ATTEMPTS_DISCOVER", ",", "self", ".", "discover_attempts", ")", "if", "len", "(", "self", ".", "offers", ")", "<=", "0", ":", "logger", ".", "debug", "(", "'C2.4: T. But no OFFERS where received, '", "'raise ERROR.'", ")", "raise", "self", ".", "ERROR", "(", ")", "logger", ".", "debug", "(", "'C2.4: F. But there is some OFFERS, '", "'raise REQUESTING.'", ")", "raise", "self", ".", "REQUESTING", "(", ")", "logger", ".", "debug", "(", "'C2.2: F. Still not received all OFFERS, but not '", "'max # attemps reached, raise SELECTING.'", ")", "raise", "self", ".", "SELECTING", "(", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.timeout_requesting
Timeout requesting in REQUESTING state. Not specifiyed in [:rfc:`7844`] [:rfc:`2131#section-3.1`]:: might retransmit the DHCPREQUEST message four times, for a total delay of 60 seconds
dhcpcanon/dhcpcapfsm.py
def timeout_requesting(self): """Timeout requesting in REQUESTING state. Not specifiyed in [:rfc:`7844`] [:rfc:`2131#section-3.1`]:: might retransmit the DHCPREQUEST message four times, for a total delay of 60 seconds """ logger.debug("C3.2: T. In %s, timeout receiving response to request, ", self.current_state) if self.discover_requests >= MAX_ATTEMPTS_REQUEST: logger.debug('C2.3: T. Maximum number %s of REQUESTs ' 'reached, already sent %s, raise ERROR.', MAX_ATTEMPTS_REQUEST, self.disover_requests) raise self.ERROR() logger.debug("C2.3: F. Maximum number of REQUESTs retries not reached," "raise REQUESTING.") raise self.REQUESTING()
def timeout_requesting(self): """Timeout requesting in REQUESTING state. Not specifiyed in [:rfc:`7844`] [:rfc:`2131#section-3.1`]:: might retransmit the DHCPREQUEST message four times, for a total delay of 60 seconds """ logger.debug("C3.2: T. In %s, timeout receiving response to request, ", self.current_state) if self.discover_requests >= MAX_ATTEMPTS_REQUEST: logger.debug('C2.3: T. Maximum number %s of REQUESTs ' 'reached, already sent %s, raise ERROR.', MAX_ATTEMPTS_REQUEST, self.disover_requests) raise self.ERROR() logger.debug("C2.3: F. Maximum number of REQUESTs retries not reached," "raise REQUESTING.") raise self.REQUESTING()
[ "Timeout", "requesting", "in", "REQUESTING", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L464-L484
[ "def", "timeout_requesting", "(", "self", ")", ":", "logger", ".", "debug", "(", "\"C3.2: T. In %s, timeout receiving response to request, \"", ",", "self", ".", "current_state", ")", "if", "self", ".", "discover_requests", ">=", "MAX_ATTEMPTS_REQUEST", ":", "logger", ".", "debug", "(", "'C2.3: T. Maximum number %s of REQUESTs '", "'reached, already sent %s, raise ERROR.'", ",", "MAX_ATTEMPTS_REQUEST", ",", "self", ".", "disover_requests", ")", "raise", "self", ".", "ERROR", "(", ")", "logger", ".", "debug", "(", "\"C2.3: F. Maximum number of REQUESTs retries not reached,\"", "\"raise REQUESTING.\"", ")", "raise", "self", ".", "REQUESTING", "(", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.timeout_request_renewing
Timeout of renewing on RENEWING state. Same comments as in :func:`dhcpcapfsm.DHCPCAPFSM.timeout_requesting`.
dhcpcanon/dhcpcapfsm.py
def timeout_request_renewing(self): """Timeout of renewing on RENEWING state. Same comments as in :func:`dhcpcapfsm.DHCPCAPFSM.timeout_requesting`. """ logger.debug("C5.2:T In %s, timeout receiving response to request.", self.current_state) if self.request_attempts >= MAX_ATTEMPTS_REQUEST: logger.debug('C2.3: T Maximum number %s of REQUESTs ' 'reached, already sent %s, wait to rebinding time.', MAX_ATTEMPTS_REQUEST, self.disover_requests) # raise self.ERROR() logger.debug("C2.3: F. Maximum number of REQUESTs retries not reached," "raise RENEWING.") raise self.RENEWING()
def timeout_request_renewing(self): """Timeout of renewing on RENEWING state. Same comments as in :func:`dhcpcapfsm.DHCPCAPFSM.timeout_requesting`. """ logger.debug("C5.2:T In %s, timeout receiving response to request.", self.current_state) if self.request_attempts >= MAX_ATTEMPTS_REQUEST: logger.debug('C2.3: T Maximum number %s of REQUESTs ' 'reached, already sent %s, wait to rebinding time.', MAX_ATTEMPTS_REQUEST, self.disover_requests) # raise self.ERROR() logger.debug("C2.3: F. Maximum number of REQUESTs retries not reached," "raise RENEWING.") raise self.RENEWING()
[ "Timeout", "of", "renewing", "on", "RENEWING", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L487-L503
[ "def", "timeout_request_renewing", "(", "self", ")", ":", "logger", ".", "debug", "(", "\"C5.2:T In %s, timeout receiving response to request.\"", ",", "self", ".", "current_state", ")", "if", "self", ".", "request_attempts", ">=", "MAX_ATTEMPTS_REQUEST", ":", "logger", ".", "debug", "(", "'C2.3: T Maximum number %s of REQUESTs '", "'reached, already sent %s, wait to rebinding time.'", ",", "MAX_ATTEMPTS_REQUEST", ",", "self", ".", "disover_requests", ")", "# raise self.ERROR()", "logger", ".", "debug", "(", "\"C2.3: F. Maximum number of REQUESTs retries not reached,\"", "\"raise RENEWING.\"", ")", "raise", "self", ".", "RENEWING", "(", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.timeout_request_rebinding
Timeout of request rebinding on REBINDING state. Same comments as in :func:`dhcpcapfsm.DHCPCAPFSM.timeout_requesting`.
dhcpcanon/dhcpcapfsm.py
def timeout_request_rebinding(self): """Timeout of request rebinding on REBINDING state. Same comments as in :func:`dhcpcapfsm.DHCPCAPFSM.timeout_requesting`. """ logger.debug("C6.2:T In %s, timeout receiving response to request.", self.current_state) if self.request_attempts >= MAX_ATTEMPTS_REQUEST: logger.debug('C.2.3: T. Maximum number %s of REQUESTs ' 'reached, already sent %s, wait lease time expires.', MAX_ATTEMPTS_REQUEST, self.disover_requests) # raise self.ERROR() logger.debug("C2.3: F. Maximum number of REQUESTs retries not reached," "raise REBINDING.") raise self.REBINDING()
def timeout_request_rebinding(self): """Timeout of request rebinding on REBINDING state. Same comments as in :func:`dhcpcapfsm.DHCPCAPFSM.timeout_requesting`. """ logger.debug("C6.2:T In %s, timeout receiving response to request.", self.current_state) if self.request_attempts >= MAX_ATTEMPTS_REQUEST: logger.debug('C.2.3: T. Maximum number %s of REQUESTs ' 'reached, already sent %s, wait lease time expires.', MAX_ATTEMPTS_REQUEST, self.disover_requests) # raise self.ERROR() logger.debug("C2.3: F. Maximum number of REQUESTs retries not reached," "raise REBINDING.") raise self.REBINDING()
[ "Timeout", "of", "request", "rebinding", "on", "REBINDING", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L506-L522
[ "def", "timeout_request_rebinding", "(", "self", ")", ":", "logger", ".", "debug", "(", "\"C6.2:T In %s, timeout receiving response to request.\"", ",", "self", ".", "current_state", ")", "if", "self", ".", "request_attempts", ">=", "MAX_ATTEMPTS_REQUEST", ":", "logger", ".", "debug", "(", "'C.2.3: T. Maximum number %s of REQUESTs '", "'reached, already sent %s, wait lease time expires.'", ",", "MAX_ATTEMPTS_REQUEST", ",", "self", ".", "disover_requests", ")", "# raise self.ERROR()", "logger", ".", "debug", "(", "\"C2.3: F. Maximum number of REQUESTs retries not reached,\"", "\"raise REBINDING.\"", ")", "raise", "self", ".", "REBINDING", "(", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.receive_offer
Receive offer on SELECTING state.
dhcpcanon/dhcpcapfsm.py
def receive_offer(self, pkt): """Receive offer on SELECTING state.""" logger.debug("C2. Received OFFER?, in SELECTING state.") if isoffer(pkt): logger.debug("C2: T, OFFER received") self.offers.append(pkt) if len(self.offers) >= MAX_OFFERS_COLLECTED: logger.debug("C2.5: T, raise REQUESTING.") self.select_offer() raise self.REQUESTING() logger.debug("C2.5: F, raise SELECTING.") raise self.SELECTING()
def receive_offer(self, pkt): """Receive offer on SELECTING state.""" logger.debug("C2. Received OFFER?, in SELECTING state.") if isoffer(pkt): logger.debug("C2: T, OFFER received") self.offers.append(pkt) if len(self.offers) >= MAX_OFFERS_COLLECTED: logger.debug("C2.5: T, raise REQUESTING.") self.select_offer() raise self.REQUESTING() logger.debug("C2.5: F, raise SELECTING.") raise self.SELECTING()
[ "Receive", "offer", "on", "SELECTING", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L561-L572
[ "def", "receive_offer", "(", "self", ",", "pkt", ")", ":", "logger", ".", "debug", "(", "\"C2. Received OFFER?, in SELECTING state.\"", ")", "if", "isoffer", "(", "pkt", ")", ":", "logger", ".", "debug", "(", "\"C2: T, OFFER received\"", ")", "self", ".", "offers", ".", "append", "(", "pkt", ")", "if", "len", "(", "self", ".", "offers", ")", ">=", "MAX_OFFERS_COLLECTED", ":", "logger", ".", "debug", "(", "\"C2.5: T, raise REQUESTING.\"", ")", "self", ".", "select_offer", "(", ")", "raise", "self", ".", "REQUESTING", "(", ")", "logger", ".", "debug", "(", "\"C2.5: F, raise SELECTING.\"", ")", "raise", "self", ".", "SELECTING", "(", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.receive_ack_requesting
Receive ACK in REQUESTING state.
dhcpcanon/dhcpcapfsm.py
def receive_ack_requesting(self, pkt): """Receive ACK in REQUESTING state.""" logger.debug("C3. Received ACK?, in REQUESTING state.") if self.process_received_ack(pkt): logger.debug("C3: T. Received ACK, in REQUESTING state, " "raise BOUND.") raise self.BOUND()
def receive_ack_requesting(self, pkt): """Receive ACK in REQUESTING state.""" logger.debug("C3. Received ACK?, in REQUESTING state.") if self.process_received_ack(pkt): logger.debug("C3: T. Received ACK, in REQUESTING state, " "raise BOUND.") raise self.BOUND()
[ "Receive", "ACK", "in", "REQUESTING", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L578-L584
[ "def", "receive_ack_requesting", "(", "self", ",", "pkt", ")", ":", "logger", ".", "debug", "(", "\"C3. Received ACK?, in REQUESTING state.\"", ")", "if", "self", ".", "process_received_ack", "(", "pkt", ")", ":", "logger", ".", "debug", "(", "\"C3: T. Received ACK, in REQUESTING state, \"", "\"raise BOUND.\"", ")", "raise", "self", ".", "BOUND", "(", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.receive_nak_requesting
Receive NAK in REQUESTING state.
dhcpcanon/dhcpcapfsm.py
def receive_nak_requesting(self, pkt): """Receive NAK in REQUESTING state.""" logger.debug("C3.1. Received NAK?, in REQUESTING state.") if self.process_received_nak(pkt): logger.debug("C3.1: T. Received NAK, in REQUESTING state, " "raise INIT.") raise self.INIT()
def receive_nak_requesting(self, pkt): """Receive NAK in REQUESTING state.""" logger.debug("C3.1. Received NAK?, in REQUESTING state.") if self.process_received_nak(pkt): logger.debug("C3.1: T. Received NAK, in REQUESTING state, " "raise INIT.") raise self.INIT()
[ "Receive", "NAK", "in", "REQUESTING", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L590-L596
[ "def", "receive_nak_requesting", "(", "self", ",", "pkt", ")", ":", "logger", ".", "debug", "(", "\"C3.1. Received NAK?, in REQUESTING state.\"", ")", "if", "self", ".", "process_received_nak", "(", "pkt", ")", ":", "logger", ".", "debug", "(", "\"C3.1: T. Received NAK, in REQUESTING state, \"", "\"raise INIT.\"", ")", "raise", "self", ".", "INIT", "(", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.receive_ack_renewing
Receive ACK in RENEWING state.
dhcpcanon/dhcpcapfsm.py
def receive_ack_renewing(self, pkt): """Receive ACK in RENEWING state.""" logger.debug("C3. Received ACK?, in RENEWING state.") if self.process_received_ack(pkt): logger.debug("C3: T. Received ACK, in RENEWING state, " "raise BOUND.") raise self.BOUND()
def receive_ack_renewing(self, pkt): """Receive ACK in RENEWING state.""" logger.debug("C3. Received ACK?, in RENEWING state.") if self.process_received_ack(pkt): logger.debug("C3: T. Received ACK, in RENEWING state, " "raise BOUND.") raise self.BOUND()
[ "Receive", "ACK", "in", "RENEWING", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L599-L605
[ "def", "receive_ack_renewing", "(", "self", ",", "pkt", ")", ":", "logger", ".", "debug", "(", "\"C3. Received ACK?, in RENEWING state.\"", ")", "if", "self", ".", "process_received_ack", "(", "pkt", ")", ":", "logger", ".", "debug", "(", "\"C3: T. Received ACK, in RENEWING state, \"", "\"raise BOUND.\"", ")", "raise", "self", ".", "BOUND", "(", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.receive_nak_renewing
Receive NAK in RENEWING state.
dhcpcanon/dhcpcapfsm.py
def receive_nak_renewing(self, pkt): """Receive NAK in RENEWING state.""" logger.debug("C3.1. Received NAK?, in RENEWING state.") if self.process_received_nak(pkt): logger.debug("C3.1: T. Received NAK, in RENEWING state, " " raise INIT.") raise self.INIT()
def receive_nak_renewing(self, pkt): """Receive NAK in RENEWING state.""" logger.debug("C3.1. Received NAK?, in RENEWING state.") if self.process_received_nak(pkt): logger.debug("C3.1: T. Received NAK, in RENEWING state, " " raise INIT.") raise self.INIT()
[ "Receive", "NAK", "in", "RENEWING", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L608-L614
[ "def", "receive_nak_renewing", "(", "self", ",", "pkt", ")", ":", "logger", ".", "debug", "(", "\"C3.1. Received NAK?, in RENEWING state.\"", ")", "if", "self", ".", "process_received_nak", "(", "pkt", ")", ":", "logger", ".", "debug", "(", "\"C3.1: T. Received NAK, in RENEWING state, \"", "\" raise INIT.\"", ")", "raise", "self", ".", "INIT", "(", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.receive_ack_rebinding
Receive ACK in REBINDING state.
dhcpcanon/dhcpcapfsm.py
def receive_ack_rebinding(self, pkt): """Receive ACK in REBINDING state.""" logger.debug("C3. Received ACK?, in REBINDING state.") if self.process_received_ack(pkt): logger.debug("C3: T. Received ACK, in REBINDING state, " "raise BOUND.") raise self.BOUND()
def receive_ack_rebinding(self, pkt): """Receive ACK in REBINDING state.""" logger.debug("C3. Received ACK?, in REBINDING state.") if self.process_received_ack(pkt): logger.debug("C3: T. Received ACK, in REBINDING state, " "raise BOUND.") raise self.BOUND()
[ "Receive", "ACK", "in", "REBINDING", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L617-L623
[ "def", "receive_ack_rebinding", "(", "self", ",", "pkt", ")", ":", "logger", ".", "debug", "(", "\"C3. Received ACK?, in REBINDING state.\"", ")", "if", "self", ".", "process_received_ack", "(", "pkt", ")", ":", "logger", ".", "debug", "(", "\"C3: T. Received ACK, in REBINDING state, \"", "\"raise BOUND.\"", ")", "raise", "self", ".", "BOUND", "(", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.receive_nak_rebinding
Receive NAK in REBINDING state.
dhcpcanon/dhcpcapfsm.py
def receive_nak_rebinding(self, pkt): """Receive NAK in REBINDING state.""" logger.debug("C3.1. Received NAK?, in RENEWING state.") if self.process_received_nak(pkt): logger.debug("C3.1: T. Received NAK, in RENEWING state, " "raise INIT.") raise self.INIT()
def receive_nak_rebinding(self, pkt): """Receive NAK in REBINDING state.""" logger.debug("C3.1. Received NAK?, in RENEWING state.") if self.process_received_nak(pkt): logger.debug("C3.1: T. Received NAK, in RENEWING state, " "raise INIT.") raise self.INIT()
[ "Receive", "NAK", "in", "REBINDING", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L626-L632
[ "def", "receive_nak_rebinding", "(", "self", ",", "pkt", ")", ":", "logger", ".", "debug", "(", "\"C3.1. Received NAK?, in RENEWING state.\"", ")", "if", "self", ".", "process_received_nak", "(", "pkt", ")", ":", "logger", ".", "debug", "(", "\"C3.1: T. Received NAK, in RENEWING state, \"", "\"raise INIT.\"", ")", "raise", "self", ".", "INIT", "(", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
DHCPCAPFSM.on_renewing
Action on renewing on RENEWING state. Not recording lease, but restarting timers.
dhcpcanon/dhcpcapfsm.py
def on_renewing(self): """Action on renewing on RENEWING state. Not recording lease, but restarting timers. """ self.client.lease.sanitize_net_values() self.client.lease.set_times(self.time_sent_request) self.set_timers()
def on_renewing(self): """Action on renewing on RENEWING state. Not recording lease, but restarting timers. """ self.client.lease.sanitize_net_values() self.client.lease.set_times(self.time_sent_request) self.set_timers()
[ "Action", "on", "renewing", "on", "RENEWING", "state", "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcapfsm.py#L672-L680
[ "def", "on_renewing", "(", "self", ")", ":", "self", ".", "client", ".", "lease", ".", "sanitize_net_values", "(", ")", "self", ".", "client", ".", "lease", ".", "set_times", "(", "self", ".", "time_sent_request", ")", "self", ".", "set_timers", "(", ")" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
isoffer
.
dhcpcanon/dhcpcaputils.py
def isoffer(packet): """.""" if DHCP in packet and (DHCPTypes.get(packet[DHCP].options[0][1]) == 'offer' or packet[DHCP].options[0][1] == "offer"): logger.debug('Packet is Offer.') return True return False
def isoffer(packet): """.""" if DHCP in packet and (DHCPTypes.get(packet[DHCP].options[0][1]) == 'offer' or packet[DHCP].options[0][1] == "offer"): logger.debug('Packet is Offer.') return True return False
[ "." ]
juga0/dhcpcanon
python
https://github.com/juga0/dhcpcanon/blob/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59/dhcpcanon/dhcpcaputils.py#L19-L25
[ "def", "isoffer", "(", "packet", ")", ":", "if", "DHCP", "in", "packet", "and", "(", "DHCPTypes", ".", "get", "(", "packet", "[", "DHCP", "]", ".", "options", "[", "0", "]", "[", "1", "]", ")", "==", "'offer'", "or", "packet", "[", "DHCP", "]", ".", "options", "[", "0", "]", "[", "1", "]", "==", "\"offer\"", ")", ":", "logger", ".", "debug", "(", "'Packet is Offer.'", ")", "return", "True", "return", "False" ]
9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59
test
Qurl.set
Assign a value, remove if it's None
qurl_templatetag/qurl.py
def set(self, name, value): """ Assign a value, remove if it's None """ clone = self._clone() if django.VERSION[0] <= 1 and django.VERSION[1] <= 4: value = value or None clone._qsl = [(q, v) for (q, v) in self._qsl if q != name] if value is not None: clone._qsl.append((name, value)) return clone
def set(self, name, value): """ Assign a value, remove if it's None """ clone = self._clone() if django.VERSION[0] <= 1 and django.VERSION[1] <= 4: value = value or None clone._qsl = [(q, v) for (q, v) in self._qsl if q != name] if value is not None: clone._qsl.append((name, value)) return clone
[ "Assign", "a", "value", "remove", "if", "it", "s", "None" ]
sophilabs/django-qurl-templatetag
python
https://github.com/sophilabs/django-qurl-templatetag/blob/8a785b112437d05cb54846b79012967fee1cb534/qurl_templatetag/qurl.py#L29-L37
[ "def", "set", "(", "self", ",", "name", ",", "value", ")", ":", "clone", "=", "self", ".", "_clone", "(", ")", "if", "django", ".", "VERSION", "[", "0", "]", "<=", "1", "and", "django", ".", "VERSION", "[", "1", "]", "<=", "4", ":", "value", "=", "value", "or", "None", "clone", ".", "_qsl", "=", "[", "(", "q", ",", "v", ")", "for", "(", "q", ",", "v", ")", "in", "self", ".", "_qsl", "if", "q", "!=", "name", "]", "if", "value", "is", "not", "None", ":", "clone", ".", "_qsl", ".", "append", "(", "(", "name", ",", "value", ")", ")", "return", "clone" ]
8a785b112437d05cb54846b79012967fee1cb534
test
Qurl.add
Append a value to multiple value parameter.
qurl_templatetag/qurl.py
def add(self, name, value): """ Append a value to multiple value parameter. """ clone = self._clone() clone._qsl = [p for p in self._qsl if not(p[0] == name and p[1] == value)] clone._qsl.append((name, value,)) return clone
def add(self, name, value): """ Append a value to multiple value parameter. """ clone = self._clone() clone._qsl = [p for p in self._qsl if not(p[0] == name and p[1] == value)] clone._qsl.append((name, value,)) return clone
[ "Append", "a", "value", "to", "multiple", "value", "parameter", "." ]
sophilabs/django-qurl-templatetag
python
https://github.com/sophilabs/django-qurl-templatetag/blob/8a785b112437d05cb54846b79012967fee1cb534/qurl_templatetag/qurl.py#L39-L45
[ "def", "add", "(", "self", ",", "name", ",", "value", ")", ":", "clone", "=", "self", ".", "_clone", "(", ")", "clone", ".", "_qsl", "=", "[", "p", "for", "p", "in", "self", ".", "_qsl", "if", "not", "(", "p", "[", "0", "]", "==", "name", "and", "p", "[", "1", "]", "==", "value", ")", "]", "clone", ".", "_qsl", ".", "append", "(", "(", "name", ",", "value", ",", ")", ")", "return", "clone" ]
8a785b112437d05cb54846b79012967fee1cb534
test
Qurl.remove
Remove a value from multiple value parameter.
qurl_templatetag/qurl.py
def remove(self, name, value): """ Remove a value from multiple value parameter. """ clone = self._clone() clone._qsl = [qb for qb in self._qsl if qb != (name, str(value))] return clone
def remove(self, name, value): """ Remove a value from multiple value parameter. """ clone = self._clone() clone._qsl = [qb for qb in self._qsl if qb != (name, str(value))] return clone
[ "Remove", "a", "value", "from", "multiple", "value", "parameter", "." ]
sophilabs/django-qurl-templatetag
python
https://github.com/sophilabs/django-qurl-templatetag/blob/8a785b112437d05cb54846b79012967fee1cb534/qurl_templatetag/qurl.py#L47-L51
[ "def", "remove", "(", "self", ",", "name", ",", "value", ")", ":", "clone", "=", "self", ".", "_clone", "(", ")", "clone", ".", "_qsl", "=", "[", "qb", "for", "qb", "in", "self", ".", "_qsl", "if", "qb", "!=", "(", "name", ",", "str", "(", "value", ")", ")", "]", "return", "clone" ]
8a785b112437d05cb54846b79012967fee1cb534