repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
rstoneback/pysat
pysat/instruments/omni_hro.py
calculate_clock_angle
def calculate_clock_angle(inst): """ Calculate IMF clock angle and magnitude of IMF in GSM Y-Z plane Parameters ----------- inst : pysat.Instrument Instrument with OMNI HRO data """ # Calculate clock angle in degrees clock_angle = np.degrees(np.arctan2(inst['BY_GSM'], inst['BZ_GSM'])) clock_angle[clock_angle < 0.0] += 360.0 inst['clock_angle'] = pds.Series(clock_angle, index=inst.data.index) # Calculate magnitude of IMF in Y-Z plane inst['BYZ_GSM'] = pds.Series(np.sqrt(inst['BY_GSM']**2 + inst['BZ_GSM']**2), index=inst.data.index) return
python
def calculate_clock_angle(inst): """ Calculate IMF clock angle and magnitude of IMF in GSM Y-Z plane Parameters ----------- inst : pysat.Instrument Instrument with OMNI HRO data """ # Calculate clock angle in degrees clock_angle = np.degrees(np.arctan2(inst['BY_GSM'], inst['BZ_GSM'])) clock_angle[clock_angle < 0.0] += 360.0 inst['clock_angle'] = pds.Series(clock_angle, index=inst.data.index) # Calculate magnitude of IMF in Y-Z plane inst['BYZ_GSM'] = pds.Series(np.sqrt(inst['BY_GSM']**2 + inst['BZ_GSM']**2), index=inst.data.index) return
[ "def", "calculate_clock_angle", "(", "inst", ")", ":", "# Calculate clock angle in degrees", "clock_angle", "=", "np", ".", "degrees", "(", "np", ".", "arctan2", "(", "inst", "[", "'BY_GSM'", "]", ",", "inst", "[", "'BZ_GSM'", "]", ")", ")", "clock_angle", "[", "clock_angle", "<", "0.0", "]", "+=", "360.0", "inst", "[", "'clock_angle'", "]", "=", "pds", ".", "Series", "(", "clock_angle", ",", "index", "=", "inst", ".", "data", ".", "index", ")", "# Calculate magnitude of IMF in Y-Z plane", "inst", "[", "'BYZ_GSM'", "]", "=", "pds", ".", "Series", "(", "np", ".", "sqrt", "(", "inst", "[", "'BY_GSM'", "]", "**", "2", "+", "inst", "[", "'BZ_GSM'", "]", "**", "2", ")", ",", "index", "=", "inst", ".", "data", ".", "index", ")", "return" ]
Calculate IMF clock angle and magnitude of IMF in GSM Y-Z plane Parameters ----------- inst : pysat.Instrument Instrument with OMNI HRO data
[ "Calculate", "IMF", "clock", "angle", "and", "magnitude", "of", "IMF", "in", "GSM", "Y", "-", "Z", "plane" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/instruments/omni_hro.py#L240-L259
rstoneback/pysat
pysat/instruments/omni_hro.py
calculate_imf_steadiness
def calculate_imf_steadiness(inst, steady_window=15, min_window_frac=0.75, max_clock_angle_std=90.0/np.pi, max_bmag_cv=0.5): """ Calculate IMF steadiness using clock angle standard deviation and the coefficient of variation of the IMF magnitude in the GSM Y-Z plane Parameters ----------- inst : pysat.Instrument Instrument with OMNI HRO data steady_window : int Window for calculating running statistical moments in min (default=15) min_window_frac : float Minimum fraction of points in a window for steadiness to be calculated (default=0.75) max_clock_angle_std : float Maximum standard deviation of the clock angle in degrees (default=22.5) max_bmag_cv : float Maximum coefficient of variation of the IMF magnitude in the GSM Y-Z plane (default=0.5) """ # We are not going to interpolate through missing values sample_rate = int(inst.tag[0]) max_wnum = np.floor(steady_window / sample_rate) if max_wnum != steady_window / sample_rate: steady_window = max_wnum * sample_rate print("WARNING: sample rate is not a factor of the statistical window") print("new statistical window is {:.1f}".format(steady_window)) min_wnum = int(np.ceil(max_wnum * min_window_frac)) # Calculate the running coefficient of variation of the BYZ magnitude byz_mean = inst['BYZ_GSM'].rolling(min_periods=min_wnum, center=True, window=steady_window).mean() byz_std = inst['BYZ_GSM'].rolling(min_periods=min_wnum, center=True, window=steady_window).std() inst['BYZ_CV'] = pds.Series(byz_std / byz_mean, index=inst.data.index) # Calculate the running circular standard deviation of the clock angle circ_kwargs = {'high':360.0, 'low':0.0} ca = inst['clock_angle'][~np.isnan(inst['clock_angle'])] ca_std = inst['clock_angle'].rolling(min_periods=min_wnum, window=steady_window, \ center=True).apply(pysat.utils.nan_circstd, kwargs=circ_kwargs) inst['clock_angle_std'] = pds.Series(ca_std, index=inst.data.index) # Determine how long the clock angle and IMF magnitude are steady imf_steady = np.zeros(shape=inst.data.index.shape) steady = False for i,cv in enumerate(inst.data['BYZ_CV']): if steady: del_min = int((inst.data.index[i] - inst.data.index[i-1]).total_seconds() / 60.0) if np.isnan(cv) or np.isnan(ca_std[i]) or del_min > sample_rate: # Reset the steadiness flag if fill values are encountered, or # if an entry is missing steady = False if cv <= max_bmag_cv and ca_std[i] <= max_clock_angle_std: # Steadiness conditions have been met if steady: imf_steady[i] = imf_steady[i-1] imf_steady[i] += sample_rate steady = True inst['IMF_Steady'] = pds.Series(imf_steady, index=inst.data.index) return
python
def calculate_imf_steadiness(inst, steady_window=15, min_window_frac=0.75, max_clock_angle_std=90.0/np.pi, max_bmag_cv=0.5): """ Calculate IMF steadiness using clock angle standard deviation and the coefficient of variation of the IMF magnitude in the GSM Y-Z plane Parameters ----------- inst : pysat.Instrument Instrument with OMNI HRO data steady_window : int Window for calculating running statistical moments in min (default=15) min_window_frac : float Minimum fraction of points in a window for steadiness to be calculated (default=0.75) max_clock_angle_std : float Maximum standard deviation of the clock angle in degrees (default=22.5) max_bmag_cv : float Maximum coefficient of variation of the IMF magnitude in the GSM Y-Z plane (default=0.5) """ # We are not going to interpolate through missing values sample_rate = int(inst.tag[0]) max_wnum = np.floor(steady_window / sample_rate) if max_wnum != steady_window / sample_rate: steady_window = max_wnum * sample_rate print("WARNING: sample rate is not a factor of the statistical window") print("new statistical window is {:.1f}".format(steady_window)) min_wnum = int(np.ceil(max_wnum * min_window_frac)) # Calculate the running coefficient of variation of the BYZ magnitude byz_mean = inst['BYZ_GSM'].rolling(min_periods=min_wnum, center=True, window=steady_window).mean() byz_std = inst['BYZ_GSM'].rolling(min_periods=min_wnum, center=True, window=steady_window).std() inst['BYZ_CV'] = pds.Series(byz_std / byz_mean, index=inst.data.index) # Calculate the running circular standard deviation of the clock angle circ_kwargs = {'high':360.0, 'low':0.0} ca = inst['clock_angle'][~np.isnan(inst['clock_angle'])] ca_std = inst['clock_angle'].rolling(min_periods=min_wnum, window=steady_window, \ center=True).apply(pysat.utils.nan_circstd, kwargs=circ_kwargs) inst['clock_angle_std'] = pds.Series(ca_std, index=inst.data.index) # Determine how long the clock angle and IMF magnitude are steady imf_steady = np.zeros(shape=inst.data.index.shape) steady = False for i,cv in enumerate(inst.data['BYZ_CV']): if steady: del_min = int((inst.data.index[i] - inst.data.index[i-1]).total_seconds() / 60.0) if np.isnan(cv) or np.isnan(ca_std[i]) or del_min > sample_rate: # Reset the steadiness flag if fill values are encountered, or # if an entry is missing steady = False if cv <= max_bmag_cv and ca_std[i] <= max_clock_angle_std: # Steadiness conditions have been met if steady: imf_steady[i] = imf_steady[i-1] imf_steady[i] += sample_rate steady = True inst['IMF_Steady'] = pds.Series(imf_steady, index=inst.data.index) return
[ "def", "calculate_imf_steadiness", "(", "inst", ",", "steady_window", "=", "15", ",", "min_window_frac", "=", "0.75", ",", "max_clock_angle_std", "=", "90.0", "/", "np", ".", "pi", ",", "max_bmag_cv", "=", "0.5", ")", ":", "# We are not going to interpolate through missing values", "sample_rate", "=", "int", "(", "inst", ".", "tag", "[", "0", "]", ")", "max_wnum", "=", "np", ".", "floor", "(", "steady_window", "/", "sample_rate", ")", "if", "max_wnum", "!=", "steady_window", "/", "sample_rate", ":", "steady_window", "=", "max_wnum", "*", "sample_rate", "print", "(", "\"WARNING: sample rate is not a factor of the statistical window\"", ")", "print", "(", "\"new statistical window is {:.1f}\"", ".", "format", "(", "steady_window", ")", ")", "min_wnum", "=", "int", "(", "np", ".", "ceil", "(", "max_wnum", "*", "min_window_frac", ")", ")", "# Calculate the running coefficient of variation of the BYZ magnitude", "byz_mean", "=", "inst", "[", "'BYZ_GSM'", "]", ".", "rolling", "(", "min_periods", "=", "min_wnum", ",", "center", "=", "True", ",", "window", "=", "steady_window", ")", ".", "mean", "(", ")", "byz_std", "=", "inst", "[", "'BYZ_GSM'", "]", ".", "rolling", "(", "min_periods", "=", "min_wnum", ",", "center", "=", "True", ",", "window", "=", "steady_window", ")", ".", "std", "(", ")", "inst", "[", "'BYZ_CV'", "]", "=", "pds", ".", "Series", "(", "byz_std", "/", "byz_mean", ",", "index", "=", "inst", ".", "data", ".", "index", ")", "# Calculate the running circular standard deviation of the clock angle", "circ_kwargs", "=", "{", "'high'", ":", "360.0", ",", "'low'", ":", "0.0", "}", "ca", "=", "inst", "[", "'clock_angle'", "]", "[", "~", "np", ".", "isnan", "(", "inst", "[", "'clock_angle'", "]", ")", "]", "ca_std", "=", "inst", "[", "'clock_angle'", "]", ".", "rolling", "(", "min_periods", "=", "min_wnum", ",", "window", "=", "steady_window", ",", "center", "=", "True", ")", ".", "apply", "(", "pysat", ".", "utils", ".", "nan_circstd", ",", "kwargs", "=", "circ_kwargs", ")", "inst", "[", "'clock_angle_std'", "]", "=", "pds", ".", "Series", "(", "ca_std", ",", "index", "=", "inst", ".", "data", ".", "index", ")", "# Determine how long the clock angle and IMF magnitude are steady", "imf_steady", "=", "np", ".", "zeros", "(", "shape", "=", "inst", ".", "data", ".", "index", ".", "shape", ")", "steady", "=", "False", "for", "i", ",", "cv", "in", "enumerate", "(", "inst", ".", "data", "[", "'BYZ_CV'", "]", ")", ":", "if", "steady", ":", "del_min", "=", "int", "(", "(", "inst", ".", "data", ".", "index", "[", "i", "]", "-", "inst", ".", "data", ".", "index", "[", "i", "-", "1", "]", ")", ".", "total_seconds", "(", ")", "/", "60.0", ")", "if", "np", ".", "isnan", "(", "cv", ")", "or", "np", ".", "isnan", "(", "ca_std", "[", "i", "]", ")", "or", "del_min", ">", "sample_rate", ":", "# Reset the steadiness flag if fill values are encountered, or", "# if an entry is missing", "steady", "=", "False", "if", "cv", "<=", "max_bmag_cv", "and", "ca_std", "[", "i", "]", "<=", "max_clock_angle_std", ":", "# Steadiness conditions have been met", "if", "steady", ":", "imf_steady", "[", "i", "]", "=", "imf_steady", "[", "i", "-", "1", "]", "imf_steady", "[", "i", "]", "+=", "sample_rate", "steady", "=", "True", "inst", "[", "'IMF_Steady'", "]", "=", "pds", ".", "Series", "(", "imf_steady", ",", "index", "=", "inst", ".", "data", ".", "index", ")", "return" ]
Calculate IMF steadiness using clock angle standard deviation and the coefficient of variation of the IMF magnitude in the GSM Y-Z plane Parameters ----------- inst : pysat.Instrument Instrument with OMNI HRO data steady_window : int Window for calculating running statistical moments in min (default=15) min_window_frac : float Minimum fraction of points in a window for steadiness to be calculated (default=0.75) max_clock_angle_std : float Maximum standard deviation of the clock angle in degrees (default=22.5) max_bmag_cv : float Maximum coefficient of variation of the IMF magnitude in the GSM Y-Z plane (default=0.5)
[ "Calculate", "IMF", "steadiness", "using", "clock", "angle", "standard", "deviation", "and", "the", "coefficient", "of", "variation", "of", "the", "IMF", "magnitude", "in", "the", "GSM", "Y", "-", "Z", "plane" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/instruments/omni_hro.py#L261-L329
rstoneback/pysat
pysat/instruments/omni_hro.py
calculate_dayside_reconnection
def calculate_dayside_reconnection(inst): """ Calculate the dayside reconnection rate (Milan et al. 2014) Parameters ----------- inst : pysat.Instrument Instrument with OMNI HRO data, requires BYZ_GSM and clock_angle Notes -------- recon_day = 3.8 Re (Vx / 4e5 m/s)^1/3 Vx B_yz (sin(theta/2))^9/2 """ rearth = 6371008.8 sin_htheta = np.power(np.sin(np.radians(0.5 * inst['clock_angle'])), 4.5) byz = inst['BYZ_GSM'] * 1.0e-9 vx = inst['flow_speed'] * 1000.0 recon_day = 3.8 * rearth * vx * byz * sin_htheta * np.power((vx / 4.0e5), 1.0/3.0) inst['recon_day'] = pds.Series(recon_day, index=inst.data.index) return
python
def calculate_dayside_reconnection(inst): """ Calculate the dayside reconnection rate (Milan et al. 2014) Parameters ----------- inst : pysat.Instrument Instrument with OMNI HRO data, requires BYZ_GSM and clock_angle Notes -------- recon_day = 3.8 Re (Vx / 4e5 m/s)^1/3 Vx B_yz (sin(theta/2))^9/2 """ rearth = 6371008.8 sin_htheta = np.power(np.sin(np.radians(0.5 * inst['clock_angle'])), 4.5) byz = inst['BYZ_GSM'] * 1.0e-9 vx = inst['flow_speed'] * 1000.0 recon_day = 3.8 * rearth * vx * byz * sin_htheta * np.power((vx / 4.0e5), 1.0/3.0) inst['recon_day'] = pds.Series(recon_day, index=inst.data.index) return
[ "def", "calculate_dayside_reconnection", "(", "inst", ")", ":", "rearth", "=", "6371008.8", "sin_htheta", "=", "np", ".", "power", "(", "np", ".", "sin", "(", "np", ".", "radians", "(", "0.5", "*", "inst", "[", "'clock_angle'", "]", ")", ")", ",", "4.5", ")", "byz", "=", "inst", "[", "'BYZ_GSM'", "]", "*", "1.0e-9", "vx", "=", "inst", "[", "'flow_speed'", "]", "*", "1000.0", "recon_day", "=", "3.8", "*", "rearth", "*", "vx", "*", "byz", "*", "sin_htheta", "*", "np", ".", "power", "(", "(", "vx", "/", "4.0e5", ")", ",", "1.0", "/", "3.0", ")", "inst", "[", "'recon_day'", "]", "=", "pds", ".", "Series", "(", "recon_day", ",", "index", "=", "inst", ".", "data", ".", "index", ")", "return" ]
Calculate the dayside reconnection rate (Milan et al. 2014) Parameters ----------- inst : pysat.Instrument Instrument with OMNI HRO data, requires BYZ_GSM and clock_angle Notes -------- recon_day = 3.8 Re (Vx / 4e5 m/s)^1/3 Vx B_yz (sin(theta/2))^9/2
[ "Calculate", "the", "dayside", "reconnection", "rate", "(", "Milan", "et", "al", ".", "2014", ")" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/instruments/omni_hro.py#L331-L351
emc-openstack/storops
storops/unity/resource/cifs_share.py
UnityCifsShare.clear_access
def clear_access(self, white_list=None): """ clear all ace entries of the share :param white_list: list of username whose access entry won't be cleared :return: sid list of ace entries removed successfully """ access_entries = self.get_ace_list() sid_list = access_entries.sid_list if white_list: sid_white_list = [UnityAclUser.get_sid(self._cli, user, self.cifs_server.domain) for user in white_list] sid_list = list(set(sid_list) - set(sid_white_list)) resp = self.delete_ace(sid=sid_list) resp.raise_if_err() return sid_list
python
def clear_access(self, white_list=None): """ clear all ace entries of the share :param white_list: list of username whose access entry won't be cleared :return: sid list of ace entries removed successfully """ access_entries = self.get_ace_list() sid_list = access_entries.sid_list if white_list: sid_white_list = [UnityAclUser.get_sid(self._cli, user, self.cifs_server.domain) for user in white_list] sid_list = list(set(sid_list) - set(sid_white_list)) resp = self.delete_ace(sid=sid_list) resp.raise_if_err() return sid_list
[ "def", "clear_access", "(", "self", ",", "white_list", "=", "None", ")", ":", "access_entries", "=", "self", ".", "get_ace_list", "(", ")", "sid_list", "=", "access_entries", ".", "sid_list", "if", "white_list", ":", "sid_white_list", "=", "[", "UnityAclUser", ".", "get_sid", "(", "self", ".", "_cli", ",", "user", ",", "self", ".", "cifs_server", ".", "domain", ")", "for", "user", "in", "white_list", "]", "sid_list", "=", "list", "(", "set", "(", "sid_list", ")", "-", "set", "(", "sid_white_list", ")", ")", "resp", "=", "self", ".", "delete_ace", "(", "sid", "=", "sid_list", ")", "resp", ".", "raise_if_err", "(", ")", "return", "sid_list" ]
clear all ace entries of the share :param white_list: list of username whose access entry won't be cleared :return: sid list of ace entries removed successfully
[ "clear", "all", "ace", "entries", "of", "the", "share" ]
train
https://github.com/emc-openstack/storops/blob/24b4b13bf065c0ef0538dd0b5ebb8f25d24176bd/storops/unity/resource/cifs_share.py#L114-L132
emc-openstack/storops
storops/unity/resource/cifs_share.py
UnityCifsShare.delete_ace
def delete_ace(self, domain=None, user=None, sid=None): """ delete ACE for the share delete ACE for the share. User could either supply the domain and username or the sid of the user. :param domain: domain of the user :param user: username :param sid: sid of the user or sid list of the user :return: REST API response """ if sid is None: if domain is None: domain = self.cifs_server.domain sid = UnityAclUser.get_sid(self._cli, user=user, domain=domain) if isinstance(sid, six.string_types): sid = [sid] ace_list = [self._make_remove_ace_entry(s) for s in sid] resp = self.action("setACEs", cifsShareACEs=ace_list) resp.raise_if_err() return resp
python
def delete_ace(self, domain=None, user=None, sid=None): """ delete ACE for the share delete ACE for the share. User could either supply the domain and username or the sid of the user. :param domain: domain of the user :param user: username :param sid: sid of the user or sid list of the user :return: REST API response """ if sid is None: if domain is None: domain = self.cifs_server.domain sid = UnityAclUser.get_sid(self._cli, user=user, domain=domain) if isinstance(sid, six.string_types): sid = [sid] ace_list = [self._make_remove_ace_entry(s) for s in sid] resp = self.action("setACEs", cifsShareACEs=ace_list) resp.raise_if_err() return resp
[ "def", "delete_ace", "(", "self", ",", "domain", "=", "None", ",", "user", "=", "None", ",", "sid", "=", "None", ")", ":", "if", "sid", "is", "None", ":", "if", "domain", "is", "None", ":", "domain", "=", "self", ".", "cifs_server", ".", "domain", "sid", "=", "UnityAclUser", ".", "get_sid", "(", "self", ".", "_cli", ",", "user", "=", "user", ",", "domain", "=", "domain", ")", "if", "isinstance", "(", "sid", ",", "six", ".", "string_types", ")", ":", "sid", "=", "[", "sid", "]", "ace_list", "=", "[", "self", ".", "_make_remove_ace_entry", "(", "s", ")", "for", "s", "in", "sid", "]", "resp", "=", "self", ".", "action", "(", "\"setACEs\"", ",", "cifsShareACEs", "=", "ace_list", ")", "resp", ".", "raise_if_err", "(", ")", "return", "resp" ]
delete ACE for the share delete ACE for the share. User could either supply the domain and username or the sid of the user. :param domain: domain of the user :param user: username :param sid: sid of the user or sid list of the user :return: REST API response
[ "delete", "ACE", "for", "the", "share" ]
train
https://github.com/emc-openstack/storops/blob/24b4b13bf065c0ef0538dd0b5ebb8f25d24176bd/storops/unity/resource/cifs_share.py#L153-L175
emc-openstack/storops
storops/vnx/resource/nqm.py
VNXIOClass.luns
def luns(self): """Aggregator for ioclass_luns and ioclass_snapshots.""" lun_list, smp_list = [], [] if self.ioclass_luns: lun_list = map(lambda l: VNXLun(lun_id=l.lun_id, name=l.name, cli=self._cli), self.ioclass_luns) if self.ioclass_snapshots: smp_list = map(lambda smp: VNXLun(name=smp.name, cli=self._cli), self.ioclass_snapshots) return list(lun_list) + list(smp_list)
python
def luns(self): """Aggregator for ioclass_luns and ioclass_snapshots.""" lun_list, smp_list = [], [] if self.ioclass_luns: lun_list = map(lambda l: VNXLun(lun_id=l.lun_id, name=l.name, cli=self._cli), self.ioclass_luns) if self.ioclass_snapshots: smp_list = map(lambda smp: VNXLun(name=smp.name, cli=self._cli), self.ioclass_snapshots) return list(lun_list) + list(smp_list)
[ "def", "luns", "(", "self", ")", ":", "lun_list", ",", "smp_list", "=", "[", "]", ",", "[", "]", "if", "self", ".", "ioclass_luns", ":", "lun_list", "=", "map", "(", "lambda", "l", ":", "VNXLun", "(", "lun_id", "=", "l", ".", "lun_id", ",", "name", "=", "l", ".", "name", ",", "cli", "=", "self", ".", "_cli", ")", ",", "self", ".", "ioclass_luns", ")", "if", "self", ".", "ioclass_snapshots", ":", "smp_list", "=", "map", "(", "lambda", "smp", ":", "VNXLun", "(", "name", "=", "smp", ".", "name", ",", "cli", "=", "self", ".", "_cli", ")", ",", "self", ".", "ioclass_snapshots", ")", "return", "list", "(", "lun_list", ")", "+", "list", "(", "smp_list", ")" ]
Aggregator for ioclass_luns and ioclass_snapshots.
[ "Aggregator", "for", "ioclass_luns", "and", "ioclass_snapshots", "." ]
train
https://github.com/emc-openstack/storops/blob/24b4b13bf065c0ef0538dd0b5ebb8f25d24176bd/storops/vnx/resource/nqm.py#L107-L116
emc-openstack/storops
storops/vnx/resource/nqm.py
VNXIOClass.policy
def policy(self): """Returns policy which contains this ioclass.""" policies = VNXIOPolicy.get(cli=self._cli) ret = None for policy in policies: contained = policy.ioclasses.name if self._get_name() in contained: ret = VNXIOPolicy.get(name=policy.name, cli=self._cli) break return ret
python
def policy(self): """Returns policy which contains this ioclass.""" policies = VNXIOPolicy.get(cli=self._cli) ret = None for policy in policies: contained = policy.ioclasses.name if self._get_name() in contained: ret = VNXIOPolicy.get(name=policy.name, cli=self._cli) break return ret
[ "def", "policy", "(", "self", ")", ":", "policies", "=", "VNXIOPolicy", ".", "get", "(", "cli", "=", "self", ".", "_cli", ")", "ret", "=", "None", "for", "policy", "in", "policies", ":", "contained", "=", "policy", ".", "ioclasses", ".", "name", "if", "self", ".", "_get_name", "(", ")", "in", "contained", ":", "ret", "=", "VNXIOPolicy", ".", "get", "(", "name", "=", "policy", ".", "name", ",", "cli", "=", "self", ".", "_cli", ")", "break", "return", "ret" ]
Returns policy which contains this ioclass.
[ "Returns", "policy", "which", "contains", "this", "ioclass", "." ]
train
https://github.com/emc-openstack/storops/blob/24b4b13bf065c0ef0538dd0b5ebb8f25d24176bd/storops/vnx/resource/nqm.py#L119-L128
emc-openstack/storops
storops/vnx/resource/nqm.py
VNXIOClass.modify
def modify(self, new_name=None, iotype=None, lun_ids=None, smp_names=None, ctrlmethod=None, minsize=None, maxsize=None): """Overwrite the current properties for a VNX ioclass. :param new_name: new name for the ioclass :param iotype: can be 'rw', 'r' or 'w' :param lun_ids: list of LUN IDs :param smp_names: list of Snapshot Mount Point names :param ctrlmethod: the new CtrlMethod :param minsize: minimal size in kb :param maxsize: maximum size in kb """ if not any([new_name, iotype, lun_ids, smp_names, ctrlmethod]): raise ValueError('Cannot apply modification, please specify ' 'parameters to modify.') def _do_modify(): out = self._cli.modify_ioclass( self._get_name(), new_name, iotype, lun_ids, smp_names, ctrlmethod, minsize, maxsize) ex.raise_if_err(out, default=ex.VNXIOClassError) try: _do_modify() except ex.VNXIOCLassRunningError: with restart_policy(self.policy): _do_modify() return VNXIOClass(new_name if new_name else self._get_name(), self._cli)
python
def modify(self, new_name=None, iotype=None, lun_ids=None, smp_names=None, ctrlmethod=None, minsize=None, maxsize=None): """Overwrite the current properties for a VNX ioclass. :param new_name: new name for the ioclass :param iotype: can be 'rw', 'r' or 'w' :param lun_ids: list of LUN IDs :param smp_names: list of Snapshot Mount Point names :param ctrlmethod: the new CtrlMethod :param minsize: minimal size in kb :param maxsize: maximum size in kb """ if not any([new_name, iotype, lun_ids, smp_names, ctrlmethod]): raise ValueError('Cannot apply modification, please specify ' 'parameters to modify.') def _do_modify(): out = self._cli.modify_ioclass( self._get_name(), new_name, iotype, lun_ids, smp_names, ctrlmethod, minsize, maxsize) ex.raise_if_err(out, default=ex.VNXIOClassError) try: _do_modify() except ex.VNXIOCLassRunningError: with restart_policy(self.policy): _do_modify() return VNXIOClass(new_name if new_name else self._get_name(), self._cli)
[ "def", "modify", "(", "self", ",", "new_name", "=", "None", ",", "iotype", "=", "None", ",", "lun_ids", "=", "None", ",", "smp_names", "=", "None", ",", "ctrlmethod", "=", "None", ",", "minsize", "=", "None", ",", "maxsize", "=", "None", ")", ":", "if", "not", "any", "(", "[", "new_name", ",", "iotype", ",", "lun_ids", ",", "smp_names", ",", "ctrlmethod", "]", ")", ":", "raise", "ValueError", "(", "'Cannot apply modification, please specify '", "'parameters to modify.'", ")", "def", "_do_modify", "(", ")", ":", "out", "=", "self", ".", "_cli", ".", "modify_ioclass", "(", "self", ".", "_get_name", "(", ")", ",", "new_name", ",", "iotype", ",", "lun_ids", ",", "smp_names", ",", "ctrlmethod", ",", "minsize", ",", "maxsize", ")", "ex", ".", "raise_if_err", "(", "out", ",", "default", "=", "ex", ".", "VNXIOClassError", ")", "try", ":", "_do_modify", "(", ")", "except", "ex", ".", "VNXIOCLassRunningError", ":", "with", "restart_policy", "(", "self", ".", "policy", ")", ":", "_do_modify", "(", ")", "return", "VNXIOClass", "(", "new_name", "if", "new_name", "else", "self", ".", "_get_name", "(", ")", ",", "self", ".", "_cli", ")" ]
Overwrite the current properties for a VNX ioclass. :param new_name: new name for the ioclass :param iotype: can be 'rw', 'r' or 'w' :param lun_ids: list of LUN IDs :param smp_names: list of Snapshot Mount Point names :param ctrlmethod: the new CtrlMethod :param minsize: minimal size in kb :param maxsize: maximum size in kb
[ "Overwrite", "the", "current", "properties", "for", "a", "VNX", "ioclass", "." ]
train
https://github.com/emc-openstack/storops/blob/24b4b13bf065c0ef0538dd0b5ebb8f25d24176bd/storops/vnx/resource/nqm.py#L143-L172
emc-openstack/storops
storops/vnx/resource/nqm.py
VNXIOClass.add_lun
def add_lun(self, luns): """A wrapper for modify method. .. note:: This API only append luns to existing luns. """ curr_lun_ids, curr_smp_names = self._get_current_names() luns = normalize_lun(luns, self._cli) new_ids, new_smps = convert_lun(luns) if new_ids: curr_lun_ids.extend(new_ids) if new_smps: curr_smp_names.extend(new_smps) return self.modify(lun_ids=curr_lun_ids, smp_names=curr_smp_names)
python
def add_lun(self, luns): """A wrapper for modify method. .. note:: This API only append luns to existing luns. """ curr_lun_ids, curr_smp_names = self._get_current_names() luns = normalize_lun(luns, self._cli) new_ids, new_smps = convert_lun(luns) if new_ids: curr_lun_ids.extend(new_ids) if new_smps: curr_smp_names.extend(new_smps) return self.modify(lun_ids=curr_lun_ids, smp_names=curr_smp_names)
[ "def", "add_lun", "(", "self", ",", "luns", ")", ":", "curr_lun_ids", ",", "curr_smp_names", "=", "self", ".", "_get_current_names", "(", ")", "luns", "=", "normalize_lun", "(", "luns", ",", "self", ".", "_cli", ")", "new_ids", ",", "new_smps", "=", "convert_lun", "(", "luns", ")", "if", "new_ids", ":", "curr_lun_ids", ".", "extend", "(", "new_ids", ")", "if", "new_smps", ":", "curr_smp_names", ".", "extend", "(", "new_smps", ")", "return", "self", ".", "modify", "(", "lun_ids", "=", "curr_lun_ids", ",", "smp_names", "=", "curr_smp_names", ")" ]
A wrapper for modify method. .. note:: This API only append luns to existing luns.
[ "A", "wrapper", "for", "modify", "method", "." ]
train
https://github.com/emc-openstack/storops/blob/24b4b13bf065c0ef0538dd0b5ebb8f25d24176bd/storops/vnx/resource/nqm.py#L174-L186
emc-openstack/storops
storops/vnx/resource/nqm.py
VNXIOPolicy.add_class
def add_class(self, ioclass): """Add one VNXIOClass instance to policy. .. note: due to the limitation of VNX, need to stop the policy first. """ current_ioclasses = self.ioclasses if ioclass.name in current_ioclasses.name: return current_ioclasses.append(ioclass) self.modify(new_ioclasses=current_ioclasses)
python
def add_class(self, ioclass): """Add one VNXIOClass instance to policy. .. note: due to the limitation of VNX, need to stop the policy first. """ current_ioclasses = self.ioclasses if ioclass.name in current_ioclasses.name: return current_ioclasses.append(ioclass) self.modify(new_ioclasses=current_ioclasses)
[ "def", "add_class", "(", "self", ",", "ioclass", ")", ":", "current_ioclasses", "=", "self", ".", "ioclasses", "if", "ioclass", ".", "name", "in", "current_ioclasses", ".", "name", ":", "return", "current_ioclasses", ".", "append", "(", "ioclass", ")", "self", ".", "modify", "(", "new_ioclasses", "=", "current_ioclasses", ")" ]
Add one VNXIOClass instance to policy. .. note: due to the limitation of VNX, need to stop the policy first.
[ "Add", "one", "VNXIOClass", "instance", "to", "policy", "." ]
train
https://github.com/emc-openstack/storops/blob/24b4b13bf065c0ef0538dd0b5ebb8f25d24176bd/storops/vnx/resource/nqm.py#L280-L289
emc-openstack/storops
storops/vnx/resource/nqm.py
VNXIOPolicy.remove_class
def remove_class(self, ioclass): """Remove VNXIOClass instance from policy.""" current_ioclasses = self.ioclasses new_ioclasses = filter(lambda x: x.name != ioclass.name, current_ioclasses) self.modify(new_ioclasses=new_ioclasses)
python
def remove_class(self, ioclass): """Remove VNXIOClass instance from policy.""" current_ioclasses = self.ioclasses new_ioclasses = filter(lambda x: x.name != ioclass.name, current_ioclasses) self.modify(new_ioclasses=new_ioclasses)
[ "def", "remove_class", "(", "self", ",", "ioclass", ")", ":", "current_ioclasses", "=", "self", ".", "ioclasses", "new_ioclasses", "=", "filter", "(", "lambda", "x", ":", "x", ".", "name", "!=", "ioclass", ".", "name", ",", "current_ioclasses", ")", "self", ".", "modify", "(", "new_ioclasses", "=", "new_ioclasses", ")" ]
Remove VNXIOClass instance from policy.
[ "Remove", "VNXIOClass", "instance", "from", "policy", "." ]
train
https://github.com/emc-openstack/storops/blob/24b4b13bf065c0ef0538dd0b5ebb8f25d24176bd/storops/vnx/resource/nqm.py#L291-L296
emc-openstack/storops
storops/unity/resource/cg.py
UnityConsistencyGroup.replace_lun
def replace_lun(self, *lun_list): """Replaces the exiting LUNs to lun_list.""" lun_add = self._prepare_luns_add(lun_list) lun_remove = self._prepare_luns_remove(lun_list, False) return self.modify(lun_add=lun_add, lun_remove=lun_remove)
python
def replace_lun(self, *lun_list): """Replaces the exiting LUNs to lun_list.""" lun_add = self._prepare_luns_add(lun_list) lun_remove = self._prepare_luns_remove(lun_list, False) return self.modify(lun_add=lun_add, lun_remove=lun_remove)
[ "def", "replace_lun", "(", "self", ",", "*", "lun_list", ")", ":", "lun_add", "=", "self", ".", "_prepare_luns_add", "(", "lun_list", ")", "lun_remove", "=", "self", ".", "_prepare_luns_remove", "(", "lun_list", ",", "False", ")", "return", "self", ".", "modify", "(", "lun_add", "=", "lun_add", ",", "lun_remove", "=", "lun_remove", ")" ]
Replaces the exiting LUNs to lun_list.
[ "Replaces", "the", "exiting", "LUNs", "to", "lun_list", "." ]
train
https://github.com/emc-openstack/storops/blob/24b4b13bf065c0ef0538dd0b5ebb8f25d24176bd/storops/unity/resource/cg.py#L208-L212
emc-openstack/storops
storops/unity/resource/cg.py
UnityConsistencyGroup.update_lun
def update_lun(self, add_luns=None, remove_luns=None): """Updates the LUNs in CG, adding the ones in `add_luns` and removing the ones in `remove_luns`""" if not add_luns and not remove_luns: log.debug("Empty add_luns and remove_luns passed in, " "skip update_lun.") return RESP_OK lun_add = self._prepare_luns_add(add_luns) lun_remove = self._prepare_luns_remove(remove_luns, True) return self.modify(lun_add=lun_add, lun_remove=lun_remove)
python
def update_lun(self, add_luns=None, remove_luns=None): """Updates the LUNs in CG, adding the ones in `add_luns` and removing the ones in `remove_luns`""" if not add_luns and not remove_luns: log.debug("Empty add_luns and remove_luns passed in, " "skip update_lun.") return RESP_OK lun_add = self._prepare_luns_add(add_luns) lun_remove = self._prepare_luns_remove(remove_luns, True) return self.modify(lun_add=lun_add, lun_remove=lun_remove)
[ "def", "update_lun", "(", "self", ",", "add_luns", "=", "None", ",", "remove_luns", "=", "None", ")", ":", "if", "not", "add_luns", "and", "not", "remove_luns", ":", "log", ".", "debug", "(", "\"Empty add_luns and remove_luns passed in, \"", "\"skip update_lun.\"", ")", "return", "RESP_OK", "lun_add", "=", "self", ".", "_prepare_luns_add", "(", "add_luns", ")", "lun_remove", "=", "self", ".", "_prepare_luns_remove", "(", "remove_luns", ",", "True", ")", "return", "self", ".", "modify", "(", "lun_add", "=", "lun_add", ",", "lun_remove", "=", "lun_remove", ")" ]
Updates the LUNs in CG, adding the ones in `add_luns` and removing the ones in `remove_luns`
[ "Updates", "the", "LUNs", "in", "CG", "adding", "the", "ones", "in", "add_luns", "and", "removing", "the", "ones", "in", "remove_luns" ]
train
https://github.com/emc-openstack/storops/blob/24b4b13bf065c0ef0538dd0b5ebb8f25d24176bd/storops/unity/resource/cg.py#L214-L223
rstoneback/pysat
pysat/instruments/iss_fpmu.py
clean
def clean(inst): """Routine to return FPMU data cleaned to the specified level Parameters ----------- inst : (pysat.Instrument) Instrument class object, whose attribute clean_level is used to return the desired level of data selectivity. Returns -------- Void : (NoneType) data in inst is modified in-place. Notes -------- No cleaning currently available for FPMU """ inst.data.replace(-999., np.nan, inplace=True) # Te inst.data.replace(-9.9999998e+30, np.nan, inplace=True) #Ni return None
python
def clean(inst): """Routine to return FPMU data cleaned to the specified level Parameters ----------- inst : (pysat.Instrument) Instrument class object, whose attribute clean_level is used to return the desired level of data selectivity. Returns -------- Void : (NoneType) data in inst is modified in-place. Notes -------- No cleaning currently available for FPMU """ inst.data.replace(-999., np.nan, inplace=True) # Te inst.data.replace(-9.9999998e+30, np.nan, inplace=True) #Ni return None
[ "def", "clean", "(", "inst", ")", ":", "inst", ".", "data", ".", "replace", "(", "-", "999.", ",", "np", ".", "nan", ",", "inplace", "=", "True", ")", "# Te", "inst", ".", "data", ".", "replace", "(", "-", "9.9999998e+30", ",", "np", ".", "nan", ",", "inplace", "=", "True", ")", "#Ni", "return", "None" ]
Routine to return FPMU data cleaned to the specified level Parameters ----------- inst : (pysat.Instrument) Instrument class object, whose attribute clean_level is used to return the desired level of data selectivity. Returns -------- Void : (NoneType) data in inst is modified in-place. Notes -------- No cleaning currently available for FPMU
[ "Routine", "to", "return", "FPMU", "data", "cleaned", "to", "the", "specified", "level" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/instruments/iss_fpmu.py#L59-L81
rstoneback/pysat
pysat/_files.py
Files._attach_files
def _attach_files(self, files_info): """Attaches info returned by instrument list_files routine to Instrument object. """ if not files_info.empty: if (len(files_info.index.unique()) != len(files_info)): estr = 'WARNING! Duplicate datetimes in provided file ' estr = '{:s}information.\nKeeping one of each '.format(estr) estr = '{:s}of the duplicates, dropping the rest.'.format(estr) print(estr) print(files_info.index.get_duplicates()) idx = np.unique(files_info.index, return_index=True) files_info = files_info.ix[idx[1]] #raise ValueError('List of files must have unique datetimes.') self.files = files_info.sort_index() date = files_info.index[0] self.start_date = pds.datetime(date.year, date.month, date.day) date = files_info.index[-1] self.stop_date = pds.datetime(date.year, date.month, date.day) else: self.start_date = None self.stop_date = None # convert to object type # necessary if Series is empty, enables == checks with strings self.files = files_info.astype(np.dtype('O'))
python
def _attach_files(self, files_info): """Attaches info returned by instrument list_files routine to Instrument object. """ if not files_info.empty: if (len(files_info.index.unique()) != len(files_info)): estr = 'WARNING! Duplicate datetimes in provided file ' estr = '{:s}information.\nKeeping one of each '.format(estr) estr = '{:s}of the duplicates, dropping the rest.'.format(estr) print(estr) print(files_info.index.get_duplicates()) idx = np.unique(files_info.index, return_index=True) files_info = files_info.ix[idx[1]] #raise ValueError('List of files must have unique datetimes.') self.files = files_info.sort_index() date = files_info.index[0] self.start_date = pds.datetime(date.year, date.month, date.day) date = files_info.index[-1] self.stop_date = pds.datetime(date.year, date.month, date.day) else: self.start_date = None self.stop_date = None # convert to object type # necessary if Series is empty, enables == checks with strings self.files = files_info.astype(np.dtype('O'))
[ "def", "_attach_files", "(", "self", ",", "files_info", ")", ":", "if", "not", "files_info", ".", "empty", ":", "if", "(", "len", "(", "files_info", ".", "index", ".", "unique", "(", ")", ")", "!=", "len", "(", "files_info", ")", ")", ":", "estr", "=", "'WARNING! Duplicate datetimes in provided file '", "estr", "=", "'{:s}information.\\nKeeping one of each '", ".", "format", "(", "estr", ")", "estr", "=", "'{:s}of the duplicates, dropping the rest.'", ".", "format", "(", "estr", ")", "print", "(", "estr", ")", "print", "(", "files_info", ".", "index", ".", "get_duplicates", "(", ")", ")", "idx", "=", "np", ".", "unique", "(", "files_info", ".", "index", ",", "return_index", "=", "True", ")", "files_info", "=", "files_info", ".", "ix", "[", "idx", "[", "1", "]", "]", "#raise ValueError('List of files must have unique datetimes.')", "self", ".", "files", "=", "files_info", ".", "sort_index", "(", ")", "date", "=", "files_info", ".", "index", "[", "0", "]", "self", ".", "start_date", "=", "pds", ".", "datetime", "(", "date", ".", "year", ",", "date", ".", "month", ",", "date", ".", "day", ")", "date", "=", "files_info", ".", "index", "[", "-", "1", "]", "self", ".", "stop_date", "=", "pds", ".", "datetime", "(", "date", ".", "year", ",", "date", ".", "month", ",", "date", ".", "day", ")", "else", ":", "self", ".", "start_date", "=", "None", "self", ".", "stop_date", "=", "None", "# convert to object type", "# necessary if Series is empty, enables == checks with strings", "self", ".", "files", "=", "files_info", ".", "astype", "(", "np", ".", "dtype", "(", "'O'", ")", ")" ]
Attaches info returned by instrument list_files routine to Instrument object.
[ "Attaches", "info", "returned", "by", "instrument", "list_files", "routine", "to", "Instrument", "object", "." ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_files.py#L141-L168
rstoneback/pysat
pysat/_files.py
Files._store
def _store(self): """Store currently loaded filelist for instrument onto filesystem""" name = self.stored_file_name # check if current file data is different than stored file list # if so, move file list to previous file list, store current to file # if not, do nothing stored_files = self._load() if len(stored_files) != len(self.files): # # of items is different, things are new new_flag = True elif len(stored_files) == len(self.files): # # of items equal, check specifically for equality if stored_files.eq(self.files).all(): new_flag = False else: # not equal, there are new files new_flag = True if new_flag: if self.write_to_disk: stored_files.to_csv(os.path.join(self.home_path, 'previous_'+name), date_format='%Y-%m-%d %H:%M:%S.%f') self.files.to_csv(os.path.join(self.home_path, name), date_format='%Y-%m-%d %H:%M:%S.%f') else: self._previous_file_list = stored_files self._current_file_list = self.files.copy() return
python
def _store(self): """Store currently loaded filelist for instrument onto filesystem""" name = self.stored_file_name # check if current file data is different than stored file list # if so, move file list to previous file list, store current to file # if not, do nothing stored_files = self._load() if len(stored_files) != len(self.files): # # of items is different, things are new new_flag = True elif len(stored_files) == len(self.files): # # of items equal, check specifically for equality if stored_files.eq(self.files).all(): new_flag = False else: # not equal, there are new files new_flag = True if new_flag: if self.write_to_disk: stored_files.to_csv(os.path.join(self.home_path, 'previous_'+name), date_format='%Y-%m-%d %H:%M:%S.%f') self.files.to_csv(os.path.join(self.home_path, name), date_format='%Y-%m-%d %H:%M:%S.%f') else: self._previous_file_list = stored_files self._current_file_list = self.files.copy() return
[ "def", "_store", "(", "self", ")", ":", "name", "=", "self", ".", "stored_file_name", "# check if current file data is different than stored file list", "# if so, move file list to previous file list, store current to file", "# if not, do nothing", "stored_files", "=", "self", ".", "_load", "(", ")", "if", "len", "(", "stored_files", ")", "!=", "len", "(", "self", ".", "files", ")", ":", "# # of items is different, things are new", "new_flag", "=", "True", "elif", "len", "(", "stored_files", ")", "==", "len", "(", "self", ".", "files", ")", ":", "# # of items equal, check specifically for equality", "if", "stored_files", ".", "eq", "(", "self", ".", "files", ")", ".", "all", "(", ")", ":", "new_flag", "=", "False", "else", ":", "# not equal, there are new files", "new_flag", "=", "True", "if", "new_flag", ":", "if", "self", ".", "write_to_disk", ":", "stored_files", ".", "to_csv", "(", "os", ".", "path", ".", "join", "(", "self", ".", "home_path", ",", "'previous_'", "+", "name", ")", ",", "date_format", "=", "'%Y-%m-%d %H:%M:%S.%f'", ")", "self", ".", "files", ".", "to_csv", "(", "os", ".", "path", ".", "join", "(", "self", ".", "home_path", ",", "name", ")", ",", "date_format", "=", "'%Y-%m-%d %H:%M:%S.%f'", ")", "else", ":", "self", ".", "_previous_file_list", "=", "stored_files", "self", ".", "_current_file_list", "=", "self", ".", "files", ".", "copy", "(", ")", "return" ]
Store currently loaded filelist for instrument onto filesystem
[ "Store", "currently", "loaded", "filelist", "for", "instrument", "onto", "filesystem" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_files.py#L170-L200
rstoneback/pysat
pysat/_files.py
Files._load
def _load(self, prev_version=False): """Load stored filelist and return as Pandas Series Parameters ---------- prev_version : boolean if True, will load previous version of file list Returns ------- pandas.Series Full path file names are indexed by datetime Series is empty if there is no file list to load """ fname = self.stored_file_name if prev_version: fname = os.path.join(self.home_path, 'previous_'+fname) else: fname = os.path.join(self.home_path, fname) if os.path.isfile(fname) and (os.path.getsize(fname) > 0): if self.write_to_disk: return pds.read_csv(fname, index_col=0, parse_dates=True, squeeze=True, header=None) else: # grab files from memory if prev_version: return self._previous_file_list else: return self._current_file_list else: return pds.Series([], dtype='a')
python
def _load(self, prev_version=False): """Load stored filelist and return as Pandas Series Parameters ---------- prev_version : boolean if True, will load previous version of file list Returns ------- pandas.Series Full path file names are indexed by datetime Series is empty if there is no file list to load """ fname = self.stored_file_name if prev_version: fname = os.path.join(self.home_path, 'previous_'+fname) else: fname = os.path.join(self.home_path, fname) if os.path.isfile(fname) and (os.path.getsize(fname) > 0): if self.write_to_disk: return pds.read_csv(fname, index_col=0, parse_dates=True, squeeze=True, header=None) else: # grab files from memory if prev_version: return self._previous_file_list else: return self._current_file_list else: return pds.Series([], dtype='a')
[ "def", "_load", "(", "self", ",", "prev_version", "=", "False", ")", ":", "fname", "=", "self", ".", "stored_file_name", "if", "prev_version", ":", "fname", "=", "os", ".", "path", ".", "join", "(", "self", ".", "home_path", ",", "'previous_'", "+", "fname", ")", "else", ":", "fname", "=", "os", ".", "path", ".", "join", "(", "self", ".", "home_path", ",", "fname", ")", "if", "os", ".", "path", ".", "isfile", "(", "fname", ")", "and", "(", "os", ".", "path", ".", "getsize", "(", "fname", ")", ">", "0", ")", ":", "if", "self", ".", "write_to_disk", ":", "return", "pds", ".", "read_csv", "(", "fname", ",", "index_col", "=", "0", ",", "parse_dates", "=", "True", ",", "squeeze", "=", "True", ",", "header", "=", "None", ")", "else", ":", "# grab files from memory", "if", "prev_version", ":", "return", "self", ".", "_previous_file_list", "else", ":", "return", "self", ".", "_current_file_list", "else", ":", "return", "pds", ".", "Series", "(", "[", "]", ",", "dtype", "=", "'a'", ")" ]
Load stored filelist and return as Pandas Series Parameters ---------- prev_version : boolean if True, will load previous version of file list Returns ------- pandas.Series Full path file names are indexed by datetime Series is empty if there is no file list to load
[ "Load", "stored", "filelist", "and", "return", "as", "Pandas", "Series" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_files.py#L202-L234
rstoneback/pysat
pysat/_files.py
Files.refresh
def refresh(self): """Update list of files, if there are changes. Calls underlying list_rtn for the particular science instrument. Typically, these routines search in the pysat provided path, pysat_data_dir/platform/name/tag/, where pysat_data_dir is set by pysat.utils.set_data_dir(path=path). """ output_str = '{platform} {name} {tag} {sat_id}' output_str = output_str.format(platform=self._sat.platform, name=self._sat.name, tag=self._sat.tag, sat_id=self._sat.sat_id) output_str = " ".join(("pysat is searching for", output_str, "files.")) output_str = " ".join(output_str.split()) print (output_str) info = self._sat._list_rtn(tag=self._sat.tag, sat_id=self._sat.sat_id, data_path=self.data_path, format_str=self.file_format) if not info.empty: print('Found {ll:d} of them.'.format(ll=len(info))) else: estr = "Unable to find any files that match the supplied template. " estr += "If you have the necessary files please check pysat " estr += "settings and file locations (e.g. pysat.pysat_dir)." print(estr) info = self._remove_data_dir_path(info) self._attach_files(info) self._store()
python
def refresh(self): """Update list of files, if there are changes. Calls underlying list_rtn for the particular science instrument. Typically, these routines search in the pysat provided path, pysat_data_dir/platform/name/tag/, where pysat_data_dir is set by pysat.utils.set_data_dir(path=path). """ output_str = '{platform} {name} {tag} {sat_id}' output_str = output_str.format(platform=self._sat.platform, name=self._sat.name, tag=self._sat.tag, sat_id=self._sat.sat_id) output_str = " ".join(("pysat is searching for", output_str, "files.")) output_str = " ".join(output_str.split()) print (output_str) info = self._sat._list_rtn(tag=self._sat.tag, sat_id=self._sat.sat_id, data_path=self.data_path, format_str=self.file_format) if not info.empty: print('Found {ll:d} of them.'.format(ll=len(info))) else: estr = "Unable to find any files that match the supplied template. " estr += "If you have the necessary files please check pysat " estr += "settings and file locations (e.g. pysat.pysat_dir)." print(estr) info = self._remove_data_dir_path(info) self._attach_files(info) self._store()
[ "def", "refresh", "(", "self", ")", ":", "output_str", "=", "'{platform} {name} {tag} {sat_id}'", "output_str", "=", "output_str", ".", "format", "(", "platform", "=", "self", ".", "_sat", ".", "platform", ",", "name", "=", "self", ".", "_sat", ".", "name", ",", "tag", "=", "self", ".", "_sat", ".", "tag", ",", "sat_id", "=", "self", ".", "_sat", ".", "sat_id", ")", "output_str", "=", "\" \"", ".", "join", "(", "(", "\"pysat is searching for\"", ",", "output_str", ",", "\"files.\"", ")", ")", "output_str", "=", "\" \"", ".", "join", "(", "output_str", ".", "split", "(", ")", ")", "print", "(", "output_str", ")", "info", "=", "self", ".", "_sat", ".", "_list_rtn", "(", "tag", "=", "self", ".", "_sat", ".", "tag", ",", "sat_id", "=", "self", ".", "_sat", ".", "sat_id", ",", "data_path", "=", "self", ".", "data_path", ",", "format_str", "=", "self", ".", "file_format", ")", "if", "not", "info", ".", "empty", ":", "print", "(", "'Found {ll:d} of them.'", ".", "format", "(", "ll", "=", "len", "(", "info", ")", ")", ")", "else", ":", "estr", "=", "\"Unable to find any files that match the supplied template. \"", "estr", "+=", "\"If you have the necessary files please check pysat \"", "estr", "+=", "\"settings and file locations (e.g. pysat.pysat_dir).\"", "print", "(", "estr", ")", "info", "=", "self", ".", "_remove_data_dir_path", "(", "info", ")", "self", ".", "_attach_files", "(", "info", ")", "self", ".", "_store", "(", ")" ]
Update list of files, if there are changes. Calls underlying list_rtn for the particular science instrument. Typically, these routines search in the pysat provided path, pysat_data_dir/platform/name/tag/, where pysat_data_dir is set by pysat.utils.set_data_dir(path=path).
[ "Update", "list", "of", "files", "if", "there", "are", "changes", ".", "Calls", "underlying", "list_rtn", "for", "the", "particular", "science", "instrument", ".", "Typically", "these", "routines", "search", "in", "the", "pysat", "provided", "path", "pysat_data_dir", "/", "platform", "/", "name", "/", "tag", "/", "where", "pysat_data_dir", "is", "set", "by", "pysat", ".", "utils", ".", "set_data_dir", "(", "path", "=", "path", ")", "." ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_files.py#L237-L269
rstoneback/pysat
pysat/_files.py
Files.get_new
def get_new(self): """List new files since last recorded file state. pysat stores filenames in the user_home/.pysat directory. Returns a list of all new fileanmes since the last known change to files. Filenames are stored if there is a change and either update_files is True at instrument object level or files.refresh() is called. Returns ------- pandas.Series files are indexed by datetime """ # refresh files self.refresh() # current files new_info = self._load() # previous set of files old_info = self._load(prev_version=True) new_files = new_info[-new_info.isin(old_info)] return new_files
python
def get_new(self): """List new files since last recorded file state. pysat stores filenames in the user_home/.pysat directory. Returns a list of all new fileanmes since the last known change to files. Filenames are stored if there is a change and either update_files is True at instrument object level or files.refresh() is called. Returns ------- pandas.Series files are indexed by datetime """ # refresh files self.refresh() # current files new_info = self._load() # previous set of files old_info = self._load(prev_version=True) new_files = new_info[-new_info.isin(old_info)] return new_files
[ "def", "get_new", "(", "self", ")", ":", "# refresh files", "self", ".", "refresh", "(", ")", "# current files", "new_info", "=", "self", ".", "_load", "(", ")", "# previous set of files", "old_info", "=", "self", ".", "_load", "(", "prev_version", "=", "True", ")", "new_files", "=", "new_info", "[", "-", "new_info", ".", "isin", "(", "old_info", ")", "]", "return", "new_files" ]
List new files since last recorded file state. pysat stores filenames in the user_home/.pysat directory. Returns a list of all new fileanmes since the last known change to files. Filenames are stored if there is a change and either update_files is True at instrument object level or files.refresh() is called. Returns ------- pandas.Series files are indexed by datetime
[ "List", "new", "files", "since", "last", "recorded", "file", "state", ".", "pysat", "stores", "filenames", "in", "the", "user_home", "/", ".", "pysat", "directory", ".", "Returns", "a", "list", "of", "all", "new", "fileanmes", "since", "the", "last", "known", "change", "to", "files", ".", "Filenames", "are", "stored", "if", "there", "is", "a", "change", "and", "either", "update_files", "is", "True", "at", "instrument", "object", "level", "or", "files", ".", "refresh", "()", "is", "called", ".", "Returns", "-------", "pandas", ".", "Series", "files", "are", "indexed", "by", "datetime" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_files.py#L271-L293
rstoneback/pysat
pysat/_files.py
Files.get_index
def get_index(self, fname): """Return index for a given filename. Parameters ---------- fname : string filename Note ---- If fname not found in the file information already attached to the instrument.files instance, then a files.refresh() call is made. """ idx, = np.where(fname == self.files) if len(idx) == 0: # filename not in index, try reloading files from disk self.refresh() #print("DEBUG get_index:", fname, self.files) idx, = np.where(fname == np.array(self.files)) if len(idx) == 0: raise ValueError('Could not find "' + fname + '" in available file list. Valid Example: ' + self.files.iloc[0]) # return a scalar rather than array - otherwise introduces array to # index warnings. return idx[0]
python
def get_index(self, fname): """Return index for a given filename. Parameters ---------- fname : string filename Note ---- If fname not found in the file information already attached to the instrument.files instance, then a files.refresh() call is made. """ idx, = np.where(fname == self.files) if len(idx) == 0: # filename not in index, try reloading files from disk self.refresh() #print("DEBUG get_index:", fname, self.files) idx, = np.where(fname == np.array(self.files)) if len(idx) == 0: raise ValueError('Could not find "' + fname + '" in available file list. Valid Example: ' + self.files.iloc[0]) # return a scalar rather than array - otherwise introduces array to # index warnings. return idx[0]
[ "def", "get_index", "(", "self", ",", "fname", ")", ":", "idx", ",", "=", "np", ".", "where", "(", "fname", "==", "self", ".", "files", ")", "if", "len", "(", "idx", ")", "==", "0", ":", "# filename not in index, try reloading files from disk", "self", ".", "refresh", "(", ")", "#print(\"DEBUG get_index:\", fname, self.files)", "idx", ",", "=", "np", ".", "where", "(", "fname", "==", "np", ".", "array", "(", "self", ".", "files", ")", ")", "if", "len", "(", "idx", ")", "==", "0", ":", "raise", "ValueError", "(", "'Could not find \"'", "+", "fname", "+", "'\" in available file list. Valid Example: '", "+", "self", ".", "files", ".", "iloc", "[", "0", "]", ")", "# return a scalar rather than array - otherwise introduces array to", "# index warnings.", "return", "idx", "[", "0", "]" ]
Return index for a given filename. Parameters ---------- fname : string filename Note ---- If fname not found in the file information already attached to the instrument.files instance, then a files.refresh() call is made.
[ "Return", "index", "for", "a", "given", "filename", ".", "Parameters", "----------", "fname", ":", "string", "filename", "Note", "----", "If", "fname", "not", "found", "in", "the", "file", "information", "already", "attached", "to", "the", "instrument", ".", "files", "instance", "then", "a", "files", ".", "refresh", "()", "call", "is", "made", "." ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_files.py#L314-L343
rstoneback/pysat
pysat/_files.py
Files.get_file_array
def get_file_array(self, start, end): """Return a list of filenames between and including start and end. Parameters ---------- start: array_like or single string filenames for start of returned filelist stop: array_like or single string filenames inclusive end of list Returns ------- list of filenames between and including start and end over all intervals. """ if hasattr(start, '__iter__') & hasattr(end, '__iter__'): files = [] for (sta,stp) in zip(start, end): id1 = self.get_index(sta) id2 = self.get_index(stp) files.extend(self.files.iloc[id1 : id2+1]) elif hasattr(start, '__iter__') | hasattr(end, '__iter__'): estr = 'Either both or none of the inputs need to be iterable' raise ValueError(estr) else: id1 = self.get_index(start) id2 = self.get_index(end) files = self.files[id1:id2+1].to_list() return files
python
def get_file_array(self, start, end): """Return a list of filenames between and including start and end. Parameters ---------- start: array_like or single string filenames for start of returned filelist stop: array_like or single string filenames inclusive end of list Returns ------- list of filenames between and including start and end over all intervals. """ if hasattr(start, '__iter__') & hasattr(end, '__iter__'): files = [] for (sta,stp) in zip(start, end): id1 = self.get_index(sta) id2 = self.get_index(stp) files.extend(self.files.iloc[id1 : id2+1]) elif hasattr(start, '__iter__') | hasattr(end, '__iter__'): estr = 'Either both or none of the inputs need to be iterable' raise ValueError(estr) else: id1 = self.get_index(start) id2 = self.get_index(end) files = self.files[id1:id2+1].to_list() return files
[ "def", "get_file_array", "(", "self", ",", "start", ",", "end", ")", ":", "if", "hasattr", "(", "start", ",", "'__iter__'", ")", "&", "hasattr", "(", "end", ",", "'__iter__'", ")", ":", "files", "=", "[", "]", "for", "(", "sta", ",", "stp", ")", "in", "zip", "(", "start", ",", "end", ")", ":", "id1", "=", "self", ".", "get_index", "(", "sta", ")", "id2", "=", "self", ".", "get_index", "(", "stp", ")", "files", ".", "extend", "(", "self", ".", "files", ".", "iloc", "[", "id1", ":", "id2", "+", "1", "]", ")", "elif", "hasattr", "(", "start", ",", "'__iter__'", ")", "|", "hasattr", "(", "end", ",", "'__iter__'", ")", ":", "estr", "=", "'Either both or none of the inputs need to be iterable'", "raise", "ValueError", "(", "estr", ")", "else", ":", "id1", "=", "self", ".", "get_index", "(", "start", ")", "id2", "=", "self", ".", "get_index", "(", "end", ")", "files", "=", "self", ".", "files", "[", "id1", ":", "id2", "+", "1", "]", ".", "to_list", "(", ")", "return", "files" ]
Return a list of filenames between and including start and end. Parameters ---------- start: array_like or single string filenames for start of returned filelist stop: array_like or single string filenames inclusive end of list Returns ------- list of filenames between and including start and end over all intervals.
[ "Return", "a", "list", "of", "filenames", "between", "and", "including", "start", "and", "end", ".", "Parameters", "----------", "start", ":", "array_like", "or", "single", "string", "filenames", "for", "start", "of", "returned", "filelist", "stop", ":", "array_like", "or", "single", "string", "filenames", "inclusive", "end", "of", "list", "Returns", "-------", "list", "of", "filenames", "between", "and", "including", "start", "and", "end", "over", "all", "intervals", "." ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_files.py#L382-L411
rstoneback/pysat
pysat/_files.py
Files._remove_data_dir_path
def _remove_data_dir_path(self, inp=None): # import string """Remove the data directory path from filenames""" # need to add a check in here to make sure data_dir path is actually in # the filename if inp is not None: split_str = os.path.join(self.data_path, '') return inp.apply(lambda x: x.split(split_str)[-1])
python
def _remove_data_dir_path(self, inp=None): # import string """Remove the data directory path from filenames""" # need to add a check in here to make sure data_dir path is actually in # the filename if inp is not None: split_str = os.path.join(self.data_path, '') return inp.apply(lambda x: x.split(split_str)[-1])
[ "def", "_remove_data_dir_path", "(", "self", ",", "inp", "=", "None", ")", ":", "# import string", "# need to add a check in here to make sure data_dir path is actually in", "# the filename", "if", "inp", "is", "not", "None", ":", "split_str", "=", "os", ".", "path", ".", "join", "(", "self", ".", "data_path", ",", "''", ")", "return", "inp", ".", "apply", "(", "lambda", "x", ":", "x", ".", "split", "(", "split_str", ")", "[", "-", "1", "]", ")" ]
Remove the data directory path from filenames
[ "Remove", "the", "data", "directory", "path", "from", "filenames" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_files.py#L413-L420
rstoneback/pysat
pysat/_files.py
Files.from_os
def from_os(cls, data_path=None, format_str=None, two_digit_year_break=None): """ Produces a list of files and and formats it for Files class. Requires fixed_width filename Parameters ---------- data_path : string Top level directory to search files for. This directory is provided by pysat to the instrument_module.list_files functions as data_path. format_str : string with python format codes Provides the naming pattern of the instrument files and the locations of date information so an ordered list may be produced. Supports 'year', 'month', 'day', 'hour', 'min', 'sec', 'version', and 'revision' Ex: 'cnofs_cindi_ivm_500ms_{year:4d}{month:02d}{day:02d}_v01.cdf' two_digit_year_break : int If filenames only store two digits for the year, then '1900' will be added for years >= two_digit_year_break and '2000' will be added for years < two_digit_year_break. Note ---- Does not produce a Files instance, but the proper output from instrument_module.list_files method. The '?' may be used to indicate a set number of spaces for a variable part of the name that need not be extracted. 'cnofs_cindi_ivm_500ms_{year:4d}{month:02d}{day:02d}_v??.cdf' """ import collections from pysat.utils import create_datetime_index if format_str is None: raise ValueError("Must supply a filename template (format_str).") if data_path is None: raise ValueError("Must supply instrument directory path (dir_path)") # parse format string to figure out the search string to use # to identify files in the filesystem search_str = '' form = string.Formatter() # stores the keywords extracted from format_string keys = [] #, and length of string snips = [] length = [] stored = collections.OrderedDict() stored['year'] = []; stored['month'] = []; stored['day'] = []; stored['hour'] = []; stored['min'] = []; stored['sec'] = []; stored['version'] = []; stored['revision'] = []; for snip in form.parse(format_str): # collect all of the format keywords # replace them in the string with the '*' wildcard # then try and get width from format keywords so we know # later on where to parse information out from search_str += snip[0] snips.append(snip[0]) if snip[1] is not None: keys.append(snip[1]) search_str += '*' # try and determine formatting width temp = re.findall(r'\d+', snip[2]) if temp: # there are items, try and grab width for i in temp: if i != 0: length.append(int(i)) break else: raise ValueError("Couldn't determine formatting width") abs_search_str = os.path.join(data_path, search_str) files = glob.glob(abs_search_str) # we have a list of files, now we need to extract the date information # code below works, but only if the size of file string # remains unchanged # determine the locations the date information in a filename is stored # use these indices to slice out date from loaded filenames # test_str = format_str.format(**periods) if len(files) > 0: idx = 0 begin_key = [] end_key = [] for i,snip in enumerate(snips): idx += len(snip) if i < (len(length)): begin_key.append(idx) idx += length[i] end_key.append(idx) max_len = idx # setting up negative indexing to pick out filenames key_str_idx = [np.array(begin_key, dtype=int) - max_len, np.array(end_key, dtype=int) - max_len] # need to parse out dates for datetime index for i,temp in enumerate(files): for j,key in enumerate(keys): val = temp[key_str_idx[0][j]:key_str_idx[1][j]] stored[key].append(val) # convert to numpy arrays for key in stored.keys(): stored[key] = np.array(stored[key]).astype(int) if len(stored[key]) == 0: stored[key]=None # deal with the possibility of two digit years # years above or equal to break are considered to be 1900+ # years below break are considered to be 2000+ if two_digit_year_break is not None: idx, = np.where(np.array(stored['year']) >= two_digit_year_break) stored['year'][idx] = stored['year'][idx] + 1900 idx, = np.where(np.array(stored['year']) < two_digit_year_break) stored['year'][idx] = stored['year'][idx] + 2000 # need to sort the information for things to work rec_arr = [stored[key] for key in keys] rec_arr.append(files) # sort all arrays val_keys = keys + ['files'] rec_arr = np.rec.fromarrays(rec_arr, names=val_keys) rec_arr.sort(order=val_keys, axis=0) # pull out sorted info for key in keys: stored[key] = rec_arr[key] files = rec_arr['files'] # add hour and minute information to 'sec' if stored['sec'] is None: stored['sec'] = np.zeros(len(files)) if stored['hour'] is not None: stored['sec'] += 3600 * stored['hour'] if stored['min'] is not None: stored['sec'] += 60 * stored['min'] # if stored['version'] is None: # stored['version'] = np.zeros(len(files)) if stored['revision'] is None: stored['revision'] = np.zeros(len(files)) index = create_datetime_index(year=stored['year'], month=stored['month'], day=stored['day'], uts=stored['sec']) # if version and revision are supplied # use these parameters to weed out files that have been replaced # with updated versions # first, check for duplicate index times dups = index.get_duplicates() if (len(dups) > 0) and (stored['version'] is not None): # we have duplicates # keep the highest version/revision combo version = pds.Series(stored['version'], index=index) revision = pds.Series(stored['revision'], index=index) revive = version*100000. + revision frame = pds.DataFrame({'files':files, 'revive':revive, 'time':index}, index=index) frame = frame.sort_values(by=['time', 'revive'], ascending=[True, False]) frame = frame.drop_duplicates(subset='time', keep='first') return frame['files'] else: return pds.Series(files, index=index) else: return pds.Series(None)
python
def from_os(cls, data_path=None, format_str=None, two_digit_year_break=None): """ Produces a list of files and and formats it for Files class. Requires fixed_width filename Parameters ---------- data_path : string Top level directory to search files for. This directory is provided by pysat to the instrument_module.list_files functions as data_path. format_str : string with python format codes Provides the naming pattern of the instrument files and the locations of date information so an ordered list may be produced. Supports 'year', 'month', 'day', 'hour', 'min', 'sec', 'version', and 'revision' Ex: 'cnofs_cindi_ivm_500ms_{year:4d}{month:02d}{day:02d}_v01.cdf' two_digit_year_break : int If filenames only store two digits for the year, then '1900' will be added for years >= two_digit_year_break and '2000' will be added for years < two_digit_year_break. Note ---- Does not produce a Files instance, but the proper output from instrument_module.list_files method. The '?' may be used to indicate a set number of spaces for a variable part of the name that need not be extracted. 'cnofs_cindi_ivm_500ms_{year:4d}{month:02d}{day:02d}_v??.cdf' """ import collections from pysat.utils import create_datetime_index if format_str is None: raise ValueError("Must supply a filename template (format_str).") if data_path is None: raise ValueError("Must supply instrument directory path (dir_path)") # parse format string to figure out the search string to use # to identify files in the filesystem search_str = '' form = string.Formatter() # stores the keywords extracted from format_string keys = [] #, and length of string snips = [] length = [] stored = collections.OrderedDict() stored['year'] = []; stored['month'] = []; stored['day'] = []; stored['hour'] = []; stored['min'] = []; stored['sec'] = []; stored['version'] = []; stored['revision'] = []; for snip in form.parse(format_str): # collect all of the format keywords # replace them in the string with the '*' wildcard # then try and get width from format keywords so we know # later on where to parse information out from search_str += snip[0] snips.append(snip[0]) if snip[1] is not None: keys.append(snip[1]) search_str += '*' # try and determine formatting width temp = re.findall(r'\d+', snip[2]) if temp: # there are items, try and grab width for i in temp: if i != 0: length.append(int(i)) break else: raise ValueError("Couldn't determine formatting width") abs_search_str = os.path.join(data_path, search_str) files = glob.glob(abs_search_str) # we have a list of files, now we need to extract the date information # code below works, but only if the size of file string # remains unchanged # determine the locations the date information in a filename is stored # use these indices to slice out date from loaded filenames # test_str = format_str.format(**periods) if len(files) > 0: idx = 0 begin_key = [] end_key = [] for i,snip in enumerate(snips): idx += len(snip) if i < (len(length)): begin_key.append(idx) idx += length[i] end_key.append(idx) max_len = idx # setting up negative indexing to pick out filenames key_str_idx = [np.array(begin_key, dtype=int) - max_len, np.array(end_key, dtype=int) - max_len] # need to parse out dates for datetime index for i,temp in enumerate(files): for j,key in enumerate(keys): val = temp[key_str_idx[0][j]:key_str_idx[1][j]] stored[key].append(val) # convert to numpy arrays for key in stored.keys(): stored[key] = np.array(stored[key]).astype(int) if len(stored[key]) == 0: stored[key]=None # deal with the possibility of two digit years # years above or equal to break are considered to be 1900+ # years below break are considered to be 2000+ if two_digit_year_break is not None: idx, = np.where(np.array(stored['year']) >= two_digit_year_break) stored['year'][idx] = stored['year'][idx] + 1900 idx, = np.where(np.array(stored['year']) < two_digit_year_break) stored['year'][idx] = stored['year'][idx] + 2000 # need to sort the information for things to work rec_arr = [stored[key] for key in keys] rec_arr.append(files) # sort all arrays val_keys = keys + ['files'] rec_arr = np.rec.fromarrays(rec_arr, names=val_keys) rec_arr.sort(order=val_keys, axis=0) # pull out sorted info for key in keys: stored[key] = rec_arr[key] files = rec_arr['files'] # add hour and minute information to 'sec' if stored['sec'] is None: stored['sec'] = np.zeros(len(files)) if stored['hour'] is not None: stored['sec'] += 3600 * stored['hour'] if stored['min'] is not None: stored['sec'] += 60 * stored['min'] # if stored['version'] is None: # stored['version'] = np.zeros(len(files)) if stored['revision'] is None: stored['revision'] = np.zeros(len(files)) index = create_datetime_index(year=stored['year'], month=stored['month'], day=stored['day'], uts=stored['sec']) # if version and revision are supplied # use these parameters to weed out files that have been replaced # with updated versions # first, check for duplicate index times dups = index.get_duplicates() if (len(dups) > 0) and (stored['version'] is not None): # we have duplicates # keep the highest version/revision combo version = pds.Series(stored['version'], index=index) revision = pds.Series(stored['revision'], index=index) revive = version*100000. + revision frame = pds.DataFrame({'files':files, 'revive':revive, 'time':index}, index=index) frame = frame.sort_values(by=['time', 'revive'], ascending=[True, False]) frame = frame.drop_duplicates(subset='time', keep='first') return frame['files'] else: return pds.Series(files, index=index) else: return pds.Series(None)
[ "def", "from_os", "(", "cls", ",", "data_path", "=", "None", ",", "format_str", "=", "None", ",", "two_digit_year_break", "=", "None", ")", ":", "import", "collections", "from", "pysat", ".", "utils", "import", "create_datetime_index", "if", "format_str", "is", "None", ":", "raise", "ValueError", "(", "\"Must supply a filename template (format_str).\"", ")", "if", "data_path", "is", "None", ":", "raise", "ValueError", "(", "\"Must supply instrument directory path (dir_path)\"", ")", "# parse format string to figure out the search string to use", "# to identify files in the filesystem", "search_str", "=", "''", "form", "=", "string", ".", "Formatter", "(", ")", "# stores the keywords extracted from format_string", "keys", "=", "[", "]", "#, and length of string", "snips", "=", "[", "]", "length", "=", "[", "]", "stored", "=", "collections", ".", "OrderedDict", "(", ")", "stored", "[", "'year'", "]", "=", "[", "]", "stored", "[", "'month'", "]", "=", "[", "]", "stored", "[", "'day'", "]", "=", "[", "]", "stored", "[", "'hour'", "]", "=", "[", "]", "stored", "[", "'min'", "]", "=", "[", "]", "stored", "[", "'sec'", "]", "=", "[", "]", "stored", "[", "'version'", "]", "=", "[", "]", "stored", "[", "'revision'", "]", "=", "[", "]", "for", "snip", "in", "form", ".", "parse", "(", "format_str", ")", ":", "# collect all of the format keywords", "# replace them in the string with the '*' wildcard", "# then try and get width from format keywords so we know", "# later on where to parse information out from", "search_str", "+=", "snip", "[", "0", "]", "snips", ".", "append", "(", "snip", "[", "0", "]", ")", "if", "snip", "[", "1", "]", "is", "not", "None", ":", "keys", ".", "append", "(", "snip", "[", "1", "]", ")", "search_str", "+=", "'*'", "# try and determine formatting width", "temp", "=", "re", ".", "findall", "(", "r'\\d+'", ",", "snip", "[", "2", "]", ")", "if", "temp", ":", "# there are items, try and grab width", "for", "i", "in", "temp", ":", "if", "i", "!=", "0", ":", "length", ".", "append", "(", "int", "(", "i", ")", ")", "break", "else", ":", "raise", "ValueError", "(", "\"Couldn't determine formatting width\"", ")", "abs_search_str", "=", "os", ".", "path", ".", "join", "(", "data_path", ",", "search_str", ")", "files", "=", "glob", ".", "glob", "(", "abs_search_str", ")", "# we have a list of files, now we need to extract the date information", "# code below works, but only if the size of file string ", "# remains unchanged", "# determine the locations the date information in a filename is stored", "# use these indices to slice out date from loaded filenames", "# test_str = format_str.format(**periods)", "if", "len", "(", "files", ")", ">", "0", ":", "idx", "=", "0", "begin_key", "=", "[", "]", "end_key", "=", "[", "]", "for", "i", ",", "snip", "in", "enumerate", "(", "snips", ")", ":", "idx", "+=", "len", "(", "snip", ")", "if", "i", "<", "(", "len", "(", "length", ")", ")", ":", "begin_key", ".", "append", "(", "idx", ")", "idx", "+=", "length", "[", "i", "]", "end_key", ".", "append", "(", "idx", ")", "max_len", "=", "idx", "# setting up negative indexing to pick out filenames", "key_str_idx", "=", "[", "np", ".", "array", "(", "begin_key", ",", "dtype", "=", "int", ")", "-", "max_len", ",", "np", ".", "array", "(", "end_key", ",", "dtype", "=", "int", ")", "-", "max_len", "]", "# need to parse out dates for datetime index", "for", "i", ",", "temp", "in", "enumerate", "(", "files", ")", ":", "for", "j", ",", "key", "in", "enumerate", "(", "keys", ")", ":", "val", "=", "temp", "[", "key_str_idx", "[", "0", "]", "[", "j", "]", ":", "key_str_idx", "[", "1", "]", "[", "j", "]", "]", "stored", "[", "key", "]", ".", "append", "(", "val", ")", "# convert to numpy arrays", "for", "key", "in", "stored", ".", "keys", "(", ")", ":", "stored", "[", "key", "]", "=", "np", ".", "array", "(", "stored", "[", "key", "]", ")", ".", "astype", "(", "int", ")", "if", "len", "(", "stored", "[", "key", "]", ")", "==", "0", ":", "stored", "[", "key", "]", "=", "None", "# deal with the possibility of two digit years", "# years above or equal to break are considered to be 1900+", "# years below break are considered to be 2000+", "if", "two_digit_year_break", "is", "not", "None", ":", "idx", ",", "=", "np", ".", "where", "(", "np", ".", "array", "(", "stored", "[", "'year'", "]", ")", ">=", "two_digit_year_break", ")", "stored", "[", "'year'", "]", "[", "idx", "]", "=", "stored", "[", "'year'", "]", "[", "idx", "]", "+", "1900", "idx", ",", "=", "np", ".", "where", "(", "np", ".", "array", "(", "stored", "[", "'year'", "]", ")", "<", "two_digit_year_break", ")", "stored", "[", "'year'", "]", "[", "idx", "]", "=", "stored", "[", "'year'", "]", "[", "idx", "]", "+", "2000", "# need to sort the information for things to work", "rec_arr", "=", "[", "stored", "[", "key", "]", "for", "key", "in", "keys", "]", "rec_arr", ".", "append", "(", "files", ")", "# sort all arrays", "val_keys", "=", "keys", "+", "[", "'files'", "]", "rec_arr", "=", "np", ".", "rec", ".", "fromarrays", "(", "rec_arr", ",", "names", "=", "val_keys", ")", "rec_arr", ".", "sort", "(", "order", "=", "val_keys", ",", "axis", "=", "0", ")", "# pull out sorted info", "for", "key", "in", "keys", ":", "stored", "[", "key", "]", "=", "rec_arr", "[", "key", "]", "files", "=", "rec_arr", "[", "'files'", "]", "# add hour and minute information to 'sec'", "if", "stored", "[", "'sec'", "]", "is", "None", ":", "stored", "[", "'sec'", "]", "=", "np", ".", "zeros", "(", "len", "(", "files", ")", ")", "if", "stored", "[", "'hour'", "]", "is", "not", "None", ":", "stored", "[", "'sec'", "]", "+=", "3600", "*", "stored", "[", "'hour'", "]", "if", "stored", "[", "'min'", "]", "is", "not", "None", ":", "stored", "[", "'sec'", "]", "+=", "60", "*", "stored", "[", "'min'", "]", "# if stored['version'] is None:", "# stored['version'] = np.zeros(len(files))", "if", "stored", "[", "'revision'", "]", "is", "None", ":", "stored", "[", "'revision'", "]", "=", "np", ".", "zeros", "(", "len", "(", "files", ")", ")", "index", "=", "create_datetime_index", "(", "year", "=", "stored", "[", "'year'", "]", ",", "month", "=", "stored", "[", "'month'", "]", ",", "day", "=", "stored", "[", "'day'", "]", ",", "uts", "=", "stored", "[", "'sec'", "]", ")", "# if version and revision are supplied", "# use these parameters to weed out files that have been replaced", "# with updated versions", "# first, check for duplicate index times", "dups", "=", "index", ".", "get_duplicates", "(", ")", "if", "(", "len", "(", "dups", ")", ">", "0", ")", "and", "(", "stored", "[", "'version'", "]", "is", "not", "None", ")", ":", "# we have duplicates", "# keep the highest version/revision combo", "version", "=", "pds", ".", "Series", "(", "stored", "[", "'version'", "]", ",", "index", "=", "index", ")", "revision", "=", "pds", ".", "Series", "(", "stored", "[", "'revision'", "]", ",", "index", "=", "index", ")", "revive", "=", "version", "*", "100000.", "+", "revision", "frame", "=", "pds", ".", "DataFrame", "(", "{", "'files'", ":", "files", ",", "'revive'", ":", "revive", ",", "'time'", ":", "index", "}", ",", "index", "=", "index", ")", "frame", "=", "frame", ".", "sort_values", "(", "by", "=", "[", "'time'", ",", "'revive'", "]", ",", "ascending", "=", "[", "True", ",", "False", "]", ")", "frame", "=", "frame", ".", "drop_duplicates", "(", "subset", "=", "'time'", ",", "keep", "=", "'first'", ")", "return", "frame", "[", "'files'", "]", "else", ":", "return", "pds", ".", "Series", "(", "files", ",", "index", "=", "index", ")", "else", ":", "return", "pds", ".", "Series", "(", "None", ")" ]
Produces a list of files and and formats it for Files class. Requires fixed_width filename Parameters ---------- data_path : string Top level directory to search files for. This directory is provided by pysat to the instrument_module.list_files functions as data_path. format_str : string with python format codes Provides the naming pattern of the instrument files and the locations of date information so an ordered list may be produced. Supports 'year', 'month', 'day', 'hour', 'min', 'sec', 'version', and 'revision' Ex: 'cnofs_cindi_ivm_500ms_{year:4d}{month:02d}{day:02d}_v01.cdf' two_digit_year_break : int If filenames only store two digits for the year, then '1900' will be added for years >= two_digit_year_break and '2000' will be added for years < two_digit_year_break. Note ---- Does not produce a Files instance, but the proper output from instrument_module.list_files method. The '?' may be used to indicate a set number of spaces for a variable part of the name that need not be extracted. 'cnofs_cindi_ivm_500ms_{year:4d}{month:02d}{day:02d}_v??.cdf'
[ "Produces", "a", "list", "of", "files", "and", "and", "formats", "it", "for", "Files", "class", "." ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_files.py#L431-L599
rstoneback/pysat
pysat/_meta.py
Meta.merge
def merge(self, other): """Adds metadata variables to self that are in other but not in self. Parameters ---------- other : pysat.Meta """ for key in other.keys(): if key not in self: # copies over both lower and higher dimensional data self[key] = other[key]
python
def merge(self, other): """Adds metadata variables to self that are in other but not in self. Parameters ---------- other : pysat.Meta """ for key in other.keys(): if key not in self: # copies over both lower and higher dimensional data self[key] = other[key]
[ "def", "merge", "(", "self", ",", "other", ")", ":", "for", "key", "in", "other", ".", "keys", "(", ")", ":", "if", "key", "not", "in", "self", ":", "# copies over both lower and higher dimensional data", "self", "[", "key", "]", "=", "other", "[", "key", "]" ]
Adds metadata variables to self that are in other but not in self. Parameters ---------- other : pysat.Meta
[ "Adds", "metadata", "variables", "to", "self", "that", "are", "in", "other", "but", "not", "in", "self", ".", "Parameters", "----------", "other", ":", "pysat", ".", "Meta" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_meta.py#L240-L252
rstoneback/pysat
pysat/_meta.py
Meta.drop
def drop(self, names): """Drops variables (names) from metadata.""" # drop lower dimension data self._data = self._data.drop(names, axis=0) # drop higher dimension data for name in names: if name in self._ho_data: _ = self._ho_data.pop(name)
python
def drop(self, names): """Drops variables (names) from metadata.""" # drop lower dimension data self._data = self._data.drop(names, axis=0) # drop higher dimension data for name in names: if name in self._ho_data: _ = self._ho_data.pop(name)
[ "def", "drop", "(", "self", ",", "names", ")", ":", "# drop lower dimension data", "self", ".", "_data", "=", "self", ".", "_data", ".", "drop", "(", "names", ",", "axis", "=", "0", ")", "# drop higher dimension data", "for", "name", "in", "names", ":", "if", "name", "in", "self", ".", "_ho_data", ":", "_", "=", "self", ".", "_ho_data", ".", "pop", "(", "name", ")" ]
Drops variables (names) from metadata.
[ "Drops", "variables", "(", "names", ")", "from", "metadata", "." ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_meta.py#L254-L262
rstoneback/pysat
pysat/_meta.py
Meta.keep
def keep(self, keep_names): """Keeps variables (keep_names) while dropping other parameters""" current_names = self._data.columns drop_names = [] for name in current_names: if name not in keep_names: drop_names.append(name) self.drop(drop_names)
python
def keep(self, keep_names): """Keeps variables (keep_names) while dropping other parameters""" current_names = self._data.columns drop_names = [] for name in current_names: if name not in keep_names: drop_names.append(name) self.drop(drop_names)
[ "def", "keep", "(", "self", ",", "keep_names", ")", ":", "current_names", "=", "self", ".", "_data", ".", "columns", "drop_names", "=", "[", "]", "for", "name", "in", "current_names", ":", "if", "name", "not", "in", "keep_names", ":", "drop_names", ".", "append", "(", "name", ")", "self", ".", "drop", "(", "drop_names", ")" ]
Keeps variables (keep_names) while dropping other parameters
[ "Keeps", "variables", "(", "keep_names", ")", "while", "dropping", "other", "parameters" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_meta.py#L264-L272
rstoneback/pysat
pysat/_meta.py
Meta.apply_default_labels
def apply_default_labels(self, other): """Applies labels for default meta labels from self onto other. Parameters ---------- other : Meta Meta object to have default labels applied Returns ------- Meta """ other_updated = other.copy() other_updated.units_label = self.units_label other_updated.name_label = self.name_label other_updated.notes_label = self.notes_label other_updated.desc_label = self.desc_label other_updated.plot_label = self.plot_label other_updated.axis_label = self.axis_label other_updated.scale_label = self.scale_label other_updated.min_label = self.min_label other_updated.max_label = self.max_label other_updated.fill_label = self.fill_label return other
python
def apply_default_labels(self, other): """Applies labels for default meta labels from self onto other. Parameters ---------- other : Meta Meta object to have default labels applied Returns ------- Meta """ other_updated = other.copy() other_updated.units_label = self.units_label other_updated.name_label = self.name_label other_updated.notes_label = self.notes_label other_updated.desc_label = self.desc_label other_updated.plot_label = self.plot_label other_updated.axis_label = self.axis_label other_updated.scale_label = self.scale_label other_updated.min_label = self.min_label other_updated.max_label = self.max_label other_updated.fill_label = self.fill_label return other
[ "def", "apply_default_labels", "(", "self", ",", "other", ")", ":", "other_updated", "=", "other", ".", "copy", "(", ")", "other_updated", ".", "units_label", "=", "self", ".", "units_label", "other_updated", ".", "name_label", "=", "self", ".", "name_label", "other_updated", ".", "notes_label", "=", "self", ".", "notes_label", "other_updated", ".", "desc_label", "=", "self", ".", "desc_label", "other_updated", ".", "plot_label", "=", "self", ".", "plot_label", "other_updated", ".", "axis_label", "=", "self", ".", "axis_label", "other_updated", ".", "scale_label", "=", "self", ".", "scale_label", "other_updated", ".", "min_label", "=", "self", ".", "min_label", "other_updated", ".", "max_label", "=", "self", ".", "max_label", "other_updated", ".", "fill_label", "=", "self", ".", "fill_label", "return", "other" ]
Applies labels for default meta labels from self onto other. Parameters ---------- other : Meta Meta object to have default labels applied Returns ------- Meta
[ "Applies", "labels", "for", "default", "meta", "labels", "from", "self", "onto", "other", ".", "Parameters", "----------", "other", ":", "Meta", "Meta", "object", "to", "have", "default", "labels", "applied", "Returns", "-------", "Meta" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_meta.py#L307-L331
rstoneback/pysat
pysat/_meta.py
Meta.accept_default_labels
def accept_default_labels(self, other): """Applies labels for default meta labels from other onto self. Parameters ---------- other : Meta Meta object to take default labels from Returns ------- Meta """ self.units_label = other.units_label self.name_label = other.name_label self.notes_label = other.notes_label self.desc_label = other.desc_label self.plot_label = other.plot_label self.axis_label = other.axis_label self.scale_label = other.scale_label self.min_label = other.min_label self.max_label = other.max_label self.fill_label = other.fill_label return
python
def accept_default_labels(self, other): """Applies labels for default meta labels from other onto self. Parameters ---------- other : Meta Meta object to take default labels from Returns ------- Meta """ self.units_label = other.units_label self.name_label = other.name_label self.notes_label = other.notes_label self.desc_label = other.desc_label self.plot_label = other.plot_label self.axis_label = other.axis_label self.scale_label = other.scale_label self.min_label = other.min_label self.max_label = other.max_label self.fill_label = other.fill_label return
[ "def", "accept_default_labels", "(", "self", ",", "other", ")", ":", "self", ".", "units_label", "=", "other", ".", "units_label", "self", ".", "name_label", "=", "other", ".", "name_label", "self", ".", "notes_label", "=", "other", ".", "notes_label", "self", ".", "desc_label", "=", "other", ".", "desc_label", "self", ".", "plot_label", "=", "other", ".", "plot_label", "self", ".", "axis_label", "=", "other", ".", "axis_label", "self", ".", "scale_label", "=", "other", ".", "scale_label", "self", ".", "min_label", "=", "other", ".", "min_label", "self", ".", "max_label", "=", "other", ".", "max_label", "self", ".", "fill_label", "=", "other", ".", "fill_label", "return" ]
Applies labels for default meta labels from other onto self. Parameters ---------- other : Meta Meta object to take default labels from Returns ------- Meta
[ "Applies", "labels", "for", "default", "meta", "labels", "from", "other", "onto", "self", ".", "Parameters", "----------", "other", ":", "Meta", "Meta", "object", "to", "take", "default", "labels", "from", "Returns", "-------", "Meta" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_meta.py#L333-L357
rstoneback/pysat
pysat/_meta.py
Meta._label_setter
def _label_setter(self, new_label, current_label, attr_label, default=np.NaN, use_names_default=False): """Generalized setter of default meta attributes Parameters ---------- new_label : str New label to use in the Meta object current_label : str The hidden attribute to be updated that actually stores metadata default : Deafult setting to use for label if there is no attribute value use_names_default : bool if True, MetaData variable names are used as the default value for the specified Meta attributes settings Examples -------- : @name_label.setter def name_label(self, new_label): self._label_setter(new_label, self._name_label, use_names_default=True) Notes ----- Not intended for end user """ if new_label not in self.attrs(): # new label not in metadata, including case # update existing label, if present if current_label in self.attrs(): # old label exists and has expected case self.data.loc[:, new_label] = self.data.loc[:, current_label] self.data.drop(current_label, axis=1, inplace=True) else: if self.has_attr(current_label): # there is something like label, wrong case though current_label = self.attr_case_name(current_label) self.data.loc[:, new_label] = self.data.loc[:, current_label] self.data.drop(current_label, axis=1, inplace=True) else: # there is no existing label # setting for the first time if use_names_default: self.data[new_label] = self.data.index else: self.data[new_label] = default # check higher order structures as well # recursively change labels here for key in self.keys_nD(): setattr(self.ho_data[key], attr_label, new_label) # now update 'hidden' attribute value # current_label = new_label setattr(self, ''.join(('_',attr_label)), new_label)
python
def _label_setter(self, new_label, current_label, attr_label, default=np.NaN, use_names_default=False): """Generalized setter of default meta attributes Parameters ---------- new_label : str New label to use in the Meta object current_label : str The hidden attribute to be updated that actually stores metadata default : Deafult setting to use for label if there is no attribute value use_names_default : bool if True, MetaData variable names are used as the default value for the specified Meta attributes settings Examples -------- : @name_label.setter def name_label(self, new_label): self._label_setter(new_label, self._name_label, use_names_default=True) Notes ----- Not intended for end user """ if new_label not in self.attrs(): # new label not in metadata, including case # update existing label, if present if current_label in self.attrs(): # old label exists and has expected case self.data.loc[:, new_label] = self.data.loc[:, current_label] self.data.drop(current_label, axis=1, inplace=True) else: if self.has_attr(current_label): # there is something like label, wrong case though current_label = self.attr_case_name(current_label) self.data.loc[:, new_label] = self.data.loc[:, current_label] self.data.drop(current_label, axis=1, inplace=True) else: # there is no existing label # setting for the first time if use_names_default: self.data[new_label] = self.data.index else: self.data[new_label] = default # check higher order structures as well # recursively change labels here for key in self.keys_nD(): setattr(self.ho_data[key], attr_label, new_label) # now update 'hidden' attribute value # current_label = new_label setattr(self, ''.join(('_',attr_label)), new_label)
[ "def", "_label_setter", "(", "self", ",", "new_label", ",", "current_label", ",", "attr_label", ",", "default", "=", "np", ".", "NaN", ",", "use_names_default", "=", "False", ")", ":", "if", "new_label", "not", "in", "self", ".", "attrs", "(", ")", ":", "# new label not in metadata, including case", "# update existing label, if present", "if", "current_label", "in", "self", ".", "attrs", "(", ")", ":", "# old label exists and has expected case", "self", ".", "data", ".", "loc", "[", ":", ",", "new_label", "]", "=", "self", ".", "data", ".", "loc", "[", ":", ",", "current_label", "]", "self", ".", "data", ".", "drop", "(", "current_label", ",", "axis", "=", "1", ",", "inplace", "=", "True", ")", "else", ":", "if", "self", ".", "has_attr", "(", "current_label", ")", ":", "# there is something like label, wrong case though", "current_label", "=", "self", ".", "attr_case_name", "(", "current_label", ")", "self", ".", "data", ".", "loc", "[", ":", ",", "new_label", "]", "=", "self", ".", "data", ".", "loc", "[", ":", ",", "current_label", "]", "self", ".", "data", ".", "drop", "(", "current_label", ",", "axis", "=", "1", ",", "inplace", "=", "True", ")", "else", ":", "# there is no existing label", "# setting for the first time", "if", "use_names_default", ":", "self", ".", "data", "[", "new_label", "]", "=", "self", ".", "data", ".", "index", "else", ":", "self", ".", "data", "[", "new_label", "]", "=", "default", "# check higher order structures as well", "# recursively change labels here", "for", "key", "in", "self", ".", "keys_nD", "(", ")", ":", "setattr", "(", "self", ".", "ho_data", "[", "key", "]", ",", "attr_label", ",", "new_label", ")", "# now update 'hidden' attribute value", "# current_label = new_label", "setattr", "(", "self", ",", "''", ".", "join", "(", "(", "'_'", ",", "attr_label", ")", ")", ",", "new_label", ")" ]
Generalized setter of default meta attributes Parameters ---------- new_label : str New label to use in the Meta object current_label : str The hidden attribute to be updated that actually stores metadata default : Deafult setting to use for label if there is no attribute value use_names_default : bool if True, MetaData variable names are used as the default value for the specified Meta attributes settings Examples -------- : @name_label.setter def name_label(self, new_label): self._label_setter(new_label, self._name_label, use_names_default=True) Notes ----- Not intended for end user
[ "Generalized", "setter", "of", "default", "meta", "attributes", "Parameters", "----------", "new_label", ":", "str", "New", "label", "to", "use", "in", "the", "Meta", "object", "current_label", ":", "str", "The", "hidden", "attribute", "to", "be", "updated", "that", "actually", "stores", "metadata", "default", ":", "Deafult", "setting", "to", "use", "for", "label", "if", "there", "is", "no", "attribute", "value", "use_names_default", ":", "bool", "if", "True", "MetaData", "variable", "names", "are", "used", "as", "the", "default", "value", "for", "the", "specified", "Meta", "attributes", "settings", "Examples", "--------", ":" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_meta.py#L574-L631
rstoneback/pysat
pysat/_meta.py
Meta.var_case_name
def var_case_name(self, name): """Provides stored name (case preserved) for case insensitive input If name is not found (case-insensitive check) then name is returned, as input. This function is intended to be used to help ensure the case of a given variable name is the same across the Meta object. Parameters ---------- name : str variable name in any case Returns ------- str string with case preserved as in metaobject """ lower_name = name.lower() if name in self: for i in self.keys(): if lower_name == i.lower(): return i for i in self.keys_nD(): if lower_name == i.lower(): return i return name
python
def var_case_name(self, name): """Provides stored name (case preserved) for case insensitive input If name is not found (case-insensitive check) then name is returned, as input. This function is intended to be used to help ensure the case of a given variable name is the same across the Meta object. Parameters ---------- name : str variable name in any case Returns ------- str string with case preserved as in metaobject """ lower_name = name.lower() if name in self: for i in self.keys(): if lower_name == i.lower(): return i for i in self.keys_nD(): if lower_name == i.lower(): return i return name
[ "def", "var_case_name", "(", "self", ",", "name", ")", ":", "lower_name", "=", "name", ".", "lower", "(", ")", "if", "name", "in", "self", ":", "for", "i", "in", "self", ".", "keys", "(", ")", ":", "if", "lower_name", "==", "i", ".", "lower", "(", ")", ":", "return", "i", "for", "i", "in", "self", ".", "keys_nD", "(", ")", ":", "if", "lower_name", "==", "i", ".", "lower", "(", ")", ":", "return", "i", "return", "name" ]
Provides stored name (case preserved) for case insensitive input If name is not found (case-insensitive check) then name is returned, as input. This function is intended to be used to help ensure the case of a given variable name is the same across the Meta object. Parameters ---------- name : str variable name in any case Returns ------- str string with case preserved as in metaobject
[ "Provides", "stored", "name", "(", "case", "preserved", ")", "for", "case", "insensitive", "input", "If", "name", "is", "not", "found", "(", "case", "-", "insensitive", "check", ")", "then", "name", "is", "returned", "as", "input", ".", "This", "function", "is", "intended", "to", "be", "used", "to", "help", "ensure", "the", "case", "of", "a", "given", "variable", "name", "is", "the", "same", "across", "the", "Meta", "object", ".", "Parameters", "----------", "name", ":", "str", "variable", "name", "in", "any", "case", "Returns", "-------", "str", "string", "with", "case", "preserved", "as", "in", "metaobject" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_meta.py#L704-L731
rstoneback/pysat
pysat/_meta.py
Meta.has_attr
def has_attr(self, name): """Returns boolean indicating presence of given attribute name Case-insensitive check Notes ----- Does not check higher order meta objects Parameters ---------- name : str name of variable to get stored case form Returns ------- bool True if case-insesitive check for attribute name is True """ if name.lower() in [i.lower() for i in self.data.columns]: return True return False
python
def has_attr(self, name): """Returns boolean indicating presence of given attribute name Case-insensitive check Notes ----- Does not check higher order meta objects Parameters ---------- name : str name of variable to get stored case form Returns ------- bool True if case-insesitive check for attribute name is True """ if name.lower() in [i.lower() for i in self.data.columns]: return True return False
[ "def", "has_attr", "(", "self", ",", "name", ")", ":", "if", "name", ".", "lower", "(", ")", "in", "[", "i", ".", "lower", "(", ")", "for", "i", "in", "self", ".", "data", ".", "columns", "]", ":", "return", "True", "return", "False" ]
Returns boolean indicating presence of given attribute name Case-insensitive check Notes ----- Does not check higher order meta objects Parameters ---------- name : str name of variable to get stored case form Returns ------- bool True if case-insesitive check for attribute name is True
[ "Returns", "boolean", "indicating", "presence", "of", "given", "attribute", "name", "Case", "-", "insensitive", "check", "Notes", "-----", "Does", "not", "check", "higher", "order", "meta", "objects", "Parameters", "----------", "name", ":", "str", "name", "of", "variable", "to", "get", "stored", "case", "form", "Returns", "-------", "bool", "True", "if", "case", "-", "insesitive", "check", "for", "attribute", "name", "is", "True" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_meta.py#L751-L774
rstoneback/pysat
pysat/_meta.py
Meta.attr_case_name
def attr_case_name(self, name): """Returns preserved case name for case insensitive value of name. Checks first within standard attributes. If not found there, checks attributes for higher order data structures. If not found, returns supplied name as it is available for use. Intended to be used to help ensure that the same case is applied to all repetitions of a given variable name. Parameters ---------- name : str name of variable to get stored case form Returns ------- str name in proper case """ lower_name = name.lower() for i in self.attrs(): if lower_name == i.lower(): return i # check if attribute present in higher order structures for key in self.keys_nD(): for i in self[key].children.attrs(): if lower_name == i.lower(): return i # nothing was found if still here # pass name back, free to be whatever return name
python
def attr_case_name(self, name): """Returns preserved case name for case insensitive value of name. Checks first within standard attributes. If not found there, checks attributes for higher order data structures. If not found, returns supplied name as it is available for use. Intended to be used to help ensure that the same case is applied to all repetitions of a given variable name. Parameters ---------- name : str name of variable to get stored case form Returns ------- str name in proper case """ lower_name = name.lower() for i in self.attrs(): if lower_name == i.lower(): return i # check if attribute present in higher order structures for key in self.keys_nD(): for i in self[key].children.attrs(): if lower_name == i.lower(): return i # nothing was found if still here # pass name back, free to be whatever return name
[ "def", "attr_case_name", "(", "self", ",", "name", ")", ":", "lower_name", "=", "name", ".", "lower", "(", ")", "for", "i", "in", "self", ".", "attrs", "(", ")", ":", "if", "lower_name", "==", "i", ".", "lower", "(", ")", ":", "return", "i", "# check if attribute present in higher order structures", "for", "key", "in", "self", ".", "keys_nD", "(", ")", ":", "for", "i", "in", "self", "[", "key", "]", ".", "children", ".", "attrs", "(", ")", ":", "if", "lower_name", "==", "i", ".", "lower", "(", ")", ":", "return", "i", "# nothing was found if still here", "# pass name back, free to be whatever", "return", "name" ]
Returns preserved case name for case insensitive value of name. Checks first within standard attributes. If not found there, checks attributes for higher order data structures. If not found, returns supplied name as it is available for use. Intended to be used to help ensure that the same case is applied to all repetitions of a given variable name. Parameters ---------- name : str name of variable to get stored case form Returns ------- str name in proper case
[ "Returns", "preserved", "case", "name", "for", "case", "insensitive", "value", "of", "name", ".", "Checks", "first", "within", "standard", "attributes", ".", "If", "not", "found", "there", "checks", "attributes", "for", "higher", "order", "data", "structures", ".", "If", "not", "found", "returns", "supplied", "name", "as", "it", "is", "available", "for", "use", ".", "Intended", "to", "be", "used", "to", "help", "ensure", "that", "the", "same", "case", "is", "applied", "to", "all", "repetitions", "of", "a", "given", "variable", "name", ".", "Parameters", "----------", "name", ":", "str", "name", "of", "variable", "to", "get", "stored", "case", "form" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_meta.py#L776-L807
rstoneback/pysat
pysat/_meta.py
Meta.concat
def concat(self, other, strict=False): """Concats two metadata objects together. Parameters ---------- other : Meta Meta object to be concatenated strict : bool if True, ensure there are no duplicate variable names Notes ----- Uses units and name label of self if other is different Returns ------- Meta Concatenated object """ mdata = self.copy() # checks if strict: for key in other.keys(): if key in mdata: raise RuntimeError('Duplicated keys (variable names) ' + 'across Meta objects in keys().') for key in other.keys_nD(): if key in mdata: raise RuntimeError('Duplicated keys (variable names) across ' 'Meta objects in keys_nD().') # make sure labels between the two objects are the same other_updated = self.apply_default_labels(other) # concat 1D metadata in data frames to copy of # current metadata # <<<<<<< ho_meta_fix for key in other_updated.keys(): mdata.data.loc[key] = other.data.loc[key] # add together higher order data for key in other_updated.keys_nD(): mdata.ho_data[key] = other.ho_data[key] # ======= # for key in other_updated.keys(): # mdata[key] = other_updated[key] # # add together higher order data # for key in other_updated.keys_nD(): # mdata[key] = other_updated[key] return mdata
python
def concat(self, other, strict=False): """Concats two metadata objects together. Parameters ---------- other : Meta Meta object to be concatenated strict : bool if True, ensure there are no duplicate variable names Notes ----- Uses units and name label of self if other is different Returns ------- Meta Concatenated object """ mdata = self.copy() # checks if strict: for key in other.keys(): if key in mdata: raise RuntimeError('Duplicated keys (variable names) ' + 'across Meta objects in keys().') for key in other.keys_nD(): if key in mdata: raise RuntimeError('Duplicated keys (variable names) across ' 'Meta objects in keys_nD().') # make sure labels between the two objects are the same other_updated = self.apply_default_labels(other) # concat 1D metadata in data frames to copy of # current metadata # <<<<<<< ho_meta_fix for key in other_updated.keys(): mdata.data.loc[key] = other.data.loc[key] # add together higher order data for key in other_updated.keys_nD(): mdata.ho_data[key] = other.ho_data[key] # ======= # for key in other_updated.keys(): # mdata[key] = other_updated[key] # # add together higher order data # for key in other_updated.keys_nD(): # mdata[key] = other_updated[key] return mdata
[ "def", "concat", "(", "self", ",", "other", ",", "strict", "=", "False", ")", ":", "mdata", "=", "self", ".", "copy", "(", ")", "# checks", "if", "strict", ":", "for", "key", "in", "other", ".", "keys", "(", ")", ":", "if", "key", "in", "mdata", ":", "raise", "RuntimeError", "(", "'Duplicated keys (variable names) '", "+", "'across Meta objects in keys().'", ")", "for", "key", "in", "other", ".", "keys_nD", "(", ")", ":", "if", "key", "in", "mdata", ":", "raise", "RuntimeError", "(", "'Duplicated keys (variable names) across '", "'Meta objects in keys_nD().'", ")", "# make sure labels between the two objects are the same", "other_updated", "=", "self", ".", "apply_default_labels", "(", "other", ")", "# concat 1D metadata in data frames to copy of", "# current metadata", "# <<<<<<< ho_meta_fix", "for", "key", "in", "other_updated", ".", "keys", "(", ")", ":", "mdata", ".", "data", ".", "loc", "[", "key", "]", "=", "other", ".", "data", ".", "loc", "[", "key", "]", "# add together higher order data", "for", "key", "in", "other_updated", ".", "keys_nD", "(", ")", ":", "mdata", ".", "ho_data", "[", "key", "]", "=", "other", ".", "ho_data", "[", "key", "]", "# =======", "# for key in other_updated.keys():", "# mdata[key] = other_updated[key]", "# # add together higher order data", "# for key in other_updated.keys_nD():", "# mdata[key] = other_updated[key]", "return", "mdata" ]
Concats two metadata objects together. Parameters ---------- other : Meta Meta object to be concatenated strict : bool if True, ensure there are no duplicate variable names Notes ----- Uses units and name label of self if other is different Returns ------- Meta Concatenated object
[ "Concats", "two", "metadata", "objects", "together", "." ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_meta.py#L809-L859
rstoneback/pysat
pysat/_meta.py
Meta.pop
def pop(self, name): """Remove and return metadata about variable Parameters ---------- name : str variable name Returns ------- pandas.Series Series of metadata for variable """ # check if present if name in self: # get case preserved name for variable new_name = self.var_case_name(name) # check if 1D or nD if new_name in self.keys(): output = self[new_name] self.data.drop(new_name, inplace=True, axis=0) else: output = self.ho_data.pop(new_name) return output else: raise KeyError('Key not present in metadata variables')
python
def pop(self, name): """Remove and return metadata about variable Parameters ---------- name : str variable name Returns ------- pandas.Series Series of metadata for variable """ # check if present if name in self: # get case preserved name for variable new_name = self.var_case_name(name) # check if 1D or nD if new_name in self.keys(): output = self[new_name] self.data.drop(new_name, inplace=True, axis=0) else: output = self.ho_data.pop(new_name) return output else: raise KeyError('Key not present in metadata variables')
[ "def", "pop", "(", "self", ",", "name", ")", ":", "# check if present", "if", "name", "in", "self", ":", "# get case preserved name for variable", "new_name", "=", "self", ".", "var_case_name", "(", "name", ")", "# check if 1D or nD", "if", "new_name", "in", "self", ".", "keys", "(", ")", ":", "output", "=", "self", "[", "new_name", "]", "self", ".", "data", ".", "drop", "(", "new_name", ",", "inplace", "=", "True", ",", "axis", "=", "0", ")", "else", ":", "output", "=", "self", ".", "ho_data", ".", "pop", "(", "new_name", ")", "return", "output", "else", ":", "raise", "KeyError", "(", "'Key not present in metadata variables'", ")" ]
Remove and return metadata about variable Parameters ---------- name : str variable name Returns ------- pandas.Series Series of metadata for variable
[ "Remove", "and", "return", "metadata", "about", "variable" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_meta.py#L866-L892
rstoneback/pysat
pysat/_meta.py
Meta.transfer_attributes_to_instrument
def transfer_attributes_to_instrument(self, inst, strict_names=False): """Transfer non-standard attributes in Meta to Instrument object. Pysat's load_netCDF and similar routines are only able to attach netCDF4 attributes to a Meta object. This routine identifies these attributes and removes them from the Meta object. Intent is to support simple transfers to the pysat.Instrument object. Will not transfer names that conflict with pysat default attributes. Parameters ---------- inst : pysat.Instrument Instrument object to transfer attributes to strict_names : boolean (False) If True, produces an error if the Instrument object already has an attribute with the same name to be copied. Returns ------- None pysat.Instrument object modified in place with new attributes """ # base Instrument attributes banned = inst._base_attr # get base attribute set, and attributes attached to instance base_attrb = self._base_attr this_attrb = dir(self) # collect these attributes into a dict adict = {} transfer_key = [] for key in this_attrb: if key not in banned: if key not in base_attrb: # don't store _ leading attributes if key[0] != '_': adict[key] = self.__getattribute__(key) transfer_key.append(key) # store any non-standard attributes in Instrument # get list of instrument objects attributes first # to check if a duplicate inst_attr = dir(inst) for key in transfer_key: if key not in banned: if key not in inst_attr: inst.__setattr__(key, adict[key]) else: if not strict_names: # new_name = 'pysat_attr_'+key inst.__setattr__(key, adict[key]) else: raise RuntimeError('Attribute ' + key + 'attached to Meta object can not be ' + 'transferred as it already exists' + ' in the Instrument object.')
python
def transfer_attributes_to_instrument(self, inst, strict_names=False): """Transfer non-standard attributes in Meta to Instrument object. Pysat's load_netCDF and similar routines are only able to attach netCDF4 attributes to a Meta object. This routine identifies these attributes and removes them from the Meta object. Intent is to support simple transfers to the pysat.Instrument object. Will not transfer names that conflict with pysat default attributes. Parameters ---------- inst : pysat.Instrument Instrument object to transfer attributes to strict_names : boolean (False) If True, produces an error if the Instrument object already has an attribute with the same name to be copied. Returns ------- None pysat.Instrument object modified in place with new attributes """ # base Instrument attributes banned = inst._base_attr # get base attribute set, and attributes attached to instance base_attrb = self._base_attr this_attrb = dir(self) # collect these attributes into a dict adict = {} transfer_key = [] for key in this_attrb: if key not in banned: if key not in base_attrb: # don't store _ leading attributes if key[0] != '_': adict[key] = self.__getattribute__(key) transfer_key.append(key) # store any non-standard attributes in Instrument # get list of instrument objects attributes first # to check if a duplicate inst_attr = dir(inst) for key in transfer_key: if key not in banned: if key not in inst_attr: inst.__setattr__(key, adict[key]) else: if not strict_names: # new_name = 'pysat_attr_'+key inst.__setattr__(key, adict[key]) else: raise RuntimeError('Attribute ' + key + 'attached to Meta object can not be ' + 'transferred as it already exists' + ' in the Instrument object.')
[ "def", "transfer_attributes_to_instrument", "(", "self", ",", "inst", ",", "strict_names", "=", "False", ")", ":", "# base Instrument attributes", "banned", "=", "inst", ".", "_base_attr", "# get base attribute set, and attributes attached to instance", "base_attrb", "=", "self", ".", "_base_attr", "this_attrb", "=", "dir", "(", "self", ")", "# collect these attributes into a dict", "adict", "=", "{", "}", "transfer_key", "=", "[", "]", "for", "key", "in", "this_attrb", ":", "if", "key", "not", "in", "banned", ":", "if", "key", "not", "in", "base_attrb", ":", "# don't store _ leading attributes", "if", "key", "[", "0", "]", "!=", "'_'", ":", "adict", "[", "key", "]", "=", "self", ".", "__getattribute__", "(", "key", ")", "transfer_key", ".", "append", "(", "key", ")", "# store any non-standard attributes in Instrument", "# get list of instrument objects attributes first", "# to check if a duplicate", "inst_attr", "=", "dir", "(", "inst", ")", "for", "key", "in", "transfer_key", ":", "if", "key", "not", "in", "banned", ":", "if", "key", "not", "in", "inst_attr", ":", "inst", ".", "__setattr__", "(", "key", ",", "adict", "[", "key", "]", ")", "else", ":", "if", "not", "strict_names", ":", "# new_name = 'pysat_attr_'+key", "inst", ".", "__setattr__", "(", "key", ",", "adict", "[", "key", "]", ")", "else", ":", "raise", "RuntimeError", "(", "'Attribute '", "+", "key", "+", "'attached to Meta object can not be '", "+", "'transferred as it already exists'", "+", "' in the Instrument object.'", ")" ]
Transfer non-standard attributes in Meta to Instrument object. Pysat's load_netCDF and similar routines are only able to attach netCDF4 attributes to a Meta object. This routine identifies these attributes and removes them from the Meta object. Intent is to support simple transfers to the pysat.Instrument object. Will not transfer names that conflict with pysat default attributes. Parameters ---------- inst : pysat.Instrument Instrument object to transfer attributes to strict_names : boolean (False) If True, produces an error if the Instrument object already has an attribute with the same name to be copied. Returns ------- None pysat.Instrument object modified in place with new attributes
[ "Transfer", "non", "-", "standard", "attributes", "in", "Meta", "to", "Instrument", "object", "." ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_meta.py#L895-L951
rstoneback/pysat
pysat/_meta.py
Meta.from_csv
def from_csv(cls, name=None, col_names=None, sep=None, **kwargs): """Create instrument metadata object from csv. Parameters ---------- name : string absolute filename for csv file or name of file stored in pandas instruments location col_names : list-like collection of strings column names in csv and resultant meta object sep : string column seperator for supplied csv filename Note ---- column names must include at least ['name', 'long_name', 'units'], assumed if col_names is None. """ import pysat req_names = ['name','long_name','units'] if col_names is None: col_names = req_names elif not all([i in col_names for i in req_names]): raise ValueError('col_names must include name, long_name, units.') if sep is None: sep = ',' if name is None: raise ValueError('Must supply an instrument name or file path.') elif not isinstance(name, str): raise ValueError('keyword name must be related to a string') elif not os.path.isfile(name): # Not a real file, assume input is a pysat instrument name # and look in the standard pysat location. test = os.path.join(pysat.__path__[0],'instruments',name) if os.path.isfile(test): name = test else: #trying to form an absolute path for success test = os.path.abspath(name) if not os.path.isfile(test): raise ValueError("Unable to create valid file path.") else: #success name = test mdata = pds.read_csv(name, names=col_names, sep=sep, **kwargs) if not mdata.empty: # make sure the data name is the index mdata.index = mdata['name'] del mdata['name'] return cls(metadata=mdata) else: raise ValueError('Unable to retrieve information from ' + name)
python
def from_csv(cls, name=None, col_names=None, sep=None, **kwargs): """Create instrument metadata object from csv. Parameters ---------- name : string absolute filename for csv file or name of file stored in pandas instruments location col_names : list-like collection of strings column names in csv and resultant meta object sep : string column seperator for supplied csv filename Note ---- column names must include at least ['name', 'long_name', 'units'], assumed if col_names is None. """ import pysat req_names = ['name','long_name','units'] if col_names is None: col_names = req_names elif not all([i in col_names for i in req_names]): raise ValueError('col_names must include name, long_name, units.') if sep is None: sep = ',' if name is None: raise ValueError('Must supply an instrument name or file path.') elif not isinstance(name, str): raise ValueError('keyword name must be related to a string') elif not os.path.isfile(name): # Not a real file, assume input is a pysat instrument name # and look in the standard pysat location. test = os.path.join(pysat.__path__[0],'instruments',name) if os.path.isfile(test): name = test else: #trying to form an absolute path for success test = os.path.abspath(name) if not os.path.isfile(test): raise ValueError("Unable to create valid file path.") else: #success name = test mdata = pds.read_csv(name, names=col_names, sep=sep, **kwargs) if not mdata.empty: # make sure the data name is the index mdata.index = mdata['name'] del mdata['name'] return cls(metadata=mdata) else: raise ValueError('Unable to retrieve information from ' + name)
[ "def", "from_csv", "(", "cls", ",", "name", "=", "None", ",", "col_names", "=", "None", ",", "sep", "=", "None", ",", "*", "*", "kwargs", ")", ":", "import", "pysat", "req_names", "=", "[", "'name'", ",", "'long_name'", ",", "'units'", "]", "if", "col_names", "is", "None", ":", "col_names", "=", "req_names", "elif", "not", "all", "(", "[", "i", "in", "col_names", "for", "i", "in", "req_names", "]", ")", ":", "raise", "ValueError", "(", "'col_names must include name, long_name, units.'", ")", "if", "sep", "is", "None", ":", "sep", "=", "','", "if", "name", "is", "None", ":", "raise", "ValueError", "(", "'Must supply an instrument name or file path.'", ")", "elif", "not", "isinstance", "(", "name", ",", "str", ")", ":", "raise", "ValueError", "(", "'keyword name must be related to a string'", ")", "elif", "not", "os", ".", "path", ".", "isfile", "(", "name", ")", ":", "# Not a real file, assume input is a pysat instrument name", "# and look in the standard pysat location.", "test", "=", "os", ".", "path", ".", "join", "(", "pysat", ".", "__path__", "[", "0", "]", ",", "'instruments'", ",", "name", ")", "if", "os", ".", "path", ".", "isfile", "(", "test", ")", ":", "name", "=", "test", "else", ":", "#trying to form an absolute path for success", "test", "=", "os", ".", "path", ".", "abspath", "(", "name", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "test", ")", ":", "raise", "ValueError", "(", "\"Unable to create valid file path.\"", ")", "else", ":", "#success", "name", "=", "test", "mdata", "=", "pds", ".", "read_csv", "(", "name", ",", "names", "=", "col_names", ",", "sep", "=", "sep", ",", "*", "*", "kwargs", ")", "if", "not", "mdata", ".", "empty", ":", "# make sure the data name is the index", "mdata", ".", "index", "=", "mdata", "[", "'name'", "]", "del", "mdata", "[", "'name'", "]", "return", "cls", "(", "metadata", "=", "mdata", ")", "else", ":", "raise", "ValueError", "(", "'Unable to retrieve information from '", "+", "name", ")" ]
Create instrument metadata object from csv. Parameters ---------- name : string absolute filename for csv file or name of file stored in pandas instruments location col_names : list-like collection of strings column names in csv and resultant meta object sep : string column seperator for supplied csv filename Note ---- column names must include at least ['name', 'long_name', 'units'], assumed if col_names is None.
[ "Create", "instrument", "metadata", "object", "from", "csv", "." ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/_meta.py#L1047-L1102
rstoneback/pysat
pysat/instruments/cnofs_ivm.py
clean
def clean(self): """Routine to return C/NOFS IVM data cleaned to the specified level Parameters ----------- inst : (pysat.Instrument) Instrument class object, whose attribute clean_level is used to return the desired level of data selectivity. Returns -------- Void : (NoneType) data in inst is modified in-place. Notes -------- Supports 'clean', 'dusty', 'dirty' """ # cleans cindi data if self.clean_level == 'clean': # choose areas below 550km # self.data = self.data[self.data.alt <= 550] idx, = np.where(self.data.altitude <= 550) self.data = self[idx,:] # make sure all -999999 values are NaN self.data.replace(-999999., np.nan, inplace=True) if (self.clean_level == 'clean') | (self.clean_level == 'dusty'): try: idx, = np.where(np.abs(self.data.ionVelmeridional) < 10000.) self.data = self[idx,:] except AttributeError: pass if self.clean_level == 'dusty': # take out all values where RPA data quality is > 1 idx, = np.where(self.data.RPAflag <= 1) self.data = self[idx,:] # IDM quality flags self.data = self.data[ (self.data.driftMeterflag<= 3) ] else: # take out all values where RPA data quality is > 0 idx, = np.where(self.data.RPAflag <= 0) self.data = self[idx,:] # IDM quality flags self.data = self.data[ (self.data.driftMeterflag<= 0) ] if self.clean_level == 'dirty': # take out all values where RPA data quality is > 4 idx, = np.where(self.data.RPAflag <= 4) self.data = self[idx,:] # IDM quality flags self.data = self.data[ (self.data.driftMeterflag<= 6) ] # basic quality check on drifts and don't let UTS go above 86400. idx, = np.where(self.data.time <= 86400.) self.data = self[idx,:] # make sure MLT is between 0 and 24 idx, = np.where((self.data.mlt >= 0) & (self.data.mlt <= 24.)) self.data = self[idx,:] return
python
def clean(self): """Routine to return C/NOFS IVM data cleaned to the specified level Parameters ----------- inst : (pysat.Instrument) Instrument class object, whose attribute clean_level is used to return the desired level of data selectivity. Returns -------- Void : (NoneType) data in inst is modified in-place. Notes -------- Supports 'clean', 'dusty', 'dirty' """ # cleans cindi data if self.clean_level == 'clean': # choose areas below 550km # self.data = self.data[self.data.alt <= 550] idx, = np.where(self.data.altitude <= 550) self.data = self[idx,:] # make sure all -999999 values are NaN self.data.replace(-999999., np.nan, inplace=True) if (self.clean_level == 'clean') | (self.clean_level == 'dusty'): try: idx, = np.where(np.abs(self.data.ionVelmeridional) < 10000.) self.data = self[idx,:] except AttributeError: pass if self.clean_level == 'dusty': # take out all values where RPA data quality is > 1 idx, = np.where(self.data.RPAflag <= 1) self.data = self[idx,:] # IDM quality flags self.data = self.data[ (self.data.driftMeterflag<= 3) ] else: # take out all values where RPA data quality is > 0 idx, = np.where(self.data.RPAflag <= 0) self.data = self[idx,:] # IDM quality flags self.data = self.data[ (self.data.driftMeterflag<= 0) ] if self.clean_level == 'dirty': # take out all values where RPA data quality is > 4 idx, = np.where(self.data.RPAflag <= 4) self.data = self[idx,:] # IDM quality flags self.data = self.data[ (self.data.driftMeterflag<= 6) ] # basic quality check on drifts and don't let UTS go above 86400. idx, = np.where(self.data.time <= 86400.) self.data = self[idx,:] # make sure MLT is between 0 and 24 idx, = np.where((self.data.mlt >= 0) & (self.data.mlt <= 24.)) self.data = self[idx,:] return
[ "def", "clean", "(", "self", ")", ":", "# cleans cindi data", "if", "self", ".", "clean_level", "==", "'clean'", ":", "# choose areas below 550km", "# self.data = self.data[self.data.alt <= 550]", "idx", ",", "=", "np", ".", "where", "(", "self", ".", "data", ".", "altitude", "<=", "550", ")", "self", ".", "data", "=", "self", "[", "idx", ",", ":", "]", "# make sure all -999999 values are NaN", "self", ".", "data", ".", "replace", "(", "-", "999999.", ",", "np", ".", "nan", ",", "inplace", "=", "True", ")", "if", "(", "self", ".", "clean_level", "==", "'clean'", ")", "|", "(", "self", ".", "clean_level", "==", "'dusty'", ")", ":", "try", ":", "idx", ",", "=", "np", ".", "where", "(", "np", ".", "abs", "(", "self", ".", "data", ".", "ionVelmeridional", ")", "<", "10000.", ")", "self", ".", "data", "=", "self", "[", "idx", ",", ":", "]", "except", "AttributeError", ":", "pass", "if", "self", ".", "clean_level", "==", "'dusty'", ":", "# take out all values where RPA data quality is > 1", "idx", ",", "=", "np", ".", "where", "(", "self", ".", "data", ".", "RPAflag", "<=", "1", ")", "self", ".", "data", "=", "self", "[", "idx", ",", ":", "]", "# IDM quality flags", "self", ".", "data", "=", "self", ".", "data", "[", "(", "self", ".", "data", ".", "driftMeterflag", "<=", "3", ")", "]", "else", ":", "# take out all values where RPA data quality is > 0", "idx", ",", "=", "np", ".", "where", "(", "self", ".", "data", ".", "RPAflag", "<=", "0", ")", "self", ".", "data", "=", "self", "[", "idx", ",", ":", "]", "# IDM quality flags", "self", ".", "data", "=", "self", ".", "data", "[", "(", "self", ".", "data", ".", "driftMeterflag", "<=", "0", ")", "]", "if", "self", ".", "clean_level", "==", "'dirty'", ":", "# take out all values where RPA data quality is > 4", "idx", ",", "=", "np", ".", "where", "(", "self", ".", "data", ".", "RPAflag", "<=", "4", ")", "self", ".", "data", "=", "self", "[", "idx", ",", ":", "]", "# IDM quality flags", "self", ".", "data", "=", "self", ".", "data", "[", "(", "self", ".", "data", ".", "driftMeterflag", "<=", "6", ")", "]", "# basic quality check on drifts and don't let UTS go above 86400.", "idx", ",", "=", "np", ".", "where", "(", "self", ".", "data", ".", "time", "<=", "86400.", ")", "self", ".", "data", "=", "self", "[", "idx", ",", ":", "]", "# make sure MLT is between 0 and 24", "idx", ",", "=", "np", ".", "where", "(", "(", "self", ".", "data", ".", "mlt", ">=", "0", ")", "&", "(", "self", ".", "data", ".", "mlt", "<=", "24.", ")", ")", "self", ".", "data", "=", "self", "[", "idx", ",", ":", "]", "return" ]
Routine to return C/NOFS IVM data cleaned to the specified level Parameters ----------- inst : (pysat.Instrument) Instrument class object, whose attribute clean_level is used to return the desired level of data selectivity. Returns -------- Void : (NoneType) data in inst is modified in-place. Notes -------- Supports 'clean', 'dusty', 'dirty'
[ "Routine", "to", "return", "C", "/", "NOFS", "IVM", "data", "cleaned", "to", "the", "specified", "level" ]
train
https://github.com/rstoneback/pysat/blob/4ae1afd80e15e4449397d39dce8c3e969c32c422/pysat/instruments/cnofs_ivm.py#L67-L130
charettes/django-mutant
mutant/management/__init__.py
nonraw_instance
def nonraw_instance(receiver): """ A signal receiver decorator that fetch the complete instance from db when it's passed as raw """ @wraps(receiver) def wrapper(sender, instance, raw, using, **kwargs): if raw: instance = sender._default_manager.using(using).get(pk=instance.pk) return receiver(sender=sender, raw=raw, instance=instance, using=using, **kwargs) return wrapper
python
def nonraw_instance(receiver): """ A signal receiver decorator that fetch the complete instance from db when it's passed as raw """ @wraps(receiver) def wrapper(sender, instance, raw, using, **kwargs): if raw: instance = sender._default_manager.using(using).get(pk=instance.pk) return receiver(sender=sender, raw=raw, instance=instance, using=using, **kwargs) return wrapper
[ "def", "nonraw_instance", "(", "receiver", ")", ":", "@", "wraps", "(", "receiver", ")", "def", "wrapper", "(", "sender", ",", "instance", ",", "raw", ",", "using", ",", "*", "*", "kwargs", ")", ":", "if", "raw", ":", "instance", "=", "sender", ".", "_default_manager", ".", "using", "(", "using", ")", ".", "get", "(", "pk", "=", "instance", ".", "pk", ")", "return", "receiver", "(", "sender", "=", "sender", ",", "raw", "=", "raw", ",", "instance", "=", "instance", ",", "using", "=", "using", ",", "*", "*", "kwargs", ")", "return", "wrapper" ]
A signal receiver decorator that fetch the complete instance from db when it's passed as raw
[ "A", "signal", "receiver", "decorator", "that", "fetch", "the", "complete", "instance", "from", "db", "when", "it", "s", "passed", "as", "raw" ]
train
https://github.com/charettes/django-mutant/blob/865a1b712ce30501901c4691ce2110ab03f0f93b/mutant/management/__init__.py#L24-L35
charettes/django-mutant
mutant/management/__init__.py
base_definition_pre_delete
def base_definition_pre_delete(sender, instance, **kwargs): """ This is used to pass data required for deletion to the post_delete signal that is no more available thereafter. """ # see CASCADE_MARK_ORIGIN's docstring cascade_deletion_origin = popattr( instance._state, '_cascade_deletion_origin', None ) if cascade_deletion_origin == 'model_def': return if (instance.base and issubclass(instance.base, models.Model) and instance.base._meta.abstract): instance._state._deletion = instance.model_def.model_class().render_state()
python
def base_definition_pre_delete(sender, instance, **kwargs): """ This is used to pass data required for deletion to the post_delete signal that is no more available thereafter. """ # see CASCADE_MARK_ORIGIN's docstring cascade_deletion_origin = popattr( instance._state, '_cascade_deletion_origin', None ) if cascade_deletion_origin == 'model_def': return if (instance.base and issubclass(instance.base, models.Model) and instance.base._meta.abstract): instance._state._deletion = instance.model_def.model_class().render_state()
[ "def", "base_definition_pre_delete", "(", "sender", ",", "instance", ",", "*", "*", "kwargs", ")", ":", "# see CASCADE_MARK_ORIGIN's docstring", "cascade_deletion_origin", "=", "popattr", "(", "instance", ".", "_state", ",", "'_cascade_deletion_origin'", ",", "None", ")", "if", "cascade_deletion_origin", "==", "'model_def'", ":", "return", "if", "(", "instance", ".", "base", "and", "issubclass", "(", "instance", ".", "base", ",", "models", ".", "Model", ")", "and", "instance", ".", "base", ".", "_meta", ".", "abstract", ")", ":", "instance", ".", "_state", ".", "_deletion", "=", "instance", ".", "model_def", ".", "model_class", "(", ")", ".", "render_state", "(", ")" ]
This is used to pass data required for deletion to the post_delete signal that is no more available thereafter.
[ "This", "is", "used", "to", "pass", "data", "required", "for", "deletion", "to", "the", "post_delete", "signal", "that", "is", "no", "more", "available", "thereafter", "." ]
train
https://github.com/charettes/django-mutant/blob/865a1b712ce30501901c4691ce2110ab03f0f93b/mutant/management/__init__.py#L132-L145
charettes/django-mutant
mutant/management/__init__.py
base_definition_post_delete
def base_definition_post_delete(sender, instance, **kwargs): """ Make sure to delete fields inherited from an abstract model base. """ if hasattr(instance._state, '_deletion'): # Make sure to flatten abstract bases since Django # migrations can't deal with them. model = popattr(instance._state, '_deletion') for field in instance.base._meta.fields: perform_ddl('remove_field', model, field)
python
def base_definition_post_delete(sender, instance, **kwargs): """ Make sure to delete fields inherited from an abstract model base. """ if hasattr(instance._state, '_deletion'): # Make sure to flatten abstract bases since Django # migrations can't deal with them. model = popattr(instance._state, '_deletion') for field in instance.base._meta.fields: perform_ddl('remove_field', model, field)
[ "def", "base_definition_post_delete", "(", "sender", ",", "instance", ",", "*", "*", "kwargs", ")", ":", "if", "hasattr", "(", "instance", ".", "_state", ",", "'_deletion'", ")", ":", "# Make sure to flatten abstract bases since Django", "# migrations can't deal with them.", "model", "=", "popattr", "(", "instance", ".", "_state", ",", "'_deletion'", ")", "for", "field", "in", "instance", ".", "base", ".", "_meta", ".", "fields", ":", "perform_ddl", "(", "'remove_field'", ",", "model", ",", "field", ")" ]
Make sure to delete fields inherited from an abstract model base.
[ "Make", "sure", "to", "delete", "fields", "inherited", "from", "an", "abstract", "model", "base", "." ]
train
https://github.com/charettes/django-mutant/blob/865a1b712ce30501901c4691ce2110ab03f0f93b/mutant/management/__init__.py#L148-L157
charettes/django-mutant
mutant/management/__init__.py
raw_field_definition_proxy_post_save
def raw_field_definition_proxy_post_save(sender, instance, raw, **kwargs): """ When proxy field definitions are loaded from a fixture they're not passing through the `field_definition_post_save` signal. Make sure they are. """ if raw: model_class = instance.content_type.model_class() opts = model_class._meta if opts.proxy and opts.concrete_model is sender: field_definition_post_save( sender=model_class, instance=instance.type_cast(), raw=raw, **kwargs )
python
def raw_field_definition_proxy_post_save(sender, instance, raw, **kwargs): """ When proxy field definitions are loaded from a fixture they're not passing through the `field_definition_post_save` signal. Make sure they are. """ if raw: model_class = instance.content_type.model_class() opts = model_class._meta if opts.proxy and opts.concrete_model is sender: field_definition_post_save( sender=model_class, instance=instance.type_cast(), raw=raw, **kwargs )
[ "def", "raw_field_definition_proxy_post_save", "(", "sender", ",", "instance", ",", "raw", ",", "*", "*", "kwargs", ")", ":", "if", "raw", ":", "model_class", "=", "instance", ".", "content_type", ".", "model_class", "(", ")", "opts", "=", "model_class", ".", "_meta", "if", "opts", ".", "proxy", "and", "opts", ".", "concrete_model", "is", "sender", ":", "field_definition_post_save", "(", "sender", "=", "model_class", ",", "instance", "=", "instance", ".", "type_cast", "(", ")", ",", "raw", "=", "raw", ",", "*", "*", "kwargs", ")" ]
When proxy field definitions are loaded from a fixture they're not passing through the `field_definition_post_save` signal. Make sure they are.
[ "When", "proxy", "field", "definitions", "are", "loaded", "from", "a", "fixture", "they", "re", "not", "passing", "through", "the", "field_definition_post_save", "signal", ".", "Make", "sure", "they", "are", "." ]
train
https://github.com/charettes/django-mutant/blob/865a1b712ce30501901c4691ce2110ab03f0f93b/mutant/management/__init__.py#L172-L185
charettes/django-mutant
mutant/management/__init__.py
field_definition_post_save
def field_definition_post_save(sender, instance, created, raw, **kwargs): """ This signal is connected by all FieldDefinition subclasses see comment in FieldDefinitionBase for more details """ model_class = instance.model_def.model_class().render_state() field = instance.construct_for_migrate() field.model = model_class if created: if hasattr(instance._state, '_creation_default_value'): field.default = instance._state._creation_default_value delattr(instance._state, '_creation_default_value') add_column = popattr(instance._state, '_add_column', True) if add_column: perform_ddl('add_field', model_class, field) # If the field definition is raw we must re-create the model class # since ModelDefinitionAttribute.save won't be called if raw: instance.model_def.model_class().mark_as_obsolete() else: old_field = instance._state._pre_save_field delattr(instance._state, '_pre_save_field') perform_ddl('alter_field', model_class, old_field, field, strict=True)
python
def field_definition_post_save(sender, instance, created, raw, **kwargs): """ This signal is connected by all FieldDefinition subclasses see comment in FieldDefinitionBase for more details """ model_class = instance.model_def.model_class().render_state() field = instance.construct_for_migrate() field.model = model_class if created: if hasattr(instance._state, '_creation_default_value'): field.default = instance._state._creation_default_value delattr(instance._state, '_creation_default_value') add_column = popattr(instance._state, '_add_column', True) if add_column: perform_ddl('add_field', model_class, field) # If the field definition is raw we must re-create the model class # since ModelDefinitionAttribute.save won't be called if raw: instance.model_def.model_class().mark_as_obsolete() else: old_field = instance._state._pre_save_field delattr(instance._state, '_pre_save_field') perform_ddl('alter_field', model_class, old_field, field, strict=True)
[ "def", "field_definition_post_save", "(", "sender", ",", "instance", ",", "created", ",", "raw", ",", "*", "*", "kwargs", ")", ":", "model_class", "=", "instance", ".", "model_def", ".", "model_class", "(", ")", ".", "render_state", "(", ")", "field", "=", "instance", ".", "construct_for_migrate", "(", ")", "field", ".", "model", "=", "model_class", "if", "created", ":", "if", "hasattr", "(", "instance", ".", "_state", ",", "'_creation_default_value'", ")", ":", "field", ".", "default", "=", "instance", ".", "_state", ".", "_creation_default_value", "delattr", "(", "instance", ".", "_state", ",", "'_creation_default_value'", ")", "add_column", "=", "popattr", "(", "instance", ".", "_state", ",", "'_add_column'", ",", "True", ")", "if", "add_column", ":", "perform_ddl", "(", "'add_field'", ",", "model_class", ",", "field", ")", "# If the field definition is raw we must re-create the model class", "# since ModelDefinitionAttribute.save won't be called", "if", "raw", ":", "instance", ".", "model_def", ".", "model_class", "(", ")", ".", "mark_as_obsolete", "(", ")", "else", ":", "old_field", "=", "instance", ".", "_state", ".", "_pre_save_field", "delattr", "(", "instance", ".", "_state", ",", "'_pre_save_field'", ")", "perform_ddl", "(", "'alter_field'", ",", "model_class", ",", "old_field", ",", "field", ",", "strict", "=", "True", ")" ]
This signal is connected by all FieldDefinition subclasses see comment in FieldDefinitionBase for more details
[ "This", "signal", "is", "connected", "by", "all", "FieldDefinition", "subclasses", "see", "comment", "in", "FieldDefinitionBase", "for", "more", "details" ]
train
https://github.com/charettes/django-mutant/blob/865a1b712ce30501901c4691ce2110ab03f0f93b/mutant/management/__init__.py#L189-L211
charettes/django-mutant
mutant/utils.py
popattr
def popattr(obj, attr, default=NOT_PROVIDED): """ Useful for retrieving an object attr and removing it if it's part of it's dict while allowing retrieving from subclass. i.e. class A: a = 'a' class B(A): b = 'b' >>> popattr(B, 'a', None) 'a' >>> A.a 'a' """ val = getattr(obj, attr, default) try: delattr(obj, attr) except AttributeError: if default is NOT_PROVIDED: raise return val
python
def popattr(obj, attr, default=NOT_PROVIDED): """ Useful for retrieving an object attr and removing it if it's part of it's dict while allowing retrieving from subclass. i.e. class A: a = 'a' class B(A): b = 'b' >>> popattr(B, 'a', None) 'a' >>> A.a 'a' """ val = getattr(obj, attr, default) try: delattr(obj, attr) except AttributeError: if default is NOT_PROVIDED: raise return val
[ "def", "popattr", "(", "obj", ",", "attr", ",", "default", "=", "NOT_PROVIDED", ")", ":", "val", "=", "getattr", "(", "obj", ",", "attr", ",", "default", ")", "try", ":", "delattr", "(", "obj", ",", "attr", ")", "except", "AttributeError", ":", "if", "default", "is", "NOT_PROVIDED", ":", "raise", "return", "val" ]
Useful for retrieving an object attr and removing it if it's part of it's dict while allowing retrieving from subclass. i.e. class A: a = 'a' class B(A): b = 'b' >>> popattr(B, 'a', None) 'a' >>> A.a 'a'
[ "Useful", "for", "retrieving", "an", "object", "attr", "and", "removing", "it", "if", "it", "s", "part", "of", "it", "s", "dict", "while", "allowing", "retrieving", "from", "subclass", ".", "i", ".", "e", ".", "class", "A", ":", "a", "=", "a", "class", "B", "(", "A", ")", ":", "b", "=", "b", ">>>", "popattr", "(", "B", "a", "None", ")", "a", ">>>", "A", ".", "a", "a" ]
train
https://github.com/charettes/django-mutant/blob/865a1b712ce30501901c4691ce2110ab03f0f93b/mutant/utils.py#L27-L47
charettes/django-mutant
mutant/utils.py
_app_cache_deepcopy
def _app_cache_deepcopy(obj): """ An helper that correctly deepcopy model cache state """ if isinstance(obj, defaultdict): return deepcopy(obj) elif isinstance(obj, dict): return type(obj)((_app_cache_deepcopy(key), _app_cache_deepcopy(val)) for key, val in obj.items()) elif isinstance(obj, list): return list(_app_cache_deepcopy(val) for val in obj) elif isinstance(obj, AppConfig): app_conf = Empty() app_conf.__class__ = AppConfig app_conf.__dict__ = _app_cache_deepcopy(obj.__dict__) return app_conf return obj
python
def _app_cache_deepcopy(obj): """ An helper that correctly deepcopy model cache state """ if isinstance(obj, defaultdict): return deepcopy(obj) elif isinstance(obj, dict): return type(obj)((_app_cache_deepcopy(key), _app_cache_deepcopy(val)) for key, val in obj.items()) elif isinstance(obj, list): return list(_app_cache_deepcopy(val) for val in obj) elif isinstance(obj, AppConfig): app_conf = Empty() app_conf.__class__ = AppConfig app_conf.__dict__ = _app_cache_deepcopy(obj.__dict__) return app_conf return obj
[ "def", "_app_cache_deepcopy", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "defaultdict", ")", ":", "return", "deepcopy", "(", "obj", ")", "elif", "isinstance", "(", "obj", ",", "dict", ")", ":", "return", "type", "(", "obj", ")", "(", "(", "_app_cache_deepcopy", "(", "key", ")", ",", "_app_cache_deepcopy", "(", "val", ")", ")", "for", "key", ",", "val", "in", "obj", ".", "items", "(", ")", ")", "elif", "isinstance", "(", "obj", ",", "list", ")", ":", "return", "list", "(", "_app_cache_deepcopy", "(", "val", ")", "for", "val", "in", "obj", ")", "elif", "isinstance", "(", "obj", ",", "AppConfig", ")", ":", "app_conf", "=", "Empty", "(", ")", "app_conf", ".", "__class__", "=", "AppConfig", "app_conf", ".", "__dict__", "=", "_app_cache_deepcopy", "(", "obj", ".", "__dict__", ")", "return", "app_conf", "return", "obj" ]
An helper that correctly deepcopy model cache state
[ "An", "helper", "that", "correctly", "deepcopy", "model", "cache", "state" ]
train
https://github.com/charettes/django-mutant/blob/865a1b712ce30501901c4691ce2110ab03f0f93b/mutant/utils.py#L139-L154
charettes/django-mutant
mutant/utils.py
app_cache_restorer
def app_cache_restorer(): """ A context manager that restore model cache state as it was before entering context. """ state = _app_cache_deepcopy(apps.__dict__) try: yield state finally: with apps_lock(): apps.__dict__ = state # Rebind the app registry models cache to # individual app config ones. for app_conf in apps.get_app_configs(): app_conf.models = apps.all_models[app_conf.label] apps.clear_cache()
python
def app_cache_restorer(): """ A context manager that restore model cache state as it was before entering context. """ state = _app_cache_deepcopy(apps.__dict__) try: yield state finally: with apps_lock(): apps.__dict__ = state # Rebind the app registry models cache to # individual app config ones. for app_conf in apps.get_app_configs(): app_conf.models = apps.all_models[app_conf.label] apps.clear_cache()
[ "def", "app_cache_restorer", "(", ")", ":", "state", "=", "_app_cache_deepcopy", "(", "apps", ".", "__dict__", ")", "try", ":", "yield", "state", "finally", ":", "with", "apps_lock", "(", ")", ":", "apps", ".", "__dict__", "=", "state", "# Rebind the app registry models cache to", "# individual app config ones.", "for", "app_conf", "in", "apps", ".", "get_app_configs", "(", ")", ":", "app_conf", ".", "models", "=", "apps", ".", "all_models", "[", "app_conf", ".", "label", "]", "apps", ".", "clear_cache", "(", ")" ]
A context manager that restore model cache state as it was before entering context.
[ "A", "context", "manager", "that", "restore", "model", "cache", "state", "as", "it", "was", "before", "entering", "context", "." ]
train
https://github.com/charettes/django-mutant/blob/865a1b712ce30501901c4691ce2110ab03f0f93b/mutant/utils.py#L158-L173
charettes/django-mutant
mutant/db/deletion.py
CASCADE_MARK_ORIGIN
def CASCADE_MARK_ORIGIN(collector, field, sub_objs, using): """ Custom on_delete handler which sets _cascade_deletion_origin on the _state of the all relating objects that will deleted. We use this handler on ModelDefinitionAttribute.model_def, so when we delete a ModelDefinition we can skip field_definition_post_delete and base_definition_post_delete and avoid an incremental columns deletion before the entire table is dropped. """ CASCADE(collector, field, sub_objs, using) if sub_objs: for obj in sub_objs: obj._state._cascade_deletion_origin = field.name
python
def CASCADE_MARK_ORIGIN(collector, field, sub_objs, using): """ Custom on_delete handler which sets _cascade_deletion_origin on the _state of the all relating objects that will deleted. We use this handler on ModelDefinitionAttribute.model_def, so when we delete a ModelDefinition we can skip field_definition_post_delete and base_definition_post_delete and avoid an incremental columns deletion before the entire table is dropped. """ CASCADE(collector, field, sub_objs, using) if sub_objs: for obj in sub_objs: obj._state._cascade_deletion_origin = field.name
[ "def", "CASCADE_MARK_ORIGIN", "(", "collector", ",", "field", ",", "sub_objs", ",", "using", ")", ":", "CASCADE", "(", "collector", ",", "field", ",", "sub_objs", ",", "using", ")", "if", "sub_objs", ":", "for", "obj", "in", "sub_objs", ":", "obj", ".", "_state", ".", "_cascade_deletion_origin", "=", "field", ".", "name" ]
Custom on_delete handler which sets _cascade_deletion_origin on the _state of the all relating objects that will deleted. We use this handler on ModelDefinitionAttribute.model_def, so when we delete a ModelDefinition we can skip field_definition_post_delete and base_definition_post_delete and avoid an incremental columns deletion before the entire table is dropped.
[ "Custom", "on_delete", "handler", "which", "sets", "_cascade_deletion_origin", "on", "the", "_state", "of", "the", "all", "relating", "objects", "that", "will", "deleted", ".", "We", "use", "this", "handler", "on", "ModelDefinitionAttribute", ".", "model_def", "so", "when", "we", "delete", "a", "ModelDefinition", "we", "can", "skip", "field_definition_post_delete", "and", "base_definition_post_delete", "and", "avoid", "an", "incremental", "columns", "deletion", "before", "the", "entire", "table", "is", "dropped", "." ]
train
https://github.com/charettes/django-mutant/blob/865a1b712ce30501901c4691ce2110ab03f0f93b/mutant/db/deletion.py#L6-L18
charettes/django-mutant
mutant/contrib/related/management/__init__.py
mutable_model_prepared
def mutable_model_prepared(signal, sender, definition, existing_model_class, **kwargs): """ Make sure all related model class are created and marked as dependency when a mutable model class is prepared """ referenced_models = set() # Collect all model class the obsolete model class was referring to if existing_model_class: for field in existing_model_class._meta.local_fields: if isinstance(field, RelatedField): remote_field_model = get_remote_field_model(field) if not isinstance(remote_field_model, string_types): referenced_models.add(remote_field_model) # Add sender as a dependency of all mutable models it refers to for field in sender._meta.local_fields: if isinstance(field, RelatedField): remote_field_model = get_remote_field_model(field) if not isinstance(remote_field_model, string_types): referenced_models.add(remote_field_model) if (issubclass(remote_field_model, MutableModel) and remote_field_model._definition != sender._definition): remote_field_model._dependencies.add(sender._definition) # Mark all model referring to this one as dependencies related_model_defs = ModelDefinition.objects.filter( Q(fielddefinitions__foreignkeydefinition__to=definition) | Q(fielddefinitions__manytomanyfielddefinition__to=definition) ).distinct() for model_def in related_model_defs: if model_def != definition: # Generate model class from definition and add it as a dependency sender._dependencies.add(model_def.model_class()._definition) # Clear the referenced models opts related cache for model_class in referenced_models: clear_opts_related_cache(model_class)
python
def mutable_model_prepared(signal, sender, definition, existing_model_class, **kwargs): """ Make sure all related model class are created and marked as dependency when a mutable model class is prepared """ referenced_models = set() # Collect all model class the obsolete model class was referring to if existing_model_class: for field in existing_model_class._meta.local_fields: if isinstance(field, RelatedField): remote_field_model = get_remote_field_model(field) if not isinstance(remote_field_model, string_types): referenced_models.add(remote_field_model) # Add sender as a dependency of all mutable models it refers to for field in sender._meta.local_fields: if isinstance(field, RelatedField): remote_field_model = get_remote_field_model(field) if not isinstance(remote_field_model, string_types): referenced_models.add(remote_field_model) if (issubclass(remote_field_model, MutableModel) and remote_field_model._definition != sender._definition): remote_field_model._dependencies.add(sender._definition) # Mark all model referring to this one as dependencies related_model_defs = ModelDefinition.objects.filter( Q(fielddefinitions__foreignkeydefinition__to=definition) | Q(fielddefinitions__manytomanyfielddefinition__to=definition) ).distinct() for model_def in related_model_defs: if model_def != definition: # Generate model class from definition and add it as a dependency sender._dependencies.add(model_def.model_class()._definition) # Clear the referenced models opts related cache for model_class in referenced_models: clear_opts_related_cache(model_class)
[ "def", "mutable_model_prepared", "(", "signal", ",", "sender", ",", "definition", ",", "existing_model_class", ",", "*", "*", "kwargs", ")", ":", "referenced_models", "=", "set", "(", ")", "# Collect all model class the obsolete model class was referring to", "if", "existing_model_class", ":", "for", "field", "in", "existing_model_class", ".", "_meta", ".", "local_fields", ":", "if", "isinstance", "(", "field", ",", "RelatedField", ")", ":", "remote_field_model", "=", "get_remote_field_model", "(", "field", ")", "if", "not", "isinstance", "(", "remote_field_model", ",", "string_types", ")", ":", "referenced_models", ".", "add", "(", "remote_field_model", ")", "# Add sender as a dependency of all mutable models it refers to", "for", "field", "in", "sender", ".", "_meta", ".", "local_fields", ":", "if", "isinstance", "(", "field", ",", "RelatedField", ")", ":", "remote_field_model", "=", "get_remote_field_model", "(", "field", ")", "if", "not", "isinstance", "(", "remote_field_model", ",", "string_types", ")", ":", "referenced_models", ".", "add", "(", "remote_field_model", ")", "if", "(", "issubclass", "(", "remote_field_model", ",", "MutableModel", ")", "and", "remote_field_model", ".", "_definition", "!=", "sender", ".", "_definition", ")", ":", "remote_field_model", ".", "_dependencies", ".", "add", "(", "sender", ".", "_definition", ")", "# Mark all model referring to this one as dependencies", "related_model_defs", "=", "ModelDefinition", ".", "objects", ".", "filter", "(", "Q", "(", "fielddefinitions__foreignkeydefinition__to", "=", "definition", ")", "|", "Q", "(", "fielddefinitions__manytomanyfielddefinition__to", "=", "definition", ")", ")", ".", "distinct", "(", ")", "for", "model_def", "in", "related_model_defs", ":", "if", "model_def", "!=", "definition", ":", "# Generate model class from definition and add it as a dependency", "sender", ".", "_dependencies", ".", "add", "(", "model_def", ".", "model_class", "(", ")", ".", "_definition", ")", "# Clear the referenced models opts related cache", "for", "model_class", "in", "referenced_models", ":", "clear_opts_related_cache", "(", "model_class", ")" ]
Make sure all related model class are created and marked as dependency when a mutable model class is prepared
[ "Make", "sure", "all", "related", "model", "class", "are", "created", "and", "marked", "as", "dependency", "when", "a", "mutable", "model", "class", "is", "prepared" ]
train
https://github.com/charettes/django-mutant/blob/865a1b712ce30501901c4691ce2110ab03f0f93b/mutant/contrib/related/management/__init__.py#L13-L47
charettes/django-mutant
mutant/models/model/__init__.py
_model_class_from_pk
def _model_class_from_pk(definition_cls, definition_pk): """ Helper used to unpickle MutableModel model class from their definition pk. """ try: return definition_cls.objects.get(pk=definition_pk).model_class() except definition_cls.DoesNotExist: pass
python
def _model_class_from_pk(definition_cls, definition_pk): """ Helper used to unpickle MutableModel model class from their definition pk. """ try: return definition_cls.objects.get(pk=definition_pk).model_class() except definition_cls.DoesNotExist: pass
[ "def", "_model_class_from_pk", "(", "definition_cls", ",", "definition_pk", ")", ":", "try", ":", "return", "definition_cls", ".", "objects", ".", "get", "(", "pk", "=", "definition_pk", ")", ".", "model_class", "(", ")", "except", "definition_cls", ".", "DoesNotExist", ":", "pass" ]
Helper used to unpickle MutableModel model class from their definition pk.
[ "Helper", "used", "to", "unpickle", "MutableModel", "model", "class", "from", "their", "definition", "pk", "." ]
train
https://github.com/charettes/django-mutant/blob/865a1b712ce30501901c4691ce2110ab03f0f93b/mutant/models/model/__init__.py#L30-L38
charettes/django-mutant
mutant/models/model/__init__.py
OrderingFieldDefinition.clean
def clean(self): """ Make sure the lookup makes sense """ if self.lookup == '?': # Randomly sort return else: lookups = self.lookup.split(LOOKUP_SEP) opts = self.model_def.model_class()._meta valid = True while len(lookups): lookup = lookups.pop(0) try: field = opts.get_field(lookup) except FieldDoesNotExist: valid = False else: if isinstance(field, models.ForeignKey): opts = get_remote_field_model(field)._meta elif len(lookups): # Cannot go any deeper valid = False finally: if not valid: msg = _("This field doesn't exist") raise ValidationError({'lookup': [msg]})
python
def clean(self): """ Make sure the lookup makes sense """ if self.lookup == '?': # Randomly sort return else: lookups = self.lookup.split(LOOKUP_SEP) opts = self.model_def.model_class()._meta valid = True while len(lookups): lookup = lookups.pop(0) try: field = opts.get_field(lookup) except FieldDoesNotExist: valid = False else: if isinstance(field, models.ForeignKey): opts = get_remote_field_model(field)._meta elif len(lookups): # Cannot go any deeper valid = False finally: if not valid: msg = _("This field doesn't exist") raise ValidationError({'lookup': [msg]})
[ "def", "clean", "(", "self", ")", ":", "if", "self", ".", "lookup", "==", "'?'", ":", "# Randomly sort", "return", "else", ":", "lookups", "=", "self", ".", "lookup", ".", "split", "(", "LOOKUP_SEP", ")", "opts", "=", "self", ".", "model_def", ".", "model_class", "(", ")", ".", "_meta", "valid", "=", "True", "while", "len", "(", "lookups", ")", ":", "lookup", "=", "lookups", ".", "pop", "(", "0", ")", "try", ":", "field", "=", "opts", ".", "get_field", "(", "lookup", ")", "except", "FieldDoesNotExist", ":", "valid", "=", "False", "else", ":", "if", "isinstance", "(", "field", ",", "models", ".", "ForeignKey", ")", ":", "opts", "=", "get_remote_field_model", "(", "field", ")", ".", "_meta", "elif", "len", "(", "lookups", ")", ":", "# Cannot go any deeper", "valid", "=", "False", "finally", ":", "if", "not", "valid", ":", "msg", "=", "_", "(", "\"This field doesn't exist\"", ")", "raise", "ValidationError", "(", "{", "'lookup'", ":", "[", "msg", "]", "}", ")" ]
Make sure the lookup makes sense
[ "Make", "sure", "the", "lookup", "makes", "sense" ]
train
https://github.com/charettes/django-mutant/blob/865a1b712ce30501901c4691ce2110ab03f0f93b/mutant/models/model/__init__.py#L436-L460
jakevdp/JSAnimation
make_lorenz_animation.py
lorentz_deriv
def lorentz_deriv((x, y, z), t0, sigma=10., beta=8./3, rho=28.0): """Compute the time-derivative of a Lorentz system.""" return [sigma * (y - x), x * (rho - z) - y, x * y - beta * z]
python
def lorentz_deriv((x, y, z), t0, sigma=10., beta=8./3, rho=28.0): """Compute the time-derivative of a Lorentz system.""" return [sigma * (y - x), x * (rho - z) - y, x * y - beta * z]
[ "def", "lorentz_deriv", "(", "(", "x", ",", "y", ",", "z", ")", ",", "t0", ",", "sigma", "=", "10.", ",", "beta", "=", "8.", "/", "3", ",", "rho", "=", "28.0", ")", ":", "return", "[", "sigma", "*", "(", "y", "-", "x", ")", ",", "x", "*", "(", "rho", "-", "z", ")", "-", "y", ",", "x", "*", "y", "-", "beta", "*", "z", "]" ]
Compute the time-derivative of a Lorentz system.
[ "Compute", "the", "time", "-", "derivative", "of", "a", "Lorentz", "system", "." ]
train
https://github.com/jakevdp/JSAnimation/blob/d290db11ebb5427769b76fc532f2d10c4ea3391f/make_lorenz_animation.py#L20-L22
aktaylor08/RosbagPandas
scripts/bag_graph.py
buildParser
def buildParser(): ''' Builds the parser for reading the command line arguments''' parser = argparse.ArgumentParser(description='Bagfile reader') parser.add_argument('-b', '--bag', help='Bag file to read', required=True, type=str) parser.add_argument('-s', '--series', help='Msg data fields to graph', required=True, nargs='*') parser.add_argument('-y ', '--ylim', help='Set min and max y lim', required=False, nargs=2) parser.add_argument('-c', '--combined', help="Graph them all on one", required=False, action="store_true", dest="sharey") return parser
python
def buildParser(): ''' Builds the parser for reading the command line arguments''' parser = argparse.ArgumentParser(description='Bagfile reader') parser.add_argument('-b', '--bag', help='Bag file to read', required=True, type=str) parser.add_argument('-s', '--series', help='Msg data fields to graph', required=True, nargs='*') parser.add_argument('-y ', '--ylim', help='Set min and max y lim', required=False, nargs=2) parser.add_argument('-c', '--combined', help="Graph them all on one", required=False, action="store_true", dest="sharey") return parser
[ "def", "buildParser", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Bagfile reader'", ")", "parser", ".", "add_argument", "(", "'-b'", ",", "'--bag'", ",", "help", "=", "'Bag file to read'", ",", "required", "=", "True", ",", "type", "=", "str", ")", "parser", ".", "add_argument", "(", "'-s'", ",", "'--series'", ",", "help", "=", "'Msg data fields to graph'", ",", "required", "=", "True", ",", "nargs", "=", "'*'", ")", "parser", ".", "add_argument", "(", "'-y '", ",", "'--ylim'", ",", "help", "=", "'Set min and max y lim'", ",", "required", "=", "False", ",", "nargs", "=", "2", ")", "parser", ".", "add_argument", "(", "'-c'", ",", "'--combined'", ",", "help", "=", "\"Graph them all on one\"", ",", "required", "=", "False", ",", "action", "=", "\"store_true\"", ",", "dest", "=", "\"sharey\"", ")", "return", "parser" ]
Builds the parser for reading the command line arguments
[ "Builds", "the", "parser", "for", "reading", "the", "command", "line", "arguments" ]
train
https://github.com/aktaylor08/RosbagPandas/blob/c2af9f22537102696dffdf2e61790362726a8403/scripts/bag_graph.py#L10-L24
aktaylor08/RosbagPandas
scripts/bag_graph.py
parse_series_args
def parse_series_args(topics, fields): '''Return which topics and which field keys need to be examined for plotting''' keys = {} for field in fields: for topic in topics: if field.startswith(topic): keys[field] = (topic, field[len(topic) + 1:]) return keys
python
def parse_series_args(topics, fields): '''Return which topics and which field keys need to be examined for plotting''' keys = {} for field in fields: for topic in topics: if field.startswith(topic): keys[field] = (topic, field[len(topic) + 1:]) return keys
[ "def", "parse_series_args", "(", "topics", ",", "fields", ")", ":", "keys", "=", "{", "}", "for", "field", "in", "fields", ":", "for", "topic", "in", "topics", ":", "if", "field", ".", "startswith", "(", "topic", ")", ":", "keys", "[", "field", "]", "=", "(", "topic", ",", "field", "[", "len", "(", "topic", ")", "+", "1", ":", "]", ")", "return", "keys" ]
Return which topics and which field keys need to be examined for plotting
[ "Return", "which", "topics", "and", "which", "field", "keys", "need", "to", "be", "examined", "for", "plotting" ]
train
https://github.com/aktaylor08/RosbagPandas/blob/c2af9f22537102696dffdf2e61790362726a8403/scripts/bag_graph.py#L27-L36
aktaylor08/RosbagPandas
src/rosbag_pandas/rosbag_pandas.py
bag_to_dataframe
def bag_to_dataframe(bag_name, include=None, exclude=None, parse_header=False, seconds=False): ''' Read in a rosbag file and create a pandas data frame that is indexed by the time the message was recorded in the bag. :bag_name: String name for the bag file :include: None, String, or List Topics to include in the dataframe if None all topics added, if string it is used as regular expression, if list that list is used. :exclude: None, String, or List Topics to be removed from those added using the include option using set difference. If None no topics removed. If String it is treated as a regular expression. A list removes those in the list. :seconds: time index is in seconds :returns: a pandas dataframe object ''' # get list of topics to parse yaml_info = get_bag_info(bag_name) bag_topics = get_topics(yaml_info) bag_topics = prune_topics(bag_topics, include, exclude) length = get_length(bag_topics, yaml_info) msgs_to_read, msg_type = get_msg_info(yaml_info, bag_topics, parse_header) bag = rosbag.Bag(bag_name) dmap = create_data_map(msgs_to_read) # create datastore datastore = {} for topic in dmap.keys(): for f, key in dmap[topic].iteritems(): t = msg_type[topic][f] if isinstance(t, int) or isinstance(t, float): arr = np.empty(length) arr.fill(np.NAN) elif isinstance(t, list): arr = np.empty(length) arr.fill(np.NAN) for i in range(len(t)): key_i = '{0}{1}'.format(key, i) datastore[key_i] = arr.copy() continue else: arr = np.empty(length, dtype=np.object) datastore[key] = arr # create the index index = np.empty(length) index.fill(np.NAN) # all of the data is loaded for idx, (topic, msg, mt) in enumerate(bag.read_messages(topics=bag_topics)): try: if seconds: index[idx] = msg.header.stamp.to_sec() else: index[idx] = msg.header.stamp.to_nsec() except: if seconds: index[idx] = mt.to_sec() else: index[idx] = mt.to_nsec() fields = dmap[topic] for f, key in fields.iteritems(): try: d = get_message_data(msg, f) if isinstance(d, tuple): for i, val in enumerate(d): key_i = '{0}{1}'.format(key, i) datastore[key_i][idx] = val else: datastore[key][idx] = d except: pass bag.close() # convert the index if not seconds: index = pd.to_datetime(index, unit='ns') # now we have read all of the messages its time to assemble the dataframe return pd.DataFrame(data=datastore, index=index)
python
def bag_to_dataframe(bag_name, include=None, exclude=None, parse_header=False, seconds=False): ''' Read in a rosbag file and create a pandas data frame that is indexed by the time the message was recorded in the bag. :bag_name: String name for the bag file :include: None, String, or List Topics to include in the dataframe if None all topics added, if string it is used as regular expression, if list that list is used. :exclude: None, String, or List Topics to be removed from those added using the include option using set difference. If None no topics removed. If String it is treated as a regular expression. A list removes those in the list. :seconds: time index is in seconds :returns: a pandas dataframe object ''' # get list of topics to parse yaml_info = get_bag_info(bag_name) bag_topics = get_topics(yaml_info) bag_topics = prune_topics(bag_topics, include, exclude) length = get_length(bag_topics, yaml_info) msgs_to_read, msg_type = get_msg_info(yaml_info, bag_topics, parse_header) bag = rosbag.Bag(bag_name) dmap = create_data_map(msgs_to_read) # create datastore datastore = {} for topic in dmap.keys(): for f, key in dmap[topic].iteritems(): t = msg_type[topic][f] if isinstance(t, int) or isinstance(t, float): arr = np.empty(length) arr.fill(np.NAN) elif isinstance(t, list): arr = np.empty(length) arr.fill(np.NAN) for i in range(len(t)): key_i = '{0}{1}'.format(key, i) datastore[key_i] = arr.copy() continue else: arr = np.empty(length, dtype=np.object) datastore[key] = arr # create the index index = np.empty(length) index.fill(np.NAN) # all of the data is loaded for idx, (topic, msg, mt) in enumerate(bag.read_messages(topics=bag_topics)): try: if seconds: index[idx] = msg.header.stamp.to_sec() else: index[idx] = msg.header.stamp.to_nsec() except: if seconds: index[idx] = mt.to_sec() else: index[idx] = mt.to_nsec() fields = dmap[topic] for f, key in fields.iteritems(): try: d = get_message_data(msg, f) if isinstance(d, tuple): for i, val in enumerate(d): key_i = '{0}{1}'.format(key, i) datastore[key_i][idx] = val else: datastore[key][idx] = d except: pass bag.close() # convert the index if not seconds: index = pd.to_datetime(index, unit='ns') # now we have read all of the messages its time to assemble the dataframe return pd.DataFrame(data=datastore, index=index)
[ "def", "bag_to_dataframe", "(", "bag_name", ",", "include", "=", "None", ",", "exclude", "=", "None", ",", "parse_header", "=", "False", ",", "seconds", "=", "False", ")", ":", "# get list of topics to parse", "yaml_info", "=", "get_bag_info", "(", "bag_name", ")", "bag_topics", "=", "get_topics", "(", "yaml_info", ")", "bag_topics", "=", "prune_topics", "(", "bag_topics", ",", "include", ",", "exclude", ")", "length", "=", "get_length", "(", "bag_topics", ",", "yaml_info", ")", "msgs_to_read", ",", "msg_type", "=", "get_msg_info", "(", "yaml_info", ",", "bag_topics", ",", "parse_header", ")", "bag", "=", "rosbag", ".", "Bag", "(", "bag_name", ")", "dmap", "=", "create_data_map", "(", "msgs_to_read", ")", "# create datastore", "datastore", "=", "{", "}", "for", "topic", "in", "dmap", ".", "keys", "(", ")", ":", "for", "f", ",", "key", "in", "dmap", "[", "topic", "]", ".", "iteritems", "(", ")", ":", "t", "=", "msg_type", "[", "topic", "]", "[", "f", "]", "if", "isinstance", "(", "t", ",", "int", ")", "or", "isinstance", "(", "t", ",", "float", ")", ":", "arr", "=", "np", ".", "empty", "(", "length", ")", "arr", ".", "fill", "(", "np", ".", "NAN", ")", "elif", "isinstance", "(", "t", ",", "list", ")", ":", "arr", "=", "np", ".", "empty", "(", "length", ")", "arr", ".", "fill", "(", "np", ".", "NAN", ")", "for", "i", "in", "range", "(", "len", "(", "t", ")", ")", ":", "key_i", "=", "'{0}{1}'", ".", "format", "(", "key", ",", "i", ")", "datastore", "[", "key_i", "]", "=", "arr", ".", "copy", "(", ")", "continue", "else", ":", "arr", "=", "np", ".", "empty", "(", "length", ",", "dtype", "=", "np", ".", "object", ")", "datastore", "[", "key", "]", "=", "arr", "# create the index", "index", "=", "np", ".", "empty", "(", "length", ")", "index", ".", "fill", "(", "np", ".", "NAN", ")", "# all of the data is loaded", "for", "idx", ",", "(", "topic", ",", "msg", ",", "mt", ")", "in", "enumerate", "(", "bag", ".", "read_messages", "(", "topics", "=", "bag_topics", ")", ")", ":", "try", ":", "if", "seconds", ":", "index", "[", "idx", "]", "=", "msg", ".", "header", ".", "stamp", ".", "to_sec", "(", ")", "else", ":", "index", "[", "idx", "]", "=", "msg", ".", "header", ".", "stamp", ".", "to_nsec", "(", ")", "except", ":", "if", "seconds", ":", "index", "[", "idx", "]", "=", "mt", ".", "to_sec", "(", ")", "else", ":", "index", "[", "idx", "]", "=", "mt", ".", "to_nsec", "(", ")", "fields", "=", "dmap", "[", "topic", "]", "for", "f", ",", "key", "in", "fields", ".", "iteritems", "(", ")", ":", "try", ":", "d", "=", "get_message_data", "(", "msg", ",", "f", ")", "if", "isinstance", "(", "d", ",", "tuple", ")", ":", "for", "i", ",", "val", "in", "enumerate", "(", "d", ")", ":", "key_i", "=", "'{0}{1}'", ".", "format", "(", "key", ",", "i", ")", "datastore", "[", "key_i", "]", "[", "idx", "]", "=", "val", "else", ":", "datastore", "[", "key", "]", "[", "idx", "]", "=", "d", "except", ":", "pass", "bag", ".", "close", "(", ")", "# convert the index", "if", "not", "seconds", ":", "index", "=", "pd", ".", "to_datetime", "(", "index", ",", "unit", "=", "'ns'", ")", "# now we have read all of the messages its time to assemble the dataframe", "return", "pd", ".", "DataFrame", "(", "data", "=", "datastore", ",", "index", "=", "index", ")" ]
Read in a rosbag file and create a pandas data frame that is indexed by the time the message was recorded in the bag. :bag_name: String name for the bag file :include: None, String, or List Topics to include in the dataframe if None all topics added, if string it is used as regular expression, if list that list is used. :exclude: None, String, or List Topics to be removed from those added using the include option using set difference. If None no topics removed. If String it is treated as a regular expression. A list removes those in the list. :seconds: time index is in seconds :returns: a pandas dataframe object
[ "Read", "in", "a", "rosbag", "file", "and", "create", "a", "pandas", "data", "frame", "that", "is", "indexed", "by", "the", "time", "the", "message", "was", "recorded", "in", "the", "bag", "." ]
train
https://github.com/aktaylor08/RosbagPandas/blob/c2af9f22537102696dffdf2e61790362726a8403/src/rosbag_pandas/rosbag_pandas.py#L15-L98
aktaylor08/RosbagPandas
src/rosbag_pandas/rosbag_pandas.py
get_length
def get_length(topics, yaml_info): ''' Find the length (# of rows) in the created dataframe ''' total = 0 info = yaml_info['topics'] for topic in topics: for t in info: if t['topic'] == topic: total = total + t['messages'] break return total
python
def get_length(topics, yaml_info): ''' Find the length (# of rows) in the created dataframe ''' total = 0 info = yaml_info['topics'] for topic in topics: for t in info: if t['topic'] == topic: total = total + t['messages'] break return total
[ "def", "get_length", "(", "topics", ",", "yaml_info", ")", ":", "total", "=", "0", "info", "=", "yaml_info", "[", "'topics'", "]", "for", "topic", "in", "topics", ":", "for", "t", "in", "info", ":", "if", "t", "[", "'topic'", "]", "==", "topic", ":", "total", "=", "total", "+", "t", "[", "'messages'", "]", "break", "return", "total" ]
Find the length (# of rows) in the created dataframe
[ "Find", "the", "length", "(", "#", "of", "rows", ")", "in", "the", "created", "dataframe" ]
train
https://github.com/aktaylor08/RosbagPandas/blob/c2af9f22537102696dffdf2e61790362726a8403/src/rosbag_pandas/rosbag_pandas.py#L101-L112
aktaylor08/RosbagPandas
src/rosbag_pandas/rosbag_pandas.py
create_data_map
def create_data_map(msgs_to_read): ''' Create a data map for usage when parsing the bag ''' dmap = {} for topic in msgs_to_read.keys(): base_name = get_key_name(topic) + '__' fields = {} for f in msgs_to_read[topic]: key = (base_name + f).replace('.', '_') fields[f] = key dmap[topic] = fields return dmap
python
def create_data_map(msgs_to_read): ''' Create a data map for usage when parsing the bag ''' dmap = {} for topic in msgs_to_read.keys(): base_name = get_key_name(topic) + '__' fields = {} for f in msgs_to_read[topic]: key = (base_name + f).replace('.', '_') fields[f] = key dmap[topic] = fields return dmap
[ "def", "create_data_map", "(", "msgs_to_read", ")", ":", "dmap", "=", "{", "}", "for", "topic", "in", "msgs_to_read", ".", "keys", "(", ")", ":", "base_name", "=", "get_key_name", "(", "topic", ")", "+", "'__'", "fields", "=", "{", "}", "for", "f", "in", "msgs_to_read", "[", "topic", "]", ":", "key", "=", "(", "base_name", "+", "f", ")", ".", "replace", "(", "'.'", ",", "'_'", ")", "fields", "[", "f", "]", "=", "key", "dmap", "[", "topic", "]", "=", "fields", "return", "dmap" ]
Create a data map for usage when parsing the bag
[ "Create", "a", "data", "map", "for", "usage", "when", "parsing", "the", "bag" ]
train
https://github.com/aktaylor08/RosbagPandas/blob/c2af9f22537102696dffdf2e61790362726a8403/src/rosbag_pandas/rosbag_pandas.py#L115-L127
aktaylor08/RosbagPandas
src/rosbag_pandas/rosbag_pandas.py
prune_topics
def prune_topics(bag_topics, include, exclude): '''prune the topics. If include is None add all to the set of topics to use if include is a string regex match that string, if it is a list use the list If exclude is None do nothing, if string remove the topics with regex, if it is a list remove those topics''' topics_to_use = set() # add all of the topics if include is None: for t in bag_topics: topics_to_use.add(t) elif isinstance(include, basestring): check = re.compile(include) for t in bag_topics: if re.match(check, t) is not None: topics_to_use.add(t) else: try: # add all of the includes if it is in the topic for topic in include: if topic in bag_topics: topics_to_use.add(topic) except: warnings.warn('Error in topic selection Using All!') topics_to_use = set() for t in bag_topics: topics_to_use.add(t) to_remove = set() # now exclude the exclusions if exclude is None: pass elif isinstance(exclude, basestring): check = re.compile(exclude) for t in list(topics_to_use): if re.match(check, t) is not None: to_remove.add(t) else: for remove in exclude: if remove in exclude: to_remove.add(remove) # final set stuff to get topics to use topics_to_use = topics_to_use - to_remove # return a list for the results return list(topics_to_use)
python
def prune_topics(bag_topics, include, exclude): '''prune the topics. If include is None add all to the set of topics to use if include is a string regex match that string, if it is a list use the list If exclude is None do nothing, if string remove the topics with regex, if it is a list remove those topics''' topics_to_use = set() # add all of the topics if include is None: for t in bag_topics: topics_to_use.add(t) elif isinstance(include, basestring): check = re.compile(include) for t in bag_topics: if re.match(check, t) is not None: topics_to_use.add(t) else: try: # add all of the includes if it is in the topic for topic in include: if topic in bag_topics: topics_to_use.add(topic) except: warnings.warn('Error in topic selection Using All!') topics_to_use = set() for t in bag_topics: topics_to_use.add(t) to_remove = set() # now exclude the exclusions if exclude is None: pass elif isinstance(exclude, basestring): check = re.compile(exclude) for t in list(topics_to_use): if re.match(check, t) is not None: to_remove.add(t) else: for remove in exclude: if remove in exclude: to_remove.add(remove) # final set stuff to get topics to use topics_to_use = topics_to_use - to_remove # return a list for the results return list(topics_to_use)
[ "def", "prune_topics", "(", "bag_topics", ",", "include", ",", "exclude", ")", ":", "topics_to_use", "=", "set", "(", ")", "# add all of the topics", "if", "include", "is", "None", ":", "for", "t", "in", "bag_topics", ":", "topics_to_use", ".", "add", "(", "t", ")", "elif", "isinstance", "(", "include", ",", "basestring", ")", ":", "check", "=", "re", ".", "compile", "(", "include", ")", "for", "t", "in", "bag_topics", ":", "if", "re", ".", "match", "(", "check", ",", "t", ")", "is", "not", "None", ":", "topics_to_use", ".", "add", "(", "t", ")", "else", ":", "try", ":", "# add all of the includes if it is in the topic", "for", "topic", "in", "include", ":", "if", "topic", "in", "bag_topics", ":", "topics_to_use", ".", "add", "(", "topic", ")", "except", ":", "warnings", ".", "warn", "(", "'Error in topic selection Using All!'", ")", "topics_to_use", "=", "set", "(", ")", "for", "t", "in", "bag_topics", ":", "topics_to_use", ".", "add", "(", "t", ")", "to_remove", "=", "set", "(", ")", "# now exclude the exclusions", "if", "exclude", "is", "None", ":", "pass", "elif", "isinstance", "(", "exclude", ",", "basestring", ")", ":", "check", "=", "re", ".", "compile", "(", "exclude", ")", "for", "t", "in", "list", "(", "topics_to_use", ")", ":", "if", "re", ".", "match", "(", "check", ",", "t", ")", "is", "not", "None", ":", "to_remove", ".", "add", "(", "t", ")", "else", ":", "for", "remove", "in", "exclude", ":", "if", "remove", "in", "exclude", ":", "to_remove", ".", "add", "(", "remove", ")", "# final set stuff to get topics to use", "topics_to_use", "=", "topics_to_use", "-", "to_remove", "# return a list for the results", "return", "list", "(", "topics_to_use", ")" ]
prune the topics. If include is None add all to the set of topics to use if include is a string regex match that string, if it is a list use the list If exclude is None do nothing, if string remove the topics with regex, if it is a list remove those topics
[ "prune", "the", "topics", ".", "If", "include", "is", "None", "add", "all", "to", "the", "set", "of", "topics", "to", "use", "if", "include", "is", "a", "string", "regex", "match", "that", "string", "if", "it", "is", "a", "list", "use", "the", "list" ]
train
https://github.com/aktaylor08/RosbagPandas/blob/c2af9f22537102696dffdf2e61790362726a8403/src/rosbag_pandas/rosbag_pandas.py#L130-L177
aktaylor08/RosbagPandas
src/rosbag_pandas/rosbag_pandas.py
get_msg_info
def get_msg_info(yaml_info, topics, parse_header=True): ''' Get info from all of the messages about what they contain and will be added to the dataframe ''' topic_info = yaml_info['topics'] msgs = {} classes = {} for topic in topics: base_key = get_key_name(topic) msg_paths = [] msg_types = {} for info in topic_info: if info['topic'] == topic: msg_class = get_message_class(info['type']) if msg_class is None: warnings.warn( 'Could not find types for ' + topic + ' skpping ') else: (msg_paths, msg_types) = get_base_fields(msg_class(), "", parse_header) msgs[topic] = msg_paths classes[topic] = msg_types return (msgs, classes)
python
def get_msg_info(yaml_info, topics, parse_header=True): ''' Get info from all of the messages about what they contain and will be added to the dataframe ''' topic_info = yaml_info['topics'] msgs = {} classes = {} for topic in topics: base_key = get_key_name(topic) msg_paths = [] msg_types = {} for info in topic_info: if info['topic'] == topic: msg_class = get_message_class(info['type']) if msg_class is None: warnings.warn( 'Could not find types for ' + topic + ' skpping ') else: (msg_paths, msg_types) = get_base_fields(msg_class(), "", parse_header) msgs[topic] = msg_paths classes[topic] = msg_types return (msgs, classes)
[ "def", "get_msg_info", "(", "yaml_info", ",", "topics", ",", "parse_header", "=", "True", ")", ":", "topic_info", "=", "yaml_info", "[", "'topics'", "]", "msgs", "=", "{", "}", "classes", "=", "{", "}", "for", "topic", "in", "topics", ":", "base_key", "=", "get_key_name", "(", "topic", ")", "msg_paths", "=", "[", "]", "msg_types", "=", "{", "}", "for", "info", "in", "topic_info", ":", "if", "info", "[", "'topic'", "]", "==", "topic", ":", "msg_class", "=", "get_message_class", "(", "info", "[", "'type'", "]", ")", "if", "msg_class", "is", "None", ":", "warnings", ".", "warn", "(", "'Could not find types for '", "+", "topic", "+", "' skpping '", ")", "else", ":", "(", "msg_paths", ",", "msg_types", ")", "=", "get_base_fields", "(", "msg_class", "(", ")", ",", "\"\"", ",", "parse_header", ")", "msgs", "[", "topic", "]", "=", "msg_paths", "classes", "[", "topic", "]", "=", "msg_types", "return", "(", "msgs", ",", "classes", ")" ]
Get info from all of the messages about what they contain and will be added to the dataframe
[ "Get", "info", "from", "all", "of", "the", "messages", "about", "what", "they", "contain", "and", "will", "be", "added", "to", "the", "dataframe" ]
train
https://github.com/aktaylor08/RosbagPandas/blob/c2af9f22537102696dffdf2e61790362726a8403/src/rosbag_pandas/rosbag_pandas.py#L180-L204
aktaylor08/RosbagPandas
src/rosbag_pandas/rosbag_pandas.py
get_bag_info
def get_bag_info(bag_file): '''Get uamle dict of the bag information by calling the subprocess -- used to create correct sized arrays''' # Get the info on the bag bag_info = yaml.load(subprocess.Popen( ['rosbag', 'info', '--yaml', bag_file], stdout=subprocess.PIPE).communicate()[0]) return bag_info
python
def get_bag_info(bag_file): '''Get uamle dict of the bag information by calling the subprocess -- used to create correct sized arrays''' # Get the info on the bag bag_info = yaml.load(subprocess.Popen( ['rosbag', 'info', '--yaml', bag_file], stdout=subprocess.PIPE).communicate()[0]) return bag_info
[ "def", "get_bag_info", "(", "bag_file", ")", ":", "# Get the info on the bag", "bag_info", "=", "yaml", ".", "load", "(", "subprocess", ".", "Popen", "(", "[", "'rosbag'", ",", "'info'", ",", "'--yaml'", ",", "bag_file", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", ".", "communicate", "(", ")", "[", "0", "]", ")", "return", "bag_info" ]
Get uamle dict of the bag information by calling the subprocess -- used to create correct sized arrays
[ "Get", "uamle", "dict", "of", "the", "bag", "information", "by", "calling", "the", "subprocess", "--", "used", "to", "create", "correct", "sized", "arrays" ]
train
https://github.com/aktaylor08/RosbagPandas/blob/c2af9f22537102696dffdf2e61790362726a8403/src/rosbag_pandas/rosbag_pandas.py#L207-L215
aktaylor08/RosbagPandas
src/rosbag_pandas/rosbag_pandas.py
get_topics
def get_topics(yaml_info): ''' Returns the names of all of the topics in the bag, and prints them to stdout if requested ''' # Pull out the topic info names = [] # Store all of the topics in a dictionary topics = yaml_info['topics'] for topic in topics: names.append(topic['topic']) return names
python
def get_topics(yaml_info): ''' Returns the names of all of the topics in the bag, and prints them to stdout if requested ''' # Pull out the topic info names = [] # Store all of the topics in a dictionary topics = yaml_info['topics'] for topic in topics: names.append(topic['topic']) return names
[ "def", "get_topics", "(", "yaml_info", ")", ":", "# Pull out the topic info", "names", "=", "[", "]", "# Store all of the topics in a dictionary", "topics", "=", "yaml_info", "[", "'topics'", "]", "for", "topic", "in", "topics", ":", "names", ".", "append", "(", "topic", "[", "'topic'", "]", ")", "return", "names" ]
Returns the names of all of the topics in the bag, and prints them to stdout if requested
[ "Returns", "the", "names", "of", "all", "of", "the", "topics", "in", "the", "bag", "and", "prints", "them", "to", "stdout", "if", "requested" ]
train
https://github.com/aktaylor08/RosbagPandas/blob/c2af9f22537102696dffdf2e61790362726a8403/src/rosbag_pandas/rosbag_pandas.py#L218-L229
aktaylor08/RosbagPandas
src/rosbag_pandas/rosbag_pandas.py
get_base_fields
def get_base_fields(msg, prefix='', parse_header=True): '''function to get the full names of every message field in the message''' slots = msg.__slots__ ret_val = [] msg_types = dict() for i in slots: slot_msg = getattr(msg, i) if not parse_header and i == 'header': continue if hasattr(slot_msg, '__slots__'): (subs, type_map) = get_base_fields( slot_msg, prefix=prefix + i + '.', parse_header=parse_header, ) for i in subs: ret_val.append(i) for k, v in type_map.items(): msg_types[k] = v else: ret_val.append(prefix + i) msg_types[prefix + i] = slot_msg return (ret_val, msg_types)
python
def get_base_fields(msg, prefix='', parse_header=True): '''function to get the full names of every message field in the message''' slots = msg.__slots__ ret_val = [] msg_types = dict() for i in slots: slot_msg = getattr(msg, i) if not parse_header and i == 'header': continue if hasattr(slot_msg, '__slots__'): (subs, type_map) = get_base_fields( slot_msg, prefix=prefix + i + '.', parse_header=parse_header, ) for i in subs: ret_val.append(i) for k, v in type_map.items(): msg_types[k] = v else: ret_val.append(prefix + i) msg_types[prefix + i] = slot_msg return (ret_val, msg_types)
[ "def", "get_base_fields", "(", "msg", ",", "prefix", "=", "''", ",", "parse_header", "=", "True", ")", ":", "slots", "=", "msg", ".", "__slots__", "ret_val", "=", "[", "]", "msg_types", "=", "dict", "(", ")", "for", "i", "in", "slots", ":", "slot_msg", "=", "getattr", "(", "msg", ",", "i", ")", "if", "not", "parse_header", "and", "i", "==", "'header'", ":", "continue", "if", "hasattr", "(", "slot_msg", ",", "'__slots__'", ")", ":", "(", "subs", ",", "type_map", ")", "=", "get_base_fields", "(", "slot_msg", ",", "prefix", "=", "prefix", "+", "i", "+", "'.'", ",", "parse_header", "=", "parse_header", ",", ")", "for", "i", "in", "subs", ":", "ret_val", ".", "append", "(", "i", ")", "for", "k", ",", "v", "in", "type_map", ".", "items", "(", ")", ":", "msg_types", "[", "k", "]", "=", "v", "else", ":", "ret_val", ".", "append", "(", "prefix", "+", "i", ")", "msg_types", "[", "prefix", "+", "i", "]", "=", "slot_msg", "return", "(", "ret_val", ",", "msg_types", ")" ]
function to get the full names of every message field in the message
[ "function", "to", "get", "the", "full", "names", "of", "every", "message", "field", "in", "the", "message" ]
train
https://github.com/aktaylor08/RosbagPandas/blob/c2af9f22537102696dffdf2e61790362726a8403/src/rosbag_pandas/rosbag_pandas.py#L232-L254
aktaylor08/RosbagPandas
src/rosbag_pandas/rosbag_pandas.py
get_message_data
def get_message_data(msg, key): '''get the datapoint from the dot delimited message field key e.g. translation.x looks up translation than x and returns the value found in x''' data = msg paths = key.split('.') for i in paths: data = getattr(data, i) return data
python
def get_message_data(msg, key): '''get the datapoint from the dot delimited message field key e.g. translation.x looks up translation than x and returns the value found in x''' data = msg paths = key.split('.') for i in paths: data = getattr(data, i) return data
[ "def", "get_message_data", "(", "msg", ",", "key", ")", ":", "data", "=", "msg", "paths", "=", "key", ".", "split", "(", "'.'", ")", "for", "i", "in", "paths", ":", "data", "=", "getattr", "(", "data", ",", "i", ")", "return", "data" ]
get the datapoint from the dot delimited message field key e.g. translation.x looks up translation than x and returns the value found in x
[ "get", "the", "datapoint", "from", "the", "dot", "delimited", "message", "field", "key", "e", ".", "g", ".", "translation", ".", "x", "looks", "up", "translation", "than", "x", "and", "returns", "the", "value", "found", "in", "x" ]
train
https://github.com/aktaylor08/RosbagPandas/blob/c2af9f22537102696dffdf2e61790362726a8403/src/rosbag_pandas/rosbag_pandas.py#L257-L265
aktaylor08/RosbagPandas
scripts/bag2csv.py
buildParser
def buildParser(): ''' Builds the parser for reading the command line arguments''' parser = argparse.ArgumentParser( description='Script to parse bagfile to csv file') parser.add_argument('bag', help='Bag file to read', type=str) parser.add_argument('-i', '--include', help='list or regex for topics to include', nargs='*') parser.add_argument('-e', '--exclude', help='list or regex for topics to exclude', nargs='*') parser.add_argument('-o', '--output', help='name of the output file', nargs='*') parser.add_argument('-f', '--fill', help='Fill the bag forward and backwards so no missing values when present', action='store_true') parser.add_argument('--include-header', help='Include the header fields. By default they are excluded', action='store_true') return parser
python
def buildParser(): ''' Builds the parser for reading the command line arguments''' parser = argparse.ArgumentParser( description='Script to parse bagfile to csv file') parser.add_argument('bag', help='Bag file to read', type=str) parser.add_argument('-i', '--include', help='list or regex for topics to include', nargs='*') parser.add_argument('-e', '--exclude', help='list or regex for topics to exclude', nargs='*') parser.add_argument('-o', '--output', help='name of the output file', nargs='*') parser.add_argument('-f', '--fill', help='Fill the bag forward and backwards so no missing values when present', action='store_true') parser.add_argument('--include-header', help='Include the header fields. By default they are excluded', action='store_true') return parser
[ "def", "buildParser", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Script to parse bagfile to csv file'", ")", "parser", ".", "add_argument", "(", "'bag'", ",", "help", "=", "'Bag file to read'", ",", "type", "=", "str", ")", "parser", ".", "add_argument", "(", "'-i'", ",", "'--include'", ",", "help", "=", "'list or regex for topics to include'", ",", "nargs", "=", "'*'", ")", "parser", ".", "add_argument", "(", "'-e'", ",", "'--exclude'", ",", "help", "=", "'list or regex for topics to exclude'", ",", "nargs", "=", "'*'", ")", "parser", ".", "add_argument", "(", "'-o'", ",", "'--output'", ",", "help", "=", "'name of the output file'", ",", "nargs", "=", "'*'", ")", "parser", ".", "add_argument", "(", "'-f'", ",", "'--fill'", ",", "help", "=", "'Fill the bag forward and backwards so no missing values when present'", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_argument", "(", "'--include-header'", ",", "help", "=", "'Include the header fields. By default they are excluded'", ",", "action", "=", "'store_true'", ")", "return", "parser" ]
Builds the parser for reading the command line arguments
[ "Builds", "the", "parser", "for", "reading", "the", "command", "line", "arguments" ]
train
https://github.com/aktaylor08/RosbagPandas/blob/c2af9f22537102696dffdf2e61790362726a8403/scripts/bag2csv.py#L11-L32
pydanny/webhooks
webhooks/senders/base.py
Senderable.jsonify_payload
def jsonify_payload(self): """ Dump the payload to JSON """ # Assume already json serialized if isinstance(self.payload, string_types): return self.payload return json.dumps(self.payload, cls=StandardJSONEncoder)
python
def jsonify_payload(self): """ Dump the payload to JSON """ # Assume already json serialized if isinstance(self.payload, string_types): return self.payload return json.dumps(self.payload, cls=StandardJSONEncoder)
[ "def", "jsonify_payload", "(", "self", ")", ":", "# Assume already json serialized", "if", "isinstance", "(", "self", ".", "payload", ",", "string_types", ")", ":", "return", "self", ".", "payload", "return", "json", ".", "dumps", "(", "self", ".", "payload", ",", "cls", "=", "StandardJSONEncoder", ")" ]
Dump the payload to JSON
[ "Dump", "the", "payload", "to", "JSON" ]
train
https://github.com/pydanny/webhooks/blob/84aa1c0b8f995308f4f5977270c9a0532abc16cc/webhooks/senders/base.py#L94-L99
pydanny/webhooks
webhooks/senders/base.py
Senderable._send
def _send(self): """ Send the webhook method """ payload = self.payload sending_metadata = {'success': False} post_attributes = {'timeout': self.timeout} if self.custom_headers: post_attributes['headers'] = self.custom_headers if not post_attributes.get('headers', None): post_attributes['headers'] = {} post_attributes['headers']['Content-Type'] = self.encoding post_attributes['data'] = self.format_payload() if self.signing_secret: post_attributes['headers']['x-hub-signature'] = self.create_signature(post_attributes['data'], \ self.signing_secret) for i, wait in enumerate(range(len(self.attempts) - 1)): self.attempt = i + 1 sending_metadata['attempt'] = self.attempt try: print(self.url) self.response = requests.post(self.url, **post_attributes) if sys.version > '3': # Converts bytes object to str object in Python 3+ self.response_content = self.response.content.decode('utf-8') else: self.response_content = self.response.content sending_metadata['status_code'] = self.response.status_code # anything with a 200 status code is a success if self.response.status_code >= 200 and self.response.status_code < 300: # Exit the sender method. Here we provide the payload as a result. # This is useful for reporting. self.notify("Attempt {}: Successfully sent webhook {}".format( self.attempt, self.hash_value) ) sending_metadata['response'] = self.response_content sending_metadata['success'] = True break else: self.error = "Status code (%d). Message: %s" % (self.response.status_code, self.response.text) except Exception as ex: err_formatted = str(ex).replace('"',"'") sending_metadata['response'] = '{"status_code": 500, "status":"failure","error":"'+err_formatted+'"}' self.error = err_formatted self.notify("Attempt {}: Could not send webhook {}".format( self.attempt, self.hash_value) ) self.notify_debug("Webhook {}. Body: {}".format( self.hash_value, self.payload) ) # If last attempt if self.attempt == (len(self.attempts) - 1): self.notify_error("Failed to send webhook {}. Body: {}".format( self.hash_value, self.payload) ) else: # Wait a bit before the next attempt sleep(wait) sending_metadata['error'] = None if sending_metadata['success'] or not self.error else self.error sending_metadata['post_attributes'] = post_attributes merged_dict = sending_metadata.copy() if isinstance(payload, string_types): payload = {'payload': payload} # Add the hash value if there is one. if self.hash_value is not None and len(self.hash_value) > 0: payload['hash'] = self.hash_value merged_dict.update(payload) return merged_dict
python
def _send(self): """ Send the webhook method """ payload = self.payload sending_metadata = {'success': False} post_attributes = {'timeout': self.timeout} if self.custom_headers: post_attributes['headers'] = self.custom_headers if not post_attributes.get('headers', None): post_attributes['headers'] = {} post_attributes['headers']['Content-Type'] = self.encoding post_attributes['data'] = self.format_payload() if self.signing_secret: post_attributes['headers']['x-hub-signature'] = self.create_signature(post_attributes['data'], \ self.signing_secret) for i, wait in enumerate(range(len(self.attempts) - 1)): self.attempt = i + 1 sending_metadata['attempt'] = self.attempt try: print(self.url) self.response = requests.post(self.url, **post_attributes) if sys.version > '3': # Converts bytes object to str object in Python 3+ self.response_content = self.response.content.decode('utf-8') else: self.response_content = self.response.content sending_metadata['status_code'] = self.response.status_code # anything with a 200 status code is a success if self.response.status_code >= 200 and self.response.status_code < 300: # Exit the sender method. Here we provide the payload as a result. # This is useful for reporting. self.notify("Attempt {}: Successfully sent webhook {}".format( self.attempt, self.hash_value) ) sending_metadata['response'] = self.response_content sending_metadata['success'] = True break else: self.error = "Status code (%d). Message: %s" % (self.response.status_code, self.response.text) except Exception as ex: err_formatted = str(ex).replace('"',"'") sending_metadata['response'] = '{"status_code": 500, "status":"failure","error":"'+err_formatted+'"}' self.error = err_formatted self.notify("Attempt {}: Could not send webhook {}".format( self.attempt, self.hash_value) ) self.notify_debug("Webhook {}. Body: {}".format( self.hash_value, self.payload) ) # If last attempt if self.attempt == (len(self.attempts) - 1): self.notify_error("Failed to send webhook {}. Body: {}".format( self.hash_value, self.payload) ) else: # Wait a bit before the next attempt sleep(wait) sending_metadata['error'] = None if sending_metadata['success'] or not self.error else self.error sending_metadata['post_attributes'] = post_attributes merged_dict = sending_metadata.copy() if isinstance(payload, string_types): payload = {'payload': payload} # Add the hash value if there is one. if self.hash_value is not None and len(self.hash_value) > 0: payload['hash'] = self.hash_value merged_dict.update(payload) return merged_dict
[ "def", "_send", "(", "self", ")", ":", "payload", "=", "self", ".", "payload", "sending_metadata", "=", "{", "'success'", ":", "False", "}", "post_attributes", "=", "{", "'timeout'", ":", "self", ".", "timeout", "}", "if", "self", ".", "custom_headers", ":", "post_attributes", "[", "'headers'", "]", "=", "self", ".", "custom_headers", "if", "not", "post_attributes", ".", "get", "(", "'headers'", ",", "None", ")", ":", "post_attributes", "[", "'headers'", "]", "=", "{", "}", "post_attributes", "[", "'headers'", "]", "[", "'Content-Type'", "]", "=", "self", ".", "encoding", "post_attributes", "[", "'data'", "]", "=", "self", ".", "format_payload", "(", ")", "if", "self", ".", "signing_secret", ":", "post_attributes", "[", "'headers'", "]", "[", "'x-hub-signature'", "]", "=", "self", ".", "create_signature", "(", "post_attributes", "[", "'data'", "]", ",", "self", ".", "signing_secret", ")", "for", "i", ",", "wait", "in", "enumerate", "(", "range", "(", "len", "(", "self", ".", "attempts", ")", "-", "1", ")", ")", ":", "self", ".", "attempt", "=", "i", "+", "1", "sending_metadata", "[", "'attempt'", "]", "=", "self", ".", "attempt", "try", ":", "print", "(", "self", ".", "url", ")", "self", ".", "response", "=", "requests", ".", "post", "(", "self", ".", "url", ",", "*", "*", "post_attributes", ")", "if", "sys", ".", "version", ">", "'3'", ":", "# Converts bytes object to str object in Python 3+", "self", ".", "response_content", "=", "self", ".", "response", ".", "content", ".", "decode", "(", "'utf-8'", ")", "else", ":", "self", ".", "response_content", "=", "self", ".", "response", ".", "content", "sending_metadata", "[", "'status_code'", "]", "=", "self", ".", "response", ".", "status_code", "# anything with a 200 status code is a success", "if", "self", ".", "response", ".", "status_code", ">=", "200", "and", "self", ".", "response", ".", "status_code", "<", "300", ":", "# Exit the sender method. Here we provide the payload as a result.", "# This is useful for reporting.", "self", ".", "notify", "(", "\"Attempt {}: Successfully sent webhook {}\"", ".", "format", "(", "self", ".", "attempt", ",", "self", ".", "hash_value", ")", ")", "sending_metadata", "[", "'response'", "]", "=", "self", ".", "response_content", "sending_metadata", "[", "'success'", "]", "=", "True", "break", "else", ":", "self", ".", "error", "=", "\"Status code (%d). Message: %s\"", "%", "(", "self", ".", "response", ".", "status_code", ",", "self", ".", "response", ".", "text", ")", "except", "Exception", "as", "ex", ":", "err_formatted", "=", "str", "(", "ex", ")", ".", "replace", "(", "'\"'", ",", "\"'\"", ")", "sending_metadata", "[", "'response'", "]", "=", "'{\"status_code\": 500, \"status\":\"failure\",\"error\":\"'", "+", "err_formatted", "+", "'\"}'", "self", ".", "error", "=", "err_formatted", "self", ".", "notify", "(", "\"Attempt {}: Could not send webhook {}\"", ".", "format", "(", "self", ".", "attempt", ",", "self", ".", "hash_value", ")", ")", "self", ".", "notify_debug", "(", "\"Webhook {}. Body: {}\"", ".", "format", "(", "self", ".", "hash_value", ",", "self", ".", "payload", ")", ")", "# If last attempt", "if", "self", ".", "attempt", "==", "(", "len", "(", "self", ".", "attempts", ")", "-", "1", ")", ":", "self", ".", "notify_error", "(", "\"Failed to send webhook {}. Body: {}\"", ".", "format", "(", "self", ".", "hash_value", ",", "self", ".", "payload", ")", ")", "else", ":", "# Wait a bit before the next attempt", "sleep", "(", "wait", ")", "sending_metadata", "[", "'error'", "]", "=", "None", "if", "sending_metadata", "[", "'success'", "]", "or", "not", "self", ".", "error", "else", "self", ".", "error", "sending_metadata", "[", "'post_attributes'", "]", "=", "post_attributes", "merged_dict", "=", "sending_metadata", ".", "copy", "(", ")", "if", "isinstance", "(", "payload", ",", "string_types", ")", ":", "payload", "=", "{", "'payload'", ":", "payload", "}", "# Add the hash value if there is one.", "if", "self", ".", "hash_value", "is", "not", "None", "and", "len", "(", "self", ".", "hash_value", ")", ">", "0", ":", "payload", "[", "'hash'", "]", "=", "self", ".", "hash_value", "merged_dict", ".", "update", "(", "payload", ")", "return", "merged_dict" ]
Send the webhook method
[ "Send", "the", "webhook", "method" ]
train
https://github.com/pydanny/webhooks/blob/84aa1c0b8f995308f4f5977270c9a0532abc16cc/webhooks/senders/base.py#L114-L196
tgalal/python-axolotl
axolotl/protocol/senderkeymessage.py
SenderKeyMessage.verifySignature
def verifySignature(self, signatureKey): """ :type signatureKey: ECPublicKey """ try: parts = ByteUtil.split(self.serialized, len(self.serialized) - self.__class__.SIGNATURE_LENGTH, self.__class__.SIGNATURE_LENGTH) if not Curve.verifySignature(signatureKey, parts[0], parts[1]): raise InvalidMessageException("Invalid signature!") except InvalidKeyException as e: raise InvalidMessageException(e)
python
def verifySignature(self, signatureKey): """ :type signatureKey: ECPublicKey """ try: parts = ByteUtil.split(self.serialized, len(self.serialized) - self.__class__.SIGNATURE_LENGTH, self.__class__.SIGNATURE_LENGTH) if not Curve.verifySignature(signatureKey, parts[0], parts[1]): raise InvalidMessageException("Invalid signature!") except InvalidKeyException as e: raise InvalidMessageException(e)
[ "def", "verifySignature", "(", "self", ",", "signatureKey", ")", ":", "try", ":", "parts", "=", "ByteUtil", ".", "split", "(", "self", ".", "serialized", ",", "len", "(", "self", ".", "serialized", ")", "-", "self", ".", "__class__", ".", "SIGNATURE_LENGTH", ",", "self", ".", "__class__", ".", "SIGNATURE_LENGTH", ")", "if", "not", "Curve", ".", "verifySignature", "(", "signatureKey", ",", "parts", "[", "0", "]", ",", "parts", "[", "1", "]", ")", ":", "raise", "InvalidMessageException", "(", "\"Invalid signature!\"", ")", "except", "InvalidKeyException", "as", "e", ":", "raise", "InvalidMessageException", "(", "e", ")" ]
:type signatureKey: ECPublicKey
[ ":", "type", "signatureKey", ":", "ECPublicKey" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/protocol/senderkeymessage.py#L76-L88
tgalal/python-axolotl
axolotl/protocol/senderkeymessage.py
SenderKeyMessage.getSignature
def getSignature(self, signatureKey, serialized): """ :type signatureKey: ECPrivateKey :type serialized: bytearray """ try: return Curve.calculateSignature(signatureKey, serialized) except InvalidKeyException as e: raise AssertionError(e)
python
def getSignature(self, signatureKey, serialized): """ :type signatureKey: ECPrivateKey :type serialized: bytearray """ try: return Curve.calculateSignature(signatureKey, serialized) except InvalidKeyException as e: raise AssertionError(e)
[ "def", "getSignature", "(", "self", ",", "signatureKey", ",", "serialized", ")", ":", "try", ":", "return", "Curve", ".", "calculateSignature", "(", "signatureKey", ",", "serialized", ")", "except", "InvalidKeyException", "as", "e", ":", "raise", "AssertionError", "(", "e", ")" ]
:type signatureKey: ECPrivateKey :type serialized: bytearray
[ ":", "type", "signatureKey", ":", "ECPrivateKey", ":", "type", "serialized", ":", "bytearray" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/protocol/senderkeymessage.py#L90-L98
tgalal/python-axolotl
axolotl/sessionbuilder.py
SessionBuilder.process
def process(self, sessionRecord, message): """ :param sessionRecord: :param message: :type message: PreKeyWhisperMessage """ messageVersion = message.getMessageVersion() theirIdentityKey = message.getIdentityKey() unsignedPreKeyId = None if not self.identityKeyStore.isTrustedIdentity(self.recipientId, theirIdentityKey): raise UntrustedIdentityException(self.recipientId, theirIdentityKey) if messageVersion == 2: unsignedPreKeyId = self.processV2(sessionRecord, message) elif messageVersion == 3: unsignedPreKeyId = self.processV3(sessionRecord, message) else: raise AssertionError("Unkown version %s" % messageVersion) self.identityKeyStore.saveIdentity(self.recipientId, theirIdentityKey) return unsignedPreKeyId
python
def process(self, sessionRecord, message): """ :param sessionRecord: :param message: :type message: PreKeyWhisperMessage """ messageVersion = message.getMessageVersion() theirIdentityKey = message.getIdentityKey() unsignedPreKeyId = None if not self.identityKeyStore.isTrustedIdentity(self.recipientId, theirIdentityKey): raise UntrustedIdentityException(self.recipientId, theirIdentityKey) if messageVersion == 2: unsignedPreKeyId = self.processV2(sessionRecord, message) elif messageVersion == 3: unsignedPreKeyId = self.processV3(sessionRecord, message) else: raise AssertionError("Unkown version %s" % messageVersion) self.identityKeyStore.saveIdentity(self.recipientId, theirIdentityKey) return unsignedPreKeyId
[ "def", "process", "(", "self", ",", "sessionRecord", ",", "message", ")", ":", "messageVersion", "=", "message", ".", "getMessageVersion", "(", ")", "theirIdentityKey", "=", "message", ".", "getIdentityKey", "(", ")", "unsignedPreKeyId", "=", "None", "if", "not", "self", ".", "identityKeyStore", ".", "isTrustedIdentity", "(", "self", ".", "recipientId", ",", "theirIdentityKey", ")", ":", "raise", "UntrustedIdentityException", "(", "self", ".", "recipientId", ",", "theirIdentityKey", ")", "if", "messageVersion", "==", "2", ":", "unsignedPreKeyId", "=", "self", ".", "processV2", "(", "sessionRecord", ",", "message", ")", "elif", "messageVersion", "==", "3", ":", "unsignedPreKeyId", "=", "self", ".", "processV3", "(", "sessionRecord", ",", "message", ")", "else", ":", "raise", "AssertionError", "(", "\"Unkown version %s\"", "%", "messageVersion", ")", "self", ".", "identityKeyStore", ".", "saveIdentity", "(", "self", ".", "recipientId", ",", "theirIdentityKey", ")", "return", "unsignedPreKeyId" ]
:param sessionRecord: :param message: :type message: PreKeyWhisperMessage
[ ":", "param", "sessionRecord", ":", ":", "param", "message", ":", ":", "type", "message", ":", "PreKeyWhisperMessage" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/sessionbuilder.py#L32-L56
tgalal/python-axolotl
axolotl/sessionbuilder.py
SessionBuilder.processV2
def processV2(self, sessionRecord, message): """ :type sessionRecord: SessionRecord :type message: PreKeyWhisperMessage """ if message.getPreKeyId() is None: raise InvalidKeyIdException("V2 message requires one time prekey id!") if not self.preKeyStore.containsPreKey(message.getPreKeyId()) and \ self.sessionStore.containsSession(self.recipientId, self.deviceId): logging.warn("We've already processed the prekey part of this V2 session, " "letting bundled message fall through...") return None ourPreKey = self.preKeyStore.loadPreKey(message.getPreKeyId()).getKeyPair() parameters = BobAxolotlParameters.newBuilder() parameters.setOurIdentityKey(self.identityKeyStore.getIdentityKeyPair())\ .setOurSignedPreKey(ourPreKey)\ .setOurRatchetKey(ourPreKey)\ .setOurOneTimePreKey(None)\ .setTheirIdentityKey(message.getIdentityKey())\ .setTheirBaseKey(message.getBaseKey()) if not sessionRecord.isFresh(): sessionRecord.archiveCurrentState() RatchetingSession.initializeSessionAsBob(sessionRecord.getSessionState(), message.getMessageVersion(), parameters.create()) sessionRecord.getSessionState().setLocalRegistrationId(self.identityKeyStore.getLocalRegistrationId()) sessionRecord.getSessionState().setRemoteRegistrationId(message.getRegistrationId()) sessionRecord.getSessionState().setAliceBaseKey(message.getBaseKey().serialize()) if message.getPreKeyId() != Medium.MAX_VALUE: return message.getPreKeyId() else: return None
python
def processV2(self, sessionRecord, message): """ :type sessionRecord: SessionRecord :type message: PreKeyWhisperMessage """ if message.getPreKeyId() is None: raise InvalidKeyIdException("V2 message requires one time prekey id!") if not self.preKeyStore.containsPreKey(message.getPreKeyId()) and \ self.sessionStore.containsSession(self.recipientId, self.deviceId): logging.warn("We've already processed the prekey part of this V2 session, " "letting bundled message fall through...") return None ourPreKey = self.preKeyStore.loadPreKey(message.getPreKeyId()).getKeyPair() parameters = BobAxolotlParameters.newBuilder() parameters.setOurIdentityKey(self.identityKeyStore.getIdentityKeyPair())\ .setOurSignedPreKey(ourPreKey)\ .setOurRatchetKey(ourPreKey)\ .setOurOneTimePreKey(None)\ .setTheirIdentityKey(message.getIdentityKey())\ .setTheirBaseKey(message.getBaseKey()) if not sessionRecord.isFresh(): sessionRecord.archiveCurrentState() RatchetingSession.initializeSessionAsBob(sessionRecord.getSessionState(), message.getMessageVersion(), parameters.create()) sessionRecord.getSessionState().setLocalRegistrationId(self.identityKeyStore.getLocalRegistrationId()) sessionRecord.getSessionState().setRemoteRegistrationId(message.getRegistrationId()) sessionRecord.getSessionState().setAliceBaseKey(message.getBaseKey().serialize()) if message.getPreKeyId() != Medium.MAX_VALUE: return message.getPreKeyId() else: return None
[ "def", "processV2", "(", "self", ",", "sessionRecord", ",", "message", ")", ":", "if", "message", ".", "getPreKeyId", "(", ")", "is", "None", ":", "raise", "InvalidKeyIdException", "(", "\"V2 message requires one time prekey id!\"", ")", "if", "not", "self", ".", "preKeyStore", ".", "containsPreKey", "(", "message", ".", "getPreKeyId", "(", ")", ")", "and", "self", ".", "sessionStore", ".", "containsSession", "(", "self", ".", "recipientId", ",", "self", ".", "deviceId", ")", ":", "logging", ".", "warn", "(", "\"We've already processed the prekey part of this V2 session, \"", "\"letting bundled message fall through...\"", ")", "return", "None", "ourPreKey", "=", "self", ".", "preKeyStore", ".", "loadPreKey", "(", "message", ".", "getPreKeyId", "(", ")", ")", ".", "getKeyPair", "(", ")", "parameters", "=", "BobAxolotlParameters", ".", "newBuilder", "(", ")", "parameters", ".", "setOurIdentityKey", "(", "self", ".", "identityKeyStore", ".", "getIdentityKeyPair", "(", ")", ")", ".", "setOurSignedPreKey", "(", "ourPreKey", ")", ".", "setOurRatchetKey", "(", "ourPreKey", ")", ".", "setOurOneTimePreKey", "(", "None", ")", ".", "setTheirIdentityKey", "(", "message", ".", "getIdentityKey", "(", ")", ")", ".", "setTheirBaseKey", "(", "message", ".", "getBaseKey", "(", ")", ")", "if", "not", "sessionRecord", ".", "isFresh", "(", ")", ":", "sessionRecord", ".", "archiveCurrentState", "(", ")", "RatchetingSession", ".", "initializeSessionAsBob", "(", "sessionRecord", ".", "getSessionState", "(", ")", ",", "message", ".", "getMessageVersion", "(", ")", ",", "parameters", ".", "create", "(", ")", ")", "sessionRecord", ".", "getSessionState", "(", ")", ".", "setLocalRegistrationId", "(", "self", ".", "identityKeyStore", ".", "getLocalRegistrationId", "(", ")", ")", "sessionRecord", ".", "getSessionState", "(", ")", ".", "setRemoteRegistrationId", "(", "message", ".", "getRegistrationId", "(", ")", ")", "sessionRecord", ".", "getSessionState", "(", ")", ".", "setAliceBaseKey", "(", "message", ".", "getBaseKey", "(", ")", ".", "serialize", "(", ")", ")", "if", "message", ".", "getPreKeyId", "(", ")", "!=", "Medium", ".", "MAX_VALUE", ":", "return", "message", ".", "getPreKeyId", "(", ")", "else", ":", "return", "None" ]
:type sessionRecord: SessionRecord :type message: PreKeyWhisperMessage
[ ":", "type", "sessionRecord", ":", "SessionRecord", ":", "type", "message", ":", "PreKeyWhisperMessage" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/sessionbuilder.py#L58-L98
tgalal/python-axolotl
axolotl/sessionbuilder.py
SessionBuilder.processV3
def processV3(self, sessionRecord, message): """ :param sessionRecord: :param message: :type message: PreKeyWhisperMessage :return: """ if sessionRecord.hasSessionState(message.getMessageVersion(), message.getBaseKey().serialize()): logger.warn("We've already setup a session for this V3 message, letting bundled message fall through...") return None ourSignedPreKey = self.signedPreKeyStore.loadSignedPreKey(message.getSignedPreKeyId()).getKeyPair() parameters = BobAxolotlParameters.newBuilder() parameters.setTheirBaseKey(message.getBaseKey())\ .setTheirIdentityKey(message.getIdentityKey())\ .setOurIdentityKey(self.identityKeyStore.getIdentityKeyPair())\ .setOurSignedPreKey(ourSignedPreKey)\ .setOurRatchetKey(ourSignedPreKey) if message.getPreKeyId() is not None: parameters.setOurOneTimePreKey(self.preKeyStore.loadPreKey(message.getPreKeyId()).getKeyPair()) else: parameters.setOurOneTimePreKey(None) if not sessionRecord.isFresh(): sessionRecord.archiveCurrentState() RatchetingSession.initializeSessionAsBob(sessionRecord.getSessionState(), message.getMessageVersion(), parameters.create()) sessionRecord.getSessionState().setLocalRegistrationId(self.identityKeyStore.getLocalRegistrationId()) sessionRecord.getSessionState().setRemoteRegistrationId(message.getRegistrationId()) sessionRecord.getSessionState().setAliceBaseKey(message.getBaseKey().serialize()) if message.getPreKeyId() is not None and message.getPreKeyId() != Medium.MAX_VALUE: return message.getPreKeyId() else: return None
python
def processV3(self, sessionRecord, message): """ :param sessionRecord: :param message: :type message: PreKeyWhisperMessage :return: """ if sessionRecord.hasSessionState(message.getMessageVersion(), message.getBaseKey().serialize()): logger.warn("We've already setup a session for this V3 message, letting bundled message fall through...") return None ourSignedPreKey = self.signedPreKeyStore.loadSignedPreKey(message.getSignedPreKeyId()).getKeyPair() parameters = BobAxolotlParameters.newBuilder() parameters.setTheirBaseKey(message.getBaseKey())\ .setTheirIdentityKey(message.getIdentityKey())\ .setOurIdentityKey(self.identityKeyStore.getIdentityKeyPair())\ .setOurSignedPreKey(ourSignedPreKey)\ .setOurRatchetKey(ourSignedPreKey) if message.getPreKeyId() is not None: parameters.setOurOneTimePreKey(self.preKeyStore.loadPreKey(message.getPreKeyId()).getKeyPair()) else: parameters.setOurOneTimePreKey(None) if not sessionRecord.isFresh(): sessionRecord.archiveCurrentState() RatchetingSession.initializeSessionAsBob(sessionRecord.getSessionState(), message.getMessageVersion(), parameters.create()) sessionRecord.getSessionState().setLocalRegistrationId(self.identityKeyStore.getLocalRegistrationId()) sessionRecord.getSessionState().setRemoteRegistrationId(message.getRegistrationId()) sessionRecord.getSessionState().setAliceBaseKey(message.getBaseKey().serialize()) if message.getPreKeyId() is not None and message.getPreKeyId() != Medium.MAX_VALUE: return message.getPreKeyId() else: return None
[ "def", "processV3", "(", "self", ",", "sessionRecord", ",", "message", ")", ":", "if", "sessionRecord", ".", "hasSessionState", "(", "message", ".", "getMessageVersion", "(", ")", ",", "message", ".", "getBaseKey", "(", ")", ".", "serialize", "(", ")", ")", ":", "logger", ".", "warn", "(", "\"We've already setup a session for this V3 message, letting bundled message fall through...\"", ")", "return", "None", "ourSignedPreKey", "=", "self", ".", "signedPreKeyStore", ".", "loadSignedPreKey", "(", "message", ".", "getSignedPreKeyId", "(", ")", ")", ".", "getKeyPair", "(", ")", "parameters", "=", "BobAxolotlParameters", ".", "newBuilder", "(", ")", "parameters", ".", "setTheirBaseKey", "(", "message", ".", "getBaseKey", "(", ")", ")", ".", "setTheirIdentityKey", "(", "message", ".", "getIdentityKey", "(", ")", ")", ".", "setOurIdentityKey", "(", "self", ".", "identityKeyStore", ".", "getIdentityKeyPair", "(", ")", ")", ".", "setOurSignedPreKey", "(", "ourSignedPreKey", ")", ".", "setOurRatchetKey", "(", "ourSignedPreKey", ")", "if", "message", ".", "getPreKeyId", "(", ")", "is", "not", "None", ":", "parameters", ".", "setOurOneTimePreKey", "(", "self", ".", "preKeyStore", ".", "loadPreKey", "(", "message", ".", "getPreKeyId", "(", ")", ")", ".", "getKeyPair", "(", ")", ")", "else", ":", "parameters", ".", "setOurOneTimePreKey", "(", "None", ")", "if", "not", "sessionRecord", ".", "isFresh", "(", ")", ":", "sessionRecord", ".", "archiveCurrentState", "(", ")", "RatchetingSession", ".", "initializeSessionAsBob", "(", "sessionRecord", ".", "getSessionState", "(", ")", ",", "message", ".", "getMessageVersion", "(", ")", ",", "parameters", ".", "create", "(", ")", ")", "sessionRecord", ".", "getSessionState", "(", ")", ".", "setLocalRegistrationId", "(", "self", ".", "identityKeyStore", ".", "getLocalRegistrationId", "(", ")", ")", "sessionRecord", ".", "getSessionState", "(", ")", ".", "setRemoteRegistrationId", "(", "message", ".", "getRegistrationId", "(", ")", ")", "sessionRecord", ".", "getSessionState", "(", ")", ".", "setAliceBaseKey", "(", "message", ".", "getBaseKey", "(", ")", ".", "serialize", "(", ")", ")", "if", "message", ".", "getPreKeyId", "(", ")", "is", "not", "None", "and", "message", ".", "getPreKeyId", "(", ")", "!=", "Medium", ".", "MAX_VALUE", ":", "return", "message", ".", "getPreKeyId", "(", ")", "else", ":", "return", "None" ]
:param sessionRecord: :param message: :type message: PreKeyWhisperMessage :return:
[ ":", "param", "sessionRecord", ":", ":", "param", "message", ":", ":", "type", "message", ":", "PreKeyWhisperMessage", ":", "return", ":" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/sessionbuilder.py#L100-L138
tgalal/python-axolotl
axolotl/sessionbuilder.py
SessionBuilder.processPreKeyBundle
def processPreKeyBundle(self, preKey): """ :type preKey: PreKeyBundle """ if not self.identityKeyStore.isTrustedIdentity(self.recipientId, preKey.getIdentityKey()): raise UntrustedIdentityException(self.recipientId, preKey.getIdentityKey()) if preKey.getSignedPreKey() is not None and\ not Curve.verifySignature(preKey.getIdentityKey().getPublicKey(), preKey.getSignedPreKey().serialize(), preKey.getSignedPreKeySignature()): raise InvalidKeyException("Invalid signature on device key!") if preKey.getSignedPreKey() is None and preKey.getPreKey() is None: raise InvalidKeyException("Both signed and unsigned prekeys are absent!") supportsV3 = preKey.getSignedPreKey() is not None sessionRecord = self.sessionStore.loadSession(self.recipientId, self.deviceId) ourBaseKey = Curve.generateKeyPair() theirSignedPreKey = preKey.getSignedPreKey() if supportsV3 else preKey.getPreKey() theirOneTimePreKey = preKey.getPreKey() theirOneTimePreKeyId = preKey.getPreKeyId() if theirOneTimePreKey is not None else None parameters = AliceAxolotlParameters.newBuilder() parameters.setOurBaseKey(ourBaseKey)\ .setOurIdentityKey(self.identityKeyStore.getIdentityKeyPair())\ .setTheirIdentityKey(preKey.getIdentityKey())\ .setTheirSignedPreKey(theirSignedPreKey)\ .setTheirRatchetKey(theirSignedPreKey)\ .setTheirOneTimePreKey(theirOneTimePreKey if supportsV3 else None) if not sessionRecord.isFresh(): sessionRecord.archiveCurrentState() RatchetingSession.initializeSessionAsAlice(sessionRecord.getSessionState(), 3 if supportsV3 else 2, parameters.create()) sessionRecord.getSessionState().setUnacknowledgedPreKeyMessage(theirOneTimePreKeyId, preKey.getSignedPreKeyId(), ourBaseKey.getPublicKey()) sessionRecord.getSessionState().setLocalRegistrationId(self.identityKeyStore.getLocalRegistrationId()) sessionRecord.getSessionState().setRemoteRegistrationId(preKey.getRegistrationId()) sessionRecord.getSessionState().setAliceBaseKey(ourBaseKey.getPublicKey().serialize()) self.sessionStore.storeSession(self.recipientId, self.deviceId, sessionRecord) self.identityKeyStore.saveIdentity(self.recipientId, preKey.getIdentityKey())
python
def processPreKeyBundle(self, preKey): """ :type preKey: PreKeyBundle """ if not self.identityKeyStore.isTrustedIdentity(self.recipientId, preKey.getIdentityKey()): raise UntrustedIdentityException(self.recipientId, preKey.getIdentityKey()) if preKey.getSignedPreKey() is not None and\ not Curve.verifySignature(preKey.getIdentityKey().getPublicKey(), preKey.getSignedPreKey().serialize(), preKey.getSignedPreKeySignature()): raise InvalidKeyException("Invalid signature on device key!") if preKey.getSignedPreKey() is None and preKey.getPreKey() is None: raise InvalidKeyException("Both signed and unsigned prekeys are absent!") supportsV3 = preKey.getSignedPreKey() is not None sessionRecord = self.sessionStore.loadSession(self.recipientId, self.deviceId) ourBaseKey = Curve.generateKeyPair() theirSignedPreKey = preKey.getSignedPreKey() if supportsV3 else preKey.getPreKey() theirOneTimePreKey = preKey.getPreKey() theirOneTimePreKeyId = preKey.getPreKeyId() if theirOneTimePreKey is not None else None parameters = AliceAxolotlParameters.newBuilder() parameters.setOurBaseKey(ourBaseKey)\ .setOurIdentityKey(self.identityKeyStore.getIdentityKeyPair())\ .setTheirIdentityKey(preKey.getIdentityKey())\ .setTheirSignedPreKey(theirSignedPreKey)\ .setTheirRatchetKey(theirSignedPreKey)\ .setTheirOneTimePreKey(theirOneTimePreKey if supportsV3 else None) if not sessionRecord.isFresh(): sessionRecord.archiveCurrentState() RatchetingSession.initializeSessionAsAlice(sessionRecord.getSessionState(), 3 if supportsV3 else 2, parameters.create()) sessionRecord.getSessionState().setUnacknowledgedPreKeyMessage(theirOneTimePreKeyId, preKey.getSignedPreKeyId(), ourBaseKey.getPublicKey()) sessionRecord.getSessionState().setLocalRegistrationId(self.identityKeyStore.getLocalRegistrationId()) sessionRecord.getSessionState().setRemoteRegistrationId(preKey.getRegistrationId()) sessionRecord.getSessionState().setAliceBaseKey(ourBaseKey.getPublicKey().serialize()) self.sessionStore.storeSession(self.recipientId, self.deviceId, sessionRecord) self.identityKeyStore.saveIdentity(self.recipientId, preKey.getIdentityKey())
[ "def", "processPreKeyBundle", "(", "self", ",", "preKey", ")", ":", "if", "not", "self", ".", "identityKeyStore", ".", "isTrustedIdentity", "(", "self", ".", "recipientId", ",", "preKey", ".", "getIdentityKey", "(", ")", ")", ":", "raise", "UntrustedIdentityException", "(", "self", ".", "recipientId", ",", "preKey", ".", "getIdentityKey", "(", ")", ")", "if", "preKey", ".", "getSignedPreKey", "(", ")", "is", "not", "None", "and", "not", "Curve", ".", "verifySignature", "(", "preKey", ".", "getIdentityKey", "(", ")", ".", "getPublicKey", "(", ")", ",", "preKey", ".", "getSignedPreKey", "(", ")", ".", "serialize", "(", ")", ",", "preKey", ".", "getSignedPreKeySignature", "(", ")", ")", ":", "raise", "InvalidKeyException", "(", "\"Invalid signature on device key!\"", ")", "if", "preKey", ".", "getSignedPreKey", "(", ")", "is", "None", "and", "preKey", ".", "getPreKey", "(", ")", "is", "None", ":", "raise", "InvalidKeyException", "(", "\"Both signed and unsigned prekeys are absent!\"", ")", "supportsV3", "=", "preKey", ".", "getSignedPreKey", "(", ")", "is", "not", "None", "sessionRecord", "=", "self", ".", "sessionStore", ".", "loadSession", "(", "self", ".", "recipientId", ",", "self", ".", "deviceId", ")", "ourBaseKey", "=", "Curve", ".", "generateKeyPair", "(", ")", "theirSignedPreKey", "=", "preKey", ".", "getSignedPreKey", "(", ")", "if", "supportsV3", "else", "preKey", ".", "getPreKey", "(", ")", "theirOneTimePreKey", "=", "preKey", ".", "getPreKey", "(", ")", "theirOneTimePreKeyId", "=", "preKey", ".", "getPreKeyId", "(", ")", "if", "theirOneTimePreKey", "is", "not", "None", "else", "None", "parameters", "=", "AliceAxolotlParameters", ".", "newBuilder", "(", ")", "parameters", ".", "setOurBaseKey", "(", "ourBaseKey", ")", ".", "setOurIdentityKey", "(", "self", ".", "identityKeyStore", ".", "getIdentityKeyPair", "(", ")", ")", ".", "setTheirIdentityKey", "(", "preKey", ".", "getIdentityKey", "(", ")", ")", ".", "setTheirSignedPreKey", "(", "theirSignedPreKey", ")", ".", "setTheirRatchetKey", "(", "theirSignedPreKey", ")", ".", "setTheirOneTimePreKey", "(", "theirOneTimePreKey", "if", "supportsV3", "else", "None", ")", "if", "not", "sessionRecord", ".", "isFresh", "(", ")", ":", "sessionRecord", ".", "archiveCurrentState", "(", ")", "RatchetingSession", ".", "initializeSessionAsAlice", "(", "sessionRecord", ".", "getSessionState", "(", ")", ",", "3", "if", "supportsV3", "else", "2", ",", "parameters", ".", "create", "(", ")", ")", "sessionRecord", ".", "getSessionState", "(", ")", ".", "setUnacknowledgedPreKeyMessage", "(", "theirOneTimePreKeyId", ",", "preKey", ".", "getSignedPreKeyId", "(", ")", ",", "ourBaseKey", ".", "getPublicKey", "(", ")", ")", "sessionRecord", ".", "getSessionState", "(", ")", ".", "setLocalRegistrationId", "(", "self", ".", "identityKeyStore", ".", "getLocalRegistrationId", "(", ")", ")", "sessionRecord", ".", "getSessionState", "(", ")", ".", "setRemoteRegistrationId", "(", "preKey", ".", "getRegistrationId", "(", ")", ")", "sessionRecord", ".", "getSessionState", "(", ")", ".", "setAliceBaseKey", "(", "ourBaseKey", ".", "getPublicKey", "(", ")", ".", "serialize", "(", ")", ")", "self", ".", "sessionStore", ".", "storeSession", "(", "self", ".", "recipientId", ",", "self", ".", "deviceId", ",", "sessionRecord", ")", "self", ".", "identityKeyStore", ".", "saveIdentity", "(", "self", ".", "recipientId", ",", "preKey", ".", "getIdentityKey", "(", ")", ")" ]
:type preKey: PreKeyBundle
[ ":", "type", "preKey", ":", "PreKeyBundle" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/sessionbuilder.py#L140-L186
tgalal/python-axolotl
axolotl/groups/groupsessionbuilder.py
GroupSessionBuilder.process
def process(self, senderKeyName, senderKeyDistributionMessage): """ :type senderKeyName: SenderKeyName :type senderKeyDistributionMessage: SenderKeyDistributionMessage """ senderKeyRecord = self.senderKeyStore.loadSenderKey(senderKeyName) senderKeyRecord.addSenderKeyState(senderKeyDistributionMessage.getId(), senderKeyDistributionMessage.getIteration(), senderKeyDistributionMessage.getChainKey(), senderKeyDistributionMessage.getSignatureKey()) self.senderKeyStore.storeSenderKey(senderKeyName, senderKeyRecord)
python
def process(self, senderKeyName, senderKeyDistributionMessage): """ :type senderKeyName: SenderKeyName :type senderKeyDistributionMessage: SenderKeyDistributionMessage """ senderKeyRecord = self.senderKeyStore.loadSenderKey(senderKeyName) senderKeyRecord.addSenderKeyState(senderKeyDistributionMessage.getId(), senderKeyDistributionMessage.getIteration(), senderKeyDistributionMessage.getChainKey(), senderKeyDistributionMessage.getSignatureKey()) self.senderKeyStore.storeSenderKey(senderKeyName, senderKeyRecord)
[ "def", "process", "(", "self", ",", "senderKeyName", ",", "senderKeyDistributionMessage", ")", ":", "senderKeyRecord", "=", "self", ".", "senderKeyStore", ".", "loadSenderKey", "(", "senderKeyName", ")", "senderKeyRecord", ".", "addSenderKeyState", "(", "senderKeyDistributionMessage", ".", "getId", "(", ")", ",", "senderKeyDistributionMessage", ".", "getIteration", "(", ")", ",", "senderKeyDistributionMessage", ".", "getChainKey", "(", ")", ",", "senderKeyDistributionMessage", ".", "getSignatureKey", "(", ")", ")", "self", ".", "senderKeyStore", ".", "storeSenderKey", "(", "senderKeyName", ",", "senderKeyRecord", ")" ]
:type senderKeyName: SenderKeyName :type senderKeyDistributionMessage: SenderKeyDistributionMessage
[ ":", "type", "senderKeyName", ":", "SenderKeyName", ":", "type", "senderKeyDistributionMessage", ":", "SenderKeyDistributionMessage" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/groups/groupsessionbuilder.py#L10-L20
tgalal/python-axolotl
axolotl/groups/groupsessionbuilder.py
GroupSessionBuilder.create
def create(self, senderKeyName): """ :type senderKeyName: SenderKeyName """ try: senderKeyRecord = self.senderKeyStore.loadSenderKey(senderKeyName); if senderKeyRecord.isEmpty() : senderKeyRecord.setSenderKeyState(KeyHelper.generateSenderKeyId(), 0, KeyHelper.generateSenderKey(), KeyHelper.generateSenderSigningKey()); self.senderKeyStore.storeSenderKey(senderKeyName, senderKeyRecord); state = senderKeyRecord.getSenderKeyState(); return SenderKeyDistributionMessage(state.getKeyId(), state.getSenderChainKey().getIteration(), state.getSenderChainKey().getSeed(), state.getSigningKeyPublic()); except (InvalidKeyException, InvalidKeyIdException) as e: raise AssertionError(e)
python
def create(self, senderKeyName): """ :type senderKeyName: SenderKeyName """ try: senderKeyRecord = self.senderKeyStore.loadSenderKey(senderKeyName); if senderKeyRecord.isEmpty() : senderKeyRecord.setSenderKeyState(KeyHelper.generateSenderKeyId(), 0, KeyHelper.generateSenderKey(), KeyHelper.generateSenderSigningKey()); self.senderKeyStore.storeSenderKey(senderKeyName, senderKeyRecord); state = senderKeyRecord.getSenderKeyState(); return SenderKeyDistributionMessage(state.getKeyId(), state.getSenderChainKey().getIteration(), state.getSenderChainKey().getSeed(), state.getSigningKeyPublic()); except (InvalidKeyException, InvalidKeyIdException) as e: raise AssertionError(e)
[ "def", "create", "(", "self", ",", "senderKeyName", ")", ":", "try", ":", "senderKeyRecord", "=", "self", ".", "senderKeyStore", ".", "loadSenderKey", "(", "senderKeyName", ")", "if", "senderKeyRecord", ".", "isEmpty", "(", ")", ":", "senderKeyRecord", ".", "setSenderKeyState", "(", "KeyHelper", ".", "generateSenderKeyId", "(", ")", ",", "0", ",", "KeyHelper", ".", "generateSenderKey", "(", ")", ",", "KeyHelper", ".", "generateSenderSigningKey", "(", ")", ")", "self", ".", "senderKeyStore", ".", "storeSenderKey", "(", "senderKeyName", ",", "senderKeyRecord", ")", "state", "=", "senderKeyRecord", ".", "getSenderKeyState", "(", ")", "return", "SenderKeyDistributionMessage", "(", "state", ".", "getKeyId", "(", ")", ",", "state", ".", "getSenderChainKey", "(", ")", ".", "getIteration", "(", ")", ",", "state", ".", "getSenderChainKey", "(", ")", ".", "getSeed", "(", ")", ",", "state", ".", "getSigningKeyPublic", "(", ")", ")", "except", "(", "InvalidKeyException", ",", "InvalidKeyIdException", ")", "as", "e", ":", "raise", "AssertionError", "(", "e", ")" ]
:type senderKeyName: SenderKeyName
[ ":", "type", "senderKeyName", ":", "SenderKeyName" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/groups/groupsessionbuilder.py#L23-L44
tgalal/python-axolotl
axolotl/ratchet/ratchetingsession.py
RatchetingSession.initializeSession
def initializeSession(sessionState, sessionVersion, parameters): """ :type sessionState: SessionState :type sessionVersion: int :type parameters: SymmetricAxolotlParameters """ if RatchetingSession.isAlice(parameters.getOurBaseKey().getPublicKey(), parameters.getTheirBaseKey()): aliceParameters = AliceAxolotlParameters.newBuilder() aliceParameters.setOurBaseKey(parameters.getOurBaseKey()) \ .setOurIdentityKey(parameters.getOurIdentityKey()) \ .setTheirRatchetKey(parameters.getTheirRatchetKey()) \ .setTheirIdentityKey(parameters.getTheirIdentityKey()) \ .setTheirSignedPreKey(parameters.getTheirBaseKey()) \ .setTheirOneTimePreKey(None) RatchetingSession.initializeSessionAsAlice(sessionState, sessionVersion, aliceParameters.create()) else: bobParameters = BobAxolotlParameters.newBuilder() bobParameters.setOurIdentityKey(parameters.getOurIdentityKey()) \ .setOurRatchetKey(parameters.getOurRatchetKey()) \ .setOurSignedPreKey(parameters.getOurBaseKey()) \ .setOurOneTimePreKey(None) \ .setTheirBaseKey(parameters.getTheirBaseKey()) \ .setTheirIdentityKey(parameters.getTheirIdentityKey()) RatchetingSession.initializeSessionAsBob(sessionState, sessionVersion, bobParameters.create())
python
def initializeSession(sessionState, sessionVersion, parameters): """ :type sessionState: SessionState :type sessionVersion: int :type parameters: SymmetricAxolotlParameters """ if RatchetingSession.isAlice(parameters.getOurBaseKey().getPublicKey(), parameters.getTheirBaseKey()): aliceParameters = AliceAxolotlParameters.newBuilder() aliceParameters.setOurBaseKey(parameters.getOurBaseKey()) \ .setOurIdentityKey(parameters.getOurIdentityKey()) \ .setTheirRatchetKey(parameters.getTheirRatchetKey()) \ .setTheirIdentityKey(parameters.getTheirIdentityKey()) \ .setTheirSignedPreKey(parameters.getTheirBaseKey()) \ .setTheirOneTimePreKey(None) RatchetingSession.initializeSessionAsAlice(sessionState, sessionVersion, aliceParameters.create()) else: bobParameters = BobAxolotlParameters.newBuilder() bobParameters.setOurIdentityKey(parameters.getOurIdentityKey()) \ .setOurRatchetKey(parameters.getOurRatchetKey()) \ .setOurSignedPreKey(parameters.getOurBaseKey()) \ .setOurOneTimePreKey(None) \ .setTheirBaseKey(parameters.getTheirBaseKey()) \ .setTheirIdentityKey(parameters.getTheirIdentityKey()) RatchetingSession.initializeSessionAsBob(sessionState, sessionVersion, bobParameters.create())
[ "def", "initializeSession", "(", "sessionState", ",", "sessionVersion", ",", "parameters", ")", ":", "if", "RatchetingSession", ".", "isAlice", "(", "parameters", ".", "getOurBaseKey", "(", ")", ".", "getPublicKey", "(", ")", ",", "parameters", ".", "getTheirBaseKey", "(", ")", ")", ":", "aliceParameters", "=", "AliceAxolotlParameters", ".", "newBuilder", "(", ")", "aliceParameters", ".", "setOurBaseKey", "(", "parameters", ".", "getOurBaseKey", "(", ")", ")", ".", "setOurIdentityKey", "(", "parameters", ".", "getOurIdentityKey", "(", ")", ")", ".", "setTheirRatchetKey", "(", "parameters", ".", "getTheirRatchetKey", "(", ")", ")", ".", "setTheirIdentityKey", "(", "parameters", ".", "getTheirIdentityKey", "(", ")", ")", ".", "setTheirSignedPreKey", "(", "parameters", ".", "getTheirBaseKey", "(", ")", ")", ".", "setTheirOneTimePreKey", "(", "None", ")", "RatchetingSession", ".", "initializeSessionAsAlice", "(", "sessionState", ",", "sessionVersion", ",", "aliceParameters", ".", "create", "(", ")", ")", "else", ":", "bobParameters", "=", "BobAxolotlParameters", ".", "newBuilder", "(", ")", "bobParameters", ".", "setOurIdentityKey", "(", "parameters", ".", "getOurIdentityKey", "(", ")", ")", ".", "setOurRatchetKey", "(", "parameters", ".", "getOurRatchetKey", "(", ")", ")", ".", "setOurSignedPreKey", "(", "parameters", ".", "getOurBaseKey", "(", ")", ")", ".", "setOurOneTimePreKey", "(", "None", ")", ".", "setTheirBaseKey", "(", "parameters", ".", "getTheirBaseKey", "(", ")", ")", ".", "setTheirIdentityKey", "(", "parameters", ".", "getTheirIdentityKey", "(", ")", ")", "RatchetingSession", ".", "initializeSessionAsBob", "(", "sessionState", ",", "sessionVersion", ",", "bobParameters", ".", "create", "(", ")", ")" ]
:type sessionState: SessionState :type sessionVersion: int :type parameters: SymmetricAxolotlParameters
[ ":", "type", "sessionState", ":", "SessionState", ":", "type", "sessionVersion", ":", "int", ":", "type", "parameters", ":", "SymmetricAxolotlParameters" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/ratchet/ratchetingsession.py#L14-L37
tgalal/python-axolotl
axolotl/ratchet/ratchetingsession.py
RatchetingSession.initializeSessionAsAlice
def initializeSessionAsAlice(sessionState, sessionVersion, parameters): """ :type sessionState: SessionState :type sessionVersion: int :type parameters: AliceAxolotlParameters """ sessionState.setSessionVersion(sessionVersion) sessionState.setRemoteIdentityKey(parameters.getTheirIdentityKey()) sessionState.setLocalIdentityKey(parameters.getOurIdentityKey().getPublicKey()) sendingRatchetKey = Curve.generateKeyPair() secrets = bytearray() if sessionVersion >= 3: secrets.extend(RatchetingSession.getDiscontinuityBytes()) secrets.extend(Curve.calculateAgreement(parameters.getTheirSignedPreKey(), parameters.getOurIdentityKey().getPrivateKey())) secrets.extend(Curve.calculateAgreement(parameters.getTheirIdentityKey().getPublicKey(), parameters.getOurBaseKey().getPrivateKey())) secrets.extend(Curve.calculateAgreement(parameters.getTheirSignedPreKey(), parameters.getOurBaseKey().getPrivateKey())) if sessionVersion >= 3 and parameters.getTheirOneTimePreKey() is not None: secrets.extend(Curve.calculateAgreement(parameters.getTheirOneTimePreKey(), parameters.getOurBaseKey().getPrivateKey())) derivedKeys = RatchetingSession.calculateDerivedKeys(sessionVersion, secrets) sendingChain = derivedKeys.getRootKey().createChain(parameters.getTheirRatchetKey(), sendingRatchetKey) sessionState.addReceiverChain(parameters.getTheirRatchetKey(), derivedKeys.getChainKey()) sessionState.setSenderChain(sendingRatchetKey, sendingChain[1]) sessionState.setRootKey(sendingChain[0])
python
def initializeSessionAsAlice(sessionState, sessionVersion, parameters): """ :type sessionState: SessionState :type sessionVersion: int :type parameters: AliceAxolotlParameters """ sessionState.setSessionVersion(sessionVersion) sessionState.setRemoteIdentityKey(parameters.getTheirIdentityKey()) sessionState.setLocalIdentityKey(parameters.getOurIdentityKey().getPublicKey()) sendingRatchetKey = Curve.generateKeyPair() secrets = bytearray() if sessionVersion >= 3: secrets.extend(RatchetingSession.getDiscontinuityBytes()) secrets.extend(Curve.calculateAgreement(parameters.getTheirSignedPreKey(), parameters.getOurIdentityKey().getPrivateKey())) secrets.extend(Curve.calculateAgreement(parameters.getTheirIdentityKey().getPublicKey(), parameters.getOurBaseKey().getPrivateKey())) secrets.extend(Curve.calculateAgreement(parameters.getTheirSignedPreKey(), parameters.getOurBaseKey().getPrivateKey())) if sessionVersion >= 3 and parameters.getTheirOneTimePreKey() is not None: secrets.extend(Curve.calculateAgreement(parameters.getTheirOneTimePreKey(), parameters.getOurBaseKey().getPrivateKey())) derivedKeys = RatchetingSession.calculateDerivedKeys(sessionVersion, secrets) sendingChain = derivedKeys.getRootKey().createChain(parameters.getTheirRatchetKey(), sendingRatchetKey) sessionState.addReceiverChain(parameters.getTheirRatchetKey(), derivedKeys.getChainKey()) sessionState.setSenderChain(sendingRatchetKey, sendingChain[1]) sessionState.setRootKey(sendingChain[0])
[ "def", "initializeSessionAsAlice", "(", "sessionState", ",", "sessionVersion", ",", "parameters", ")", ":", "sessionState", ".", "setSessionVersion", "(", "sessionVersion", ")", "sessionState", ".", "setRemoteIdentityKey", "(", "parameters", ".", "getTheirIdentityKey", "(", ")", ")", "sessionState", ".", "setLocalIdentityKey", "(", "parameters", ".", "getOurIdentityKey", "(", ")", ".", "getPublicKey", "(", ")", ")", "sendingRatchetKey", "=", "Curve", ".", "generateKeyPair", "(", ")", "secrets", "=", "bytearray", "(", ")", "if", "sessionVersion", ">=", "3", ":", "secrets", ".", "extend", "(", "RatchetingSession", ".", "getDiscontinuityBytes", "(", ")", ")", "secrets", ".", "extend", "(", "Curve", ".", "calculateAgreement", "(", "parameters", ".", "getTheirSignedPreKey", "(", ")", ",", "parameters", ".", "getOurIdentityKey", "(", ")", ".", "getPrivateKey", "(", ")", ")", ")", "secrets", ".", "extend", "(", "Curve", ".", "calculateAgreement", "(", "parameters", ".", "getTheirIdentityKey", "(", ")", ".", "getPublicKey", "(", ")", ",", "parameters", ".", "getOurBaseKey", "(", ")", ".", "getPrivateKey", "(", ")", ")", ")", "secrets", ".", "extend", "(", "Curve", ".", "calculateAgreement", "(", "parameters", ".", "getTheirSignedPreKey", "(", ")", ",", "parameters", ".", "getOurBaseKey", "(", ")", ".", "getPrivateKey", "(", ")", ")", ")", "if", "sessionVersion", ">=", "3", "and", "parameters", ".", "getTheirOneTimePreKey", "(", ")", "is", "not", "None", ":", "secrets", ".", "extend", "(", "Curve", ".", "calculateAgreement", "(", "parameters", ".", "getTheirOneTimePreKey", "(", ")", ",", "parameters", ".", "getOurBaseKey", "(", ")", ".", "getPrivateKey", "(", ")", ")", ")", "derivedKeys", "=", "RatchetingSession", ".", "calculateDerivedKeys", "(", "sessionVersion", ",", "secrets", ")", "sendingChain", "=", "derivedKeys", ".", "getRootKey", "(", ")", ".", "createChain", "(", "parameters", ".", "getTheirRatchetKey", "(", ")", ",", "sendingRatchetKey", ")", "sessionState", ".", "addReceiverChain", "(", "parameters", ".", "getTheirRatchetKey", "(", ")", ",", "derivedKeys", ".", "getChainKey", "(", ")", ")", "sessionState", ".", "setSenderChain", "(", "sendingRatchetKey", ",", "sendingChain", "[", "1", "]", ")", "sessionState", ".", "setRootKey", "(", "sendingChain", "[", "0", "]", ")" ]
:type sessionState: SessionState :type sessionVersion: int :type parameters: AliceAxolotlParameters
[ ":", "type", "sessionState", ":", "SessionState", ":", "type", "sessionVersion", ":", "int", ":", "type", "parameters", ":", "AliceAxolotlParameters" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/ratchet/ratchetingsession.py#L40-L72
tgalal/python-axolotl
axolotl/ratchet/ratchetingsession.py
RatchetingSession.initializeSessionAsBob
def initializeSessionAsBob(sessionState, sessionVersion, parameters): """ :type sessionState: SessionState :type sessionVersion: int :type parameters: BobAxolotlParameters """ sessionState.setSessionVersion(sessionVersion) sessionState.setRemoteIdentityKey(parameters.getTheirIdentityKey()) sessionState.setLocalIdentityKey(parameters.getOurIdentityKey().getPublicKey()) secrets = bytearray() if sessionVersion >= 3: secrets.extend(RatchetingSession.getDiscontinuityBytes()) secrets.extend(Curve.calculateAgreement(parameters.getTheirIdentityKey().getPublicKey(), parameters.getOurSignedPreKey().getPrivateKey())) secrets.extend(Curve.calculateAgreement(parameters.getTheirBaseKey(), parameters.getOurIdentityKey().getPrivateKey())) secrets.extend(Curve.calculateAgreement(parameters.getTheirBaseKey(), parameters.getOurSignedPreKey().getPrivateKey())) if sessionVersion >= 3 and parameters.getOurOneTimePreKey() is not None: secrets.extend(Curve.calculateAgreement(parameters.getTheirBaseKey(), parameters.getOurOneTimePreKey().getPrivateKey())) derivedKeys = RatchetingSession.calculateDerivedKeys(sessionVersion, secrets) sessionState.setSenderChain(parameters.getOurRatchetKey(), derivedKeys.getChainKey()) sessionState.setRootKey(derivedKeys.getRootKey())
python
def initializeSessionAsBob(sessionState, sessionVersion, parameters): """ :type sessionState: SessionState :type sessionVersion: int :type parameters: BobAxolotlParameters """ sessionState.setSessionVersion(sessionVersion) sessionState.setRemoteIdentityKey(parameters.getTheirIdentityKey()) sessionState.setLocalIdentityKey(parameters.getOurIdentityKey().getPublicKey()) secrets = bytearray() if sessionVersion >= 3: secrets.extend(RatchetingSession.getDiscontinuityBytes()) secrets.extend(Curve.calculateAgreement(parameters.getTheirIdentityKey().getPublicKey(), parameters.getOurSignedPreKey().getPrivateKey())) secrets.extend(Curve.calculateAgreement(parameters.getTheirBaseKey(), parameters.getOurIdentityKey().getPrivateKey())) secrets.extend(Curve.calculateAgreement(parameters.getTheirBaseKey(), parameters.getOurSignedPreKey().getPrivateKey())) if sessionVersion >= 3 and parameters.getOurOneTimePreKey() is not None: secrets.extend(Curve.calculateAgreement(parameters.getTheirBaseKey(), parameters.getOurOneTimePreKey().getPrivateKey())) derivedKeys = RatchetingSession.calculateDerivedKeys(sessionVersion, secrets) sessionState.setSenderChain(parameters.getOurRatchetKey(), derivedKeys.getChainKey()) sessionState.setRootKey(derivedKeys.getRootKey())
[ "def", "initializeSessionAsBob", "(", "sessionState", ",", "sessionVersion", ",", "parameters", ")", ":", "sessionState", ".", "setSessionVersion", "(", "sessionVersion", ")", "sessionState", ".", "setRemoteIdentityKey", "(", "parameters", ".", "getTheirIdentityKey", "(", ")", ")", "sessionState", ".", "setLocalIdentityKey", "(", "parameters", ".", "getOurIdentityKey", "(", ")", ".", "getPublicKey", "(", ")", ")", "secrets", "=", "bytearray", "(", ")", "if", "sessionVersion", ">=", "3", ":", "secrets", ".", "extend", "(", "RatchetingSession", ".", "getDiscontinuityBytes", "(", ")", ")", "secrets", ".", "extend", "(", "Curve", ".", "calculateAgreement", "(", "parameters", ".", "getTheirIdentityKey", "(", ")", ".", "getPublicKey", "(", ")", ",", "parameters", ".", "getOurSignedPreKey", "(", ")", ".", "getPrivateKey", "(", ")", ")", ")", "secrets", ".", "extend", "(", "Curve", ".", "calculateAgreement", "(", "parameters", ".", "getTheirBaseKey", "(", ")", ",", "parameters", ".", "getOurIdentityKey", "(", ")", ".", "getPrivateKey", "(", ")", ")", ")", "secrets", ".", "extend", "(", "Curve", ".", "calculateAgreement", "(", "parameters", ".", "getTheirBaseKey", "(", ")", ",", "parameters", ".", "getOurSignedPreKey", "(", ")", ".", "getPrivateKey", "(", ")", ")", ")", "if", "sessionVersion", ">=", "3", "and", "parameters", ".", "getOurOneTimePreKey", "(", ")", "is", "not", "None", ":", "secrets", ".", "extend", "(", "Curve", ".", "calculateAgreement", "(", "parameters", ".", "getTheirBaseKey", "(", ")", ",", "parameters", ".", "getOurOneTimePreKey", "(", ")", ".", "getPrivateKey", "(", ")", ")", ")", "derivedKeys", "=", "RatchetingSession", ".", "calculateDerivedKeys", "(", "sessionVersion", ",", "secrets", ")", "sessionState", ".", "setSenderChain", "(", "parameters", ".", "getOurRatchetKey", "(", ")", ",", "derivedKeys", ".", "getChainKey", "(", ")", ")", "sessionState", ".", "setRootKey", "(", "derivedKeys", ".", "getRootKey", "(", ")", ")" ]
:type sessionState: SessionState :type sessionVersion: int :type parameters: BobAxolotlParameters
[ ":", "type", "sessionState", ":", "SessionState", ":", "type", "sessionVersion", ":", "int", ":", "type", "parameters", ":", "BobAxolotlParameters" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/ratchet/ratchetingsession.py#L75-L104
tgalal/python-axolotl
axolotl/groups/state/senderkeyrecord.py
SenderKeyRecord.addSenderKeyState
def addSenderKeyState(self, id, iteration, chainKey, signatureKey): """ :type id: int :type iteration: int :type chainKey: bytearray :type signatureKey: ECPublicKey """ self.senderKeyStates.append(SenderKeyState(id, iteration, chainKey, signatureKey))
python
def addSenderKeyState(self, id, iteration, chainKey, signatureKey): """ :type id: int :type iteration: int :type chainKey: bytearray :type signatureKey: ECPublicKey """ self.senderKeyStates.append(SenderKeyState(id, iteration, chainKey, signatureKey))
[ "def", "addSenderKeyState", "(", "self", ",", "id", ",", "iteration", ",", "chainKey", ",", "signatureKey", ")", ":", "self", ".", "senderKeyStates", ".", "append", "(", "SenderKeyState", "(", "id", ",", "iteration", ",", "chainKey", ",", "signatureKey", ")", ")" ]
:type id: int :type iteration: int :type chainKey: bytearray :type signatureKey: ECPublicKey
[ ":", "type", "id", ":", "int", ":", "type", "iteration", ":", "int", ":", "type", "chainKey", ":", "bytearray", ":", "type", "signatureKey", ":", "ECPublicKey" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/groups/state/senderkeyrecord.py#L35-L42
tgalal/python-axolotl
axolotl/groups/state/senderkeyrecord.py
SenderKeyRecord.setSenderKeyState
def setSenderKeyState(self, id, iteration, chainKey, signatureKey): """ :type id: int :type iteration: int :type chainKey: bytearray :type signatureKey: ECKeyPair """ del self.senderKeyStates[:] self.senderKeyStates.append(SenderKeyState(id, iteration, chainKey, signatureKeyPair=signatureKey))
python
def setSenderKeyState(self, id, iteration, chainKey, signatureKey): """ :type id: int :type iteration: int :type chainKey: bytearray :type signatureKey: ECKeyPair """ del self.senderKeyStates[:] self.senderKeyStates.append(SenderKeyState(id, iteration, chainKey, signatureKeyPair=signatureKey))
[ "def", "setSenderKeyState", "(", "self", ",", "id", ",", "iteration", ",", "chainKey", ",", "signatureKey", ")", ":", "del", "self", ".", "senderKeyStates", "[", ":", "]", "self", ".", "senderKeyStates", ".", "append", "(", "SenderKeyState", "(", "id", ",", "iteration", ",", "chainKey", ",", "signatureKeyPair", "=", "signatureKey", ")", ")" ]
:type id: int :type iteration: int :type chainKey: bytearray :type signatureKey: ECKeyPair
[ ":", "type", "id", ":", "int", ":", "type", "iteration", ":", "int", ":", "type", "chainKey", ":", "bytearray", ":", "type", "signatureKey", ":", "ECKeyPair" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/groups/state/senderkeyrecord.py#L44-L52
tgalal/python-axolotl
axolotl/sessioncipher.py
SessionCipher.encrypt
def encrypt(self, paddedMessage): """ :type paddedMessage: str """ # TODO: make this less ugly and python 2 and 3 compatible # paddedMessage = bytearray(paddedMessage.encode() if (sys.version_info >= (3, 0) and not type(paddedMessage) in (bytes, bytearray)) or type(paddedMessage) is unicode else paddedMessage) if (sys.version_info >= (3, 0) and not type(paddedMessage) in (bytes, bytearray)) or type(paddedMessage) is unicode: paddedMessage = bytearray(paddedMessage.encode()) else: paddedMessage = bytearray(paddedMessage) sessionRecord = self.sessionStore.loadSession(self.recipientId, self.deviceId) sessionState = sessionRecord.getSessionState() chainKey = sessionState.getSenderChainKey() messageKeys = chainKey.getMessageKeys() senderEphemeral = sessionState.getSenderRatchetKey() previousCounter = sessionState.getPreviousCounter() sessionVersion = sessionState.getSessionVersion() ciphertextBody = self.getCiphertext(sessionVersion, messageKeys, paddedMessage) ciphertextMessage = WhisperMessage(sessionVersion, messageKeys.getMacKey(), senderEphemeral, chainKey.getIndex(), previousCounter, ciphertextBody, sessionState.getLocalIdentityKey(), sessionState.getRemoteIdentityKey()) if sessionState.hasUnacknowledgedPreKeyMessage(): items = sessionState.getUnacknowledgedPreKeyMessageItems() localRegistrationid = sessionState.getLocalRegistrationId() ciphertextMessage = PreKeyWhisperMessage(sessionVersion, localRegistrationid, items.getPreKeyId(), items.getSignedPreKeyId(), items.getBaseKey(), sessionState.getLocalIdentityKey(), ciphertextMessage) sessionState.setSenderChainKey(chainKey.getNextChainKey()) self.sessionStore.storeSession(self.recipientId, self.deviceId, sessionRecord) return ciphertextMessage
python
def encrypt(self, paddedMessage): """ :type paddedMessage: str """ # TODO: make this less ugly and python 2 and 3 compatible # paddedMessage = bytearray(paddedMessage.encode() if (sys.version_info >= (3, 0) and not type(paddedMessage) in (bytes, bytearray)) or type(paddedMessage) is unicode else paddedMessage) if (sys.version_info >= (3, 0) and not type(paddedMessage) in (bytes, bytearray)) or type(paddedMessage) is unicode: paddedMessage = bytearray(paddedMessage.encode()) else: paddedMessage = bytearray(paddedMessage) sessionRecord = self.sessionStore.loadSession(self.recipientId, self.deviceId) sessionState = sessionRecord.getSessionState() chainKey = sessionState.getSenderChainKey() messageKeys = chainKey.getMessageKeys() senderEphemeral = sessionState.getSenderRatchetKey() previousCounter = sessionState.getPreviousCounter() sessionVersion = sessionState.getSessionVersion() ciphertextBody = self.getCiphertext(sessionVersion, messageKeys, paddedMessage) ciphertextMessage = WhisperMessage(sessionVersion, messageKeys.getMacKey(), senderEphemeral, chainKey.getIndex(), previousCounter, ciphertextBody, sessionState.getLocalIdentityKey(), sessionState.getRemoteIdentityKey()) if sessionState.hasUnacknowledgedPreKeyMessage(): items = sessionState.getUnacknowledgedPreKeyMessageItems() localRegistrationid = sessionState.getLocalRegistrationId() ciphertextMessage = PreKeyWhisperMessage(sessionVersion, localRegistrationid, items.getPreKeyId(), items.getSignedPreKeyId(), items.getBaseKey(), sessionState.getLocalIdentityKey(), ciphertextMessage) sessionState.setSenderChainKey(chainKey.getNextChainKey()) self.sessionStore.storeSession(self.recipientId, self.deviceId, sessionRecord) return ciphertextMessage
[ "def", "encrypt", "(", "self", ",", "paddedMessage", ")", ":", "# TODO: make this less ugly and python 2 and 3 compatible", "# paddedMessage = bytearray(paddedMessage.encode() if (sys.version_info >= (3, 0) and not type(paddedMessage) in (bytes, bytearray)) or type(paddedMessage) is unicode else paddedMessage)", "if", "(", "sys", ".", "version_info", ">=", "(", "3", ",", "0", ")", "and", "not", "type", "(", "paddedMessage", ")", "in", "(", "bytes", ",", "bytearray", ")", ")", "or", "type", "(", "paddedMessage", ")", "is", "unicode", ":", "paddedMessage", "=", "bytearray", "(", "paddedMessage", ".", "encode", "(", ")", ")", "else", ":", "paddedMessage", "=", "bytearray", "(", "paddedMessage", ")", "sessionRecord", "=", "self", ".", "sessionStore", ".", "loadSession", "(", "self", ".", "recipientId", ",", "self", ".", "deviceId", ")", "sessionState", "=", "sessionRecord", ".", "getSessionState", "(", ")", "chainKey", "=", "sessionState", ".", "getSenderChainKey", "(", ")", "messageKeys", "=", "chainKey", ".", "getMessageKeys", "(", ")", "senderEphemeral", "=", "sessionState", ".", "getSenderRatchetKey", "(", ")", "previousCounter", "=", "sessionState", ".", "getPreviousCounter", "(", ")", "sessionVersion", "=", "sessionState", ".", "getSessionVersion", "(", ")", "ciphertextBody", "=", "self", ".", "getCiphertext", "(", "sessionVersion", ",", "messageKeys", ",", "paddedMessage", ")", "ciphertextMessage", "=", "WhisperMessage", "(", "sessionVersion", ",", "messageKeys", ".", "getMacKey", "(", ")", ",", "senderEphemeral", ",", "chainKey", ".", "getIndex", "(", ")", ",", "previousCounter", ",", "ciphertextBody", ",", "sessionState", ".", "getLocalIdentityKey", "(", ")", ",", "sessionState", ".", "getRemoteIdentityKey", "(", ")", ")", "if", "sessionState", ".", "hasUnacknowledgedPreKeyMessage", "(", ")", ":", "items", "=", "sessionState", ".", "getUnacknowledgedPreKeyMessageItems", "(", ")", "localRegistrationid", "=", "sessionState", ".", "getLocalRegistrationId", "(", ")", "ciphertextMessage", "=", "PreKeyWhisperMessage", "(", "sessionVersion", ",", "localRegistrationid", ",", "items", ".", "getPreKeyId", "(", ")", ",", "items", ".", "getSignedPreKeyId", "(", ")", ",", "items", ".", "getBaseKey", "(", ")", ",", "sessionState", ".", "getLocalIdentityKey", "(", ")", ",", "ciphertextMessage", ")", "sessionState", ".", "setSenderChainKey", "(", "chainKey", ".", "getNextChainKey", "(", ")", ")", "self", ".", "sessionStore", ".", "storeSession", "(", "self", ".", "recipientId", ",", "self", ".", "deviceId", ",", "sessionRecord", ")", "return", "ciphertextMessage" ]
:type paddedMessage: str
[ ":", "type", "paddedMessage", ":", "str" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/sessioncipher.py#L34-L72
tgalal/python-axolotl
axolotl/sessioncipher.py
SessionCipher.decryptMsg
def decryptMsg(self, ciphertext, textMsg=True): """ :type ciphertext: WhisperMessage :type textMsg: Bool set this to False if you are decrypting bytes instead of string """ if not self.sessionStore.containsSession(self.recipientId, self.deviceId): raise NoSessionException("No session for: %s, %s" % (self.recipientId, self.deviceId)) sessionRecord = self.sessionStore.loadSession(self.recipientId, self.deviceId) plaintext = self.decryptWithSessionRecord(sessionRecord, ciphertext) self.sessionStore.storeSession(self.recipientId, self.deviceId, sessionRecord) return plaintext
python
def decryptMsg(self, ciphertext, textMsg=True): """ :type ciphertext: WhisperMessage :type textMsg: Bool set this to False if you are decrypting bytes instead of string """ if not self.sessionStore.containsSession(self.recipientId, self.deviceId): raise NoSessionException("No session for: %s, %s" % (self.recipientId, self.deviceId)) sessionRecord = self.sessionStore.loadSession(self.recipientId, self.deviceId) plaintext = self.decryptWithSessionRecord(sessionRecord, ciphertext) self.sessionStore.storeSession(self.recipientId, self.deviceId, sessionRecord) return plaintext
[ "def", "decryptMsg", "(", "self", ",", "ciphertext", ",", "textMsg", "=", "True", ")", ":", "if", "not", "self", ".", "sessionStore", ".", "containsSession", "(", "self", ".", "recipientId", ",", "self", ".", "deviceId", ")", ":", "raise", "NoSessionException", "(", "\"No session for: %s, %s\"", "%", "(", "self", ".", "recipientId", ",", "self", ".", "deviceId", ")", ")", "sessionRecord", "=", "self", ".", "sessionStore", ".", "loadSession", "(", "self", ".", "recipientId", ",", "self", ".", "deviceId", ")", "plaintext", "=", "self", ".", "decryptWithSessionRecord", "(", "sessionRecord", ",", "ciphertext", ")", "self", ".", "sessionStore", ".", "storeSession", "(", "self", ".", "recipientId", ",", "self", ".", "deviceId", ",", "sessionRecord", ")", "return", "plaintext" ]
:type ciphertext: WhisperMessage :type textMsg: Bool set this to False if you are decrypting bytes instead of string
[ ":", "type", "ciphertext", ":", "WhisperMessage", ":", "type", "textMsg", ":", "Bool", "set", "this", "to", "False", "if", "you", "are", "decrypting", "bytes", "instead", "of", "string" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/sessioncipher.py#L74-L89
tgalal/python-axolotl
axolotl/sessioncipher.py
SessionCipher.decryptPkmsg
def decryptPkmsg(self, ciphertext, textMsg=True): """ :type ciphertext: PreKeyWhisperMessage """ sessionRecord = self.sessionStore.loadSession(self.recipientId, self.deviceId) unsignedPreKeyId = self.sessionBuilder.process(sessionRecord, ciphertext) plaintext = self.decryptWithSessionRecord(sessionRecord, ciphertext.getWhisperMessage()) # callback.handlePlaintext(plaintext) self.sessionStore.storeSession(self.recipientId, self.deviceId, sessionRecord) if unsignedPreKeyId is not None: self.preKeyStore.removePreKey(unsignedPreKeyId) return plaintext
python
def decryptPkmsg(self, ciphertext, textMsg=True): """ :type ciphertext: PreKeyWhisperMessage """ sessionRecord = self.sessionStore.loadSession(self.recipientId, self.deviceId) unsignedPreKeyId = self.sessionBuilder.process(sessionRecord, ciphertext) plaintext = self.decryptWithSessionRecord(sessionRecord, ciphertext.getWhisperMessage()) # callback.handlePlaintext(plaintext) self.sessionStore.storeSession(self.recipientId, self.deviceId, sessionRecord) if unsignedPreKeyId is not None: self.preKeyStore.removePreKey(unsignedPreKeyId) return plaintext
[ "def", "decryptPkmsg", "(", "self", ",", "ciphertext", ",", "textMsg", "=", "True", ")", ":", "sessionRecord", "=", "self", ".", "sessionStore", ".", "loadSession", "(", "self", ".", "recipientId", ",", "self", ".", "deviceId", ")", "unsignedPreKeyId", "=", "self", ".", "sessionBuilder", ".", "process", "(", "sessionRecord", ",", "ciphertext", ")", "plaintext", "=", "self", ".", "decryptWithSessionRecord", "(", "sessionRecord", ",", "ciphertext", ".", "getWhisperMessage", "(", ")", ")", "# callback.handlePlaintext(plaintext)", "self", ".", "sessionStore", ".", "storeSession", "(", "self", ".", "recipientId", ",", "self", ".", "deviceId", ",", "sessionRecord", ")", "if", "unsignedPreKeyId", "is", "not", "None", ":", "self", ".", "preKeyStore", ".", "removePreKey", "(", "unsignedPreKeyId", ")", "return", "plaintext" ]
:type ciphertext: PreKeyWhisperMessage
[ ":", "type", "ciphertext", ":", "PreKeyWhisperMessage" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/sessioncipher.py#L91-L105
tgalal/python-axolotl
axolotl/sessioncipher.py
SessionCipher.decryptWithSessionRecord
def decryptWithSessionRecord(self, sessionRecord, cipherText): """ :type sessionRecord: SessionRecord :type cipherText: WhisperMessage """ previousStates = sessionRecord.getPreviousSessionStates() exceptions = [] try: sessionState = SessionState(sessionRecord.getSessionState()) plaintext = self.decryptWithSessionState(sessionState, cipherText) sessionRecord.setState(sessionState) return plaintext except InvalidMessageException as e: exceptions.append(e) for i in range(0, len(previousStates)): previousState = previousStates[i] try: promotedState = SessionState(previousState) plaintext = self.decryptWithSessionState(promotedState, cipherText) previousStates.pop(i) sessionRecord.promoteState(promotedState) return plaintext except InvalidMessageException as e: exceptions.append(e) raise InvalidMessageException("No valid sessions", exceptions)
python
def decryptWithSessionRecord(self, sessionRecord, cipherText): """ :type sessionRecord: SessionRecord :type cipherText: WhisperMessage """ previousStates = sessionRecord.getPreviousSessionStates() exceptions = [] try: sessionState = SessionState(sessionRecord.getSessionState()) plaintext = self.decryptWithSessionState(sessionState, cipherText) sessionRecord.setState(sessionState) return plaintext except InvalidMessageException as e: exceptions.append(e) for i in range(0, len(previousStates)): previousState = previousStates[i] try: promotedState = SessionState(previousState) plaintext = self.decryptWithSessionState(promotedState, cipherText) previousStates.pop(i) sessionRecord.promoteState(promotedState) return plaintext except InvalidMessageException as e: exceptions.append(e) raise InvalidMessageException("No valid sessions", exceptions)
[ "def", "decryptWithSessionRecord", "(", "self", ",", "sessionRecord", ",", "cipherText", ")", ":", "previousStates", "=", "sessionRecord", ".", "getPreviousSessionStates", "(", ")", "exceptions", "=", "[", "]", "try", ":", "sessionState", "=", "SessionState", "(", "sessionRecord", ".", "getSessionState", "(", ")", ")", "plaintext", "=", "self", ".", "decryptWithSessionState", "(", "sessionState", ",", "cipherText", ")", "sessionRecord", ".", "setState", "(", "sessionState", ")", "return", "plaintext", "except", "InvalidMessageException", "as", "e", ":", "exceptions", ".", "append", "(", "e", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "previousStates", ")", ")", ":", "previousState", "=", "previousStates", "[", "i", "]", "try", ":", "promotedState", "=", "SessionState", "(", "previousState", ")", "plaintext", "=", "self", ".", "decryptWithSessionState", "(", "promotedState", ",", "cipherText", ")", "previousStates", ".", "pop", "(", "i", ")", "sessionRecord", ".", "promoteState", "(", "promotedState", ")", "return", "plaintext", "except", "InvalidMessageException", "as", "e", ":", "exceptions", ".", "append", "(", "e", ")", "raise", "InvalidMessageException", "(", "\"No valid sessions\"", ",", "exceptions", ")" ]
:type sessionRecord: SessionRecord :type cipherText: WhisperMessage
[ ":", "type", "sessionRecord", ":", "SessionRecord", ":", "type", "cipherText", ":", "WhisperMessage" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/sessioncipher.py#L107-L134
tgalal/python-axolotl
axolotl/sessioncipher.py
SessionCipher.getCiphertext
def getCiphertext(self, version, messageKeys, plainText): """ :type version: int :type messageKeys: MessageKeys :type plainText: bytearray """ cipher = None if version >= 3: cipher = self.getCipher(messageKeys.getCipherKey(), messageKeys.getIv()) else: cipher = self.getCipher_v2(messageKeys.getCipherKey(), messageKeys.getCounter()) return cipher.encrypt(bytes(plainText))
python
def getCiphertext(self, version, messageKeys, plainText): """ :type version: int :type messageKeys: MessageKeys :type plainText: bytearray """ cipher = None if version >= 3: cipher = self.getCipher(messageKeys.getCipherKey(), messageKeys.getIv()) else: cipher = self.getCipher_v2(messageKeys.getCipherKey(), messageKeys.getCounter()) return cipher.encrypt(bytes(plainText))
[ "def", "getCiphertext", "(", "self", ",", "version", ",", "messageKeys", ",", "plainText", ")", ":", "cipher", "=", "None", "if", "version", ">=", "3", ":", "cipher", "=", "self", ".", "getCipher", "(", "messageKeys", ".", "getCipherKey", "(", ")", ",", "messageKeys", ".", "getIv", "(", ")", ")", "else", ":", "cipher", "=", "self", ".", "getCipher_v2", "(", "messageKeys", ".", "getCipherKey", "(", ")", ",", "messageKeys", ".", "getCounter", "(", ")", ")", "return", "cipher", ".", "encrypt", "(", "bytes", "(", "plainText", ")", ")" ]
:type version: int :type messageKeys: MessageKeys :type plainText: bytearray
[ ":", "type", "version", ":", "int", ":", "type", "messageKeys", ":", "MessageKeys", ":", "type", "plainText", ":", "bytearray" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/sessioncipher.py#L197-L209
tgalal/python-axolotl
axolotl/ecc/curve.py
Curve.calculateAgreement
def calculateAgreement(publicKey, privateKey): """ :type publicKey: ECPublicKey :type privateKey: ECPrivateKey """ if publicKey.getType() != privateKey.getType(): raise InvalidKeyException("Public and private keys must be of the same type!") if publicKey.getType() == Curve.DJB_TYPE: return _curve.calculateAgreement(privateKey.getPrivateKey(), publicKey.getPublicKey()) else: raise InvalidKeyException("Unknown type: %s" % publicKey.getType())
python
def calculateAgreement(publicKey, privateKey): """ :type publicKey: ECPublicKey :type privateKey: ECPrivateKey """ if publicKey.getType() != privateKey.getType(): raise InvalidKeyException("Public and private keys must be of the same type!") if publicKey.getType() == Curve.DJB_TYPE: return _curve.calculateAgreement(privateKey.getPrivateKey(), publicKey.getPublicKey()) else: raise InvalidKeyException("Unknown type: %s" % publicKey.getType())
[ "def", "calculateAgreement", "(", "publicKey", ",", "privateKey", ")", ":", "if", "publicKey", ".", "getType", "(", ")", "!=", "privateKey", ".", "getType", "(", ")", ":", "raise", "InvalidKeyException", "(", "\"Public and private keys must be of the same type!\"", ")", "if", "publicKey", ".", "getType", "(", ")", "==", "Curve", ".", "DJB_TYPE", ":", "return", "_curve", ".", "calculateAgreement", "(", "privateKey", ".", "getPrivateKey", "(", ")", ",", "publicKey", ".", "getPublicKey", "(", ")", ")", "else", ":", "raise", "InvalidKeyException", "(", "\"Unknown type: %s\"", "%", "publicKey", ".", "getType", "(", ")", ")" ]
:type publicKey: ECPublicKey :type privateKey: ECPrivateKey
[ ":", "type", "publicKey", ":", "ECPublicKey", ":", "type", "privateKey", ":", "ECPrivateKey" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/ecc/curve.py#L53-L64
tgalal/python-axolotl
axolotl/ecc/curve.py
Curve.verifySignature
def verifySignature(ecPublicSigningKey, message, signature): """ :type ecPublicSigningKey: ECPublicKey :type message: bytearray :type signature: bytearray """ if ecPublicSigningKey.getType() == Curve.DJB_TYPE: result = _curve.verifySignature(ecPublicSigningKey.getPublicKey(), message, signature) return result == 0 else: raise InvalidKeyException("Unknown type: %s" % ecPublicSigningKey.getType())
python
def verifySignature(ecPublicSigningKey, message, signature): """ :type ecPublicSigningKey: ECPublicKey :type message: bytearray :type signature: bytearray """ if ecPublicSigningKey.getType() == Curve.DJB_TYPE: result = _curve.verifySignature(ecPublicSigningKey.getPublicKey(), message, signature) return result == 0 else: raise InvalidKeyException("Unknown type: %s" % ecPublicSigningKey.getType())
[ "def", "verifySignature", "(", "ecPublicSigningKey", ",", "message", ",", "signature", ")", ":", "if", "ecPublicSigningKey", ".", "getType", "(", ")", "==", "Curve", ".", "DJB_TYPE", ":", "result", "=", "_curve", ".", "verifySignature", "(", "ecPublicSigningKey", ".", "getPublicKey", "(", ")", ",", "message", ",", "signature", ")", "return", "result", "==", "0", "else", ":", "raise", "InvalidKeyException", "(", "\"Unknown type: %s\"", "%", "ecPublicSigningKey", ".", "getType", "(", ")", ")" ]
:type ecPublicSigningKey: ECPublicKey :type message: bytearray :type signature: bytearray
[ ":", "type", "ecPublicSigningKey", ":", "ECPublicKey", ":", "type", "message", ":", "bytearray", ":", "type", "signature", ":", "bytearray" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/ecc/curve.py#L67-L78
tgalal/python-axolotl
axolotl/ecc/curve.py
Curve.calculateSignature
def calculateSignature(privateSigningKey, message): """ :type privateSigningKey: ECPrivateKey :type message: bytearray """ if privateSigningKey.getType() == Curve.DJB_TYPE: rand = os.urandom(64) res = _curve.calculateSignature(rand, privateSigningKey.getPrivateKey(), message) return res else: raise InvalidKeyException("Unknown type: %s" % privateSigningKey.getType())
python
def calculateSignature(privateSigningKey, message): """ :type privateSigningKey: ECPrivateKey :type message: bytearray """ if privateSigningKey.getType() == Curve.DJB_TYPE: rand = os.urandom(64) res = _curve.calculateSignature(rand, privateSigningKey.getPrivateKey(), message) return res else: raise InvalidKeyException("Unknown type: %s" % privateSigningKey.getType())
[ "def", "calculateSignature", "(", "privateSigningKey", ",", "message", ")", ":", "if", "privateSigningKey", ".", "getType", "(", ")", "==", "Curve", ".", "DJB_TYPE", ":", "rand", "=", "os", ".", "urandom", "(", "64", ")", "res", "=", "_curve", ".", "calculateSignature", "(", "rand", ",", "privateSigningKey", ".", "getPrivateKey", "(", ")", ",", "message", ")", "return", "res", "else", ":", "raise", "InvalidKeyException", "(", "\"Unknown type: %s\"", "%", "privateSigningKey", ".", "getType", "(", ")", ")" ]
:type privateSigningKey: ECPrivateKey :type message: bytearray
[ ":", "type", "privateSigningKey", ":", "ECPrivateKey", ":", "type", "message", ":", "bytearray" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/ecc/curve.py#L81-L91
tgalal/python-axolotl
axolotl/groups/groupcipher.py
GroupCipher.encrypt
def encrypt(self, paddedPlaintext): """ :type paddedPlaintext: str """ # TODO: make this less ugly and python 2 and 3 compatible # paddedMessage = bytearray(paddedMessage.encode() if (sys.version_info >= (3, 0) and not type(paddedMessage) in (bytes, bytearray)) or type(paddedMessage) is unicode else paddedMessage) if (sys.version_info >= (3, 0) and not type(paddedPlaintext) in (bytes, bytearray)) or type(paddedPlaintext) is unicode: paddedPlaintext = bytearray(paddedPlaintext.encode()) else: paddedPlaintext = bytearray(paddedPlaintext) try: record = self.senderKeyStore.loadSenderKey(self.senderKeyName) senderKeyState = record.getSenderKeyState() senderKey = senderKeyState.getSenderChainKey().getSenderMessageKey() ciphertext = self.getCipherText(senderKey.getIv(), senderKey.getCipherKey(), paddedPlaintext) senderKeyMessage = SenderKeyMessage(senderKeyState.getKeyId(), senderKey.getIteration(), ciphertext, senderKeyState.getSigningKeyPrivate()) senderKeyState.setSenderChainKey(senderKeyState.getSenderChainKey().getNext()) self.senderKeyStore.storeSenderKey(self.senderKeyName, record) return senderKeyMessage.serialize() except InvalidKeyIdException as e: raise NoSessionException(e)
python
def encrypt(self, paddedPlaintext): """ :type paddedPlaintext: str """ # TODO: make this less ugly and python 2 and 3 compatible # paddedMessage = bytearray(paddedMessage.encode() if (sys.version_info >= (3, 0) and not type(paddedMessage) in (bytes, bytearray)) or type(paddedMessage) is unicode else paddedMessage) if (sys.version_info >= (3, 0) and not type(paddedPlaintext) in (bytes, bytearray)) or type(paddedPlaintext) is unicode: paddedPlaintext = bytearray(paddedPlaintext.encode()) else: paddedPlaintext = bytearray(paddedPlaintext) try: record = self.senderKeyStore.loadSenderKey(self.senderKeyName) senderKeyState = record.getSenderKeyState() senderKey = senderKeyState.getSenderChainKey().getSenderMessageKey() ciphertext = self.getCipherText(senderKey.getIv(), senderKey.getCipherKey(), paddedPlaintext) senderKeyMessage = SenderKeyMessage(senderKeyState.getKeyId(), senderKey.getIteration(), ciphertext, senderKeyState.getSigningKeyPrivate()) senderKeyState.setSenderChainKey(senderKeyState.getSenderChainKey().getNext()) self.senderKeyStore.storeSenderKey(self.senderKeyName, record) return senderKeyMessage.serialize() except InvalidKeyIdException as e: raise NoSessionException(e)
[ "def", "encrypt", "(", "self", ",", "paddedPlaintext", ")", ":", "# TODO: make this less ugly and python 2 and 3 compatible", "# paddedMessage = bytearray(paddedMessage.encode() if (sys.version_info >= (3, 0) and not type(paddedMessage) in (bytes, bytearray)) or type(paddedMessage) is unicode else paddedMessage)", "if", "(", "sys", ".", "version_info", ">=", "(", "3", ",", "0", ")", "and", "not", "type", "(", "paddedPlaintext", ")", "in", "(", "bytes", ",", "bytearray", ")", ")", "or", "type", "(", "paddedPlaintext", ")", "is", "unicode", ":", "paddedPlaintext", "=", "bytearray", "(", "paddedPlaintext", ".", "encode", "(", ")", ")", "else", ":", "paddedPlaintext", "=", "bytearray", "(", "paddedPlaintext", ")", "try", ":", "record", "=", "self", ".", "senderKeyStore", ".", "loadSenderKey", "(", "self", ".", "senderKeyName", ")", "senderKeyState", "=", "record", ".", "getSenderKeyState", "(", ")", "senderKey", "=", "senderKeyState", ".", "getSenderChainKey", "(", ")", ".", "getSenderMessageKey", "(", ")", "ciphertext", "=", "self", ".", "getCipherText", "(", "senderKey", ".", "getIv", "(", ")", ",", "senderKey", ".", "getCipherKey", "(", ")", ",", "paddedPlaintext", ")", "senderKeyMessage", "=", "SenderKeyMessage", "(", "senderKeyState", ".", "getKeyId", "(", ")", ",", "senderKey", ".", "getIteration", "(", ")", ",", "ciphertext", ",", "senderKeyState", ".", "getSigningKeyPrivate", "(", ")", ")", "senderKeyState", ".", "setSenderChainKey", "(", "senderKeyState", ".", "getSenderChainKey", "(", ")", ".", "getNext", "(", ")", ")", "self", ".", "senderKeyStore", ".", "storeSenderKey", "(", "self", ".", "senderKeyName", ",", "record", ")", "return", "senderKeyMessage", ".", "serialize", "(", ")", "except", "InvalidKeyIdException", "as", "e", ":", "raise", "NoSessionException", "(", "e", ")" ]
:type paddedPlaintext: str
[ ":", "type", "paddedPlaintext", ":", "str" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/groups/groupcipher.py#L23-L50
tgalal/python-axolotl
axolotl/groups/groupcipher.py
GroupCipher.decrypt
def decrypt(self, senderKeyMessageBytes): """ :type senderKeyMessageBytes: bytearray """ try: record = self.senderKeyStore.loadSenderKey(self.senderKeyName) if record.isEmpty(): raise NoSessionException("No sender key for: %s" % self.senderKeyName) senderKeyMessage = SenderKeyMessage(serialized = bytes(senderKeyMessageBytes)) senderKeyState = record.getSenderKeyState(senderKeyMessage.getKeyId()) senderKeyMessage.verifySignature(senderKeyState.getSigningKeyPublic()) senderKey = self.getSenderKey(senderKeyState, senderKeyMessage.getIteration()) plaintext = self.getPlainText(senderKey.getIv(), senderKey.getCipherKey(), senderKeyMessage.getCipherText()) self.senderKeyStore.storeSenderKey(self.senderKeyName, record) return plaintext except (InvalidKeyException, InvalidKeyIdException) as e: raise InvalidMessageException(e)
python
def decrypt(self, senderKeyMessageBytes): """ :type senderKeyMessageBytes: bytearray """ try: record = self.senderKeyStore.loadSenderKey(self.senderKeyName) if record.isEmpty(): raise NoSessionException("No sender key for: %s" % self.senderKeyName) senderKeyMessage = SenderKeyMessage(serialized = bytes(senderKeyMessageBytes)) senderKeyState = record.getSenderKeyState(senderKeyMessage.getKeyId()) senderKeyMessage.verifySignature(senderKeyState.getSigningKeyPublic()) senderKey = self.getSenderKey(senderKeyState, senderKeyMessage.getIteration()) plaintext = self.getPlainText(senderKey.getIv(), senderKey.getCipherKey(), senderKeyMessage.getCipherText()) self.senderKeyStore.storeSenderKey(self.senderKeyName, record) return plaintext except (InvalidKeyException, InvalidKeyIdException) as e: raise InvalidMessageException(e)
[ "def", "decrypt", "(", "self", ",", "senderKeyMessageBytes", ")", ":", "try", ":", "record", "=", "self", ".", "senderKeyStore", ".", "loadSenderKey", "(", "self", ".", "senderKeyName", ")", "if", "record", ".", "isEmpty", "(", ")", ":", "raise", "NoSessionException", "(", "\"No sender key for: %s\"", "%", "self", ".", "senderKeyName", ")", "senderKeyMessage", "=", "SenderKeyMessage", "(", "serialized", "=", "bytes", "(", "senderKeyMessageBytes", ")", ")", "senderKeyState", "=", "record", ".", "getSenderKeyState", "(", "senderKeyMessage", ".", "getKeyId", "(", ")", ")", "senderKeyMessage", ".", "verifySignature", "(", "senderKeyState", ".", "getSigningKeyPublic", "(", ")", ")", "senderKey", "=", "self", ".", "getSenderKey", "(", "senderKeyState", ",", "senderKeyMessage", ".", "getIteration", "(", ")", ")", "plaintext", "=", "self", ".", "getPlainText", "(", "senderKey", ".", "getIv", "(", ")", ",", "senderKey", ".", "getCipherKey", "(", ")", ",", "senderKeyMessage", ".", "getCipherText", "(", ")", ")", "self", ".", "senderKeyStore", ".", "storeSenderKey", "(", "self", ".", "senderKeyName", ",", "record", ")", "return", "plaintext", "except", "(", "InvalidKeyException", ",", "InvalidKeyIdException", ")", "as", "e", ":", "raise", "InvalidMessageException", "(", "e", ")" ]
:type senderKeyMessageBytes: bytearray
[ ":", "type", "senderKeyMessageBytes", ":", "bytearray" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/groups/groupcipher.py#L52-L73
tgalal/python-axolotl
axolotl/groups/groupcipher.py
GroupCipher.getPlainText
def getPlainText(self, iv, key, ciphertext): """ :type iv: bytearray :type key: bytearray :type ciphertext: bytearray """ try: cipher = AESCipher(key, iv) plaintext = cipher.decrypt(ciphertext) if sys.version_info >= (3, 0): return plaintext.decode() return plaintext except Exception as e: raise InvalidMessageException(e)
python
def getPlainText(self, iv, key, ciphertext): """ :type iv: bytearray :type key: bytearray :type ciphertext: bytearray """ try: cipher = AESCipher(key, iv) plaintext = cipher.decrypt(ciphertext) if sys.version_info >= (3, 0): return plaintext.decode() return plaintext except Exception as e: raise InvalidMessageException(e)
[ "def", "getPlainText", "(", "self", ",", "iv", ",", "key", ",", "ciphertext", ")", ":", "try", ":", "cipher", "=", "AESCipher", "(", "key", ",", "iv", ")", "plaintext", "=", "cipher", ".", "decrypt", "(", "ciphertext", ")", "if", "sys", ".", "version_info", ">=", "(", "3", ",", "0", ")", ":", "return", "plaintext", ".", "decode", "(", ")", "return", "plaintext", "except", "Exception", "as", "e", ":", "raise", "InvalidMessageException", "(", "e", ")" ]
:type iv: bytearray :type key: bytearray :type ciphertext: bytearray
[ ":", "type", "iv", ":", "bytearray", ":", "type", "key", ":", "bytearray", ":", "type", "ciphertext", ":", "bytearray" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/groups/groupcipher.py#L95-L108
tgalal/python-axolotl
axolotl/groups/groupcipher.py
GroupCipher.getCipherText
def getCipherText(self, iv, key, plaintext): """ :type iv: bytearray :type key: bytearray :type plaintext: bytearray """ cipher = AESCipher(key, iv) return cipher.encrypt(bytes(plaintext))
python
def getCipherText(self, iv, key, plaintext): """ :type iv: bytearray :type key: bytearray :type plaintext: bytearray """ cipher = AESCipher(key, iv) return cipher.encrypt(bytes(plaintext))
[ "def", "getCipherText", "(", "self", ",", "iv", ",", "key", ",", "plaintext", ")", ":", "cipher", "=", "AESCipher", "(", "key", ",", "iv", ")", "return", "cipher", ".", "encrypt", "(", "bytes", "(", "plaintext", ")", ")" ]
:type iv: bytearray :type key: bytearray :type plaintext: bytearray
[ ":", "type", "iv", ":", "bytearray", ":", "type", "key", ":", "bytearray", ":", "type", "plaintext", ":", "bytearray" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/groups/groupcipher.py#L110-L117
tgalal/python-axolotl
axolotl/state/sessionstate.py
SessionState.setPendingKeyExchange
def setPendingKeyExchange(self, sequence, ourBaseKey, ourRatchetKey, ourIdentityKey): """ :type sequence: int :type ourBaseKey: ECKeyPair :type ourRatchetKey: ECKeyPair :type ourIdentityKey: IdentityKeyPair """ structure = self.sessionStructure.PendingKeyExchange() structure.sequence = sequence structure.localBaseKey = ourBaseKey.getPublicKey().serialize() structure.localBaseKeyPrivate = ourBaseKey.getPrivateKey().serialize() structure.localRatchetKey = ourRatchetKey.getPublicKey().serialize() structure.localRatchetKeyPrivate = ourRatchetKey.getPrivateKey().serialize() structure.localIdentityKey = ourIdentityKey.getPublicKey().serialize() structure.localIdentityKeyPrivate = ourIdentityKey.getPrivateKey().serialize() self.sessionStructure.pendingKeyExchange.MergeFrom(structure)
python
def setPendingKeyExchange(self, sequence, ourBaseKey, ourRatchetKey, ourIdentityKey): """ :type sequence: int :type ourBaseKey: ECKeyPair :type ourRatchetKey: ECKeyPair :type ourIdentityKey: IdentityKeyPair """ structure = self.sessionStructure.PendingKeyExchange() structure.sequence = sequence structure.localBaseKey = ourBaseKey.getPublicKey().serialize() structure.localBaseKeyPrivate = ourBaseKey.getPrivateKey().serialize() structure.localRatchetKey = ourRatchetKey.getPublicKey().serialize() structure.localRatchetKeyPrivate = ourRatchetKey.getPrivateKey().serialize() structure.localIdentityKey = ourIdentityKey.getPublicKey().serialize() structure.localIdentityKeyPrivate = ourIdentityKey.getPrivateKey().serialize() self.sessionStructure.pendingKeyExchange.MergeFrom(structure)
[ "def", "setPendingKeyExchange", "(", "self", ",", "sequence", ",", "ourBaseKey", ",", "ourRatchetKey", ",", "ourIdentityKey", ")", ":", "structure", "=", "self", ".", "sessionStructure", ".", "PendingKeyExchange", "(", ")", "structure", ".", "sequence", "=", "sequence", "structure", ".", "localBaseKey", "=", "ourBaseKey", ".", "getPublicKey", "(", ")", ".", "serialize", "(", ")", "structure", ".", "localBaseKeyPrivate", "=", "ourBaseKey", ".", "getPrivateKey", "(", ")", ".", "serialize", "(", ")", "structure", ".", "localRatchetKey", "=", "ourRatchetKey", ".", "getPublicKey", "(", ")", ".", "serialize", "(", ")", "structure", ".", "localRatchetKeyPrivate", "=", "ourRatchetKey", ".", "getPrivateKey", "(", ")", ".", "serialize", "(", ")", "structure", ".", "localIdentityKey", "=", "ourIdentityKey", ".", "getPublicKey", "(", ")", ".", "serialize", "(", ")", "structure", ".", "localIdentityKeyPrivate", "=", "ourIdentityKey", ".", "getPrivateKey", "(", ")", ".", "serialize", "(", ")", "self", ".", "sessionStructure", ".", "pendingKeyExchange", ".", "MergeFrom", "(", "structure", ")" ]
:type sequence: int :type ourBaseKey: ECKeyPair :type ourRatchetKey: ECKeyPair :type ourIdentityKey: IdentityKeyPair
[ ":", "type", "sequence", ":", "int", ":", "type", "ourBaseKey", ":", "ECKeyPair", ":", "type", "ourRatchetKey", ":", "ECKeyPair", ":", "type", "ourIdentityKey", ":", "IdentityKeyPair" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/state/sessionstate.py#L194-L210
tgalal/python-axolotl
axolotl/state/sessionstate.py
SessionState.setUnacknowledgedPreKeyMessage
def setUnacknowledgedPreKeyMessage(self, preKeyId, signedPreKeyId, baseKey): """ :type preKeyId: int :type signedPreKeyId: int :type baseKey: ECPublicKey """ self.sessionStructure.pendingPreKey.signedPreKeyId = signedPreKeyId self.sessionStructure.pendingPreKey.baseKey = baseKey.serialize() if preKeyId is not None: self.sessionStructure.pendingPreKey.preKeyId = preKeyId
python
def setUnacknowledgedPreKeyMessage(self, preKeyId, signedPreKeyId, baseKey): """ :type preKeyId: int :type signedPreKeyId: int :type baseKey: ECPublicKey """ self.sessionStructure.pendingPreKey.signedPreKeyId = signedPreKeyId self.sessionStructure.pendingPreKey.baseKey = baseKey.serialize() if preKeyId is not None: self.sessionStructure.pendingPreKey.preKeyId = preKeyId
[ "def", "setUnacknowledgedPreKeyMessage", "(", "self", ",", "preKeyId", ",", "signedPreKeyId", ",", "baseKey", ")", ":", "self", ".", "sessionStructure", ".", "pendingPreKey", ".", "signedPreKeyId", "=", "signedPreKeyId", "self", ".", "sessionStructure", ".", "pendingPreKey", ".", "baseKey", "=", "baseKey", ".", "serialize", "(", ")", "if", "preKeyId", "is", "not", "None", ":", "self", ".", "sessionStructure", ".", "pendingPreKey", ".", "preKeyId", "=", "preKeyId" ]
:type preKeyId: int :type signedPreKeyId: int :type baseKey: ECPublicKey
[ ":", "type", "preKeyId", ":", "int", ":", "type", "signedPreKeyId", ":", "int", ":", "type", "baseKey", ":", "ECPublicKey" ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/state/sessionstate.py#L234-L244
tgalal/python-axolotl
axolotl/util/keyhelper.py
KeyHelper.generateIdentityKeyPair
def generateIdentityKeyPair(): """ Generate an identity key pair. Clients should only do this once, at install time. @return the generated IdentityKeyPair. """ keyPair = Curve.generateKeyPair() publicKey = IdentityKey(keyPair.getPublicKey()) serialized = '0a21056e8936e8367f768a7bba008ade7cf58407bdc7a6aae293e2c' \ 'b7c06668dcd7d5e12205011524f0c15467100dd603e0d6020f4d293' \ 'edfbcd82129b14a88791ac81365c' serialized = binascii.unhexlify(serialized.encode()) identityKeyPair = IdentityKeyPair(publicKey, keyPair.getPrivateKey()) return identityKeyPair
python
def generateIdentityKeyPair(): """ Generate an identity key pair. Clients should only do this once, at install time. @return the generated IdentityKeyPair. """ keyPair = Curve.generateKeyPair() publicKey = IdentityKey(keyPair.getPublicKey()) serialized = '0a21056e8936e8367f768a7bba008ade7cf58407bdc7a6aae293e2c' \ 'b7c06668dcd7d5e12205011524f0c15467100dd603e0d6020f4d293' \ 'edfbcd82129b14a88791ac81365c' serialized = binascii.unhexlify(serialized.encode()) identityKeyPair = IdentityKeyPair(publicKey, keyPair.getPrivateKey()) return identityKeyPair
[ "def", "generateIdentityKeyPair", "(", ")", ":", "keyPair", "=", "Curve", ".", "generateKeyPair", "(", ")", "publicKey", "=", "IdentityKey", "(", "keyPair", ".", "getPublicKey", "(", ")", ")", "serialized", "=", "'0a21056e8936e8367f768a7bba008ade7cf58407bdc7a6aae293e2c'", "'b7c06668dcd7d5e12205011524f0c15467100dd603e0d6020f4d293'", "'edfbcd82129b14a88791ac81365c'", "serialized", "=", "binascii", ".", "unhexlify", "(", "serialized", ".", "encode", "(", ")", ")", "identityKeyPair", "=", "IdentityKeyPair", "(", "publicKey", ",", "keyPair", ".", "getPrivateKey", "(", ")", ")", "return", "identityKeyPair" ]
Generate an identity key pair. Clients should only do this once, at install time. @return the generated IdentityKeyPair.
[ "Generate", "an", "identity", "key", "pair", ".", "Clients", "should", "only", "do", "this", "once", "at", "install", "time", "." ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/util/keyhelper.py#L21-L34
tgalal/python-axolotl
axolotl/util/keyhelper.py
KeyHelper.generatePreKeys
def generatePreKeys(start, count): """ Generate a list of PreKeys. Clients should do this at install time, and subsequently any time the list of PreKeys stored on the server runs low. PreKey IDs are shorts, so they will eventually be repeated. Clients should store PreKeys in a circular buffer, so that they are repeated as infrequently as possible. @param start The starting PreKey ID, inclusive. @param count The number of PreKeys to generate. @return the list of generated PreKeyRecords. """ results = [] start -= 1 for i in range(0, count): preKeyId = ((start + i) % (Medium.MAX_VALUE - 1)) + 1 results.append(PreKeyRecord(preKeyId, Curve.generateKeyPair())) return results
python
def generatePreKeys(start, count): """ Generate a list of PreKeys. Clients should do this at install time, and subsequently any time the list of PreKeys stored on the server runs low. PreKey IDs are shorts, so they will eventually be repeated. Clients should store PreKeys in a circular buffer, so that they are repeated as infrequently as possible. @param start The starting PreKey ID, inclusive. @param count The number of PreKeys to generate. @return the list of generated PreKeyRecords. """ results = [] start -= 1 for i in range(0, count): preKeyId = ((start + i) % (Medium.MAX_VALUE - 1)) + 1 results.append(PreKeyRecord(preKeyId, Curve.generateKeyPair())) return results
[ "def", "generatePreKeys", "(", "start", ",", "count", ")", ":", "results", "=", "[", "]", "start", "-=", "1", "for", "i", "in", "range", "(", "0", ",", "count", ")", ":", "preKeyId", "=", "(", "(", "start", "+", "i", ")", "%", "(", "Medium", ".", "MAX_VALUE", "-", "1", ")", ")", "+", "1", "results", ".", "append", "(", "PreKeyRecord", "(", "preKeyId", ",", "Curve", ".", "generateKeyPair", "(", ")", ")", ")", "return", "results" ]
Generate a list of PreKeys. Clients should do this at install time, and subsequently any time the list of PreKeys stored on the server runs low. PreKey IDs are shorts, so they will eventually be repeated. Clients should store PreKeys in a circular buffer, so that they are repeated as infrequently as possible. @param start The starting PreKey ID, inclusive. @param count The number of PreKeys to generate. @return the list of generated PreKeyRecords.
[ "Generate", "a", "list", "of", "PreKeys", ".", "Clients", "should", "do", "this", "at", "install", "time", "and", "subsequently", "any", "time", "the", "list", "of", "PreKeys", "stored", "on", "the", "server", "runs", "low", "." ]
train
https://github.com/tgalal/python-axolotl/blob/0c681af4b756f556e23a9bf961abfbc6f82800cc/axolotl/util/keyhelper.py#L54-L73
5monkeys/django-enumfield
django_enumfield/enum.py
Enum.choices
def choices(cls, blank=False): """ Choices for Enum :return: List of tuples (<value>, <human-readable value>) :rtype: list """ choices = sorted([(key, value) for key, value in cls.values.items()], key=lambda x: x[0]) if blank: choices.insert(0, ('', Enum.Value('', None, '', cls))) return choices
python
def choices(cls, blank=False): """ Choices for Enum :return: List of tuples (<value>, <human-readable value>) :rtype: list """ choices = sorted([(key, value) for key, value in cls.values.items()], key=lambda x: x[0]) if blank: choices.insert(0, ('', Enum.Value('', None, '', cls))) return choices
[ "def", "choices", "(", "cls", ",", "blank", "=", "False", ")", ":", "choices", "=", "sorted", "(", "[", "(", "key", ",", "value", ")", "for", "key", ",", "value", "in", "cls", ".", "values", ".", "items", "(", ")", "]", ",", "key", "=", "lambda", "x", ":", "x", "[", "0", "]", ")", "if", "blank", ":", "choices", ".", "insert", "(", "0", ",", "(", "''", ",", "Enum", ".", "Value", "(", "''", ",", "None", ",", "''", ",", "cls", ")", ")", ")", "return", "choices" ]
Choices for Enum :return: List of tuples (<value>, <human-readable value>) :rtype: list
[ "Choices", "for", "Enum", ":", "return", ":", "List", "of", "tuples", "(", "<value", ">", "<human", "-", "readable", "value", ">", ")", ":", "rtype", ":", "list" ]
train
https://github.com/5monkeys/django-enumfield/blob/6cf20c0fba013d39960af0f4d2c9a3b399955eb3/django_enumfield/enum.py#L69-L77
5monkeys/django-enumfield
django_enumfield/enum.py
Enum.get
def get(cls, name_or_numeric): """ Get Enum.Value object matching the value argument. :param name_or_numeric: Integer value or attribute name :type name_or_numeric: int or str :rtype: Enum.Value """ if isinstance(name_or_numeric, six.string_types): name_or_numeric = getattr(cls, name_or_numeric.upper()) return cls.values.get(name_or_numeric)
python
def get(cls, name_or_numeric): """ Get Enum.Value object matching the value argument. :param name_or_numeric: Integer value or attribute name :type name_or_numeric: int or str :rtype: Enum.Value """ if isinstance(name_or_numeric, six.string_types): name_or_numeric = getattr(cls, name_or_numeric.upper()) return cls.values.get(name_or_numeric)
[ "def", "get", "(", "cls", ",", "name_or_numeric", ")", ":", "if", "isinstance", "(", "name_or_numeric", ",", "six", ".", "string_types", ")", ":", "name_or_numeric", "=", "getattr", "(", "cls", ",", "name_or_numeric", ".", "upper", "(", ")", ")", "return", "cls", ".", "values", ".", "get", "(", "name_or_numeric", ")" ]
Get Enum.Value object matching the value argument. :param name_or_numeric: Integer value or attribute name :type name_or_numeric: int or str :rtype: Enum.Value
[ "Get", "Enum", ".", "Value", "object", "matching", "the", "value", "argument", ".", ":", "param", "name_or_numeric", ":", "Integer", "value", "or", "attribute", "name", ":", "type", "name_or_numeric", ":", "int", "or", "str", ":", "rtype", ":", "Enum", ".", "Value" ]
train
https://github.com/5monkeys/django-enumfield/blob/6cf20c0fba013d39960af0f4d2c9a3b399955eb3/django_enumfield/enum.py#L105-L114
5monkeys/django-enumfield
django_enumfield/enum.py
Enum.items
def items(cls): """ :return: List of tuples consisting of every enum value in the form [('NAME', value), ...] :rtype: list """ items = [(value.name, key) for key, value in cls.values.items()] return sorted(items, key=lambda x: x[1])
python
def items(cls): """ :return: List of tuples consisting of every enum value in the form [('NAME', value), ...] :rtype: list """ items = [(value.name, key) for key, value in cls.values.items()] return sorted(items, key=lambda x: x[1])
[ "def", "items", "(", "cls", ")", ":", "items", "=", "[", "(", "value", ".", "name", ",", "key", ")", "for", "key", ",", "value", "in", "cls", ".", "values", ".", "items", "(", ")", "]", "return", "sorted", "(", "items", ",", "key", "=", "lambda", "x", ":", "x", "[", "1", "]", ")" ]
:return: List of tuples consisting of every enum value in the form [('NAME', value), ...] :rtype: list
[ ":", "return", ":", "List", "of", "tuples", "consisting", "of", "every", "enum", "value", "in", "the", "form", "[", "(", "NAME", "value", ")", "...", "]", ":", "rtype", ":", "list" ]
train
https://github.com/5monkeys/django-enumfield/blob/6cf20c0fba013d39960af0f4d2c9a3b399955eb3/django_enumfield/enum.py#L137-L143
5monkeys/django-enumfield
django_enumfield/enum.py
Enum.is_valid_transition
def is_valid_transition(cls, from_value, to_value): """ Will check if to_value is a valid transition from from_value. Returns true if it is a valid transition. :param from_value: Start transition point :param to_value: End transition point :type from_value: int :type to_value: int :return: Success flag :rtype: bool """ try: return from_value == to_value or from_value in cls.transition_origins(to_value) except KeyError: return False
python
def is_valid_transition(cls, from_value, to_value): """ Will check if to_value is a valid transition from from_value. Returns true if it is a valid transition. :param from_value: Start transition point :param to_value: End transition point :type from_value: int :type to_value: int :return: Success flag :rtype: bool """ try: return from_value == to_value or from_value in cls.transition_origins(to_value) except KeyError: return False
[ "def", "is_valid_transition", "(", "cls", ",", "from_value", ",", "to_value", ")", ":", "try", ":", "return", "from_value", "==", "to_value", "or", "from_value", "in", "cls", ".", "transition_origins", "(", "to_value", ")", "except", "KeyError", ":", "return", "False" ]
Will check if to_value is a valid transition from from_value. Returns true if it is a valid transition. :param from_value: Start transition point :param to_value: End transition point :type from_value: int :type to_value: int :return: Success flag :rtype: bool
[ "Will", "check", "if", "to_value", "is", "a", "valid", "transition", "from", "from_value", ".", "Returns", "true", "if", "it", "is", "a", "valid", "transition", ".", ":", "param", "from_value", ":", "Start", "transition", "point", ":", "param", "to_value", ":", "End", "transition", "point", ":", "type", "from_value", ":", "int", ":", "type", "to_value", ":", "int", ":", "return", ":", "Success", "flag", ":", "rtype", ":", "bool" ]
train
https://github.com/5monkeys/django-enumfield/blob/6cf20c0fba013d39960af0f4d2c9a3b399955eb3/django_enumfield/enum.py#L146-L158
5monkeys/django-enumfield
django_enumfield/db/fields.py
EnumField._setup_validation
def _setup_validation(self, sender, **kwargs): """ User a customer setter for the field to validate new value against the old one. The current value is set as '_enum_[att_name]' on the model instance. """ att_name = self.get_attname() private_att_name = '_enum_%s' % att_name enum = self.enum def set_enum(self, new_value): if hasattr(self, private_att_name): # Fetch previous value from private enum attribute. old_value = getattr(self, private_att_name) else: # First setattr no previous value on instance. old_value = new_value # Update private enum attribute with new value setattr(self, private_att_name, new_value) self.__dict__[att_name] = new_value # Run validation for new value. validators.validate_valid_transition(enum, old_value, new_value) def get_enum(self): return getattr(self, private_att_name) def delete_enum(self): self.__dict__[att_name] = None return setattr(self, private_att_name, None) if not sender._meta.abstract: setattr(sender, att_name, property(get_enum, set_enum, delete_enum))
python
def _setup_validation(self, sender, **kwargs): """ User a customer setter for the field to validate new value against the old one. The current value is set as '_enum_[att_name]' on the model instance. """ att_name = self.get_attname() private_att_name = '_enum_%s' % att_name enum = self.enum def set_enum(self, new_value): if hasattr(self, private_att_name): # Fetch previous value from private enum attribute. old_value = getattr(self, private_att_name) else: # First setattr no previous value on instance. old_value = new_value # Update private enum attribute with new value setattr(self, private_att_name, new_value) self.__dict__[att_name] = new_value # Run validation for new value. validators.validate_valid_transition(enum, old_value, new_value) def get_enum(self): return getattr(self, private_att_name) def delete_enum(self): self.__dict__[att_name] = None return setattr(self, private_att_name, None) if not sender._meta.abstract: setattr(sender, att_name, property(get_enum, set_enum, delete_enum))
[ "def", "_setup_validation", "(", "self", ",", "sender", ",", "*", "*", "kwargs", ")", ":", "att_name", "=", "self", ".", "get_attname", "(", ")", "private_att_name", "=", "'_enum_%s'", "%", "att_name", "enum", "=", "self", ".", "enum", "def", "set_enum", "(", "self", ",", "new_value", ")", ":", "if", "hasattr", "(", "self", ",", "private_att_name", ")", ":", "# Fetch previous value from private enum attribute.", "old_value", "=", "getattr", "(", "self", ",", "private_att_name", ")", "else", ":", "# First setattr no previous value on instance.", "old_value", "=", "new_value", "# Update private enum attribute with new value", "setattr", "(", "self", ",", "private_att_name", ",", "new_value", ")", "self", ".", "__dict__", "[", "att_name", "]", "=", "new_value", "# Run validation for new value.", "validators", ".", "validate_valid_transition", "(", "enum", ",", "old_value", ",", "new_value", ")", "def", "get_enum", "(", "self", ")", ":", "return", "getattr", "(", "self", ",", "private_att_name", ")", "def", "delete_enum", "(", "self", ")", ":", "self", ".", "__dict__", "[", "att_name", "]", "=", "None", "return", "setattr", "(", "self", ",", "private_att_name", ",", "None", ")", "if", "not", "sender", ".", "_meta", ".", "abstract", ":", "setattr", "(", "sender", ",", "att_name", ",", "property", "(", "get_enum", ",", "set_enum", ",", "delete_enum", ")", ")" ]
User a customer setter for the field to validate new value against the old one. The current value is set as '_enum_[att_name]' on the model instance.
[ "User", "a", "customer", "setter", "for", "the", "field", "to", "validate", "new", "value", "against", "the", "old", "one", ".", "The", "current", "value", "is", "set", "as", "_enum_", "[", "att_name", "]", "on", "the", "model", "instance", "." ]
train
https://github.com/5monkeys/django-enumfield/blob/6cf20c0fba013d39960af0f4d2c9a3b399955eb3/django_enumfield/db/fields.py#L27-L57
5monkeys/django-enumfield
django_enumfield/validators.py
validate_valid_transition
def validate_valid_transition(enum, from_value, to_value): """ Validate that to_value is a valid choice and that to_value is a valid transition from from_value. """ validate_available_choice(enum, to_value) if hasattr(enum, '_transitions') and not enum.is_valid_transition(from_value, to_value): message = _(six.text_type('{enum} can not go from "{from_value}" to "{to_value}"')) raise InvalidStatusOperationError(message.format( enum=enum.__name__, from_value=enum.name(from_value), to_value=enum.name(to_value) or to_value ))
python
def validate_valid_transition(enum, from_value, to_value): """ Validate that to_value is a valid choice and that to_value is a valid transition from from_value. """ validate_available_choice(enum, to_value) if hasattr(enum, '_transitions') and not enum.is_valid_transition(from_value, to_value): message = _(six.text_type('{enum} can not go from "{from_value}" to "{to_value}"')) raise InvalidStatusOperationError(message.format( enum=enum.__name__, from_value=enum.name(from_value), to_value=enum.name(to_value) or to_value ))
[ "def", "validate_valid_transition", "(", "enum", ",", "from_value", ",", "to_value", ")", ":", "validate_available_choice", "(", "enum", ",", "to_value", ")", "if", "hasattr", "(", "enum", ",", "'_transitions'", ")", "and", "not", "enum", ".", "is_valid_transition", "(", "from_value", ",", "to_value", ")", ":", "message", "=", "_", "(", "six", ".", "text_type", "(", "'{enum} can not go from \"{from_value}\" to \"{to_value}\"'", ")", ")", "raise", "InvalidStatusOperationError", "(", "message", ".", "format", "(", "enum", "=", "enum", ".", "__name__", ",", "from_value", "=", "enum", ".", "name", "(", "from_value", ")", ",", "to_value", "=", "enum", ".", "name", "(", "to_value", ")", "or", "to_value", ")", ")" ]
Validate that to_value is a valid choice and that to_value is a valid transition from from_value.
[ "Validate", "that", "to_value", "is", "a", "valid", "choice", "and", "that", "to_value", "is", "a", "valid", "transition", "from", "from_value", "." ]
train
https://github.com/5monkeys/django-enumfield/blob/6cf20c0fba013d39960af0f4d2c9a3b399955eb3/django_enumfield/validators.py#L7-L18
5monkeys/django-enumfield
django_enumfield/validators.py
validate_available_choice
def validate_available_choice(enum, to_value): """ Validate that to_value is defined as a value in enum. """ if to_value is None: return if type(to_value) is not int: try: to_value = int(to_value) except ValueError: message_str = "'{value}' cannot be converted to int" message = _(six.text_type(message_str)) raise InvalidStatusOperationError(message.format(value=to_value)) if to_value not in list(dict(enum.choices()).keys()): message = _(six.text_type('Select a valid choice. {value} is not one of the available choices.')) raise InvalidStatusOperationError(message.format(value=to_value))
python
def validate_available_choice(enum, to_value): """ Validate that to_value is defined as a value in enum. """ if to_value is None: return if type(to_value) is not int: try: to_value = int(to_value) except ValueError: message_str = "'{value}' cannot be converted to int" message = _(six.text_type(message_str)) raise InvalidStatusOperationError(message.format(value=to_value)) if to_value not in list(dict(enum.choices()).keys()): message = _(six.text_type('Select a valid choice. {value} is not one of the available choices.')) raise InvalidStatusOperationError(message.format(value=to_value))
[ "def", "validate_available_choice", "(", "enum", ",", "to_value", ")", ":", "if", "to_value", "is", "None", ":", "return", "if", "type", "(", "to_value", ")", "is", "not", "int", ":", "try", ":", "to_value", "=", "int", "(", "to_value", ")", "except", "ValueError", ":", "message_str", "=", "\"'{value}' cannot be converted to int\"", "message", "=", "_", "(", "six", ".", "text_type", "(", "message_str", ")", ")", "raise", "InvalidStatusOperationError", "(", "message", ".", "format", "(", "value", "=", "to_value", ")", ")", "if", "to_value", "not", "in", "list", "(", "dict", "(", "enum", ".", "choices", "(", ")", ")", ".", "keys", "(", ")", ")", ":", "message", "=", "_", "(", "six", ".", "text_type", "(", "'Select a valid choice. {value} is not one of the available choices.'", ")", ")", "raise", "InvalidStatusOperationError", "(", "message", ".", "format", "(", "value", "=", "to_value", ")", ")" ]
Validate that to_value is defined as a value in enum.
[ "Validate", "that", "to_value", "is", "defined", "as", "a", "value", "in", "enum", "." ]
train
https://github.com/5monkeys/django-enumfield/blob/6cf20c0fba013d39960af0f4d2c9a3b399955eb3/django_enumfield/validators.py#L21-L38