source_code
stringlengths
45
7.24k
function_name
stringlengths
1
111
messages
list
def x(coords, faces, stat_map=None, elev=0, azim=0, cmap='coolwarm', threshold=None, alpha='auto', bg_map=None, bg_on_stat=False, figsize= None, **kwargs): import numpy as np import matplotlib.pyplot as plt import matplotlib.tri as tri from mpl_toolkits.mplot3d import Axes3D faces = np.array(faces, dtype=int) limits = [coords.min(), coords.max()] if alpha == 'auto': if bg_map is None: alpha = 0.5 else: alpha = 1 if type(cmap) == str: cmap = plt.cm.get_cmap(cmap) if figsize is not None: fig = plt.figure(figsize=figsize) else: fig = plt.figure() ax = fig.add_subplot(111, projection='3d', xlim=limits, ylim=limits) ax.view_init(elev=elev, azim=azim) ax.set_axis_off() p3dcollec = ax.plot_trisurf(coords[:, 0], coords[:, 1], coords[:, 2], triangles=faces, linewidth=0.0, antialiased=False, color='white') if bg_map is not None or stat_map is not None: face_colors = np.ones((faces.shape[0], 4)) face_colors[:, :3] = 0.5 * face_colors[:, :3] if bg_map is not None: bg_data = bg_map if bg_data.shape[0] != coords.shape[0]: raise ValueError( 'The bg_map does not have the same number of vertices as the mesh.' ) bg_faces = np.mean(bg_data[faces], axis=1) bg_faces = bg_faces - bg_faces.min() bg_faces = bg_faces / bg_faces.max() face_colors = plt.cm.gray_r(bg_faces) face_colors[:, 3] = alpha * face_colors[:, 3] if stat_map is not None: stat_map_data = stat_map stat_map_faces = np.mean(stat_map_data[faces], axis=1) vmax = max(-np.nanmin(stat_map_faces), np.nanmax(stat_map_faces)) vmin = -vmax if threshold is not None: kept_indices = np.where(abs(stat_map_faces) >= threshold)[0] stat_map_faces = stat_map_faces - vmin stat_map_faces = stat_map_faces / (vmax - vmin) if bg_on_stat: face_colors[kept_indices] = cmap(stat_map_faces[ kept_indices]) * face_colors[kept_indices] else: face_colors[kept_indices] = cmap(stat_map_faces[ kept_indices]) else: stat_map_faces = stat_map_faces - vmin stat_map_faces = stat_map_faces / (vmax - vmin) if bg_on_stat: face_colors = cmap(stat_map_faces) * face_colors else: face_colors = cmap(stat_map_faces) p3dcollec.set_facecolors(face_colors) return fig, ax
viz
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(coords, faces, stat_map=None, elev=0, azim=0, cmap='coolwarm',\n threshold=None, alpha='auto', bg_map=None, bg_on_stat=False, figsize=\n None, **kwargs):\n import numpy as np\n import matplotlib.pyplot as plt\n import matplotlib.tri as tri\n from mpl_toolkits.mplot3d import Axes3D\n faces = np.array(faces, dtype=int)\n limits = [coords.min(), coords.max()]\n if alpha == 'auto':\n if bg_map is None:\n alpha = 0.5\n else:\n alpha = 1\n if type(cmap) == str:\n cmap = plt.cm.get_cmap(cmap)\n if figsize is not None:\n fig = plt.figure(figsize=figsize)\n else:\n fig = plt.figure()\n ax = fig.add_subplot(111, projection='3d', xlim=limits, ylim=limits)\n ax.view_init(elev=elev, azim=azim)\n ax.set_axis_off()\n p3dcollec = ax.plot_trisurf(coords[:, 0], coords[:, 1], coords[:, 2],\n triangles=faces, linewidth=0.0, antialiased=False, color='white')\n if bg_map is not None or stat_map is not None:\n face_colors = np.ones((faces.shape[0], 4))\n face_colors[:, :3] = 0.5 * face_colors[:, :3]\n if bg_map is not None:\n bg_data = bg_map\n if bg_data.shape[0] != coords.shape[0]:\n raise ValueError(\n 'The bg_map does not have the same number of vertices as the mesh.'\n )\n bg_faces = np.mean(bg_data[faces], axis=1)\n bg_faces = bg_faces - bg_faces.min()\n bg_faces = bg_faces / bg_faces.max()\n face_colors = plt.cm.gray_r(bg_faces)\n face_colors[:, 3] = alpha * face_colors[:, 3]\n if stat_map is not None:\n stat_map_data = stat_map\n stat_map_faces = np.mean(stat_map_data[faces], axis=1)\n vmax = max(-np.nanmin(stat_map_faces), np.nanmax(stat_map_faces))\n vmin = -vmax\n if threshold is not None:\n kept_indices = np.where(abs(stat_map_faces) >= threshold)[0]\n stat_map_faces = stat_map_faces - vmin\n stat_map_faces = stat_map_faces / (vmax - vmin)\n if bg_on_stat:\n face_colors[kept_indices] = cmap(stat_map_faces[\n kept_indices]) * face_colors[kept_indices]\n else:\n face_colors[kept_indices] = cmap(stat_map_faces[\n kept_indices])\n else:\n stat_map_faces = stat_map_faces - vmin\n stat_map_faces = stat_map_faces / (vmax - vmin)\n if bg_on_stat:\n face_colors = cmap(stat_map_faces) * face_colors\n else:\n face_colors = cmap(stat_map_faces)\n p3dcollec.set_facecolors(face_colors)\n return fig, ax\n", "role": "user" }, { "content": "viz", "role": "assistant" } ]
def x(surf, cortex): vertices, triangles = surf cortex_vertices = np.array(vertices[cortex], dtype=np.float64) cortex_triangles = triangles_keep_cortex(triangles, cortex) return cortex_vertices, cortex_triangles
surf_keep_cortex
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(surf, cortex):\n vertices, triangles = surf\n cortex_vertices = np.array(vertices[cortex], dtype=np.float64)\n cortex_triangles = triangles_keep_cortex(triangles, cortex)\n return cortex_vertices, cortex_triangles\n", "role": "user" }, { "content": "surf_keep_cortex", "role": "assistant" } ]
def x(triangles, cortex): input_shape = triangles.shape triangle_is_in_cortex = np.all(np.reshape(np.in1d(triangles.ravel(), cortex), input_shape), axis=1) cortex_triangles_old = np.array(triangles[triangle_is_in_cortex], dtype =np.int32) new_index = np.digitize(cortex_triangles_old.ravel(), cortex, right=True) cortex_triangles = np.array(np.arange(len(cortex))[new_index].reshape( cortex_triangles_old.shape), dtype=np.int32) return cortex_triangles
triangles_keep_cortex
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(triangles, cortex):\n input_shape = triangles.shape\n triangle_is_in_cortex = np.all(np.reshape(np.in1d(triangles.ravel(),\n cortex), input_shape), axis=1)\n cortex_triangles_old = np.array(triangles[triangle_is_in_cortex], dtype\n =np.int32)\n new_index = np.digitize(cortex_triangles_old.ravel(), cortex, right=True)\n cortex_triangles = np.array(np.arange(len(cortex))[new_index].reshape(\n cortex_triangles_old.shape), dtype=np.int32)\n return cortex_triangles\n", "role": "user" }, { "content": "triangles_keep_cortex", "role": "assistant" } ]
def x(src, cortex): src_new = np.array(np.where(np.in1d(cortex, src))[0], dtype=np.int32) return src_new
translate_src
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(src, cortex):\n src_new = np.array(np.where(np.in1d(cortex, src))[0], dtype=np.int32)\n return src_new\n", "role": "user" }, { "content": "translate_src", "role": "assistant" } ]
def x(input_data, surf, cortex): data = np.zeros(len(surf[0])) data[cortex] = input_data return data
recort
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(input_data, surf, cortex):\n data = np.zeros(len(surf[0]))\n data[cortex] = input_data\n return data\n", "role": "user" }, { "content": "recort", "role": "assistant" } ]
def x(simple_vertices, complex_vertices): import scipy.spatial voronoi_seed_idx = np.zeros((simple_vertices.shape[0],), dtype='int64') - 1 missing = np.where(voronoi_seed_idx == -1)[0].shape[0] mapping_single = np.zeros_like(voronoi_seed_idx) neighbours = 0 col = 0 while missing != 0: neighbours += 100 inaccuracy, mapping = scipy.spatial.KDTree(complex_vertices).query( simple_vertices, k=neighbours) while col < neighbours: missing_idx = np.where(voronoi_seed_idx == -1)[0] missing = missing_idx.shape[0] if missing == 0: break else: mapping_single[missing_idx] = np.copy(mapping[missing_idx, col] ) unique, double_idx = np.unique(mapping_single, return_inverse=True) voronoi_seed_idx = np.zeros((simple_vertices.shape[0],), dtype='int64') - 1 for u in range(unique.shape[0]): entries = np.where(double_idx == u)[0] voronoi_seed_idx[entries[0]] = unique[u] col += 1 return voronoi_seed_idx, inaccuracy
find_node_match
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(simple_vertices, complex_vertices):\n import scipy.spatial\n voronoi_seed_idx = np.zeros((simple_vertices.shape[0],), dtype='int64') - 1\n missing = np.where(voronoi_seed_idx == -1)[0].shape[0]\n mapping_single = np.zeros_like(voronoi_seed_idx)\n neighbours = 0\n col = 0\n while missing != 0:\n neighbours += 100\n inaccuracy, mapping = scipy.spatial.KDTree(complex_vertices).query(\n simple_vertices, k=neighbours)\n while col < neighbours:\n missing_idx = np.where(voronoi_seed_idx == -1)[0]\n missing = missing_idx.shape[0]\n if missing == 0:\n break\n else:\n mapping_single[missing_idx] = np.copy(mapping[missing_idx, col]\n )\n unique, double_idx = np.unique(mapping_single,\n return_inverse=True)\n voronoi_seed_idx = np.zeros((simple_vertices.shape[0],),\n dtype='int64') - 1\n for u in range(unique.shape[0]):\n entries = np.where(double_idx == u)[0]\n voronoi_seed_idx[entries[0]] = unique[u]\n col += 1\n return voronoi_seed_idx, inaccuracy\n", "role": "user" }, { "content": "find_node_match", "role": "assistant" } ]
def x(self): try: return super(EventSummRep, self).parse().parse_away_shots( ).parse_home_shots().parse_away_fo().parse_home_fo( ).parse_away_by_player().parse_home_by_player() except: return None
parse
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n try:\n return super(EventSummRep, self).parse().parse_away_shots(\n ).parse_home_shots().parse_away_fo().parse_home_fo(\n ).parse_away_by_player().parse_home_by_player()\n except:\n return None\n", "role": "user" }, { "content": "parse", "role": "assistant" } ]
def x(self): try: self.__set_shot_tables() self.shots['home'] = self.__parse_shot_tables(self.__home_top, self .__home_bot) return self except: return None
parse_home_shots
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n try:\n self.__set_shot_tables()\n self.shots['home'] = self.__parse_shot_tables(self.__home_top, self\n .__home_bot)\n return self\n except:\n return None\n", "role": "user" }, { "content": "parse_home_shots", "role": "assistant" } ]
def x(self): try: self.__set_shot_tables() self.shots['away'] = self.__parse_shot_tables(self.__aw_top, self. __aw_bot) return self except: return None
parse_away_shots
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n try:\n self.__set_shot_tables()\n self.shots['away'] = self.__parse_shot_tables(self.__aw_top, self.\n __aw_bot)\n return self\n except:\n return None\n", "role": "user" }, { "content": "parse_away_shots", "role": "assistant" } ]
def x(self): try: self.__set_fo_tables() self.face_offs['home'] = self.__parse_fo_table(self.__home_fo) return self except: return None
parse_home_fo
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n try:\n self.__set_fo_tables()\n self.face_offs['home'] = self.__parse_fo_table(self.__home_fo)\n return self\n except:\n return None\n", "role": "user" }, { "content": "parse_home_fo", "role": "assistant" } ]
def x(self): try: self.__set_fo_tables() self.face_offs['away'] = self.__parse_fo_table(self.__away_fo) return self except: return None
parse_away_fo
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n try:\n self.__set_fo_tables()\n self.face_offs['away'] = self.__parse_fo_table(self.__away_fo)\n return self\n except:\n return None\n", "role": "user" }, { "content": "parse_away_fo", "role": "assistant" } ]
def x(surf, cortex, source_nodes): cortex_vertices, cortex_triangles = surf_keep_cortex(surf, cortex) translated_source_nodes = translate_src(source_nodes, cortex) data = gdist.compute_gdist(cortex_vertices, cortex_triangles, source_indices=translated_source_nodes) dist = recort(data, surf, cortex) del data return dist
dist_calc
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(surf, cortex, source_nodes):\n cortex_vertices, cortex_triangles = surf_keep_cortex(surf, cortex)\n translated_source_nodes = translate_src(source_nodes, cortex)\n data = gdist.compute_gdist(cortex_vertices, cortex_triangles,\n source_indices=translated_source_nodes)\n dist = recort(data, surf, cortex)\n del data\n return dist\n", "role": "user" }, { "content": "dist_calc", "role": "assistant" } ]
def x(surf, cortex, src): cortex_vertices, cortex_triangles = surf_keep_cortex(surf, cortex) dist_vals = np.zeros((len(source_nodes), len(cortex_vertices))) for x in range(len(source_nodes)): translated_source_nodes = translate_src(source_nodes[x], cortex) dist_vals[x, :] = gdist.compute_gdist(cortex_vertices, cortex_triangles, source_indices=translated_source_nodes) data = np.argsort(dist_vals, axis=0)[0, :] + 1 zone = recort(data, surf, cortex) del data return zone
zone_calc
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(surf, cortex, src):\n cortex_vertices, cortex_triangles = surf_keep_cortex(surf, cortex)\n dist_vals = np.zeros((len(source_nodes), len(cortex_vertices)))\n for x in range(len(source_nodes)):\n translated_source_nodes = translate_src(source_nodes[x], cortex)\n dist_vals[x, :] = gdist.compute_gdist(cortex_vertices,\n cortex_triangles, source_indices=translated_source_nodes)\n data = np.argsort(dist_vals, axis=0)[0, :] + 1\n zone = recort(data, surf, cortex)\n del data\n return zone\n", "role": "user" }, { "content": "zone_calc", "role": "assistant" } ]
def x(surf, cortex, labels, exceptions=['Unknown', 'Medial_wall'], verbose=True ): cortex_vertices, cortex_triangles = surf_keep_cortex(surf, cortex) label_list = sd.load.get_freesurfer_label(labels, verbose=False) rs = np.where([(a not in exceptions) for a in label_list])[0] rois = [label_list[r] for r in rs] if verbose: print('# of regions: ' + str(len(rois))) dist_roi = [] for roi in rois: source_nodes = sd.load.load_freesurfer_label(labels, roi) translated_source_nodes = translate_src(source_nodes, cortex) dist_roi.append(gdist.compute_gdist(cortex_vertices, cortex_triangles, source_indices=translated_source_nodes)) if verbose: print(roi) dist_roi = np.array(dist_roi) dist_mat = [] for roi in rois: source_nodes = sd.load.load_freesurfer_label(labels, roi) translated_source_nodes = translate_src(source_nodes, cortex) dist_mat.append(np.min(dist_roi[:, translated_source_nodes], axis=1)) dist_mat = np.array(dist_mat) return dist_mat, rois
dist_calc_matrix
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(surf, cortex, labels, exceptions=['Unknown', 'Medial_wall'], verbose=True\n ):\n cortex_vertices, cortex_triangles = surf_keep_cortex(surf, cortex)\n label_list = sd.load.get_freesurfer_label(labels, verbose=False)\n rs = np.where([(a not in exceptions) for a in label_list])[0]\n rois = [label_list[r] for r in rs]\n if verbose:\n print('# of regions: ' + str(len(rois)))\n dist_roi = []\n for roi in rois:\n source_nodes = sd.load.load_freesurfer_label(labels, roi)\n translated_source_nodes = translate_src(source_nodes, cortex)\n dist_roi.append(gdist.compute_gdist(cortex_vertices,\n cortex_triangles, source_indices=translated_source_nodes))\n if verbose:\n print(roi)\n dist_roi = np.array(dist_roi)\n dist_mat = []\n for roi in rois:\n source_nodes = sd.load.load_freesurfer_label(labels, roi)\n translated_source_nodes = translate_src(source_nodes, cortex)\n dist_mat.append(np.min(dist_roi[:, translated_source_nodes], axis=1))\n dist_mat = np.array(dist_mat)\n return dist_mat, rois\n", "role": "user" }, { "content": "dist_calc_matrix", "role": "assistant" } ]
def x(self): try: return super(FaceOffRep, self).parse() and self.parse_home_face_offs( ) and self.parse_away_face_offs() except: return None
parse
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n try:\n return super(FaceOffRep, self).parse() and self.parse_home_face_offs(\n ) and self.parse_away_face_offs()\n except:\n return None\n", "role": "user" }, { "content": "parse", "role": "assistant" } ]
def x(self): self.__set_team_docs() self.face_offs['home'] = FaceOffRep.__read_team_doc(self.__home_doc) return self
parse_home_face_offs
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n self.__set_team_docs()\n self.face_offs['home'] = FaceOffRep.__read_team_doc(self.__home_doc)\n return self\n", "role": "user" }, { "content": "parse_home_face_offs", "role": "assistant" } ]
def x(self): self.__set_team_docs() self.face_offs['away'] = FaceOffRep.__read_team_doc(self.__vis_doc) return self
parse_away_face_offs
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n self.__set_team_docs()\n self.face_offs['away'] = FaceOffRep.__read_team_doc(self.__vis_doc)\n return self\n", "role": "user" }, { "content": "parse_away_face_offs", "role": "assistant" } ]
def x(filename): basename = os.path.basename(filename) path = os.path.dirname(filename) sys.path.append(path) return __import__(os.path.splitext(basename)[0])
load_module
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(filename):\n basename = os.path.basename(filename)\n path = os.path.dirname(filename)\n sys.path.append(path)\n return __import__(os.path.splitext(basename)[0])\n", "role": "user" }, { "content": "load_module", "role": "assistant" } ]
def x(machine_list): if machine_list is None: return {} else: mapping = {} for pair in machine_list: if constants.MACHINE_SEPARATOR not in pair or pair.count(constants .MACHINE_SEPARATOR) != 1: raise ValueError( 'machine pairs must be passed as two strings separted by a %s' , constants.MACHINE_SEPARATOR) logical, physical = pair.split(constants.MACHINE_SEPARATOR) mapping[logical] = physical return mapping
make_machine_mapping
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(machine_list):\n if machine_list is None:\n return {}\n else:\n mapping = {}\n for pair in machine_list:\n if constants.MACHINE_SEPARATOR not in pair or pair.count(constants\n .MACHINE_SEPARATOR) != 1:\n raise ValueError(\n 'machine pairs must be passed as two strings separted by a %s'\n , constants.MACHINE_SEPARATOR)\n logical, physical = pair.split(constants.MACHINE_SEPARATOR)\n mapping[logical] = physical\n return mapping\n", "role": "user" }, { "content": "make_machine_mapping", "role": "assistant" } ]
def x(config_list): if config_list is None: return {} else: mapping = {} for pair in config_list: if constants.CONFIG_SEPARATOR not in pair or pair.count(constants .CONFIG_SEPARATOR) != 1: raise ValueError( 'configs must be passed as two strings separted by a %s', constants.CONFIG_SEPARATOR) config, value = pair.split(constants.CONFIG_SEPARATOR) mapping[config] = value return mapping
parse_config_list
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(config_list):\n if config_list is None:\n return {}\n else:\n mapping = {}\n for pair in config_list:\n if constants.CONFIG_SEPARATOR not in pair or pair.count(constants\n .CONFIG_SEPARATOR) != 1:\n raise ValueError(\n 'configs must be passed as two strings separted by a %s',\n constants.CONFIG_SEPARATOR)\n config, value = pair.split(constants.CONFIG_SEPARATOR)\n mapping[config] = value\n return mapping\n", "role": "user" }, { "content": "parse_config_list", "role": "assistant" } ]
def x(config_file_path): extension = os.path.splitext(config_file_path)[1] if extension == '.pyc': raise ValueError('Skipping .pyc file as config') if extension == '.json': with open(config_file_path) as config_file: try: mapping = json.load(config_file) except ValueError as e: logger.error('Did not load json configs', e) raise SyntaxError( 'Unable to parse config file:%s due to malformed JSON. Aborting' % config_file_path) elif extension == '.py': mapping = {} file_dict = load_module(config_file_path) for attr_name in dir(file_dict): if not (attr_name.startswith('_') or attr_name.startswith('__')): attr = getattr(file_dict, attr_name) if type(attr) is dict: mapping.update(attr) else: with open(config_file_path) as config_file: lines = [line.rstrip() for line in config_file if line.rstrip() != '' and not line.startswith('#')] mapping = parse_config_list(lines) return mapping
parse_config_file
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(config_file_path):\n extension = os.path.splitext(config_file_path)[1]\n if extension == '.pyc':\n raise ValueError('Skipping .pyc file as config')\n if extension == '.json':\n with open(config_file_path) as config_file:\n try:\n mapping = json.load(config_file)\n except ValueError as e:\n logger.error('Did not load json configs', e)\n raise SyntaxError(\n 'Unable to parse config file:%s due to malformed JSON. Aborting'\n % config_file_path)\n elif extension == '.py':\n mapping = {}\n file_dict = load_module(config_file_path)\n for attr_name in dir(file_dict):\n if not (attr_name.startswith('_') or attr_name.startswith('__')):\n attr = getattr(file_dict, attr_name)\n if type(attr) is dict:\n mapping.update(attr)\n else:\n with open(config_file_path) as config_file:\n lines = [line.rstrip() for line in config_file if line.rstrip() !=\n '' and not line.startswith('#')]\n mapping = parse_config_list(lines)\n return mapping\n", "role": "user" }, { "content": "parse_config_file", "role": "assistant" } ]
def x(ssh, command, msg='', env={}, **kwargs): bash_profile_command = 'source .bash_profile > /dev/null 2> /dev/null;' env_command = build_os_environment_string(env) new_command = bash_profile_command + env_command + command if kwargs.get('sync', True): return better_exec_command(ssh, new_command, msg) else: return ssh.exec_command(new_command)
exec_with_env
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(ssh, command, msg='', env={}, **kwargs):\n bash_profile_command = 'source .bash_profile > /dev/null 2> /dev/null;'\n env_command = build_os_environment_string(env)\n new_command = bash_profile_command + env_command + command\n if kwargs.get('sync', True):\n return better_exec_command(ssh, new_command, msg)\n else:\n return ssh.exec_command(new_command)\n", "role": "user" }, { "content": "exec_with_env", "role": "assistant" } ]
def x(ssh, command, msg): chan = ssh.get_transport().open_session() chan.exec_command(command) exit_status = chan.recv_exit_status() if exit_status != 0: msg_str = chan.recv_stderr(1024) err_msgs = [] while len(msg_str) > 0: err_msgs.append(msg_str) msg_str = chan.recv_stderr(1024) err_msg = ''.join(err_msgs) logger.error(err_msg) raise ParamikoError(msg, err_msg) return chan
better_exec_command
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(ssh, command, msg):\n chan = ssh.get_transport().open_session()\n chan.exec_command(command)\n exit_status = chan.recv_exit_status()\n if exit_status != 0:\n msg_str = chan.recv_stderr(1024)\n err_msgs = []\n while len(msg_str) > 0:\n err_msgs.append(msg_str)\n msg_str = chan.recv_stderr(1024)\n err_msg = ''.join(err_msgs)\n logger.error(err_msg)\n raise ParamikoError(msg, err_msg)\n return chan\n", "role": "user" }, { "content": "better_exec_command", "role": "assistant" } ]
def x(chan): if hasattr(chan, 'recv'): str = chan.recv(1024) msgs = [] while len(str) > 0: msgs.append(str) str = chan.recv(1024) msg = ''.join(msgs).strip() if len(msg) > 0: logger.info(msg)
log_output
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(chan):\n if hasattr(chan, 'recv'):\n str = chan.recv(1024)\n msgs = []\n while len(str) > 0:\n msgs.append(str)\n str = chan.recv(1024)\n msg = ''.join(msgs).strip()\n if len(msg) > 0:\n logger.info(msg)\n", "role": "user" }, { "content": "log_output", "role": "assistant" } ]
def x(hostname, filename, mode='r', bufsize=-1, username=None, password=None): with get_ssh_client(hostname, username=username, password=password) as ssh: sftp = None f = None try: sftp = ssh.open_sftp() f = sftp.open(filename, mode, bufsize) yield f finally: if f is not None: f.close() if sftp is not None: sftp.close()
open_remote_file
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(hostname, filename, mode='r', bufsize=-1, username=None, password=None):\n with get_ssh_client(hostname, username=username, password=password) as ssh:\n sftp = None\n f = None\n try:\n sftp = ssh.open_sftp()\n f = sftp.open(filename, mode, bufsize)\n yield f\n finally:\n if f is not None:\n f.close()\n if sftp is not None:\n sftp.close()\n", "role": "user" }, { "content": "open_remote_file", "role": "assistant" } ]
def x(self, unique_id, configs=None): self.install(unique_id, configs) self.start(unique_id, configs)
deploy
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, configs=None):\n self.install(unique_id, configs)\n self.start(unique_id, configs)\n", "role": "user" }, { "content": "deploy", "role": "assistant" } ]
def x(self, unique_id, configs=None): self.stop(unique_id, configs) self.uninstall(unique_id, configs)
undeploy
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, configs=None):\n self.stop(unique_id, configs)\n self.uninstall(unique_id, configs)\n", "role": "user" }, { "content": "undeploy", "role": "assistant" } ]
def x(self, unique_id, configs=None): self.stop(unique_id, configs) self.start(unique_id, configs)
soft_bounce
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, configs=None):\n self.stop(unique_id, configs)\n self.start(unique_id, configs)\n", "role": "user" }, { "content": "soft_bounce", "role": "assistant" } ]
def x(self, unique_id, configs=None): self.kill(unique_id, configs) self.start(unique_id, configs)
hard_bounce
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, configs=None):\n self.kill(unique_id, configs)\n self.start(unique_id, configs)\n", "role": "user" }, { "content": "hard_bounce", "role": "assistant" } ]
def x(self, unique_id, delay, configs=None): self.pause(unique_id, configs) time.sleep(delay) self.resume(unique_id, configs)
sleep
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, delay, configs=None):\n self.pause(unique_id, configs)\n time.sleep(delay)\n self.resume(unique_id, configs)\n", "role": "user" }, { "content": "sleep", "role": "assistant" } ]
def x(self, unique_id, configs=None): pids = self.get_pid(unique_id, configs) if pids != constants.PROCESS_NOT_RUNNING_PID: pid_str = ' '.join(str(pid) for pid in pids) hostname = self.processes[unique_id].hostname with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh: better_exec_command(ssh, 'kill -SIGSTOP {0}'.format(pid_str), 'PAUSING PROCESS {0}'.format(unique_id))
pause
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, configs=None):\n pids = self.get_pid(unique_id, configs)\n if pids != constants.PROCESS_NOT_RUNNING_PID:\n pid_str = ' '.join(str(pid) for pid in pids)\n hostname = self.processes[unique_id].hostname\n with get_ssh_client(hostname, username=runtime.get_username(),\n password=runtime.get_password()) as ssh:\n better_exec_command(ssh, 'kill -SIGSTOP {0}'.format(pid_str),\n 'PAUSING PROCESS {0}'.format(unique_id))\n", "role": "user" }, { "content": "pause", "role": "assistant" } ]
def x(self, unique_id, signalno, configs): pids = self.get_pid(unique_id, configs) if pids != constants.PROCESS_NOT_RUNNING_PID: pid_str = ' '.join(str(pid) for pid in pids) hostname = self.processes[unique_id].hostname msg = Deployer._signalnames.get(signalno, 'SENDING SIGNAL %s TO' % signalno) with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh: better_exec_command(ssh, 'kill -{0} {1}'.format(signalno, pid_str), '{0} PROCESS {1}'.format(msg, unique_id))
send_signal
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, signalno, configs):\n pids = self.get_pid(unique_id, configs)\n if pids != constants.PROCESS_NOT_RUNNING_PID:\n pid_str = ' '.join(str(pid) for pid in pids)\n hostname = self.processes[unique_id].hostname\n msg = Deployer._signalnames.get(signalno, 'SENDING SIGNAL %s TO' %\n signalno)\n with get_ssh_client(hostname, username=runtime.get_username(),\n password=runtime.get_password()) as ssh:\n better_exec_command(ssh, 'kill -{0} {1}'.format(signalno,\n pid_str), '{0} PROCESS {1}'.format(msg, unique_id))\n", "role": "user" }, { "content": "send_signal", "role": "assistant" } ]
def x(self, unique_id, configs=None): self._send_signal(unique_id, signal.SIGCONT, configs)
resume
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, configs=None):\n self._send_signal(unique_id, signal.SIGCONT, configs)\n", "role": "user" }, { "content": "resume", "role": "assistant" } ]
def x(self, unique_id, configs=None): self._send_signal(unique_id, signal.SIGKILL, configs)
kill
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, configs=None):\n self._send_signal(unique_id, signal.SIGKILL, configs)\n", "role": "user" }, { "content": "kill", "role": "assistant" } ]
def x(self, unique_id, configs=None): self._send_signal(unique_id, signal.SIGTERM, configs)
terminate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, configs=None):\n self._send_signal(unique_id, signal.SIGTERM, configs)\n", "role": "user" }, { "content": "terminate", "role": "assistant" } ]
def x(self, unique_id, configs=None): self._send_signal(unique_id, signal.SIGHUP, configs)
hangup
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, configs=None):\n self._send_signal(unique_id, signal.SIGHUP, configs)\n", "role": "user" }, { "content": "hangup", "role": "assistant" } ]
def x(self, unique_id, logs, directory, pattern=constants. FILTER_NAME_ALLOW_NONE): self.fetch_logs(unique_id, logs, directory, pattern)
get_logs
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, logs, directory, pattern=constants.\n FILTER_NAME_ALLOW_NONE):\n self.fetch_logs(unique_id, logs, directory, pattern)\n", "role": "user" }, { "content": "get_logs", "role": "assistant" } ]
def x(self, unique_id, logs, directory, pattern=constants. FILTER_NAME_ALLOW_NONE): hostname = self.processes[unique_id].hostname install_path = self.processes[unique_id].install_path self.fetch_logs_from_host(hostname, install_path, unique_id, logs, directory, pattern)
fetch_logs
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, logs, directory, pattern=constants.\n FILTER_NAME_ALLOW_NONE):\n hostname = self.processes[unique_id].hostname\n install_path = self.processes[unique_id].install_path\n self.fetch_logs_from_host(hostname, install_path, unique_id, logs,\n directory, pattern)\n", "role": "user" }, { "content": "fetch_logs", "role": "assistant" } ]
def x(self): self._setup() for config_name in self.report_info.config_to_test_names_map.keys(): config_dir = os.path.join(self.report_info.resource_dir, config_name) utils.makedirs(config_dir) testsuite = self._generate_junit_xml(config_name) with open(os.path.join(self.report_info.junit_xml_path, 'zopkio_junit_reports.xml'), 'w') as file: TestSuite.to_file(file, [testsuite], prettyprint=False)
generate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n self._setup()\n for config_name in self.report_info.config_to_test_names_map.keys():\n config_dir = os.path.join(self.report_info.resource_dir, config_name)\n utils.makedirs(config_dir)\n testsuite = self._generate_junit_xml(config_name)\n with open(os.path.join(self.report_info.junit_xml_path,\n 'zopkio_junit_reports.xml'), 'w') as file:\n TestSuite.to_file(file, [testsuite], prettyprint=False)\n", "role": "user" }, { "content": "generate", "role": "assistant" } ]
def x(self, unique_id, configs=None): if configs is None: configs = {} tmp = self.default_configs.copy() tmp.update(configs) configs = tmp hostname = None is_tarfile = False is_zipfile = False if unique_id in self.processes and 'hostname' in configs: self.uninstall(unique_id, configs) hostname = configs['hostname'] elif 'hostname' in configs: hostname = configs['hostname'] elif unique_id not in self.processes: raise DeploymentError('hostname was not provided for unique_id: ' + unique_id) env = configs.get('env', {}) install_path = configs.get('install_path') or self.default_configs.get( 'install_path') pid_file = configs.get('pid_file') or self.default_configs.get('pid_file') if install_path is None: logger.error('install_path was not provided for unique_id: ' + unique_id) raise DeploymentError( 'install_path was not provided for unique_id: ' + unique_id) if not configs.get('no_copy', False): with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh: log_output(better_exec_command(ssh, 'mkdir -p {0}'.format( install_path), 'Failed to create path {0}'.format( install_path))) log_output(better_exec_command(ssh, 'chmod 755 {0}'.format( install_path), 'Failed to make path {0} writeable'.format( install_path))) executable = configs.get('executable') or self.default_configs.get( 'executable') if executable is None: logger.error('executable was not provided for unique_id: ' + unique_id) raise DeploymentError( 'executable was not provided for unique_id: ' + unique_id) copy_from_remote_location = False if ':' in executable: copy_from_remote_location = True if 'http' not in executable: remote_location_server = executable.split(':')[0] remote_file_path = executable.split(':')[1] remote_file_name = os.path.basename(remote_file_path) local_temp_file_name = os.path.join(configs.get( 'tmp_dir', '/tmp'), remote_file_name) if not os.path.exists(local_temp_file_name): with get_sftp_client(remote_location_server, username=runtime.get_username(), password= runtime.get_password()) as ftp: try: ftp.get(remote_file_path, local_temp_file_name) executable = local_temp_file_name except: raise DeploymentError( 'Unable to load file from remote server ' + executable) else: remote_file_name = executable.split('/')[-1] local_temp_file_name = os.path.join(configs.get( 'tmp_dir', '/tmp'), remote_file_name) if not os.path.exists(local_temp_file_name): try: urllib.urlretrieve(executable, local_temp_file_name ) except: raise DeploymentError( 'Unable to load file from remote server ' + executable) executable = local_temp_file_name try: exec_name = os.path.basename(executable) install_location = os.path.join(install_path, exec_name) with get_sftp_client(hostname, username=runtime. get_username(), password=runtime.get_password()) as ftp: ftp.put(executable, install_location) except: raise DeploymentError( 'Unable to copy executable to install_location:' + install_location) finally: is_tarfile = tarfile.is_tarfile(executable) is_zipfile = zipfile.is_zipfile(executable) if copy_from_remote_location and not configs.get('cache', False ): os.remove(executable) if configs.get('extract', False) or self.default_configs.get( 'extract', False): if is_tarfile: log_output(better_exec_command(ssh, 'tar -xf {0} -C {1}'.format(install_location, install_path), 'Failed to extract tarfile {0}'. format(exec_name))) elif is_zipfile: log_output(better_exec_command(ssh, 'unzip -o {0} -d {1}'.format(install_location, install_path), 'Failed to extract zipfile {0}'. format(exec_name))) else: logger.error(executable + ' is not a supported filetype for extracting') raise DeploymentError(executable + ' is not a supported filetype for extracting') post_install_cmds = configs.get('post_install_cmds', False ) or self.default_configs.get('post_install_cmds', []) for cmd in post_install_cmds: relative_cmd = 'cd {0}; {1}'.format(install_path, cmd) log_output(exec_with_env(ssh, relative_cmd, msg= 'Failed to execute post install command: {0}'.format( relative_cmd), env=env)) self.processes[unique_id] = Process(unique_id, self.service_name, hostname, install_path) self.processes[unique_id].pid_file = pid_file
install
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, configs=None):\n if configs is None:\n configs = {}\n tmp = self.default_configs.copy()\n tmp.update(configs)\n configs = tmp\n hostname = None\n is_tarfile = False\n is_zipfile = False\n if unique_id in self.processes and 'hostname' in configs:\n self.uninstall(unique_id, configs)\n hostname = configs['hostname']\n elif 'hostname' in configs:\n hostname = configs['hostname']\n elif unique_id not in self.processes:\n raise DeploymentError('hostname was not provided for unique_id: ' +\n unique_id)\n env = configs.get('env', {})\n install_path = configs.get('install_path') or self.default_configs.get(\n 'install_path')\n pid_file = configs.get('pid_file') or self.default_configs.get('pid_file')\n if install_path is None:\n logger.error('install_path was not provided for unique_id: ' +\n unique_id)\n raise DeploymentError(\n 'install_path was not provided for unique_id: ' + unique_id)\n if not configs.get('no_copy', False):\n with get_ssh_client(hostname, username=runtime.get_username(),\n password=runtime.get_password()) as ssh:\n log_output(better_exec_command(ssh, 'mkdir -p {0}'.format(\n install_path), 'Failed to create path {0}'.format(\n install_path)))\n log_output(better_exec_command(ssh, 'chmod 755 {0}'.format(\n install_path), 'Failed to make path {0} writeable'.format(\n install_path)))\n executable = configs.get('executable') or self.default_configs.get(\n 'executable')\n if executable is None:\n logger.error('executable was not provided for unique_id: ' +\n unique_id)\n raise DeploymentError(\n 'executable was not provided for unique_id: ' + unique_id)\n copy_from_remote_location = False\n if ':' in executable:\n copy_from_remote_location = True\n if 'http' not in executable:\n remote_location_server = executable.split(':')[0]\n remote_file_path = executable.split(':')[1]\n remote_file_name = os.path.basename(remote_file_path)\n local_temp_file_name = os.path.join(configs.get(\n 'tmp_dir', '/tmp'), remote_file_name)\n if not os.path.exists(local_temp_file_name):\n with get_sftp_client(remote_location_server,\n username=runtime.get_username(), password=\n runtime.get_password()) as ftp:\n try:\n ftp.get(remote_file_path, local_temp_file_name)\n executable = local_temp_file_name\n except:\n raise DeploymentError(\n 'Unable to load file from remote server ' +\n executable)\n else:\n remote_file_name = executable.split('/')[-1]\n local_temp_file_name = os.path.join(configs.get(\n 'tmp_dir', '/tmp'), remote_file_name)\n if not os.path.exists(local_temp_file_name):\n try:\n urllib.urlretrieve(executable, local_temp_file_name\n )\n except:\n raise DeploymentError(\n 'Unable to load file from remote server ' +\n executable)\n executable = local_temp_file_name\n try:\n exec_name = os.path.basename(executable)\n install_location = os.path.join(install_path, exec_name)\n with get_sftp_client(hostname, username=runtime.\n get_username(), password=runtime.get_password()) as ftp:\n ftp.put(executable, install_location)\n except:\n raise DeploymentError(\n 'Unable to copy executable to install_location:' +\n install_location)\n finally:\n is_tarfile = tarfile.is_tarfile(executable)\n is_zipfile = zipfile.is_zipfile(executable)\n if copy_from_remote_location and not configs.get('cache', False\n ):\n os.remove(executable)\n if configs.get('extract', False) or self.default_configs.get(\n 'extract', False):\n if is_tarfile:\n log_output(better_exec_command(ssh,\n 'tar -xf {0} -C {1}'.format(install_location,\n install_path), 'Failed to extract tarfile {0}'.\n format(exec_name)))\n elif is_zipfile:\n log_output(better_exec_command(ssh,\n 'unzip -o {0} -d {1}'.format(install_location,\n install_path), 'Failed to extract zipfile {0}'.\n format(exec_name)))\n else:\n logger.error(executable +\n ' is not a supported filetype for extracting')\n raise DeploymentError(executable +\n ' is not a supported filetype for extracting')\n post_install_cmds = configs.get('post_install_cmds', False\n ) or self.default_configs.get('post_install_cmds', [])\n for cmd in post_install_cmds:\n relative_cmd = 'cd {0}; {1}'.format(install_path, cmd)\n log_output(exec_with_env(ssh, relative_cmd, msg=\n 'Failed to execute post install command: {0}'.format(\n relative_cmd), env=env))\n self.processes[unique_id] = Process(unique_id, self.service_name,\n hostname, install_path)\n self.processes[unique_id].pid_file = pid_file\n", "role": "user" }, { "content": "install", "role": "assistant" } ]
def x(self, unique_id, configs=None): if configs is None: configs = {} tmp = self.default_configs.copy() tmp.update(configs) configs = tmp logger.debug('starting ' + unique_id) if self.get_pid(unique_id, configs ) is not constants.PROCESS_NOT_RUNNING_PID: return None if unique_id not in self.processes: self.install(unique_id, configs) hostname = self.processes[unique_id].hostname install_path = self.processes[unique_id].install_path start_command = configs.get('start_command') or self.processes[unique_id ].start_command or self.default_configs.get('start_command') pid_file = configs.get('pid_file') or self.default_configs.get('pid_file') if start_command is None: logger.error('start_command was not provided for unique_id: ' + unique_id) raise DeploymentError( 'start_command was not provided for unique_id: ' + unique_id) args = configs.get('args') or self.processes[unique_id ].args or self.default_configs.get('args') if args is not None: full_start_command = '{0} {1}'.format(start_command, ' '.join(args)) else: full_start_command = start_command command = 'cd {0}; {1}'.format(install_path, full_start_command) env = configs.get('env', {}) with get_ssh_client(hostname, username=runtime.get_username(), password =runtime.get_password()) as ssh: exec_with_env(ssh, command, msg='Failed to start', env=env, sync= configs.get('sync', False)) self.processes[unique_id].start_command = start_command self.processes[unique_id].args = args if self.processes[unique_id].pid_file is None: self.processes[unique_id].pid_file = pid_file if 'delay' in configs: time.sleep(configs['delay'])
start
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, configs=None):\n if configs is None:\n configs = {}\n tmp = self.default_configs.copy()\n tmp.update(configs)\n configs = tmp\n logger.debug('starting ' + unique_id)\n if self.get_pid(unique_id, configs\n ) is not constants.PROCESS_NOT_RUNNING_PID:\n return None\n if unique_id not in self.processes:\n self.install(unique_id, configs)\n hostname = self.processes[unique_id].hostname\n install_path = self.processes[unique_id].install_path\n start_command = configs.get('start_command') or self.processes[unique_id\n ].start_command or self.default_configs.get('start_command')\n pid_file = configs.get('pid_file') or self.default_configs.get('pid_file')\n if start_command is None:\n logger.error('start_command was not provided for unique_id: ' +\n unique_id)\n raise DeploymentError(\n 'start_command was not provided for unique_id: ' + unique_id)\n args = configs.get('args') or self.processes[unique_id\n ].args or self.default_configs.get('args')\n if args is not None:\n full_start_command = '{0} {1}'.format(start_command, ' '.join(args))\n else:\n full_start_command = start_command\n command = 'cd {0}; {1}'.format(install_path, full_start_command)\n env = configs.get('env', {})\n with get_ssh_client(hostname, username=runtime.get_username(), password\n =runtime.get_password()) as ssh:\n exec_with_env(ssh, command, msg='Failed to start', env=env, sync=\n configs.get('sync', False))\n self.processes[unique_id].start_command = start_command\n self.processes[unique_id].args = args\n if self.processes[unique_id].pid_file is None:\n self.processes[unique_id].pid_file = pid_file\n if 'delay' in configs:\n time.sleep(configs['delay'])\n", "role": "user" }, { "content": "start", "role": "assistant" } ]
def x(self, unique_id, configs=None): if configs is None: configs = {} tmp = self.default_configs.copy() tmp.update(configs) configs = tmp logger.debug('stopping ' + unique_id) if unique_id in self.processes: hostname = self.processes[unique_id].hostname else: logger.error("Can't stop {0}: process not known".format(unique_id)) raise DeploymentError("Can't stop {0}: process not known".format( unique_id)) if configs.get('terminate_only', False): self.terminate(unique_id, configs) else: stop_command = configs.get('stop_command') or self.default_configs.get( 'stop_command') env = configs.get('env', {}) if stop_command is not None: install_path = self.processes[unique_id].install_path with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh: log_output(exec_with_env(ssh, 'cd {0}; {1}'.format( install_path, stop_command), msg='Failed to stop {0}'. format(unique_id), env=env)) else: self.terminate(unique_id, configs) if 'delay' in configs: time.sleep(configs['delay'])
stop
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, configs=None):\n if configs is None:\n configs = {}\n tmp = self.default_configs.copy()\n tmp.update(configs)\n configs = tmp\n logger.debug('stopping ' + unique_id)\n if unique_id in self.processes:\n hostname = self.processes[unique_id].hostname\n else:\n logger.error(\"Can't stop {0}: process not known\".format(unique_id))\n raise DeploymentError(\"Can't stop {0}: process not known\".format(\n unique_id))\n if configs.get('terminate_only', False):\n self.terminate(unique_id, configs)\n else:\n stop_command = configs.get('stop_command') or self.default_configs.get(\n 'stop_command')\n env = configs.get('env', {})\n if stop_command is not None:\n install_path = self.processes[unique_id].install_path\n with get_ssh_client(hostname, username=runtime.get_username(),\n password=runtime.get_password()) as ssh:\n log_output(exec_with_env(ssh, 'cd {0}; {1}'.format(\n install_path, stop_command), msg='Failed to stop {0}'.\n format(unique_id), env=env))\n else:\n self.terminate(unique_id, configs)\n if 'delay' in configs:\n time.sleep(configs['delay'])\n", "role": "user" }, { "content": "stop", "role": "assistant" } ]
def x(self, unique_id, configs=None): if configs is None: configs = {} tmp = self.default_configs.copy() tmp.update(configs) configs = tmp if unique_id in self.processes: hostname = self.processes[unique_id].hostname else: logger.error("Can't uninstall {0}: process not known".format(unique_id) ) raise DeploymentError("Can't uninstall {0}: process not known". format(unique_id)) install_path = self.processes[unique_id].install_path directories_to_remove = self.default_configs.get('directories_to_clean', [] ) directories_to_remove.extend(configs.get('additional_directories', [])) if install_path not in directories_to_remove: directories_to_remove.append(install_path) with get_ssh_client(hostname, username=runtime.get_username(), password =runtime.get_password()) as ssh: for directory_to_remove in directories_to_remove: log_output(better_exec_command(ssh, 'rm -rf {0}'.format( directory_to_remove), 'Failed to remove {0}'.format( directory_to_remove)))
uninstall
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, configs=None):\n if configs is None:\n configs = {}\n tmp = self.default_configs.copy()\n tmp.update(configs)\n configs = tmp\n if unique_id in self.processes:\n hostname = self.processes[unique_id].hostname\n else:\n logger.error(\"Can't uninstall {0}: process not known\".format(unique_id)\n )\n raise DeploymentError(\"Can't uninstall {0}: process not known\".\n format(unique_id))\n install_path = self.processes[unique_id].install_path\n directories_to_remove = self.default_configs.get('directories_to_clean', []\n )\n directories_to_remove.extend(configs.get('additional_directories', []))\n if install_path not in directories_to_remove:\n directories_to_remove.append(install_path)\n with get_ssh_client(hostname, username=runtime.get_username(), password\n =runtime.get_password()) as ssh:\n for directory_to_remove in directories_to_remove:\n log_output(better_exec_command(ssh, 'rm -rf {0}'.format(\n directory_to_remove), 'Failed to remove {0}'.format(\n directory_to_remove)))\n", "role": "user" }, { "content": "uninstall", "role": "assistant" } ]
def x(self, unique_id, configs=None): RECV_BLOCK_SIZE = 16 if configs is None: configs = {} tmp = self.default_configs.copy() tmp.update(configs) configs = tmp if unique_id in self.processes: hostname = self.processes[unique_id].hostname else: return constants.PROCESS_NOT_RUNNING_PID if self.processes[unique_id].start_command is None: return constants.PROCESS_NOT_RUNNING_PID if self.processes[unique_id].pid_file is not None: with open_remote_file(hostname, self.processes[unique_id].pid_file, username=runtime.get_username(), password=runtime.get_password() ) as pid_file: full_output = pid_file.read() elif 'pid_file' in configs.keys(): with open_remote_file(hostname, configs['pid_file'], username= runtime.get_username(), password=runtime.get_password() ) as pid_file: full_output = pid_file.read() else: pid_keyword = self.processes[unique_id].start_command if self.processes[unique_id].args is not None: pid_keyword = '{0} {1}'.format(pid_keyword, ' '.join(self. processes[unique_id].args)) pid_keyword = configs.get('pid_keyword', pid_keyword) pid_command = ( "ps aux | grep '{0}' | grep -v grep | tr -s ' ' | cut -d ' ' -f 2 | grep -Eo '[0-9]+'" .format(pid_keyword)) pid_command = configs.get('pid_command', pid_command) non_failing_command = ( '{0}; if [ $? -le 1 ]; then true; else false; fi;'.format( pid_command)) env = configs.get('env', {}) with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh: chan = exec_with_env(ssh, non_failing_command, msg= 'Failed to get PID', env=env) output = chan.recv(RECV_BLOCK_SIZE) full_output = output while len(output) > 0: output = chan.recv(RECV_BLOCK_SIZE) full_output += output if len(full_output) > 0: pids = [int(pid_str) for pid_str in full_output.split('\n') if pid_str.isdigit()] if len(pids) > 0: return pids return constants.PROCESS_NOT_RUNNING_PID
get_pid
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id, configs=None):\n RECV_BLOCK_SIZE = 16\n if configs is None:\n configs = {}\n tmp = self.default_configs.copy()\n tmp.update(configs)\n configs = tmp\n if unique_id in self.processes:\n hostname = self.processes[unique_id].hostname\n else:\n return constants.PROCESS_NOT_RUNNING_PID\n if self.processes[unique_id].start_command is None:\n return constants.PROCESS_NOT_RUNNING_PID\n if self.processes[unique_id].pid_file is not None:\n with open_remote_file(hostname, self.processes[unique_id].pid_file,\n username=runtime.get_username(), password=runtime.get_password()\n ) as pid_file:\n full_output = pid_file.read()\n elif 'pid_file' in configs.keys():\n with open_remote_file(hostname, configs['pid_file'], username=\n runtime.get_username(), password=runtime.get_password()\n ) as pid_file:\n full_output = pid_file.read()\n else:\n pid_keyword = self.processes[unique_id].start_command\n if self.processes[unique_id].args is not None:\n pid_keyword = '{0} {1}'.format(pid_keyword, ' '.join(self.\n processes[unique_id].args))\n pid_keyword = configs.get('pid_keyword', pid_keyword)\n pid_command = (\n \"ps aux | grep '{0}' | grep -v grep | tr -s ' ' | cut -d ' ' -f 2 | grep -Eo '[0-9]+'\"\n .format(pid_keyword))\n pid_command = configs.get('pid_command', pid_command)\n non_failing_command = (\n '{0}; if [ $? -le 1 ]; then true; else false; fi;'.format(\n pid_command))\n env = configs.get('env', {})\n with get_ssh_client(hostname, username=runtime.get_username(),\n password=runtime.get_password()) as ssh:\n chan = exec_with_env(ssh, non_failing_command, msg=\n 'Failed to get PID', env=env)\n output = chan.recv(RECV_BLOCK_SIZE)\n full_output = output\n while len(output) > 0:\n output = chan.recv(RECV_BLOCK_SIZE)\n full_output += output\n if len(full_output) > 0:\n pids = [int(pid_str) for pid_str in full_output.split('\\n') if\n pid_str.isdigit()]\n if len(pids) > 0:\n return pids\n return constants.PROCESS_NOT_RUNNING_PID\n", "role": "user" }, { "content": "get_pid", "role": "assistant" } ]
def x(self, unique_id): if unique_id in self.processes: return self.processes[unique_id].hostname logger.error('{0} not a known process'.format(unique_id)) raise NameError('{0} not a known process'.format(unique_id))
get_host
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, unique_id):\n if unique_id in self.processes:\n return self.processes[unique_id].hostname\n logger.error('{0} not a known process'.format(unique_id))\n raise NameError('{0} not a known process'.format(unique_id))\n", "role": "user" }, { "content": "get_host", "role": "assistant" } ]
def x(self): if runtime.get_active_config('cleanup_pending_process', False): for process in self.get_processes(): self.terminate(process.unique_id)
kill_all_process
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if runtime.get_active_config('cleanup_pending_process', False):\n for process in self.get_processes():\n self.terminate(process.unique_id)\n", "role": "user" }, { "content": "kill_all_process", "role": "assistant" } ]
def x(log_level): if log_level.strip().upper() == 'DEBUG': return logging.DEBUG if log_level.strip().upper() == 'INFO': return logging.INFO if log_level.strip().upper() == 'WARNING': return logging.WARNING if log_level.strip().upper() == 'ERROR': return logging.ERROR
string_to_level
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(log_level):\n if log_level.strip().upper() == 'DEBUG':\n return logging.DEBUG\n if log_level.strip().upper() == 'INFO':\n return logging.INFO\n if log_level.strip().upper() == 'WARNING':\n return logging.WARNING\n if log_level.strip().upper() == 'ERROR':\n return logging.ERROR\n", "role": "user" }, { "content": "string_to_level", "role": "assistant" } ]
def x(): parser = argparse.ArgumentParser(description='A distributed test framework' ) parser.add_argument('testfile', help= 'The file that is used to determine the test suite run') parser.add_argument('--test-only', nargs='*', dest='test_list', help= 'run only the named tests to help debug broken tests') parser.add_argument('--machine-list', nargs='*', dest='machine_list', help= """mapping of logical host names to physical names allowing the same test suite to run on different hardware, each argument is a pair of logical name and physical name separated by a =""" ) parser.add_argument('--config-overrides', nargs='*', dest= 'config_overrides', help= """config overrides at execution time, each argument is a config with its value separated by a =. This has the highest priority of all configs""" ) parser.add_argument('-d', '--output-dir', dest='output_dir', help= """Directory to write output files and logs. Defaults to the current directory.""" ) parser.add_argument('--log-level', dest='log_level', help= 'Log level (default INFO)', default='INFO') parser.add_argument('--console-log-level', dest='console_level', help= 'Console Log level (default ERROR)', default='ERROR') parser.add_argument('--nopassword', action='store_true', dest= 'nopassword', help='Disable password prompt') parser.add_argument('--user', dest='user', help= 'user to run the test as (defaults to current user)') args = parser.parse_args() try: call_main(args) except ValueError: sys.exit(1)
main
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n parser = argparse.ArgumentParser(description='A distributed test framework'\n )\n parser.add_argument('testfile', help=\n 'The file that is used to determine the test suite run')\n parser.add_argument('--test-only', nargs='*', dest='test_list', help=\n 'run only the named tests to help debug broken tests')\n parser.add_argument('--machine-list', nargs='*', dest='machine_list',\n help=\n \"\"\"mapping of logical host names to physical names allowing the same\n test suite to run on different hardware, each argument is a pair\n of logical name and physical name separated by a =\"\"\"\n )\n parser.add_argument('--config-overrides', nargs='*', dest=\n 'config_overrides', help=\n \"\"\"config overrides at execution time, each argument is a config with\n its value separated by a =. This has the highest priority of all\n configs\"\"\"\n )\n parser.add_argument('-d', '--output-dir', dest='output_dir', help=\n \"\"\"Directory to write output files and logs. Defaults to the current\n directory.\"\"\"\n )\n parser.add_argument('--log-level', dest='log_level', help=\n 'Log level (default INFO)', default='INFO')\n parser.add_argument('--console-log-level', dest='console_level', help=\n 'Console Log level (default ERROR)', default='ERROR')\n parser.add_argument('--nopassword', action='store_true', dest=\n 'nopassword', help='Disable password prompt')\n parser.add_argument('--user', dest='user', help=\n 'user to run the test as (defaults to current user)')\n args = parser.parse_args()\n try:\n call_main(args)\n except ValueError:\n sys.exit(1)\n", "role": "user" }, { "content": "main", "role": "assistant" } ]
def x(): global _username global _password global _active_config global _active_tests global _machine_names global _deployers reset_deployers() reset_collector() _username = None _password = None _active_config = None _active_tests = {} _machine_names = defaultdict()
reset_all
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n global _username\n global _password\n global _active_config\n global _active_tests\n global _machine_names\n global _deployers\n reset_deployers()\n reset_collector()\n _username = None\n _password = None\n _active_config = None\n _active_tests = {}\n _machine_names = defaultdict()\n", "role": "user" }, { "content": "reset_all", "role": "assistant" } ]
def x(config_option, default=None): return _active_config.mapping[config_option ] if default is None else _active_config.mapping.get(config_option, default)
get_active_config
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(config_option, default=None):\n return _active_config.mapping[config_option\n ] if default is None else _active_config.mapping.get(config_option,\n default)\n", "role": "user" }, { "content": "get_active_config", "role": "assistant" } ]
def x(self): self._setup() header_html = self._generate_header() footer_html = self._generate_footer() results_topbar_html = self._generate_topbar('results') summary_topbar_html = self._generate_topbar('summary') logs_topbar_html = self._generate_topbar('logs') diff_topbar_html = self._generate_topbar('diff') summary_body_html = self._generate_summary_body() diff_body_html = self._generate_diff_body() summary_html = (header_html + summary_topbar_html + summary_body_html + footer_html) diff_html = header_html + diff_topbar_html + diff_body_html + footer_html Reporter._make_file(summary_html, self.report_info.home_page) Reporter._make_file(diff_html, self.report_info.diff_page) log_body_html = self._generate_log_body() log_html = header_html + logs_topbar_html + log_body_html + footer_html Reporter._make_file(log_html, self.report_info.log_page) for config_name in self.report_info.config_to_test_names_map.keys(): config_dir = os.path.join(self.report_info.resource_dir, config_name) utils.makedirs(config_dir) config_body_html = self._generate_config_body(config_name) config_html = (header_html + results_topbar_html + config_body_html + footer_html) config_file = os.path.join(config_dir, config_name + self. report_info.report_file_sfx) Reporter._make_file(config_html, config_file) for test_name in self.data_source.get_test_names(config_name): test_body_html = self._generate_test_body(config_name, test_name) test_html = (header_html + results_topbar_html + test_body_html + footer_html) test_file = os.path.join(config_dir, test_name + self. report_info.report_file_sfx) Reporter._make_file(test_html, test_file)
generate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n self._setup()\n header_html = self._generate_header()\n footer_html = self._generate_footer()\n results_topbar_html = self._generate_topbar('results')\n summary_topbar_html = self._generate_topbar('summary')\n logs_topbar_html = self._generate_topbar('logs')\n diff_topbar_html = self._generate_topbar('diff')\n summary_body_html = self._generate_summary_body()\n diff_body_html = self._generate_diff_body()\n summary_html = (header_html + summary_topbar_html + summary_body_html +\n footer_html)\n diff_html = header_html + diff_topbar_html + diff_body_html + footer_html\n Reporter._make_file(summary_html, self.report_info.home_page)\n Reporter._make_file(diff_html, self.report_info.diff_page)\n log_body_html = self._generate_log_body()\n log_html = header_html + logs_topbar_html + log_body_html + footer_html\n Reporter._make_file(log_html, self.report_info.log_page)\n for config_name in self.report_info.config_to_test_names_map.keys():\n config_dir = os.path.join(self.report_info.resource_dir, config_name)\n utils.makedirs(config_dir)\n config_body_html = self._generate_config_body(config_name)\n config_html = (header_html + results_topbar_html + config_body_html +\n footer_html)\n config_file = os.path.join(config_dir, config_name + self.\n report_info.report_file_sfx)\n Reporter._make_file(config_html, config_file)\n for test_name in self.data_source.get_test_names(config_name):\n test_body_html = self._generate_test_body(config_name, test_name)\n test_html = (header_html + results_topbar_html + test_body_html +\n footer_html)\n test_file = os.path.join(config_dir, test_name + self.\n report_info.report_file_sfx)\n Reporter._make_file(test_html, test_file)\n", "role": "user" }, { "content": "generate", "role": "assistant" } ]
def x(self, conn, dataset, dataset_access_type, transaction=False): if not conn: dbsExceptionHandler('dbsException-failed-connect2host', 'Oracle/Dataset/UpdateType. Expects db connection from upper layer.' , self.logger.exception) binds = {'dataset': dataset, 'dataset_access_type': dataset_access_type, 'myuser': dbsUtils().getCreateBy(), 'mydate': dbsUtils().getTime()} result = self.dbi.processData(self.sql, binds, conn, transaction)
execute
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, conn, dataset, dataset_access_type, transaction=False):\n if not conn:\n dbsExceptionHandler('dbsException-failed-connect2host',\n 'Oracle/Dataset/UpdateType. Expects db connection from upper layer.'\n , self.logger.exception)\n binds = {'dataset': dataset, 'dataset_access_type': dataset_access_type,\n 'myuser': dbsUtils().getCreateBy(), 'mydate': dbsUtils().getTime()}\n result = self.dbi.processData(self.sql, binds, conn, transaction)\n", "role": "user" }, { "content": "execute", "role": "assistant" } ]
def x(input_key, input_data, read=False): log = clog.error_log func = None if '*' in input_data or '%' in input_data: func = validationFunctionWildcard.get(input_key) if func is None: func = searchstr elif input_key == 'migration_input': if input_data.find('#') != -1: func = block else: func = dataset elif not read: func = validationFunction.get(input_key) if func is None: func = namestr elif input_key == 'dataset': func = reading_dataset_check elif input_key == 'block_name': func = reading_block_check elif input_key == 'logical_file_name': func = reading_lfn_check else: func = namestr try: func(input_data) except AssertionError as ae: serverLog = str(ae ) + ' key-value pair (%s, %s) cannot pass input checking' % ( input_key, input_data) dbsExceptionHandler('dbsException-invalid-input2', message= 'Invalid Input Data %s...: Not Match Required Format' % input_data[:10], logger=log.error, serverError=serverLog) return input_data
validateStringInput
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(input_key, input_data, read=False):\n log = clog.error_log\n func = None\n if '*' in input_data or '%' in input_data:\n func = validationFunctionWildcard.get(input_key)\n if func is None:\n func = searchstr\n elif input_key == 'migration_input':\n if input_data.find('#') != -1:\n func = block\n else:\n func = dataset\n elif not read:\n func = validationFunction.get(input_key)\n if func is None:\n func = namestr\n elif input_key == 'dataset':\n func = reading_dataset_check\n elif input_key == 'block_name':\n func = reading_block_check\n elif input_key == 'logical_file_name':\n func = reading_lfn_check\n else:\n func = namestr\n try:\n func(input_data)\n except AssertionError as ae:\n serverLog = str(ae\n ) + ' key-value pair (%s, %s) cannot pass input checking' % (\n input_key, input_data)\n dbsExceptionHandler('dbsException-invalid-input2', message=\n 'Invalid Input Data %s...: Not Match Required Format' %\n input_data[:10], logger=log.error, serverError=serverLog)\n return input_data\n", "role": "user" }, { "content": "validateStringInput", "role": "assistant" } ]
def x(func): def x(self, *args, **kwds): gen = func(self, *args, **kwds) yield '[' firstItem = True for item in gen: if not firstItem: yield ',' else: firstItem = False yield cjson.encode(item) yield ']' return wrapper
jsonstreamer
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(func):\n\n def x(self, *args, **kwds):\n gen = func(self, *args, **kwds)\n yield '['\n firstItem = True\n for item in gen:\n if not firstItem:\n yield ','\n else:\n firstItem = False\n yield cjson.encode(item)\n yield ']'\n return wrapper\n", "role": "user" }, { "content": "jsonstreamer", "role": "assistant" } ]
def x(self, dataset_access_type=''): if isinstance(dataset_access_type, basestring): try: dataset_access_type = str(dataset_access_type) except: dbsExceptionHandler('dbsException-invalid-input', 'dataset_access_type given is not valid : %s' % dataset_access_type) else: dbsExceptionHandler('dbsException-invalid-input', 'dataset_access_type given is not valid : %s' % dataset_access_type ) conn = self.dbi.connection() try: plist = self.datasetAccessType.execute(conn, dataset_access_type. upper()) result = [{}] if plist: t = [] for i in plist: for k, v in i.iteritems(): t.append(v) result[0]['dataset_access_type'] = t return result finally: if conn: conn.close()
listDatasetAccessTypes
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, dataset_access_type=''):\n if isinstance(dataset_access_type, basestring):\n try:\n dataset_access_type = str(dataset_access_type)\n except:\n dbsExceptionHandler('dbsException-invalid-input', \n 'dataset_access_type given is not valid : %s' %\n dataset_access_type)\n else:\n dbsExceptionHandler('dbsException-invalid-input', \n 'dataset_access_type given is not valid : %s' % dataset_access_type\n )\n conn = self.dbi.connection()\n try:\n plist = self.datasetAccessType.execute(conn, dataset_access_type.\n upper())\n result = [{}]\n if plist:\n t = []\n for i in plist:\n for k, v in i.iteritems():\n t.append(v)\n result[0]['dataset_access_type'] = t\n return result\n finally:\n if conn:\n conn.close()\n", "role": "user" }, { "content": "listDatasetAccessTypes", "role": "assistant" } ]
def x(self): if request.path.startswith(url_for('static', filename='')): return ignored_extensions = 'ico', 'png', 'txt', 'xml' if request.path.rsplit('.', 1)[-1] in ignored_extensions: return ips = request.headers.getlist('X-Forwarded-For') if not ips: return ip = ips[0].strip() if ip[-1] == ',': ip = ip[:-1] ip = ip.rsplit(',', 1)[-1].strip() if self.matches_ip(ip): if self.logger is not None: self.logger.info('IPBlock: matched {}, {}'.format(ip, self. block_msg)) if self.blocking_enabled: return 'IP Blocked', 200
block_before
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if request.path.startswith(url_for('static', filename='')):\n return\n ignored_extensions = 'ico', 'png', 'txt', 'xml'\n if request.path.rsplit('.', 1)[-1] in ignored_extensions:\n return\n ips = request.headers.getlist('X-Forwarded-For')\n if not ips:\n return\n ip = ips[0].strip()\n if ip[-1] == ',':\n ip = ip[:-1]\n ip = ip.rsplit(',', 1)[-1].strip()\n if self.matches_ip(ip):\n if self.logger is not None:\n self.logger.info('IPBlock: matched {}, {}'.format(ip, self.\n block_msg))\n if self.blocking_enabled:\n return 'IP Blocked', 200\n", "role": "user" }, { "content": "block_before", "role": "assistant" } ]
def x(self, ip): if self.cache is not None: matches_ip = self.cache.get(ip) if matches_ip is not None: return matches_ip matches_ip = IPNetwork.matches_ip(ip, read_preference=self.read_preference) if self.cache is not None: self.cache[ip] = matches_ip return matches_ip
matches_ip
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, ip):\n if self.cache is not None:\n matches_ip = self.cache.get(ip)\n if matches_ip is not None:\n return matches_ip\n matches_ip = IPNetwork.matches_ip(ip, read_preference=self.read_preference)\n if self.cache is not None:\n self.cache[ip] = matches_ip\n return matches_ip\n", "role": "user" }, { "content": "matches_ip", "role": "assistant" } ]
def x(self, conn, logical_file_name, block_name, block_id, transaction=False): binds = {} sql = '' if logical_file_name: if isinstance(logical_file_name, basestring): wheresql = 'WHERE F.LOGICAL_FILE_NAME = :logical_file_name' binds = {'logical_file_name': logical_file_name} sql = '{sql} {wheresql}'.format(sql=self.sql, wheresql=wheresql) elif isinstance(logical_file_name, list): wheresql = ( 'WHERE F.LOGICAL_FILE_NAME in (SELECT TOKEN FROM TOKEN_GENERATOR)' ) lfn_generator, binds = create_token_generator(logical_file_name) sql = '{lfn_generator} {sql} {wheresql}'.format(lfn_generator= lfn_generator, sql=self.sql, wheresql=wheresql) elif block_name: joins = 'JOIN {owner}BLOCKS B on B.BLOCK_ID = F.BLOCK_ID'.format(owner =self.owner) wheresql = 'WHERE B.BLOCK_NAME = :block_name' binds = {'block_name': block_name} sql = '{sql} {joins} {wheresql}'.format(sql=self.sql, joins=joins, wheresql=wheresql) elif block_id: wheresql = 'WHERE F.BLOCK_ID = :block_id' binds = {'block_id': block_id} sql = '{sql} {wheresql}'.format(sql=self.sql, wheresql=wheresql) else: dbsExceptionHandler('dbsException-invalid-input', 'Logical_file_names is required for listChild dao.', self. logger.exception) cursors = self.dbi.processData(sql, binds, conn, transaction= transaction, returnCursor=True) result = [] for c in cursors: result.extend(self.formatCursor(c, size=100)) return result
execute
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, conn, logical_file_name, block_name, block_id, transaction=False):\n binds = {}\n sql = ''\n if logical_file_name:\n if isinstance(logical_file_name, basestring):\n wheresql = 'WHERE F.LOGICAL_FILE_NAME = :logical_file_name'\n binds = {'logical_file_name': logical_file_name}\n sql = '{sql} {wheresql}'.format(sql=self.sql, wheresql=wheresql)\n elif isinstance(logical_file_name, list):\n wheresql = (\n 'WHERE F.LOGICAL_FILE_NAME in (SELECT TOKEN FROM TOKEN_GENERATOR)'\n )\n lfn_generator, binds = create_token_generator(logical_file_name)\n sql = '{lfn_generator} {sql} {wheresql}'.format(lfn_generator=\n lfn_generator, sql=self.sql, wheresql=wheresql)\n elif block_name:\n joins = 'JOIN {owner}BLOCKS B on B.BLOCK_ID = F.BLOCK_ID'.format(owner\n =self.owner)\n wheresql = 'WHERE B.BLOCK_NAME = :block_name'\n binds = {'block_name': block_name}\n sql = '{sql} {joins} {wheresql}'.format(sql=self.sql, joins=joins,\n wheresql=wheresql)\n elif block_id:\n wheresql = 'WHERE F.BLOCK_ID = :block_id'\n binds = {'block_id': block_id}\n sql = '{sql} {wheresql}'.format(sql=self.sql, wheresql=wheresql)\n else:\n dbsExceptionHandler('dbsException-invalid-input',\n 'Logical_file_names is required for listChild dao.', self.\n logger.exception)\n cursors = self.dbi.processData(sql, binds, conn, transaction=\n transaction, returnCursor=True)\n result = []\n for c in cursors:\n result.extend(self.formatCursor(c, size=100))\n return result\n", "role": "user" }, { "content": "execute", "role": "assistant" } ]
def x(self, acq=''): try: acq = str(acq) except: dbsExceptionHandler('dbsException-invalid-input', 'acquistion_era_name given is not valid : %s' % acq) conn = self.dbi.connection() try: result = self.acqlst.execute(conn, acq) return result finally: if conn: conn.close()
listAcquisitionEras
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, acq=''):\n try:\n acq = str(acq)\n except:\n dbsExceptionHandler('dbsException-invalid-input', \n 'acquistion_era_name given is not valid : %s' % acq)\n conn = self.dbi.connection()\n try:\n result = self.acqlst.execute(conn, acq)\n return result\n finally:\n if conn:\n conn.close()\n", "role": "user" }, { "content": "listAcquisitionEras", "role": "assistant" } ]
def x(self, acq=''): try: acq = str(acq) except: dbsExceptionHandler('dbsException-invalid-input', 'aquistion_era_name given is not valid : %s' % acq) conn = self.dbi.connection() try: result = self.acqlst_ci.execute(conn, acq) return result finally: if conn: conn.close()
listAcquisitionEras_CI
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, acq=''):\n try:\n acq = str(acq)\n except:\n dbsExceptionHandler('dbsException-invalid-input', \n 'aquistion_era_name given is not valid : %s' % acq)\n conn = self.dbi.connection()\n try:\n result = self.acqlst_ci.execute(conn, acq)\n return result\n finally:\n if conn:\n conn.close()\n", "role": "user" }, { "content": "listAcquisitionEras_CI", "role": "assistant" } ]
def x(self, businput): conn = self.dbi.connection() tran = conn.begin() try: businput['acquisition_era_id'] = self.sm.increment(conn, 'SEQ_AQE', tran) businput['acquisition_era_name'] = businput['acquisition_era_name'] self.acqin.execute(conn, businput, tran) tran.commit() tran = None except KeyError as ke: dbsExceptionHandler('dbsException-invalid-input', 'Invalid input:' + ke.args[0]) except Exception as ex: if str(ex).lower().find('unique constraint') != -1 or str(ex).lower( ).find('duplicate') != -1: dbsExceptionHandler('dbsException-invalid-input2', 'Invalid input: acquisition_era_name already exists in DB', serverError='%s' % ex) else: raise finally: if tran: tran.rollback() if conn: conn.close()
insertAcquisitionEra
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, businput):\n conn = self.dbi.connection()\n tran = conn.begin()\n try:\n businput['acquisition_era_id'] = self.sm.increment(conn, 'SEQ_AQE',\n tran)\n businput['acquisition_era_name'] = businput['acquisition_era_name']\n self.acqin.execute(conn, businput, tran)\n tran.commit()\n tran = None\n except KeyError as ke:\n dbsExceptionHandler('dbsException-invalid-input', 'Invalid input:' +\n ke.args[0])\n except Exception as ex:\n if str(ex).lower().find('unique constraint') != -1 or str(ex).lower(\n ).find('duplicate') != -1:\n dbsExceptionHandler('dbsException-invalid-input2',\n 'Invalid input: acquisition_era_name already exists in DB',\n serverError='%s' % ex)\n else:\n raise\n finally:\n if tran:\n tran.rollback()\n if conn:\n conn.close()\n", "role": "user" }, { "content": "insertAcquisitionEra", "role": "assistant" } ]
def x(self, acquisition_era_name='', end_date=0): if acquisition_era_name == '' or end_date == 0: dbsExceptionHandler('dbsException-invalid-input', 'acquisition_era_name and end_date are required') conn = self.dbi.connection() tran = conn.begin() try: self.acqud.execute(conn, acquisition_era_name, end_date, tran) if tran: tran.commit() tran = None finally: if tran: tran.rollback() if conn: conn.close()
UpdateAcqEraEndDate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, acquisition_era_name='', end_date=0):\n if acquisition_era_name == '' or end_date == 0:\n dbsExceptionHandler('dbsException-invalid-input',\n 'acquisition_era_name and end_date are required')\n conn = self.dbi.connection()\n tran = conn.begin()\n try:\n self.acqud.execute(conn, acquisition_era_name, end_date, tran)\n if tran:\n tran.commit()\n tran = None\n finally:\n if tran:\n tran.rollback()\n if conn:\n conn.close()\n", "role": "user" }, { "content": "UpdateAcqEraEndDate", "role": "assistant" } ]
def x(self, migration_rqst): conn = self.dbi.connection() try: tran = conn.begin() self.mgrremove.execute(conn, migration_rqst) tran.commit() except dbsException as he: if conn: conn.close() raise except Exception as ex: if conn: conn.close() raise if conn: conn.close()
removeMigrationRequest
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, migration_rqst):\n conn = self.dbi.connection()\n try:\n tran = conn.begin()\n self.mgrremove.execute(conn, migration_rqst)\n tran.commit()\n except dbsException as he:\n if conn:\n conn.close()\n raise\n except Exception as ex:\n if conn:\n conn.close()\n raise\n if conn:\n conn.close()\n", "role": "user" }, { "content": "removeMigrationRequest", "role": "assistant" } ]
def x(self, migration_request_id='', block_name='', dataset='', user='', oldest=False): conn = self.dbi.connection() migratee = '' try: if block_name: migratee = block_name elif dataset: migratee = dataset result = self.mgrlist.execute(conn, migration_url='', migration_input=migratee, create_by=user, migration_request_id= migration_request_id, oldest=oldest) return result finally: if conn: conn.close()
listMigrationRequests
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, migration_request_id='', block_name='', dataset='', user='',\n oldest=False):\n conn = self.dbi.connection()\n migratee = ''\n try:\n if block_name:\n migratee = block_name\n elif dataset:\n migratee = dataset\n result = self.mgrlist.execute(conn, migration_url='',\n migration_input=migratee, create_by=user, migration_request_id=\n migration_request_id, oldest=oldest)\n return result\n finally:\n if conn:\n conn.close()\n", "role": "user" }, { "content": "listMigrationRequests", "role": "assistant" } ]
def x(self, migration_request_id=''): conn = self.dbi.connection() try: return self.mgrblklist.execute(conn, migration_request_id= migration_request_id) finally: if conn: conn.close()
listMigrationBlocks
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, migration_request_id=''):\n conn = self.dbi.connection()\n try:\n return self.mgrblklist.execute(conn, migration_request_id=\n migration_request_id)\n finally:\n if conn:\n conn.close()\n", "role": "user" }, { "content": "listMigrationBlocks", "role": "assistant" } ]
def x(self, migration_status, migration_request_id): conn = self.dbi.connection() tran = conn.begin() try: upst = dict(migration_status=migration_status, migration_request_id =migration_request_id, last_modification_date=dbsUtils().getTime()) self.mgrRqUp.execute(conn, upst) except: if tran: tran.rollback() raise else: if tran: tran.commit() finally: if conn: conn.close()
updateMigrationRequestStatus
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, migration_status, migration_request_id):\n conn = self.dbi.connection()\n tran = conn.begin()\n try:\n upst = dict(migration_status=migration_status, migration_request_id\n =migration_request_id, last_modification_date=dbsUtils().getTime())\n self.mgrRqUp.execute(conn, upst)\n except:\n if tran:\n tran.rollback()\n raise\n else:\n if tran:\n tran.commit()\n finally:\n if conn:\n conn.close()\n", "role": "user" }, { "content": "updateMigrationRequestStatus", "role": "assistant" } ]
def x(self, migration_status=0, migration_block=None, migration_request=None): conn = self.dbi.connection() tran = conn.begin() try: if migration_block: upst = dict(migration_status=migration_status, migration_block_id=migration_block, last_modification_date= dbsUtils().getTime()) elif migration_request: upst = dict(migration_status=migration_status, migration_request_id=migration_request, last_modification_date=dbsUtils().getTime()) self.mgrup.execute(conn, upst) except: if tran: tran.rollback() raise else: if tran: tran.commit() finally: if conn: conn.close()
updateMigrationBlockStatus
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, migration_status=0, migration_block=None, migration_request=None):\n conn = self.dbi.connection()\n tran = conn.begin()\n try:\n if migration_block:\n upst = dict(migration_status=migration_status,\n migration_block_id=migration_block, last_modification_date=\n dbsUtils().getTime())\n elif migration_request:\n upst = dict(migration_status=migration_status,\n migration_request_id=migration_request,\n last_modification_date=dbsUtils().getTime())\n self.mgrup.execute(conn, upst)\n except:\n if tran:\n tran.rollback()\n raise\n else:\n if tran:\n tran.commit()\n finally:\n if conn:\n conn.close()\n", "role": "user" }, { "content": "updateMigrationBlockStatus", "role": "assistant" } ]
def x(self, url, dataset): params = {'dataset': dataset} return cjson.decode(self.callDBSService(url, 'datasetparents', params, {}))
getSrcDatasetParents
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, url, dataset):\n params = {'dataset': dataset}\n return cjson.decode(self.callDBSService(url, 'datasetparents', params, {}))\n", "role": "user" }, { "content": "getSrcDatasetParents", "role": "assistant" } ]
def x(self, url, block): params = {'block_name': block} return cjson.decode(self.callDBSService(url, 'blockparents', params, {}))
getSrcBlockParents
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, url, block):\n params = {'block_name': block}\n return cjson.decode(self.callDBSService(url, 'blockparents', params, {}))\n", "role": "user" }, { "content": "getSrcBlockParents", "role": "assistant" } ]
def x(self, url, dataset='', block=''): if block: params = {'block_name': block, 'open_for_writing': 0} elif dataset: params = {'dataset': dataset, 'open_for_writing': 0} else: m = ( 'DBSMigration: Invalid input. Either block or dataset name has to be provided' ) e = ( 'DBSMigrate/getSrcBlocks: Invalid input. Either block or dataset name has to be provided' ) dbsExceptionHandler('dbsException-invalid-input2', m, self.logger. exception, e) return cjson.decode(self.callDBSService(url, 'blocks', params, {}))
getSrcBlocks
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, url, dataset='', block=''):\n if block:\n params = {'block_name': block, 'open_for_writing': 0}\n elif dataset:\n params = {'dataset': dataset, 'open_for_writing': 0}\n else:\n m = (\n 'DBSMigration: Invalid input. Either block or dataset name has to be provided'\n )\n e = (\n 'DBSMigrate/getSrcBlocks: Invalid input. Either block or dataset name has to be provided'\n )\n dbsExceptionHandler('dbsException-invalid-input2', m, self.logger.\n exception, e)\n return cjson.decode(self.callDBSService(url, 'blocks', params, {}))\n", "role": "user" }, { "content": "getSrcBlocks", "role": "assistant" } ]
def x(self, value): if value.lower() not in self._BOOLEANS: raise ValueError('Not a boolean: %s' % value) return self._BOOLEANS[value.lower()]
cast_boolean
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, value):\n if value.lower() not in self._BOOLEANS:\n raise ValueError('Not a boolean: %s' % value)\n return self._BOOLEANS[value.lower()]\n", "role": "user" }, { "content": "cast_boolean", "role": "assistant" } ]
def x(self, option, default=undefined, cast=undefined): if option in self.repository: value = self.repository.get(option) else: value = default if isinstance(value, Undefined): raise UndefinedValueError( '%s option not found and default value was not defined.' % option) if isinstance(cast, Undefined): cast = lambda v: v elif cast is bool: cast = self._cast_boolean return cast(value)
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, option, default=undefined, cast=undefined):\n if option in self.repository:\n value = self.repository.get(option)\n else:\n value = default\n if isinstance(value, Undefined):\n raise UndefinedValueError(\n '%s option not found and default value was not defined.' % option)\n if isinstance(cast, Undefined):\n cast = lambda v: v\n elif cast is bool:\n cast = self._cast_boolean\n return cast(value)\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
def x(requirements_file): if os.path.exists(requirements_file): return open(requirements_file, 'r').read().splitlines() else: print('ERROR: requirements file ' + requirements_file + ' not found.') sys.exit(1)
parse_requirements
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(requirements_file):\n if os.path.exists(requirements_file):\n return open(requirements_file, 'r').read().splitlines()\n else:\n print('ERROR: requirements file ' + requirements_file + ' not found.')\n sys.exit(1)\n", "role": "user" }, { "content": "parse_requirements", "role": "assistant" } ]
def x(self, logical_file_name='', block_name='', run_num=-1, validFileOnly= 0, input_body=-1): if (logical_file_name == '' or '*' in logical_file_name or '%' in logical_file_name) and (block_name == '' or '*' in block_name or '%' in block_name) and input_body == -1: dbsExceptionHandler('dbsException-invalid-input', 'Fully specified logical_file_name or block_name is required if GET is called. No wildcards are allowed.' , self.logger.exception, 'Fully specified logical_file_name or block_name is required if GET is called. No wildcards are allowed.' ) elif input_body != -1: try: logical_file_name = input_body['logical_file_name'] run_num = input_body.get('run_num', -1) validFileOnly = input_body.get('validFileOnly', 0) block_name = '' except cjson.DecodeError as de: msg = ( 'business/listFileLumis requires at least a list of logical_file_name. %s' % de) dbsExceptionHandler('dbsException-invalid-input2', 'Invalid input', self.logger.exception, msg) elif input_body != -1 and (logical_file_name is not None or block_name is not None): dbsExceptionHandler('dbsException-invalid-input', 'listFileLumis may have input in the command or in the payload, not mixed.' , self.logger.exception, 'listFileLumis may have input in the command or in the payload, not mixed.' ) with self.dbi.connection() as conn: for item in self.filelumilist.execute(conn, logical_file_name, block_name, run_num, validFileOnly=validFileOnly): yield item
listFileLumis
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, logical_file_name='', block_name='', run_num=-1, validFileOnly=\n 0, input_body=-1):\n if (logical_file_name == '' or '*' in logical_file_name or '%' in\n logical_file_name) and (block_name == '' or '*' in block_name or \n '%' in block_name) and input_body == -1:\n dbsExceptionHandler('dbsException-invalid-input',\n 'Fully specified logical_file_name or block_name is required if GET is called. No wildcards are allowed.'\n , self.logger.exception,\n 'Fully specified logical_file_name or block_name is required if GET is called. No wildcards are allowed.'\n )\n elif input_body != -1:\n try:\n logical_file_name = input_body['logical_file_name']\n run_num = input_body.get('run_num', -1)\n validFileOnly = input_body.get('validFileOnly', 0)\n block_name = ''\n except cjson.DecodeError as de:\n msg = (\n 'business/listFileLumis requires at least a list of logical_file_name. %s'\n % de)\n dbsExceptionHandler('dbsException-invalid-input2',\n 'Invalid input', self.logger.exception, msg)\n elif input_body != -1 and (logical_file_name is not None or block_name\n is not None):\n dbsExceptionHandler('dbsException-invalid-input',\n 'listFileLumis may have input in the command or in the payload, not mixed.'\n , self.logger.exception,\n 'listFileLumis may have input in the command or in the payload, not mixed.'\n )\n with self.dbi.connection() as conn:\n for item in self.filelumilist.execute(conn, logical_file_name,\n block_name, run_num, validFileOnly=validFileOnly):\n yield item\n", "role": "user" }, { "content": "listFileLumis", "role": "assistant" } ]
def x(self, block_name='', dataset='', run_num=-1, validFileOnly=0, sumOverLumi=0): if not block_name and not dataset: msg = 'Block_name or dataset is required for listFileSummary API' dbsExceptionHandler('dbsException-invalid-input', msg, self.logger. exception) if ('%' in block_name or '*' in block_name or '%' in dataset or '*' in dataset): msg = ( 'No wildcard is allowed in block_name or dataset for filesummaries API' ) dbsExceptionHandler('dbsException-invalid-input', msg, self.logger. exception) with self.dbi.connection() as conn: for item in self.filesummarylist.execute(conn, block_name, dataset, run_num, validFileOnly=validFileOnly, sumOverLumi=sumOverLumi): if item['num_file'] == 0 and item['num_block'] == 0 and item[ 'num_event'] == 0 and item['file_size'] == 0: pass else: yield item
listFileSummary
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, block_name='', dataset='', run_num=-1, validFileOnly=0,\n sumOverLumi=0):\n if not block_name and not dataset:\n msg = 'Block_name or dataset is required for listFileSummary API'\n dbsExceptionHandler('dbsException-invalid-input', msg, self.logger.\n exception)\n if ('%' in block_name or '*' in block_name or '%' in dataset or '*' in\n dataset):\n msg = (\n 'No wildcard is allowed in block_name or dataset for filesummaries API'\n )\n dbsExceptionHandler('dbsException-invalid-input', msg, self.logger.\n exception)\n with self.dbi.connection() as conn:\n for item in self.filesummarylist.execute(conn, block_name, dataset,\n run_num, validFileOnly=validFileOnly, sumOverLumi=sumOverLumi):\n if item['num_file'] == 0 and item['num_block'] == 0 and item[\n 'num_event'] == 0 and item['file_size'] == 0:\n pass\n else:\n yield item\n", "role": "user" }, { "content": "listFileSummary", "role": "assistant" } ]
def x(self, logical_file_name='', block_id=0, block_name=''): if not logical_file_name and not block_name and not block_id: dbsExceptionHandler('dbsException-invalid-input', 'Logical_file_name, block_id or block_name is required for fileparents api' , self.logger.exception) with self.dbi.connection() as conn: sqlresult = self.fileparentlist.execute(conn, logical_file_name, block_id, block_name) d = {} for i in sqlresult: k = i['this_logical_file_name'] v = i['parent_logical_file_name'] d.setdefault(k, []).append(v) for k, v in d.iteritems(): yield {'logical_file_name': k, 'parent_logical_file_name': v} del d
listFileParents
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, logical_file_name='', block_id=0, block_name=''):\n if not logical_file_name and not block_name and not block_id:\n dbsExceptionHandler('dbsException-invalid-input',\n 'Logical_file_name, block_id or block_name is required for fileparents api'\n , self.logger.exception)\n with self.dbi.connection() as conn:\n sqlresult = self.fileparentlist.execute(conn, logical_file_name,\n block_id, block_name)\n d = {}\n for i in sqlresult:\n k = i['this_logical_file_name']\n v = i['parent_logical_file_name']\n d.setdefault(k, []).append(v)\n for k, v in d.iteritems():\n yield {'logical_file_name': k, 'parent_logical_file_name': v}\n del d\n", "role": "user" }, { "content": "listFileParents", "role": "assistant" } ]
def x(self, block_name='', logical_file_name=[]): if not block_name: dbsExceptionHandler('dbsException-invalid-input', 'Child block_name is required for fileparents/listFileParentsByLumi api' , self.logger.exception) with self.dbi.connection() as conn: sqlresult = self.fileparentbylumi.execute(conn, block_name, logical_file_name) return [{'child_parent_id_list': sqlresult}]
listFileParentsByLumi
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, block_name='', logical_file_name=[]):\n if not block_name:\n dbsExceptionHandler('dbsException-invalid-input',\n 'Child block_name is required for fileparents/listFileParentsByLumi api'\n , self.logger.exception)\n with self.dbi.connection() as conn:\n sqlresult = self.fileparentbylumi.execute(conn, block_name,\n logical_file_name)\n return [{'child_parent_id_list': sqlresult}]\n", "role": "user" }, { "content": "listFileParentsByLumi", "role": "assistant" } ]
def x(self, logical_file_name='', block_name='', block_id=0): conn = self.dbi.connection() try: if not logical_file_name and not block_name and not block_id: dbsExceptionHandler('dbsException-invalid-input', 'Logical_file_name, block_id or block_name is required for listFileChildren api' ) sqlresult = self.filechildlist.execute(conn, logical_file_name, block_name, block_id) d = {} result = [] for i in range(len(sqlresult)): k = sqlresult[i]['logical_file_name'] v = sqlresult[i]['child_logical_file_name'] if k in d: d[k].append(v) else: d[k] = [v] for k, v in d.iteritems(): r = {'logical_file_name': k, 'child_logical_file_name': v} result.append(r) return result finally: if conn: conn.close()
listFileChildren
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, logical_file_name='', block_name='', block_id=0):\n conn = self.dbi.connection()\n try:\n if not logical_file_name and not block_name and not block_id:\n dbsExceptionHandler('dbsException-invalid-input',\n 'Logical_file_name, block_id or block_name is required for listFileChildren api'\n )\n sqlresult = self.filechildlist.execute(conn, logical_file_name,\n block_name, block_id)\n d = {}\n result = []\n for i in range(len(sqlresult)):\n k = sqlresult[i]['logical_file_name']\n v = sqlresult[i]['child_logical_file_name']\n if k in d:\n d[k].append(v)\n else:\n d[k] = [v]\n for k, v in d.iteritems():\n r = {'logical_file_name': k, 'child_logical_file_name': v}\n result.append(r)\n return result\n finally:\n if conn:\n conn.close()\n", "role": "user" }, { "content": "listFileChildren", "role": "assistant" } ]
def x(self, logical_file_name, is_file_valid, lost, dataset): conn = self.dbi.connection() trans = conn.begin() try: self.updatestatus.execute(conn, logical_file_name, is_file_valid, lost, dataset, trans) trans.commit() trans = None except Exception as ex: if trans: trans.rollback() trans = None raise ex finally: if trans: trans.rollback() if conn: conn.close()
updateStatus
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, logical_file_name, is_file_valid, lost, dataset):\n conn = self.dbi.connection()\n trans = conn.begin()\n try:\n self.updatestatus.execute(conn, logical_file_name, is_file_valid,\n lost, dataset, trans)\n trans.commit()\n trans = None\n except Exception as ex:\n if trans:\n trans.rollback()\n trans = None\n raise ex\n finally:\n if trans:\n trans.rollback()\n if conn:\n conn.close()\n", "role": "user" }, { "content": "updateStatus", "role": "assistant" } ]
def x(self, businput): if 'block_name' not in businput.keys( ) or 'child_parent_id_list' not in businput.keys() or not businput[ 'child_parent_id_list'] or not businput['block_name']: dbsExceptionHandler('dbsException-invalid-input2', 'DBSFile/insertFileParents: require child block_name and list of child/parent file id pairs' , self.logger.exception, 'DBSFile/insertFileParents: require child block_name and list of child/parent file id pairs' ) tran = None conn = None try: conn = self.dbi.connection() tran = conn.begin() self.logger.info('Insert File parentage mapping') self.fparentin2.execute(conn, businput, tran) self.logger.info('Insert block parentage mapping') self.blkparentin3.execute(conn, businput, tran) if tran: tran.commit() if conn: conn.close() except SQLAlchemyIntegrityError as ex: if tran: tran.rollback() if conn: conn.close() if str(ex).find('ORA-01400') > -1: dbsExceptionHandler('dbsException-missing-data', 'Missing data when insert filei/block parent. ', self. logger.exception, 'Missing data when insert file/block parent. ' + str(ex)) else: dbsExceptionHandler('dbsException-invalid-input2', 'Invalid data when insert file/block parent. ', self. logger.exception, 'Invalid data when insert file/block parent. ' + str(ex)) finally: if tran: tran.rollback() if conn: conn.close()
insertFileParents
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, businput):\n if 'block_name' not in businput.keys(\n ) or 'child_parent_id_list' not in businput.keys() or not businput[\n 'child_parent_id_list'] or not businput['block_name']:\n dbsExceptionHandler('dbsException-invalid-input2',\n 'DBSFile/insertFileParents: require child block_name and list of child/parent file id pairs'\n , self.logger.exception,\n 'DBSFile/insertFileParents: require child block_name and list of child/parent file id pairs'\n )\n tran = None\n conn = None\n try:\n conn = self.dbi.connection()\n tran = conn.begin()\n self.logger.info('Insert File parentage mapping')\n self.fparentin2.execute(conn, businput, tran)\n self.logger.info('Insert block parentage mapping')\n self.blkparentin3.execute(conn, businput, tran)\n if tran:\n tran.commit()\n if conn:\n conn.close()\n except SQLAlchemyIntegrityError as ex:\n if tran:\n tran.rollback()\n if conn:\n conn.close()\n if str(ex).find('ORA-01400') > -1:\n dbsExceptionHandler('dbsException-missing-data',\n 'Missing data when insert filei/block parent. ', self.\n logger.exception, \n 'Missing data when insert file/block parent. ' + str(ex))\n else:\n dbsExceptionHandler('dbsException-invalid-input2',\n 'Invalid data when insert file/block parent. ', self.\n logger.exception, \n 'Invalid data when insert file/block parent. ' + str(ex))\n finally:\n if tran:\n tran.rollback()\n if conn:\n conn.close()\n", "role": "user" }, { "content": "insertFileParents", "role": "assistant" } ]
def x(self, conn, seqName, transaction=False, incCount=1): sql = 'select %s%s.nextval as val from dual' % (self.owner, seqName) result = self.dbi.processData(sql, conn=conn, transaction=transaction) resultlist = self.formatDict(result) return resultlist[0]['val']
increment
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, conn, seqName, transaction=False, incCount=1):\n sql = 'select %s%s.nextval as val from dual' % (self.owner, seqName)\n result = self.dbi.processData(sql, conn=conn, transaction=transaction)\n resultlist = self.formatDict(result)\n return resultlist[0]['val']\n", "role": "user" }, { "content": "increment", "role": "assistant" } ]
def x(self, release_version='', dataset='', logical_file_name=''): if dataset and ('%' in dataset or '*' in dataset): dbsExceptionHandler('dbsException-invalid-input', ' DBSReleaseVersion/listReleaseVersions. No wildcards are' + """ allowed in dataset. .""") if logical_file_name and ('%' in logical_file_name or '*' in logical_file_name): dbsExceptionHandler('dbsException-invalid-input', ' DBSReleaseVersion/listReleaseVersions. No wildcards are' + """ allowed in logical_file_name. .""") conn = self.dbi.connection() try: plist = self.releaseVersion.execute(conn, release_version.upper(), dataset, logical_file_name) result = [{}] if plist: t = [] for i in plist: for k, v in i.iteritems(): t.append(v) result[0]['release_version'] = t return result finally: if conn: conn.close()
listReleaseVersions
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, release_version='', dataset='', logical_file_name=''):\n if dataset and ('%' in dataset or '*' in dataset):\n dbsExceptionHandler('dbsException-invalid-input', \n ' DBSReleaseVersion/listReleaseVersions. No wildcards are' +\n \"\"\" allowed in dataset.\n.\"\"\")\n if logical_file_name and ('%' in logical_file_name or '*' in\n logical_file_name):\n dbsExceptionHandler('dbsException-invalid-input', \n ' DBSReleaseVersion/listReleaseVersions. No wildcards are' +\n \"\"\" allowed in logical_file_name.\n.\"\"\")\n conn = self.dbi.connection()\n try:\n plist = self.releaseVersion.execute(conn, release_version.upper(),\n dataset, logical_file_name)\n result = [{}]\n if plist:\n t = []\n for i in plist:\n for k, v in i.iteritems():\n t.append(v)\n result[0]['release_version'] = t\n return result\n finally:\n if conn:\n conn.close()\n", "role": "user" }, { "content": "listReleaseVersions", "role": "assistant" } ]
def x(self): if 'X509_CERT_DIR' in os.environ: self._ca_path = os.environ['X509_CERT_DIR'] elif os.path.exists('/etc/grid-security/certificates'): self._ca_path = '/etc/grid-security/certificates' else: raise ClientAuthException('Could not find a valid CA path')
search_ca_path
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if 'X509_CERT_DIR' in os.environ:\n self._ca_path = os.environ['X509_CERT_DIR']\n elif os.path.exists('/etc/grid-security/certificates'):\n self._ca_path = '/etc/grid-security/certificates'\n else:\n raise ClientAuthException('Could not find a valid CA path')\n", "role": "user" }, { "content": "search_ca_path", "role": "assistant" } ]
def x(self): if 'X509_HOST_CERT' in os.environ: self._ssl_cert = os.environ['X509_HOST_CERT'] self._ssl_key = os.environ['X509_HOST_KEY'] elif 'X509_USER_PROXY' in os.environ and os.path.exists(os.environ[ 'X509_USER_PROXY']): self._ssl_cert = os.environ['X509_USER_PROXY'] self._ssl_key = self._ssl_cert elif 'X509_USER_CERT' in os.environ and 'X509_USER_KEY' in os.environ: self._ssl_cert = os.environ['X509_USER_CERT'] self._ssl_key = os.environ['X509_USER_KEY'] elif os.path.exists('/tmp/x509up_u%s' % str(os.getuid())): self._ssl_cert = '/tmp/x509up_u%s' % str(os.getuid()) self._ssl_key = self._ssl_cert elif sys.stdin.isatty(): home_dir = os.environ['HOME'] user_cert = os.path.join(home_dir, '.globus/usercert.pem') user_key = os.path.join(home_dir, '.globus/userkey.pem') if os.path.exists(user_cert): self._ssl_cert = user_cert if os.path.exists(user_key): self._ssl_key = user_key self._ssl_key_pass = getpass('Password for %s: ' % self. _ssl_key) else: self._ssl_key = self._ssl_cert else: raise ClientAuthException('No valid X509 cert-key-pair found.') else: raise ClientAuthException('No valid X509 cert-key-pair found.')
search_cert_key
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if 'X509_HOST_CERT' in os.environ:\n self._ssl_cert = os.environ['X509_HOST_CERT']\n self._ssl_key = os.environ['X509_HOST_KEY']\n elif 'X509_USER_PROXY' in os.environ and os.path.exists(os.environ[\n 'X509_USER_PROXY']):\n self._ssl_cert = os.environ['X509_USER_PROXY']\n self._ssl_key = self._ssl_cert\n elif 'X509_USER_CERT' in os.environ and 'X509_USER_KEY' in os.environ:\n self._ssl_cert = os.environ['X509_USER_CERT']\n self._ssl_key = os.environ['X509_USER_KEY']\n elif os.path.exists('/tmp/x509up_u%s' % str(os.getuid())):\n self._ssl_cert = '/tmp/x509up_u%s' % str(os.getuid())\n self._ssl_key = self._ssl_cert\n elif sys.stdin.isatty():\n home_dir = os.environ['HOME']\n user_cert = os.path.join(home_dir, '.globus/usercert.pem')\n user_key = os.path.join(home_dir, '.globus/userkey.pem')\n if os.path.exists(user_cert):\n self._ssl_cert = user_cert\n if os.path.exists(user_key):\n self._ssl_key = user_key\n self._ssl_key_pass = getpass('Password for %s: ' % self.\n _ssl_key)\n else:\n self._ssl_key = self._ssl_cert\n else:\n raise ClientAuthException('No valid X509 cert-key-pair found.')\n else:\n raise ClientAuthException('No valid X509 cert-key-pair found.')\n", "role": "user" }, { "content": "search_cert_key", "role": "assistant" } ]
def x(user, role, group, site): if not role: return True for k, v in user['roles'].iteritems(): for g in v['group']: if k in role.get(g, '').split(':'): return True return False
authInsert
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(user, role, group, site):\n if not role:\n return True\n for k, v in user['roles'].iteritems():\n for g in v['group']:\n if k in role.get(g, '').split(':'):\n return True\n return False\n", "role": "user" }, { "content": "authInsert", "role": "assistant" } ]
def x(self): body = request.body.read() indata = cjson.decode(body) try: indata = validateJSONInputNoCopy('migration_rqst', indata) return self.dbsMigrate.removeMigrationRequest(indata) except dbsException as he: dbsExceptionHandler(he.eCode, he.message, self.logger.exception, he .message) except Exception as e: if e.code == 400: dbsExceptionHandler('dbsException-invalid-input2', str(e), self .logger.exception, str(e)) else: dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger. exception, str(e))
remove
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n body = request.body.read()\n indata = cjson.decode(body)\n try:\n indata = validateJSONInputNoCopy('migration_rqst', indata)\n return self.dbsMigrate.removeMigrationRequest(indata)\n except dbsException as he:\n dbsExceptionHandler(he.eCode, he.message, self.logger.exception, he\n .message)\n except Exception as e:\n if e.code == 400:\n dbsExceptionHandler('dbsException-invalid-input2', str(e), self\n .logger.exception, str(e))\n else:\n dbsExceptionHandler('dbsException-server-error',\n dbsExceptionCode['dbsException-server-error'], self.logger.\n exception, str(e))\n", "role": "user" }, { "content": "remove", "role": "assistant" } ]
def x(self, conn, logical_file_name='', block_id=0, block_name='', transaction=False): sql = '' binds = {} if logical_file_name: if isinstance(logical_file_name, basestring): wheresql = 'WHERE F.LOGICAL_FILE_NAME = :logical_file_name' binds = {'logical_file_name': logical_file_name} sql = '{sql} {wheresql}'.format(sql=self.sql, wheresql=wheresql) elif isinstance(logical_file_name, list): wheresql = ( 'WHERE F.LOGICAL_FILE_NAME in (SELECT TOKEN FROM TOKEN_GENERATOR)' ) lfn_generator, binds = create_token_generator(logical_file_name) sql = '{lfn_generator} {sql} {wheresql}'.format(lfn_generator= lfn_generator, sql=self.sql, wheresql=wheresql) elif block_id != 0: wheresql = 'WHERE F.BLOCK_ID = :block_id' binds = {'block_id': block_id} sql = '{sql} {wheresql}'.format(sql=self.sql, wheresql=wheresql) elif block_name: joins = 'JOIN {owner}BLOCKS B on B.BLOCK_ID = F.BLOCK_ID'.format(owner =self.owner) wheresql = 'WHERE B.BLOCK_NAME= :block_name' binds = {'block_name': block_name} sql = '{sql} {joins} {wheresql}'.format(sql=self.sql, joins=joins, wheresql=wheresql) else: return cursors = self.dbi.processData(sql, binds, conn, transaction= transaction, returnCursor=True) for i in cursors: d = self.formatCursor(i, size=100) if isinstance(d, list) or isinstance(d, GeneratorType): for elem in d: yield elem elif d: yield d
execute
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, conn, logical_file_name='', block_id=0, block_name='',\n transaction=False):\n sql = ''\n binds = {}\n if logical_file_name:\n if isinstance(logical_file_name, basestring):\n wheresql = 'WHERE F.LOGICAL_FILE_NAME = :logical_file_name'\n binds = {'logical_file_name': logical_file_name}\n sql = '{sql} {wheresql}'.format(sql=self.sql, wheresql=wheresql)\n elif isinstance(logical_file_name, list):\n wheresql = (\n 'WHERE F.LOGICAL_FILE_NAME in (SELECT TOKEN FROM TOKEN_GENERATOR)'\n )\n lfn_generator, binds = create_token_generator(logical_file_name)\n sql = '{lfn_generator} {sql} {wheresql}'.format(lfn_generator=\n lfn_generator, sql=self.sql, wheresql=wheresql)\n elif block_id != 0:\n wheresql = 'WHERE F.BLOCK_ID = :block_id'\n binds = {'block_id': block_id}\n sql = '{sql} {wheresql}'.format(sql=self.sql, wheresql=wheresql)\n elif block_name:\n joins = 'JOIN {owner}BLOCKS B on B.BLOCK_ID = F.BLOCK_ID'.format(owner\n =self.owner)\n wheresql = 'WHERE B.BLOCK_NAME= :block_name'\n binds = {'block_name': block_name}\n sql = '{sql} {joins} {wheresql}'.format(sql=self.sql, joins=joins,\n wheresql=wheresql)\n else:\n return\n cursors = self.dbi.processData(sql, binds, conn, transaction=\n transaction, returnCursor=True)\n for i in cursors:\n d = self.formatCursor(i, size=100)\n if isinstance(d, list) or isinstance(d, GeneratorType):\n for elem in d:\n yield elem\n elif d:\n yield d\n", "role": "user" }, { "content": "execute", "role": "assistant" } ]
def x(self, dataset=''): if dataset == '': dbsExceptionHandler('dbsException-invalid-input', 'DBSDataset/listDatasetParents. Child Dataset name is required.') conn = self.dbi.connection() try: result = self.datasetparentlist.execute(conn, dataset) return result finally: if conn: conn.close()
listDatasetParents
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, dataset=''):\n if dataset == '':\n dbsExceptionHandler('dbsException-invalid-input',\n 'DBSDataset/listDatasetParents. Child Dataset name is required.')\n conn = self.dbi.connection()\n try:\n result = self.datasetparentlist.execute(conn, dataset)\n return result\n finally:\n if conn:\n conn.close()\n", "role": "user" }, { "content": "listDatasetParents", "role": "assistant" } ]
def x(self, dataset): if dataset == '': dbsExceptionHandler('dbsException-invalid-input', 'DBSDataset/listDatasetChildren. Parent Dataset name is required.') conn = self.dbi.connection() try: result = self.datasetchildlist.execute(conn, dataset) return result finally: if conn: conn.close()
listDatasetChildren
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, dataset):\n if dataset == '':\n dbsExceptionHandler('dbsException-invalid-input',\n 'DBSDataset/listDatasetChildren. Parent Dataset name is required.')\n conn = self.dbi.connection()\n try:\n result = self.datasetchildlist.execute(conn, dataset)\n return result\n finally:\n if conn:\n conn.close()\n", "role": "user" }, { "content": "listDatasetChildren", "role": "assistant" } ]
def x(self, dataset, is_dataset_valid): if dataset == '': dbsExceptionHandler('dbsException-invalid-input', 'DBSDataset/updateStatus. dataset is required.') conn = self.dbi.connection() trans = conn.begin() try: self.updatestatus.execute(conn, dataset, is_dataset_valid, trans) trans.commit() trans = None except Exception as ex: if trans: trans.rollback() raise ex finally: if trans: trans.rollback() if conn: conn.close()
updateStatus
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, dataset, is_dataset_valid):\n if dataset == '':\n dbsExceptionHandler('dbsException-invalid-input',\n 'DBSDataset/updateStatus. dataset is required.')\n conn = self.dbi.connection()\n trans = conn.begin()\n try:\n self.updatestatus.execute(conn, dataset, is_dataset_valid, trans)\n trans.commit()\n trans = None\n except Exception as ex:\n if trans:\n trans.rollback()\n raise ex\n finally:\n if trans:\n trans.rollback()\n if conn:\n conn.close()\n", "role": "user" }, { "content": "updateStatus", "role": "assistant" } ]
def x(self, dataset, dataset_access_type): if dataset == '': dbsExceptionHandler('dbsException-invalid-input', 'DBSDataset/updateType. dataset is required.') conn = self.dbi.connection() trans = conn.begin() try: self.updatetype.execute(conn, dataset, dataset_access_type.upper(), trans) trans.commit() trans = None except SQLAlchemyDatabaseError as ex: if str(ex).find('ORA-01407') != -1: dbsExceptionHandler('dbsException-invalid-input2', 'Invalid Input', None, 'DBSDataset/updateType. A Valid dataset_access_type is required.' ) finally: if trans: trans.rollback() if conn: conn.close()
updateType
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, dataset, dataset_access_type):\n if dataset == '':\n dbsExceptionHandler('dbsException-invalid-input',\n 'DBSDataset/updateType. dataset is required.')\n conn = self.dbi.connection()\n trans = conn.begin()\n try:\n self.updatetype.execute(conn, dataset, dataset_access_type.upper(),\n trans)\n trans.commit()\n trans = None\n except SQLAlchemyDatabaseError as ex:\n if str(ex).find('ORA-01407') != -1:\n dbsExceptionHandler('dbsException-invalid-input2',\n 'Invalid Input', None,\n 'DBSDataset/updateType. A Valid dataset_access_type is required.'\n )\n finally:\n if trans:\n trans.rollback()\n if conn:\n conn.close()\n", "role": "user" }, { "content": "updateType", "role": "assistant" } ]
def x(self, conn, block_name, origin_site_name, transaction=False): if not conn: dbsExceptionHandler('dbsException-failed-connect2host', 'Oracle/Block/UpdateStatus. Expects db connection from upper layer.' , self.logger.exception) binds = {'block_name': block_name, 'origin_site_name': origin_site_name, 'mtime': dbsUtils().getTime(), 'myuser': dbsUtils().getCreateBy()} self.dbi.processData(self.sql, binds, conn, transaction)
execute
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, conn, block_name, origin_site_name, transaction=False):\n if not conn:\n dbsExceptionHandler('dbsException-failed-connect2host',\n 'Oracle/Block/UpdateStatus. Expects db connection from upper layer.'\n , self.logger.exception)\n binds = {'block_name': block_name, 'origin_site_name': origin_site_name,\n 'mtime': dbsUtils().getTime(), 'myuser': dbsUtils().getCreateBy()}\n self.dbi.processData(self.sql, binds, conn, transaction)\n", "role": "user" }, { "content": "execute", "role": "assistant" } ]
def x(self, run_num=-1, logical_file_name='', block_name='', dataset=''): if '%' in logical_file_name or '%' in block_name or '%' in dataset: dbsExceptionHandler('dbsException-invalid-input', """ DBSDatasetRun/listRuns. No wildcards are allowed in logical_file_name, block_name or dataset. .""" ) conn = self.dbi.connection() tran = False try: ret = self.runlist.execute(conn, run_num, logical_file_name, block_name, dataset, tran) result = [] rnum = [] for i in ret: rnum.append(i['run_num']) result.append({'run_num': rnum}) return result finally: if conn: conn.close()
listRuns
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, run_num=-1, logical_file_name='', block_name='', dataset=''):\n if '%' in logical_file_name or '%' in block_name or '%' in dataset:\n dbsExceptionHandler('dbsException-invalid-input',\n \"\"\" DBSDatasetRun/listRuns. No wildcards are allowed in logical_file_name, block_name or dataset.\n.\"\"\"\n )\n conn = self.dbi.connection()\n tran = False\n try:\n ret = self.runlist.execute(conn, run_num, logical_file_name,\n block_name, dataset, tran)\n result = []\n rnum = []\n for i in ret:\n rnum.append(i['run_num'])\n result.append({'run_num': rnum})\n return result\n finally:\n if conn:\n conn.close()\n", "role": "user" }, { "content": "listRuns", "role": "assistant" } ]
def x(self): try: body = request.body.read() indata = cjson.decode(body) indata = validateJSONInputNoCopy('primds', indata) indata.update({'creation_date': dbsUtils().getTime(), 'create_by': dbsUtils().getCreateBy()}) self.dbsPrimaryDataset.insertPrimaryDataset(indata) except cjson.DecodeError as dc: dbsExceptionHandler('dbsException-invalid-input2', 'Wrong format/data from insert PrimaryDataset input', self. logger.exception, str(dc)) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de .message) except HTTPError as he: raise he except Exception as ex: sError = ( 'DBSWriterModel/insertPrimaryDataset. %s\n Exception trace: \n %s' % (ex, traceback.format_exc())) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode[ 'dbsException-server-error'], self.logger.exception, sError)
insertPrimaryDataset
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n try:\n body = request.body.read()\n indata = cjson.decode(body)\n indata = validateJSONInputNoCopy('primds', indata)\n indata.update({'creation_date': dbsUtils().getTime(), 'create_by':\n dbsUtils().getCreateBy()})\n self.dbsPrimaryDataset.insertPrimaryDataset(indata)\n except cjson.DecodeError as dc:\n dbsExceptionHandler('dbsException-invalid-input2',\n 'Wrong format/data from insert PrimaryDataset input', self.\n logger.exception, str(dc))\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de\n .message)\n except HTTPError as he:\n raise he\n except Exception as ex:\n sError = (\n 'DBSWriterModel/insertPrimaryDataset. %s\\n Exception trace: \\n %s'\n % (ex, traceback.format_exc()))\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode[\n 'dbsException-server-error'], self.logger.exception, sError)\n", "role": "user" }, { "content": "insertPrimaryDataset", "role": "assistant" } ]
def x(self, acquisition_era_name='', end_date=0): try: self.dbsAcqEra.UpdateAcqEraEndDate(acquisition_era_name, end_date) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de .message) except HTTPError as he: raise he except Exception as ex: sError = ( 'DBSWriterModel/update.AcqEraEndDate %s\n. Exception trace: \n %s' % (ex, traceback.format_exc())) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode[ 'dbsException-server-error'], self.logger.exception, sError)
updateAcqEraEndDate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, acquisition_era_name='', end_date=0):\n try:\n self.dbsAcqEra.UpdateAcqEraEndDate(acquisition_era_name, end_date)\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de\n .message)\n except HTTPError as he:\n raise he\n except Exception as ex:\n sError = (\n 'DBSWriterModel/update.AcqEraEndDate %s\\n. Exception trace: \\n %s'\n % (ex, traceback.format_exc()))\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode[\n 'dbsException-server-error'], self.logger.exception, sError)\n", "role": "user" }, { "content": "updateAcqEraEndDate", "role": "assistant" } ]
def x(self): try: body = request.body.read() indata = cjson.decode(body) if indata.get('file_parent_list', []) and indata.get( 'dataset_parent_list', []): dbsExceptionHandler('dbsException-invalid-input2', 'insertBulkBlock: dataset and file parentages cannot be in the input at the same time' , self.logger.exception, 'insertBulkBlock: datset and file parentages cannot be in the input at the same time.' ) indata = validateJSONInputNoCopy('blockBulk', indata) self.dbsBlockInsert.putBlock(indata) except cjson.DecodeError as dc: dbsExceptionHandler('dbsException-invalid-input2', 'Wrong format/data from insert BulkBlock input', self.logger. exception, str(dc)) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de .message) except HTTPError as he: raise he except Exception as ex: if str(ex).find('ORA-01036') != -1: dbsExceptionHandler('dbsException-invalid-input2', 'illegal variable name/number from input', self.logger. exception, str(ex)) else: sError = ( 'DBSWriterModel/insertBulkBlock. %s\n. Exception trace: \n %s' % (ex, traceback.format_exc())) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger. exception, sError)
insertBulkBlock
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n try:\n body = request.body.read()\n indata = cjson.decode(body)\n if indata.get('file_parent_list', []) and indata.get(\n 'dataset_parent_list', []):\n dbsExceptionHandler('dbsException-invalid-input2',\n 'insertBulkBlock: dataset and file parentages cannot be in the input at the same time'\n , self.logger.exception,\n 'insertBulkBlock: datset and file parentages cannot be in the input at the same time.'\n )\n indata = validateJSONInputNoCopy('blockBulk', indata)\n self.dbsBlockInsert.putBlock(indata)\n except cjson.DecodeError as dc:\n dbsExceptionHandler('dbsException-invalid-input2',\n 'Wrong format/data from insert BulkBlock input', self.logger.\n exception, str(dc))\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de\n .message)\n except HTTPError as he:\n raise he\n except Exception as ex:\n if str(ex).find('ORA-01036') != -1:\n dbsExceptionHandler('dbsException-invalid-input2',\n 'illegal variable name/number from input', self.logger.\n exception, str(ex))\n else:\n sError = (\n 'DBSWriterModel/insertBulkBlock. %s\\n. Exception trace: \\n %s'\n % (ex, traceback.format_exc()))\n dbsExceptionHandler('dbsException-server-error',\n dbsExceptionCode['dbsException-server-error'], self.logger.\n exception, sError)\n", "role": "user" }, { "content": "insertBulkBlock", "role": "assistant" } ]
def x(self): try: body = request.body.read() indata = cjson.decode(body) indata = validateJSONInputNoCopy('block', indata) self.dbsBlock.insertBlock(indata) except cjson.DecodeError as dc: dbsExceptionHandler('dbsException-invalid-input2', 'Wrong format/data from insert Block input', self.logger. exception, str(dc)) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de .message) except Exception as ex: sError = 'DBSWriterModel/insertBlock. %s\n. Exception trace: \n %s' % ( ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode[ 'dbsException-server-error'], self.logger.exception, sError)
insertBlock
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n try:\n body = request.body.read()\n indata = cjson.decode(body)\n indata = validateJSONInputNoCopy('block', indata)\n self.dbsBlock.insertBlock(indata)\n except cjson.DecodeError as dc:\n dbsExceptionHandler('dbsException-invalid-input2',\n 'Wrong format/data from insert Block input', self.logger.\n exception, str(dc))\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de\n .message)\n except Exception as ex:\n sError = 'DBSWriterModel/insertBlock. %s\\n. Exception trace: \\n %s' % (\n ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode[\n 'dbsException-server-error'], self.logger.exception, sError)\n", "role": "user" }, { "content": "insertBlock", "role": "assistant" } ]
def x(self, qInserts=False): if qInserts in (False, 'False'): qInserts = False try: body = request.body.read() indata = cjson.decode(body)['files'] if not isinstance(indata, (list, dict)): dbsExceptionHandler('dbsException-invalid-input', 'Invalid Input DataType', self.logger.exception, 'insertFile expects input as list or dirc') businput = [] if isinstance(indata, dict): indata = [indata] indata = validateJSONInputNoCopy('files', indata) for f in indata: f.update({'creation_date': f.get('creation_date', dbsUtils(). getTime()), 'create_by': dbsUtils().getCreateBy(), 'last_modification_date': f.get('last_modification_date', dbsUtils().getTime()), 'last_modified_by': f.get( 'last_modified_by', dbsUtils().getCreateBy()), 'file_lumi_list': f.get('file_lumi_list', []), 'file_parent_list': f.get('file_parent_list', []), 'file_assoc_list': f.get('assoc_list', []), 'file_output_config_list': f.get('file_output_config_list', [])}) businput.append(f) self.dbsFile.insertFile(businput, qInserts) except cjson.DecodeError as dc: dbsExceptionHandler('dbsException-invalid-input2', 'Wrong format/data from insert File input', self.logger. exception, str(dc)) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de .message) except HTTPError as he: raise he except Exception as ex: sError = 'DBSWriterModel/insertFile. %s\n. Exception trace: \n %s' % ( ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode[ 'dbsException-server-error'], self.logger.exception, sError)
insertFile
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, qInserts=False):\n if qInserts in (False, 'False'):\n qInserts = False\n try:\n body = request.body.read()\n indata = cjson.decode(body)['files']\n if not isinstance(indata, (list, dict)):\n dbsExceptionHandler('dbsException-invalid-input',\n 'Invalid Input DataType', self.logger.exception,\n 'insertFile expects input as list or dirc')\n businput = []\n if isinstance(indata, dict):\n indata = [indata]\n indata = validateJSONInputNoCopy('files', indata)\n for f in indata:\n f.update({'creation_date': f.get('creation_date', dbsUtils().\n getTime()), 'create_by': dbsUtils().getCreateBy(),\n 'last_modification_date': f.get('last_modification_date',\n dbsUtils().getTime()), 'last_modified_by': f.get(\n 'last_modified_by', dbsUtils().getCreateBy()),\n 'file_lumi_list': f.get('file_lumi_list', []),\n 'file_parent_list': f.get('file_parent_list', []),\n 'file_assoc_list': f.get('assoc_list', []),\n 'file_output_config_list': f.get('file_output_config_list',\n [])})\n businput.append(f)\n self.dbsFile.insertFile(businput, qInserts)\n except cjson.DecodeError as dc:\n dbsExceptionHandler('dbsException-invalid-input2',\n 'Wrong format/data from insert File input', self.logger.\n exception, str(dc))\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de\n .message)\n except HTTPError as he:\n raise he\n except Exception as ex:\n sError = 'DBSWriterModel/insertFile. %s\\n. Exception trace: \\n %s' % (\n ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode[\n 'dbsException-server-error'], self.logger.exception, sError)\n", "role": "user" }, { "content": "insertFile", "role": "assistant" } ]
def x(self, logical_file_name=[], is_file_valid=1, lost=0, dataset=''): if lost in [1, True, 'True', 'true', '1', 'y', 'yes']: lost = 1 if is_file_valid in [1, True, 'True', 'true', '1', 'y', 'yes']: dbsExceptionHandler('dbsException-invalid-input2', dbsExceptionCode['dbsException-invalid-input2'], self. logger.exception, 'Lost file must set to invalid') else: lost = 0 for f in (logical_file_name, dataset): if '*' in f or '%' in f: dbsExceptionHandler('dbsException-invalid-input2', dbsExceptionCode['dbsException-invalid-input2'], self. logger.exception, 'No wildcard allow in LFN or dataset for updatefile API.' ) try: self.dbsFile.updateStatus(logical_file_name, is_file_valid, lost, dataset) except HTTPError as he: raise he except Exception as ex: sError = 'DBSWriterModel/updateFile. %s\n. Exception trace: \n %s' % ( ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode[ 'dbsException-server-error'], self.logger.exception, sError)
updateFile
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, logical_file_name=[], is_file_valid=1, lost=0, dataset=''):\n if lost in [1, True, 'True', 'true', '1', 'y', 'yes']:\n lost = 1\n if is_file_valid in [1, True, 'True', 'true', '1', 'y', 'yes']:\n dbsExceptionHandler('dbsException-invalid-input2',\n dbsExceptionCode['dbsException-invalid-input2'], self.\n logger.exception, 'Lost file must set to invalid')\n else:\n lost = 0\n for f in (logical_file_name, dataset):\n if '*' in f or '%' in f:\n dbsExceptionHandler('dbsException-invalid-input2',\n dbsExceptionCode['dbsException-invalid-input2'], self.\n logger.exception,\n 'No wildcard allow in LFN or dataset for updatefile API.'\n )\n try:\n self.dbsFile.updateStatus(logical_file_name, is_file_valid, lost,\n dataset)\n except HTTPError as he:\n raise he\n except Exception as ex:\n sError = 'DBSWriterModel/updateFile. %s\\n. Exception trace: \\n %s' % (\n ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode[\n 'dbsException-server-error'], self.logger.exception, sError)\n", "role": "user" }, { "content": "updateFile", "role": "assistant" } ]
def x(cls, cidr, label=None, whitelist=False): network = netaddr.IPNetwork(cidr) start = network.first stop = start + network.size - 1 obj = cls.objects.create(label=label, start=start, stop=stop, whitelist =whitelist) return obj
create_from_string
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, cidr, label=None, whitelist=False):\n network = netaddr.IPNetwork(cidr)\n start = network.first\n stop = start + network.size - 1\n obj = cls.objects.create(label=label, start=start, stop=stop, whitelist\n =whitelist)\n return obj\n", "role": "user" }, { "content": "create_from_string", "role": "assistant" } ]