source_code
stringlengths 45
7.24k
| function_name
stringlengths 1
111
| messages
list |
|---|---|---|
def x(self):
for lv_load_area in self._grid._graph.nodes():
if isinstance(lv_load_area, LVLoadAreaDing0):
if lv_load_area.ring == self:
yield lv_load_area
|
lv_load_areas
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n for lv_load_area in self._grid._graph.nodes():\n if isinstance(lv_load_area, LVLoadAreaDing0):\n if lv_load_area.ring == self:\n yield lv_load_area\n",
"role": "user"
},
{
"content": "lv_load_areas",
"role": "assistant"
}
] |
def x(self):
self.branch_nodes = self.grid.graph_nodes_from_branch(self.branch)
self.grid._graph.remove_edge(self.branch_nodes[0], self.branch_nodes[1])
self.status = 'open'
|
open
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self.branch_nodes = self.grid.graph_nodes_from_branch(self.branch)\n self.grid._graph.remove_edge(self.branch_nodes[0], self.branch_nodes[1])\n self.status = 'open'\n",
"role": "user"
},
{
"content": "open",
"role": "assistant"
}
] |
def x(self):
self.grid._graph.add_edge(self.branch_nodes[0], self.branch_nodes[1],
branch=self.branch)
self.status = 'closed'
|
close
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self.grid._graph.add_edge(self.branch_nodes[0], self.branch_nodes[1],\n branch=self.branch)\n self.status = 'closed'\n",
"role": "user"
},
{
"content": "close",
"role": "assistant"
}
] |
def x(self):
for load_area in sorted(self._lv_load_areas, key=lambda _: repr(_)):
yield load_area
|
lv_load_areas
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n for load_area in sorted(self._lv_load_areas, key=lambda _: repr(_)):\n yield load_area\n",
"role": "user"
},
{
"content": "lv_load_areas",
"role": "assistant"
}
] |
def x(self, lv_load_area):
if lv_load_area not in self.lv_load_areas() and isinstance(lv_load_area,
LVLoadAreaDing0):
self._lv_load_areas.append(lv_load_area)
self.mv_grid.graph_add_node(lv_load_area.lv_load_area_centre)
|
add_lv_load_area
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, lv_load_area):\n if lv_load_area not in self.lv_load_areas() and isinstance(lv_load_area,\n LVLoadAreaDing0):\n self._lv_load_areas.append(lv_load_area)\n self.mv_grid.graph_add_node(lv_load_area.lv_load_area_centre)\n",
"role": "user"
},
{
"content": "add_lv_load_area",
"role": "assistant"
}
] |
def x(self, lv_load_area_group):
if lv_load_area_group not in self.lv_load_area_groups():
self._lv_load_area_groups.append(lv_load_area_group)
|
add_lv_load_area_group
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, lv_load_area_group):\n if lv_load_area_group not in self.lv_load_area_groups():\n self._lv_load_area_groups.append(lv_load_area_group)\n",
"role": "user"
},
{
"content": "add_lv_load_area_group",
"role": "assistant"
}
] |
def x(self):
peak_load = peak_load_satellites = 0
for lv_load_area in self.lv_load_areas():
peak_load += lv_load_area.peak_load
if lv_load_area.is_satellite:
peak_load_satellites += lv_load_area.peak_load
self.peak_load = peak_load
self.peak_load_satellites = peak_load_satellites
|
add_peak_demand
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n peak_load = peak_load_satellites = 0\n for lv_load_area in self.lv_load_areas():\n peak_load += lv_load_area.peak_load\n if lv_load_area.is_satellite:\n peak_load_satellites += lv_load_area.peak_load\n self.peak_load = peak_load\n self.peak_load_satellites = peak_load_satellites\n",
"role": "user"
},
{
"content": "add_peak_demand",
"role": "assistant"
}
] |
def x(self):
peak_load_aggregated = 0
for lv_load_area in self.lv_load_areas():
if lv_load_area.is_aggregated:
peak_load_aggregated += lv_load_area.peak_load
self.peak_load_aggregated = peak_load_aggregated
|
add_aggregated_peak_demand
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n peak_load_aggregated = 0\n for lv_load_area in self.lv_load_areas():\n if lv_load_area.is_aggregated:\n peak_load_aggregated += lv_load_area.peak_load\n self.peak_load_aggregated = peak_load_aggregated\n",
"role": "user"
},
{
"content": "add_aggregated_peak_demand",
"role": "assistant"
}
] |
def x(self):
for lv_grid_district in sorted(self._lv_grid_districts, key=lambda _:
repr(_)):
yield lv_grid_district
|
lv_grid_districts
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n for lv_grid_district in sorted(self._lv_grid_districts, key=lambda _:\n repr(_)):\n yield lv_grid_district\n",
"role": "user"
},
{
"content": "lv_grid_districts",
"role": "assistant"
}
] |
def x(self, lv_grid_district):
if lv_grid_district not in self._lv_grid_districts and isinstance(
lv_grid_district, LVGridDistrictDing0):
self._lv_grid_districts.append(lv_grid_district)
|
add_lv_grid_district
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, lv_grid_district):\n if lv_grid_district not in self._lv_grid_districts and isinstance(\n lv_grid_district, LVGridDistrictDing0):\n self._lv_grid_districts.append(lv_grid_district)\n",
"role": "user"
},
{
"content": "add_lv_grid_district",
"role": "assistant"
}
] |
def x(self):
cum_peak_generation = 0
for lv_grid_district in self._lv_grid_districts:
cum_peak_generation += lv_grid_district.lv_grid.station(
).peak_generation
return cum_peak_generation
|
peak_generation
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n cum_peak_generation = 0\n for lv_grid_district in self._lv_grid_districts:\n cum_peak_generation += lv_grid_district.lv_grid.station(\n ).peak_generation\n return cum_peak_generation\n",
"role": "user"
},
{
"content": "peak_generation",
"role": "assistant"
}
] |
def x(self, pair):
i, j = pair
return self._nodes[i.name()], self._nodes[j.name()]
|
get_pair
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, pair):\n i, j = pair\n return self._nodes[i.name()], self._nodes[j.name()]\n",
"role": "user"
},
{
"content": "get_pair",
"role": "assistant"
}
] |
def x(self):
return all([(node.route_allocation() is not None) for node in list(self
._nodes.values()) if node != self._problem.depot()])
|
is_complete
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n return all([(node.route_allocation() is not None) for node in list(self\n ._nodes.values()) if node != self._problem.depot()])\n",
"role": "user"
},
{
"content": "is_complete",
"role": "assistant"
}
] |
def x(self):
new_solution = self.__class__(self._problem, len(self._routes))
for index, r in enumerate(self._routes):
new_route = new_solution._routes[index]
for node in r.nodes():
new_node = new_solution._nodes[node]
new_route.allocate([new_node])
return new_solution
|
clone
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n new_solution = self.__class__(self._problem, len(self._routes))\n for index, r in enumerate(self._routes):\n new_route = new_solution._routes[index]\n for node in r.nodes():\n new_node = new_solution._nodes[node]\n new_route.allocate([new_node])\n return new_solution\n",
"role": "user"
},
{
"content": "clone",
"role": "assistant"
}
] |
def x(self):
length = 0
for r in self._routes:
length = length + r.length()
return length
|
length
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n length = 0\n for r in self._routes:\n length = length + r.length()\n return length\n",
"role": "user"
},
{
"content": "length",
"role": "assistant"
}
] |
def x(self, anim):
g = nx.Graph()
ntemp = []
nodes_pos = {}
demands = {}
demands_pos = {}
for no, node in self._nodes.items():
g.add_node(node)
ntemp.append(node)
coord = self._problem._coord[no]
nodes_pos[node] = tuple(coord)
demands[node] = 'd=' + str(node.demand())
demands_pos[node] = tuple([(a + b) for a, b in zip(coord, [2.5] *
len(coord))])
depot = self._nodes[self._problem._depot._name]
for r in self.routes():
n1 = r._nodes[0:len(r._nodes) - 1]
n2 = r._nodes[1:len(r._nodes)]
e = list(zip(n1, n2))
e.append((depot, r._nodes[0]))
e.append((r._nodes[-1], depot))
g.add_edges_from(e)
plt.figure()
ax = plt.gca()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
if anim is not None:
nx.draw_networkx(g, nodes_pos, with_labels=False, node_size=50)
plt.savefig(anim.file_path + anim.file_prefix + (4 - len(str(anim.
counter))) * '0' + str(anim.counter) + '.png', dpi=150,
bbox_inches='tight')
anim.counter += 1
plt.close()
else:
nx.draw_networkx(g, nodes_pos)
nx.draw_networkx_labels(g, demands_pos, labels=demands)
plt.show()
|
draw_network
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, anim):\n g = nx.Graph()\n ntemp = []\n nodes_pos = {}\n demands = {}\n demands_pos = {}\n for no, node in self._nodes.items():\n g.add_node(node)\n ntemp.append(node)\n coord = self._problem._coord[no]\n nodes_pos[node] = tuple(coord)\n demands[node] = 'd=' + str(node.demand())\n demands_pos[node] = tuple([(a + b) for a, b in zip(coord, [2.5] *\n len(coord))])\n depot = self._nodes[self._problem._depot._name]\n for r in self.routes():\n n1 = r._nodes[0:len(r._nodes) - 1]\n n2 = r._nodes[1:len(r._nodes)]\n e = list(zip(n1, n2))\n e.append((depot, r._nodes[0]))\n e.append((r._nodes[-1], depot))\n g.add_edges_from(e)\n plt.figure()\n ax = plt.gca()\n ax.get_xaxis().set_visible(False)\n ax.get_yaxis().set_visible(False)\n if anim is not None:\n nx.draw_networkx(g, nodes_pos, with_labels=False, node_size=50)\n plt.savefig(anim.file_path + anim.file_prefix + (4 - len(str(anim.\n counter))) * '0' + str(anim.counter) + '.png', dpi=150,\n bbox_inches='tight')\n anim.counter += 1\n plt.close()\n else:\n nx.draw_networkx(g, nodes_pos)\n nx.draw_networkx_labels(g, demands_pos, labels=demands)\n plt.show()\n",
"role": "user"
},
{
"content": "draw_network",
"role": "assistant"
}
] |
def x(mv_grid, polygon, mode, proj):
branches = []
polygon_shp = transform(proj, polygon)
for branch in mv_grid.graph_edges():
nodes = branch['adj_nodes']
branch_shp = transform(proj, LineString([nodes[0].geo_data, nodes[1
].geo_data]))
if mode == 'intersects':
if polygon_shp.intersects(branch_shp):
branches.append(branch)
elif mode == 'contains':
if polygon_shp.contains(branch_shp):
branches.append(branch)
else:
raise ValueError('Mode is invalid!')
return branches
|
calc_geo_branches_in_polygon
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(mv_grid, polygon, mode, proj):\n branches = []\n polygon_shp = transform(proj, polygon)\n for branch in mv_grid.graph_edges():\n nodes = branch['adj_nodes']\n branch_shp = transform(proj, LineString([nodes[0].geo_data, nodes[1\n ].geo_data]))\n if mode == 'intersects':\n if polygon_shp.intersects(branch_shp):\n branches.append(branch)\n elif mode == 'contains':\n if polygon_shp.contains(branch_shp):\n branches.append(branch)\n else:\n raise ValueError('Mode is invalid!')\n return branches\n",
"role": "user"
},
{
"content": "calc_geo_branches_in_polygon",
"role": "assistant"
}
] |
def x(node, mv_grid, radius, radius_inc, proj):
branches = []
while not branches:
node_shp = transform(proj, node.geo_data)
buffer_zone_shp = node_shp.buffer(radius)
for branch in mv_grid.graph_edges():
nodes = branch['adj_nodes']
branch_shp = transform(proj, LineString([nodes[0].geo_data,
nodes[1].geo_data]))
if buffer_zone_shp.intersects(branch_shp):
branches.append(branch)
radius += radius_inc
return branches
|
calc_geo_branches_in_buffer
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(node, mv_grid, radius, radius_inc, proj):\n branches = []\n while not branches:\n node_shp = transform(proj, node.geo_data)\n buffer_zone_shp = node_shp.buffer(radius)\n for branch in mv_grid.graph_edges():\n nodes = branch['adj_nodes']\n branch_shp = transform(proj, LineString([nodes[0].geo_data,\n nodes[1].geo_data]))\n if buffer_zone_shp.intersects(branch_shp):\n branches.append(branch)\n radius += radius_inc\n return branches\n",
"role": "user"
},
{
"content": "calc_geo_branches_in_buffer",
"role": "assistant"
}
] |
def x(node_source, node_target):
branch_detour_factor = cfg_ding0.get('assumptions', 'branch_detour_factor')
branch_length = branch_detour_factor * vincenty((node_source.geo_data.y,
node_source.geo_data.x), (node_target.geo_data.y, node_target.
geo_data.x)).m
if branch_length == 0:
branch_length = 1
logger.warning(
"Geo distance is zero, check objects' positions. Distance is set to 1m"
)
return branch_length
|
calc_geo_dist_vincenty
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(node_source, node_target):\n branch_detour_factor = cfg_ding0.get('assumptions', 'branch_detour_factor')\n branch_length = branch_detour_factor * vincenty((node_source.geo_data.y,\n node_source.geo_data.x), (node_target.geo_data.y, node_target.\n geo_data.x)).m\n if branch_length == 0:\n branch_length = 1\n logger.warning(\n \"Geo distance is zero, check objects' positions. Distance is set to 1m\"\n )\n return branch_length\n",
"role": "user"
},
{
"content": "calc_geo_dist_vincenty",
"role": "assistant"
}
] |
def x(nodes_pos):
branch_detour_factor = cfg_ding0.get('assumptions', 'branch_detour_factor')
matrix = {}
for i in nodes_pos:
pos_origin = tuple(nodes_pos[i])
matrix[i] = {}
for j in nodes_pos:
pos_dest = tuple(nodes_pos[j])
distance = branch_detour_factor * vincenty(tuple(reversed(
pos_origin)), tuple(reversed(pos_dest))).km
matrix[i][j] = distance
return matrix
|
calc_geo_dist_matrix_vincenty
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(nodes_pos):\n branch_detour_factor = cfg_ding0.get('assumptions', 'branch_detour_factor')\n matrix = {}\n for i in nodes_pos:\n pos_origin = tuple(nodes_pos[i])\n matrix[i] = {}\n for j in nodes_pos:\n pos_dest = tuple(nodes_pos[j])\n distance = branch_detour_factor * vincenty(tuple(reversed(\n pos_origin)), tuple(reversed(pos_dest))).km\n matrix[i][j] = distance\n return matrix\n",
"role": "user"
},
{
"content": "calc_geo_dist_matrix_vincenty",
"role": "assistant"
}
] |
def x(node_source, node_target):
proj_source = partial(pyproj.transform, pyproj.Proj(init='epsg:4326'),
pyproj.Proj(init='epsg:3035'))
proj_target = partial(pyproj.transform, pyproj.Proj(init='epsg:3035'),
pyproj.Proj(init='epsg:4326'))
branch_shp = transform(proj_source, LineString([node_source.geo_data,
node_target.geo_data]))
distance = vincenty((node_source.geo_data.y, node_source.geo_data.x), (
node_target.geo_data.y, node_target.geo_data.x)).m
centre_point_shp = transform(proj_target, branch_shp.interpolate(
distance / 2))
return centre_point_shp
|
calc_geo_centre_point
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(node_source, node_target):\n proj_source = partial(pyproj.transform, pyproj.Proj(init='epsg:4326'),\n pyproj.Proj(init='epsg:3035'))\n proj_target = partial(pyproj.transform, pyproj.Proj(init='epsg:3035'),\n pyproj.Proj(init='epsg:4326'))\n branch_shp = transform(proj_source, LineString([node_source.geo_data,\n node_target.geo_data]))\n distance = vincenty((node_source.geo_data.y, node_source.geo_data.x), (\n node_target.geo_data.y, node_target.geo_data.x)).m\n centre_point_shp = transform(proj_target, branch_shp.interpolate(\n distance / 2))\n return centre_point_shp\n",
"role": "user"
},
{
"content": "calc_geo_centre_point",
"role": "assistant"
}
] |
def x(node_shp, branches, proj, conn_dist_weight, debug, branches_only=False):
conn_diff_tolerance = cfg_ding0.get('mv_routing', 'conn_diff_tolerance')
conn_objects_min_stack = []
for branch in branches:
stations = branch['adj_nodes']
station1_shp = transform(proj, stations[0].geo_data)
station2_shp = transform(proj, stations[1].geo_data)
line_shp = LineString([station1_shp, station2_shp])
if not branches_only:
conn_objects = {'s1': {'obj': stations[0], 'shp': station1_shp,
'dist': node_shp.distance(station1_shp) * conn_dist_weight *
0.999}, 's2': {'obj': stations[1], 'shp': station2_shp,
'dist': node_shp.distance(station2_shp) * conn_dist_weight *
0.999}, 'b': {'obj': branch, 'shp': line_shp, 'dist':
node_shp.distance(line_shp)}}
if abs(conn_objects['s1']['dist'] - conn_objects['b']['dist']
) < conn_diff_tolerance or abs(conn_objects['s2']['dist'] -
conn_objects['b']['dist']) < conn_diff_tolerance:
del conn_objects['b']
if isinstance(conn_objects['s1']['obj'], MVStationDing0):
del conn_objects['s1']
elif isinstance(conn_objects['s2']['obj'], MVStationDing0):
del conn_objects['s2']
else:
conn_objects = {'b': {'obj': branch, 'shp': line_shp, 'dist':
node_shp.distance(line_shp)}}
conn_objects_min = min(conn_objects.values(), key=lambda v: v['dist'])
conn_objects_min_stack.append(conn_objects_min)
conn_objects_min_stack = [_ for _ in sorted(conn_objects_min_stack, key
=lambda x: x['dist'])]
if debug:
logger.debug('Stack length: {}'.format(len(conn_objects_min_stack)))
return conn_objects_min_stack
|
find_nearest_conn_objects
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(node_shp, branches, proj, conn_dist_weight, debug, branches_only=False):\n conn_diff_tolerance = cfg_ding0.get('mv_routing', 'conn_diff_tolerance')\n conn_objects_min_stack = []\n for branch in branches:\n stations = branch['adj_nodes']\n station1_shp = transform(proj, stations[0].geo_data)\n station2_shp = transform(proj, stations[1].geo_data)\n line_shp = LineString([station1_shp, station2_shp])\n if not branches_only:\n conn_objects = {'s1': {'obj': stations[0], 'shp': station1_shp,\n 'dist': node_shp.distance(station1_shp) * conn_dist_weight *\n 0.999}, 's2': {'obj': stations[1], 'shp': station2_shp,\n 'dist': node_shp.distance(station2_shp) * conn_dist_weight *\n 0.999}, 'b': {'obj': branch, 'shp': line_shp, 'dist':\n node_shp.distance(line_shp)}}\n if abs(conn_objects['s1']['dist'] - conn_objects['b']['dist']\n ) < conn_diff_tolerance or abs(conn_objects['s2']['dist'] -\n conn_objects['b']['dist']) < conn_diff_tolerance:\n del conn_objects['b']\n if isinstance(conn_objects['s1']['obj'], MVStationDing0):\n del conn_objects['s1']\n elif isinstance(conn_objects['s2']['obj'], MVStationDing0):\n del conn_objects['s2']\n else:\n conn_objects = {'b': {'obj': branch, 'shp': line_shp, 'dist':\n node_shp.distance(line_shp)}}\n conn_objects_min = min(conn_objects.values(), key=lambda v: v['dist'])\n conn_objects_min_stack.append(conn_objects_min)\n conn_objects_min_stack = [_ for _ in sorted(conn_objects_min_stack, key\n =lambda x: x['dist'])]\n if debug:\n logger.debug('Stack length: {}'.format(len(conn_objects_min_stack)))\n return conn_objects_min_stack\n",
"role": "user"
},
{
"content": "find_nearest_conn_objects",
"role": "assistant"
}
] |
def x(node, node_shp, graph, proj, conn_objects_min_stack,
conn_dist_ring_mod, debug):
node_connected = False
for dist_min_obj in conn_objects_min_stack:
nodes_are_members_of_ring = False
if isinstance(dist_min_obj['shp'], LineString):
node1 = dist_min_obj['obj']['adj_nodes'][0]
node2 = dist_min_obj['obj']['adj_nodes'][1]
lv_load_area_group = get_lv_load_area_group_from_node_pair(node1,
node2)
nodes_are_members_of_ring = any(node1 in ring and node2 in ring for
ring in node.grid.rings_nodes())
branch_ring = dist_min_obj['obj']['branch'].ring
elif isinstance(dist_min_obj['obj'], MVCableDistributorDing0):
lv_load_area_group = dist_min_obj['obj'].lv_load_area_group
else:
lv_load_area_group = dist_min_obj['obj'
].lv_load_area.lv_load_area_group
if not lv_load_area_group:
target_obj_result = connect_node(node, node_shp, node.
lv_load_area.mv_grid_district.mv_grid, dist_min_obj, proj,
graph, conn_dist_ring_mod, debug)
if (target_obj_result is not None and target_obj_result !=
're-routed'):
lv_load_area_group = LoadAreaGroupDing0(mv_grid_district=
node.lv_load_area.mv_grid_district, root_node=
target_obj_result)
lv_load_area_group.add_lv_load_area(lv_load_area=node.
lv_load_area)
node.lv_load_area.lv_load_area_group = lv_load_area_group
node.lv_load_area.mv_grid_district.add_lv_load_area_group(
lv_load_area_group)
if debug:
logger.debug('New LV load_area group {} created!'.
format(lv_load_area_group))
node_connected = True
break
elif target_obj_result == 're-routed':
if nodes_are_members_of_ring:
node.lv_load_area.is_satellite = False
node.lv_load_area.ring = branch_ring
node_connected = True
break
else:
target_obj_result = connect_node(node, node_shp, node.
lv_load_area.mv_grid_district.mv_grid, dist_min_obj, proj,
graph, conn_dist_ring_mod, debug)
if (target_obj_result is not None and target_obj_result !=
're-routed'):
if lv_load_area_group.can_add_lv_load_area(node=node):
lv_load_area_group.add_lv_load_area(lv_load_area=node.
lv_load_area)
node.lv_load_area.lv_load_area_group = lv_load_area_group
if isinstance(target_obj_result, MVCableDistributorDing0):
lv_load_area_group.add_lv_load_area(lv_load_area=
target_obj_result)
target_obj_result.lv_load_area_group = (
lv_load_area_group)
if debug:
logger.debug('LV load_area group {} joined!'.format
(lv_load_area_group))
node_connected = True
break
else:
if debug:
logger.debug(
'Node {0} could not be added to load_area group {1}'
.format(node, lv_load_area_group))
disconnect_node(node, target_obj_result, graph, debug)
continue
elif target_obj_result == 're-routed':
lv_load_area_group.add_lv_load_area(lv_load_area=node.
lv_load_area)
node.lv_load_area.lv_load_area_group = lv_load_area_group
if nodes_are_members_of_ring:
node.lv_load_area.is_satellite = False
node.lv_load_area.ring = branch_ring
node_connected = True
break
if not node_connected and debug:
logger.debug(
'Node {} could not be connected, try to increase the parameter `load_area_sat_buffer_radius` in config file `config_calc.cfg` to gain more possible connection points.'
.format(node))
|
find_connection_point
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(node, node_shp, graph, proj, conn_objects_min_stack,\n conn_dist_ring_mod, debug):\n node_connected = False\n for dist_min_obj in conn_objects_min_stack:\n nodes_are_members_of_ring = False\n if isinstance(dist_min_obj['shp'], LineString):\n node1 = dist_min_obj['obj']['adj_nodes'][0]\n node2 = dist_min_obj['obj']['adj_nodes'][1]\n lv_load_area_group = get_lv_load_area_group_from_node_pair(node1,\n node2)\n nodes_are_members_of_ring = any(node1 in ring and node2 in ring for\n ring in node.grid.rings_nodes())\n branch_ring = dist_min_obj['obj']['branch'].ring\n elif isinstance(dist_min_obj['obj'], MVCableDistributorDing0):\n lv_load_area_group = dist_min_obj['obj'].lv_load_area_group\n else:\n lv_load_area_group = dist_min_obj['obj'\n ].lv_load_area.lv_load_area_group\n if not lv_load_area_group:\n target_obj_result = connect_node(node, node_shp, node.\n lv_load_area.mv_grid_district.mv_grid, dist_min_obj, proj,\n graph, conn_dist_ring_mod, debug)\n if (target_obj_result is not None and target_obj_result !=\n 're-routed'):\n lv_load_area_group = LoadAreaGroupDing0(mv_grid_district=\n node.lv_load_area.mv_grid_district, root_node=\n target_obj_result)\n lv_load_area_group.add_lv_load_area(lv_load_area=node.\n lv_load_area)\n node.lv_load_area.lv_load_area_group = lv_load_area_group\n node.lv_load_area.mv_grid_district.add_lv_load_area_group(\n lv_load_area_group)\n if debug:\n logger.debug('New LV load_area group {} created!'.\n format(lv_load_area_group))\n node_connected = True\n break\n elif target_obj_result == 're-routed':\n if nodes_are_members_of_ring:\n node.lv_load_area.is_satellite = False\n node.lv_load_area.ring = branch_ring\n node_connected = True\n break\n else:\n target_obj_result = connect_node(node, node_shp, node.\n lv_load_area.mv_grid_district.mv_grid, dist_min_obj, proj,\n graph, conn_dist_ring_mod, debug)\n if (target_obj_result is not None and target_obj_result !=\n 're-routed'):\n if lv_load_area_group.can_add_lv_load_area(node=node):\n lv_load_area_group.add_lv_load_area(lv_load_area=node.\n lv_load_area)\n node.lv_load_area.lv_load_area_group = lv_load_area_group\n if isinstance(target_obj_result, MVCableDistributorDing0):\n lv_load_area_group.add_lv_load_area(lv_load_area=\n target_obj_result)\n target_obj_result.lv_load_area_group = (\n lv_load_area_group)\n if debug:\n logger.debug('LV load_area group {} joined!'.format\n (lv_load_area_group))\n node_connected = True\n break\n else:\n if debug:\n logger.debug(\n 'Node {0} could not be added to load_area group {1}'\n .format(node, lv_load_area_group))\n disconnect_node(node, target_obj_result, graph, debug)\n continue\n elif target_obj_result == 're-routed':\n lv_load_area_group.add_lv_load_area(lv_load_area=node.\n lv_load_area)\n node.lv_load_area.lv_load_area_group = lv_load_area_group\n if nodes_are_members_of_ring:\n node.lv_load_area.is_satellite = False\n node.lv_load_area.ring = branch_ring\n node_connected = True\n break\n if not node_connected and debug:\n logger.debug(\n 'Node {} could not be connected, try to increase the parameter `load_area_sat_buffer_radius` in config file `config_calc.cfg` to gain more possible connection points.'\n .format(node))\n",
"role": "user"
},
{
"content": "find_connection_point",
"role": "assistant"
}
] |
def x(node, node_shp, mv_grid, target_obj, proj, graph, conn_dist_ring_mod,
debug):
target_obj_result = None
if isinstance(target_obj['shp'], LineString):
adj_node1 = target_obj['obj']['adj_nodes'][0]
adj_node2 = target_obj['obj']['adj_nodes'][1]
conn_point_shp = target_obj['shp'].interpolate(target_obj['shp'].
project(node_shp))
conn_point_shp = transform(proj, conn_point_shp)
if not target_obj['obj']['branch'].connects_aggregated:
if target_obj['dist'] < conn_dist_ring_mod:
branch_type = graph.adj[adj_node1][adj_node2]['branch'].type
branch_kind = graph.adj[adj_node1][adj_node2]['branch'].kind
branch_ring = graph.adj[adj_node1][adj_node2]['branch'].ring
circ_breaker = graph.adj[adj_node1][adj_node2]['branch'
].circuit_breaker
if circ_breaker is not None:
circ_breaker.geo_data = calc_geo_centre_point(adj_node1,
node)
graph.remove_edge(adj_node1, adj_node2)
branch_length = calc_geo_dist_vincenty(adj_node1, node)
branch = BranchDing0(length=branch_length, circuit_breaker=
circ_breaker, kind=branch_kind, type=branch_type, ring=
branch_ring)
if circ_breaker is not None:
circ_breaker.branch = branch
graph.add_edge(adj_node1, node, branch=branch)
branch_length = calc_geo_dist_vincenty(adj_node2, node)
graph.add_edge(adj_node2, node, branch=BranchDing0(length=
branch_length, kind=branch_kind, type=branch_type, ring
=branch_ring))
target_obj_result = 're-routed'
if debug:
logger.debug('Ring main route modified to include node {}'
.format(node))
else:
cable_dist = MVCableDistributorDing0(geo_data=
conn_point_shp, grid=mv_grid)
mv_grid.add_cable_distributor(cable_dist)
circ_breaker = graph.adj[adj_node1][adj_node2]['branch'
].circuit_breaker
if circ_breaker is not None:
circ_breaker.geo_data = calc_geo_centre_point(adj_node1,
cable_dist)
branch_kind = graph.adj[adj_node1][adj_node2]['branch'].kind
branch_type = graph.adj[adj_node1][adj_node2]['branch'].type
branch_ring = graph.adj[adj_node1][adj_node2]['branch'].ring
graph.remove_edge(adj_node1, adj_node2)
branch_length = calc_geo_dist_vincenty(adj_node1, cable_dist)
branch = BranchDing0(length=branch_length, circuit_breaker=
circ_breaker, kind=branch_kind, type=branch_type, ring=
branch_ring)
if circ_breaker is not None:
circ_breaker.branch = branch
graph.add_edge(adj_node1, cable_dist, branch=branch)
branch_length = calc_geo_dist_vincenty(adj_node2, cable_dist)
graph.add_edge(adj_node2, cable_dist, branch=BranchDing0(
length=branch_length, kind=branch_kind, type=
branch_type, ring=branch_ring))
branch_kind = mv_grid.default_branch_kind
branch_type = mv_grid.default_branch_type
branch_length = calc_geo_dist_vincenty(node, cable_dist)
graph.add_edge(node, cable_dist, branch=BranchDing0(length=
branch_length, kind=branch_kind, type=branch_type, ring
=branch_ring))
target_obj_result = cable_dist
if debug:
logger.debug(
'Nearest connection point for object {0} is branch {1} (distance={2} m)'
.format(node, target_obj['obj']['adj_nodes'],
target_obj['dist']))
else:
if isinstance(node, LVLoadAreaCentreDing0):
valid_conn_objects = LVLoadAreaCentreDing0
elif isinstance(node, LVStationDing0):
valid_conn_objects = (LVLoadAreaCentreDing0, LVStationDing0,
MVCableDistributorDing0)
elif isinstance(node, GeneratorDing0):
valid_conn_objects = (LVLoadAreaCentreDing0, LVStationDing0,
MVCableDistributorDing0, GeneratorDing0)
else:
raise ValueError(
'Oops, the node you are trying to connect is not a valid connection object'
)
if isinstance(target_obj['obj'], (LVLoadAreaCentreDing0,
LVStationDing0)):
target_is_aggregated = target_obj['obj'].lv_load_area.is_aggregated
else:
target_is_aggregated = False
if isinstance(target_obj['obj'], valid_conn_objects
) and not target_is_aggregated:
branch_kind = mv_grid.default_branch_kind
branch_type = mv_grid.default_branch_type
branch_ring = mv_grid.get_ring_from_node(target_obj['obj'])
branch_length = calc_geo_dist_vincenty(node, target_obj['obj'])
graph.add_edge(node, target_obj['obj'], branch=BranchDing0(
length=branch_length, kind=branch_kind, type=branch_type,
ring=branch_ring))
target_obj_result = target_obj['obj']
if debug:
logger.debug(
'Nearest connection point for object {0} is station {1} (distance={2} m)'
.format(node, target_obj['obj'], target_obj['dist']))
return target_obj_result
|
connect_node
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(node, node_shp, mv_grid, target_obj, proj, graph, conn_dist_ring_mod,\n debug):\n target_obj_result = None\n if isinstance(target_obj['shp'], LineString):\n adj_node1 = target_obj['obj']['adj_nodes'][0]\n adj_node2 = target_obj['obj']['adj_nodes'][1]\n conn_point_shp = target_obj['shp'].interpolate(target_obj['shp'].\n project(node_shp))\n conn_point_shp = transform(proj, conn_point_shp)\n if not target_obj['obj']['branch'].connects_aggregated:\n if target_obj['dist'] < conn_dist_ring_mod:\n branch_type = graph.adj[adj_node1][adj_node2]['branch'].type\n branch_kind = graph.adj[adj_node1][adj_node2]['branch'].kind\n branch_ring = graph.adj[adj_node1][adj_node2]['branch'].ring\n circ_breaker = graph.adj[adj_node1][adj_node2]['branch'\n ].circuit_breaker\n if circ_breaker is not None:\n circ_breaker.geo_data = calc_geo_centre_point(adj_node1,\n node)\n graph.remove_edge(adj_node1, adj_node2)\n branch_length = calc_geo_dist_vincenty(adj_node1, node)\n branch = BranchDing0(length=branch_length, circuit_breaker=\n circ_breaker, kind=branch_kind, type=branch_type, ring=\n branch_ring)\n if circ_breaker is not None:\n circ_breaker.branch = branch\n graph.add_edge(adj_node1, node, branch=branch)\n branch_length = calc_geo_dist_vincenty(adj_node2, node)\n graph.add_edge(adj_node2, node, branch=BranchDing0(length=\n branch_length, kind=branch_kind, type=branch_type, ring\n =branch_ring))\n target_obj_result = 're-routed'\n if debug:\n logger.debug('Ring main route modified to include node {}'\n .format(node))\n else:\n cable_dist = MVCableDistributorDing0(geo_data=\n conn_point_shp, grid=mv_grid)\n mv_grid.add_cable_distributor(cable_dist)\n circ_breaker = graph.adj[adj_node1][adj_node2]['branch'\n ].circuit_breaker\n if circ_breaker is not None:\n circ_breaker.geo_data = calc_geo_centre_point(adj_node1,\n cable_dist)\n branch_kind = graph.adj[adj_node1][adj_node2]['branch'].kind\n branch_type = graph.adj[adj_node1][adj_node2]['branch'].type\n branch_ring = graph.adj[adj_node1][adj_node2]['branch'].ring\n graph.remove_edge(adj_node1, adj_node2)\n branch_length = calc_geo_dist_vincenty(adj_node1, cable_dist)\n branch = BranchDing0(length=branch_length, circuit_breaker=\n circ_breaker, kind=branch_kind, type=branch_type, ring=\n branch_ring)\n if circ_breaker is not None:\n circ_breaker.branch = branch\n graph.add_edge(adj_node1, cable_dist, branch=branch)\n branch_length = calc_geo_dist_vincenty(adj_node2, cable_dist)\n graph.add_edge(adj_node2, cable_dist, branch=BranchDing0(\n length=branch_length, kind=branch_kind, type=\n branch_type, ring=branch_ring))\n branch_kind = mv_grid.default_branch_kind\n branch_type = mv_grid.default_branch_type\n branch_length = calc_geo_dist_vincenty(node, cable_dist)\n graph.add_edge(node, cable_dist, branch=BranchDing0(length=\n branch_length, kind=branch_kind, type=branch_type, ring\n =branch_ring))\n target_obj_result = cable_dist\n if debug:\n logger.debug(\n 'Nearest connection point for object {0} is branch {1} (distance={2} m)'\n .format(node, target_obj['obj']['adj_nodes'],\n target_obj['dist']))\n else:\n if isinstance(node, LVLoadAreaCentreDing0):\n valid_conn_objects = LVLoadAreaCentreDing0\n elif isinstance(node, LVStationDing0):\n valid_conn_objects = (LVLoadAreaCentreDing0, LVStationDing0,\n MVCableDistributorDing0)\n elif isinstance(node, GeneratorDing0):\n valid_conn_objects = (LVLoadAreaCentreDing0, LVStationDing0,\n MVCableDistributorDing0, GeneratorDing0)\n else:\n raise ValueError(\n 'Oops, the node you are trying to connect is not a valid connection object'\n )\n if isinstance(target_obj['obj'], (LVLoadAreaCentreDing0,\n LVStationDing0)):\n target_is_aggregated = target_obj['obj'].lv_load_area.is_aggregated\n else:\n target_is_aggregated = False\n if isinstance(target_obj['obj'], valid_conn_objects\n ) and not target_is_aggregated:\n branch_kind = mv_grid.default_branch_kind\n branch_type = mv_grid.default_branch_type\n branch_ring = mv_grid.get_ring_from_node(target_obj['obj'])\n branch_length = calc_geo_dist_vincenty(node, target_obj['obj'])\n graph.add_edge(node, target_obj['obj'], branch=BranchDing0(\n length=branch_length, kind=branch_kind, type=branch_type,\n ring=branch_ring))\n target_obj_result = target_obj['obj']\n if debug:\n logger.debug(\n 'Nearest connection point for object {0} is station {1} (distance={2} m)'\n .format(node, target_obj['obj'], target_obj['dist']))\n return target_obj_result\n",
"role": "user"
},
{
"content": "connect_node",
"role": "assistant"
}
] |
def x(node, target_obj_result, graph, debug):
branch_kind = graph.adj[node][target_obj_result]['branch'].kind
branch_type = graph.adj[node][target_obj_result]['branch'].type
branch_ring = graph.adj[node][target_obj_result]['branch'].ring
graph.remove_edge(node, target_obj_result)
if isinstance(target_obj_result, MVCableDistributorDing0):
neighbor_nodes = list(graph.neighbors(target_obj_result))
if len(neighbor_nodes) == 2:
node.grid.remove_cable_distributor(target_obj_result)
branch_length = calc_geo_dist_vincenty(neighbor_nodes[0],
neighbor_nodes[1])
graph.add_edge(neighbor_nodes[0], neighbor_nodes[1], branch=
BranchDing0(length=branch_length, kind=branch_kind, type=
branch_type, ring=branch_ring))
if debug:
logger.debug('disconnect edge {0}-{1}'.format(node, target_obj_result))
|
disconnect_node
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(node, target_obj_result, graph, debug):\n branch_kind = graph.adj[node][target_obj_result]['branch'].kind\n branch_type = graph.adj[node][target_obj_result]['branch'].type\n branch_ring = graph.adj[node][target_obj_result]['branch'].ring\n graph.remove_edge(node, target_obj_result)\n if isinstance(target_obj_result, MVCableDistributorDing0):\n neighbor_nodes = list(graph.neighbors(target_obj_result))\n if len(neighbor_nodes) == 2:\n node.grid.remove_cable_distributor(target_obj_result)\n branch_length = calc_geo_dist_vincenty(neighbor_nodes[0],\n neighbor_nodes[1])\n graph.add_edge(neighbor_nodes[0], neighbor_nodes[1], branch=\n BranchDing0(length=branch_length, kind=branch_kind, type=\n branch_type, ring=branch_ring))\n if debug:\n logger.debug('disconnect edge {0}-{1}'.format(node, target_obj_result))\n",
"role": "user"
},
{
"content": "disconnect_node",
"role": "assistant"
}
] |
def x(mv_grid):
for branch in mv_grid.graph_edges():
if branch['branch'].kind is None:
branch['branch'].kind = mv_grid.default_branch_kind
if branch['branch'].type is None:
branch['branch'].type = mv_grid.default_branch_type
|
parametrize_lines
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(mv_grid):\n for branch in mv_grid.graph_edges():\n if branch['branch'].kind is None:\n branch['branch'].kind = mv_grid.default_branch_kind\n if branch['branch'].type is None:\n branch['branch'].type = mv_grid.default_branch_type\n",
"role": "user"
},
{
"content": "parametrize_lines",
"role": "assistant"
}
] |
def x(mv_grid, graph, mode='normal', debug=False):
conn_dist_weight = cfg_ding0.get('mv_connect',
'load_area_sat_conn_dist_weight')
conn_dist_ring_mod = cfg_ding0.get('mv_connect',
'load_area_sat_conn_dist_ring_mod')
load_area_sat_buffer_radius = cfg_ding0.get('mv_connect',
'load_area_sat_buffer_radius')
load_area_sat_buffer_radius_inc = cfg_ding0.get('mv_connect',
'load_area_sat_buffer_radius_inc')
start = time.time()
proj1 = partial(pyproj.transform, pyproj.Proj(init='epsg:4326'), pyproj
.Proj(init='epsg:3035'))
proj2 = partial(pyproj.transform, pyproj.Proj(init='epsg:3035'), pyproj
.Proj(init='epsg:4326'))
if mode == 'normal':
nodes = mv_grid.graph_isolated_nodes()
elif mode == 'isolated':
nodes = mv_grid.graph_isolated_nodes()
else:
raise ValueError("'mode' is invalid.")
for node in nodes:
if isinstance(node, LVLoadAreaCentreDing0):
if node.lv_load_area.is_satellite:
node_shp = transform(proj1, node.geo_data)
if mode == 'normal':
branches = calc_geo_branches_in_buffer(node, mv_grid,
load_area_sat_buffer_radius,
load_area_sat_buffer_radius_inc, proj1)
elif mode == 'isolated':
nodes = set()
[nodes.update(ring_nodes) for ring_nodes in list(
mv_grid.rings_nodes(include_root_node=True))]
nodes = list(nodes)
branches = []
[branches.append(mv_grid.graph_branches_from_node(
node_branches)) for node_branches in nodes]
branches = [_ for _ in list(mv_grid.graph_edges()) if _
['adj_nodes'][0] in nodes and _['adj_nodes'][1] in
nodes]
conn_objects_min_stack = find_nearest_conn_objects(node_shp,
branches, proj1, conn_dist_weight, debug, branches_only
=False)
find_connection_point(node, node_shp, graph, proj2,
conn_objects_min_stack, conn_dist_ring_mod, debug)
parametrize_lines(mv_grid)
if debug:
logger.debug('Elapsed time (mv_connect): {}'.format(time.time() -
start))
return graph
|
mv_connect_satellites
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(mv_grid, graph, mode='normal', debug=False):\n conn_dist_weight = cfg_ding0.get('mv_connect',\n 'load_area_sat_conn_dist_weight')\n conn_dist_ring_mod = cfg_ding0.get('mv_connect',\n 'load_area_sat_conn_dist_ring_mod')\n load_area_sat_buffer_radius = cfg_ding0.get('mv_connect',\n 'load_area_sat_buffer_radius')\n load_area_sat_buffer_radius_inc = cfg_ding0.get('mv_connect',\n 'load_area_sat_buffer_radius_inc')\n start = time.time()\n proj1 = partial(pyproj.transform, pyproj.Proj(init='epsg:4326'), pyproj\n .Proj(init='epsg:3035'))\n proj2 = partial(pyproj.transform, pyproj.Proj(init='epsg:3035'), pyproj\n .Proj(init='epsg:4326'))\n if mode == 'normal':\n nodes = mv_grid.graph_isolated_nodes()\n elif mode == 'isolated':\n nodes = mv_grid.graph_isolated_nodes()\n else:\n raise ValueError(\"'mode' is invalid.\")\n for node in nodes:\n if isinstance(node, LVLoadAreaCentreDing0):\n if node.lv_load_area.is_satellite:\n node_shp = transform(proj1, node.geo_data)\n if mode == 'normal':\n branches = calc_geo_branches_in_buffer(node, mv_grid,\n load_area_sat_buffer_radius,\n load_area_sat_buffer_radius_inc, proj1)\n elif mode == 'isolated':\n nodes = set()\n [nodes.update(ring_nodes) for ring_nodes in list(\n mv_grid.rings_nodes(include_root_node=True))]\n nodes = list(nodes)\n branches = []\n [branches.append(mv_grid.graph_branches_from_node(\n node_branches)) for node_branches in nodes]\n branches = [_ for _ in list(mv_grid.graph_edges()) if _\n ['adj_nodes'][0] in nodes and _['adj_nodes'][1] in\n nodes]\n conn_objects_min_stack = find_nearest_conn_objects(node_shp,\n branches, proj1, conn_dist_weight, debug, branches_only\n =False)\n find_connection_point(node, node_shp, graph, proj2,\n conn_objects_min_stack, conn_dist_ring_mod, debug)\n parametrize_lines(mv_grid)\n if debug:\n logger.debug('Elapsed time (mv_connect): {}'.format(time.time() -\n start))\n return graph\n",
"role": "user"
},
{
"content": "mv_connect_satellites",
"role": "assistant"
}
] |
def x(mv_grid_district, graph, debug=False):
proj1 = partial(pyproj.transform, pyproj.Proj(init='epsg:4326'), pyproj
.Proj(init='epsg:3035'))
proj2 = partial(pyproj.transform, pyproj.Proj(init='epsg:3035'), pyproj
.Proj(init='epsg:4326'))
conn_dist_weight = cfg_ding0.get('mv_connect',
'load_area_sat_conn_dist_weight')
conn_dist_ring_mod = cfg_ding0.get('mv_connect',
'load_area_stat_conn_dist_ring_mod')
for lv_load_area in mv_grid_district.lv_load_areas():
if (not lv_load_area.is_aggregated and lv_load_area.
lv_load_area_centre not in mv_grid_district.mv_grid.
graph_isolated_nodes()):
lv_load_area_centre = lv_load_area.lv_load_area_centre
if lv_load_area.lv_grid_districts_count() == 1:
lv_station = list(lv_load_area.lv_grid_districts())[0
].lv_grid.station()
branches = mv_grid_district.mv_grid.graph_branches_from_node(
lv_load_area_centre)
for node, branch in branches:
branch_kind = branch['branch'].kind
branch_type = branch['branch'].type
branch_ring = branch['branch'].ring
circ_breaker = branch['branch'].circuit_breaker
if circ_breaker is not None:
branch['branch'
].circuit_breaker.geo_data = calc_geo_centre_point(
lv_station, node)
graph.remove_edge(lv_load_area_centre, node)
branch_length = calc_geo_dist_vincenty(lv_station, node)
branch = BranchDing0(length=branch_length,
circuit_breaker=circ_breaker, kind=branch_kind,
type=branch_type, ring=branch_ring)
if circ_breaker is not None:
circ_breaker.branch = branch
graph.add_edge(lv_station, node, branch=branch)
graph.remove_node(lv_load_area_centre)
else:
for lv_grid_district in lv_load_area.lv_grid_districts():
branches = calc_geo_branches_in_polygon(mv_grid_district
.mv_grid, lv_load_area.geo_area, mode='intersects',
proj=proj1)
if not lv_load_area.is_satellite:
branches_valid = []
for branch in branches:
node1 = branch['adj_nodes'][0]
node2 = branch['adj_nodes'][1]
lv_load_area_group = (
get_lv_load_area_group_from_node_pair(node1,
node2))
if lv_load_area_group is None and branch['branch'
].ring is lv_load_area.ring:
branches_valid.append(branch)
branches = branches_valid
lv_station = lv_grid_district.lv_grid.station()
lv_station_shp = transform(proj1, lv_station.geo_data)
conn_objects_min_stack = find_nearest_conn_objects(
lv_station_shp, branches, proj1, conn_dist_weight,
debug, branches_only=False)
connect_node(lv_station, lv_station_shp,
mv_grid_district.mv_grid, conn_objects_min_stack[0],
proj2, graph, conn_dist_ring_mod, debug)
cable_dist = MVCableDistributorDing0(geo_data=
lv_load_area_centre.geo_data, grid=mv_grid_district.mv_grid
)
mv_grid_district.mv_grid.add_cable_distributor(cable_dist)
branches = mv_grid_district.mv_grid.graph_branches_from_node(
lv_load_area_centre)
for node, branch in branches:
branch_kind = branch['branch'].kind
branch_type = branch['branch'].type
branch_ring = branch['branch'].ring
circ_breaker = branch['branch'].circuit_breaker
if circ_breaker is not None:
branch['branch'
].circuit_breaker.geo_data = calc_geo_centre_point(
cable_dist, node)
graph.remove_edge(lv_load_area_centre, node)
branch_length = calc_geo_dist_vincenty(cable_dist, node)
branch = BranchDing0(length=branch_length,
circuit_breaker=circ_breaker, kind=branch_kind,
type=branch_type, ring=branch_ring)
if circ_breaker is not None:
circ_breaker.branch = branch
graph.add_edge(cable_dist, node, branch=branch)
graph.remove_node(lv_load_area_centre)
if mv_grid_district.mv_grid.default_branch_kind == 'line':
branches = calc_geo_branches_in_polygon(mv_grid_district.
mv_grid, lv_load_area.geo_area, mode='contains', proj=proj1
)
for branch in branches:
branch['branch'].kind = (mv_grid_district.mv_grid.
default_branch_kind_settle)
branch['branch'].type = (mv_grid_district.mv_grid.
default_branch_type_settle)
return graph
|
mv_connect_stations
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(mv_grid_district, graph, debug=False):\n proj1 = partial(pyproj.transform, pyproj.Proj(init='epsg:4326'), pyproj\n .Proj(init='epsg:3035'))\n proj2 = partial(pyproj.transform, pyproj.Proj(init='epsg:3035'), pyproj\n .Proj(init='epsg:4326'))\n conn_dist_weight = cfg_ding0.get('mv_connect',\n 'load_area_sat_conn_dist_weight')\n conn_dist_ring_mod = cfg_ding0.get('mv_connect',\n 'load_area_stat_conn_dist_ring_mod')\n for lv_load_area in mv_grid_district.lv_load_areas():\n if (not lv_load_area.is_aggregated and lv_load_area.\n lv_load_area_centre not in mv_grid_district.mv_grid.\n graph_isolated_nodes()):\n lv_load_area_centre = lv_load_area.lv_load_area_centre\n if lv_load_area.lv_grid_districts_count() == 1:\n lv_station = list(lv_load_area.lv_grid_districts())[0\n ].lv_grid.station()\n branches = mv_grid_district.mv_grid.graph_branches_from_node(\n lv_load_area_centre)\n for node, branch in branches:\n branch_kind = branch['branch'].kind\n branch_type = branch['branch'].type\n branch_ring = branch['branch'].ring\n circ_breaker = branch['branch'].circuit_breaker\n if circ_breaker is not None:\n branch['branch'\n ].circuit_breaker.geo_data = calc_geo_centre_point(\n lv_station, node)\n graph.remove_edge(lv_load_area_centre, node)\n branch_length = calc_geo_dist_vincenty(lv_station, node)\n branch = BranchDing0(length=branch_length,\n circuit_breaker=circ_breaker, kind=branch_kind,\n type=branch_type, ring=branch_ring)\n if circ_breaker is not None:\n circ_breaker.branch = branch\n graph.add_edge(lv_station, node, branch=branch)\n graph.remove_node(lv_load_area_centre)\n else:\n for lv_grid_district in lv_load_area.lv_grid_districts():\n branches = calc_geo_branches_in_polygon(mv_grid_district\n .mv_grid, lv_load_area.geo_area, mode='intersects',\n proj=proj1)\n if not lv_load_area.is_satellite:\n branches_valid = []\n for branch in branches:\n node1 = branch['adj_nodes'][0]\n node2 = branch['adj_nodes'][1]\n lv_load_area_group = (\n get_lv_load_area_group_from_node_pair(node1,\n node2))\n if lv_load_area_group is None and branch['branch'\n ].ring is lv_load_area.ring:\n branches_valid.append(branch)\n branches = branches_valid\n lv_station = lv_grid_district.lv_grid.station()\n lv_station_shp = transform(proj1, lv_station.geo_data)\n conn_objects_min_stack = find_nearest_conn_objects(\n lv_station_shp, branches, proj1, conn_dist_weight,\n debug, branches_only=False)\n connect_node(lv_station, lv_station_shp,\n mv_grid_district.mv_grid, conn_objects_min_stack[0],\n proj2, graph, conn_dist_ring_mod, debug)\n cable_dist = MVCableDistributorDing0(geo_data=\n lv_load_area_centre.geo_data, grid=mv_grid_district.mv_grid\n )\n mv_grid_district.mv_grid.add_cable_distributor(cable_dist)\n branches = mv_grid_district.mv_grid.graph_branches_from_node(\n lv_load_area_centre)\n for node, branch in branches:\n branch_kind = branch['branch'].kind\n branch_type = branch['branch'].type\n branch_ring = branch['branch'].ring\n circ_breaker = branch['branch'].circuit_breaker\n if circ_breaker is not None:\n branch['branch'\n ].circuit_breaker.geo_data = calc_geo_centre_point(\n cable_dist, node)\n graph.remove_edge(lv_load_area_centre, node)\n branch_length = calc_geo_dist_vincenty(cable_dist, node)\n branch = BranchDing0(length=branch_length,\n circuit_breaker=circ_breaker, kind=branch_kind,\n type=branch_type, ring=branch_ring)\n if circ_breaker is not None:\n circ_breaker.branch = branch\n graph.add_edge(cable_dist, node, branch=branch)\n graph.remove_node(lv_load_area_centre)\n if mv_grid_district.mv_grid.default_branch_kind == 'line':\n branches = calc_geo_branches_in_polygon(mv_grid_district.\n mv_grid, lv_load_area.geo_area, mode='contains', proj=proj1\n )\n for branch in branches:\n branch['branch'].kind = (mv_grid_district.mv_grid.\n default_branch_kind_settle)\n branch['branch'].type = (mv_grid_district.mv_grid.\n default_branch_type_settle)\n return graph\n",
"role": "user"
},
{
"content": "mv_connect_stations",
"role": "assistant"
}
] |
def x(mv_grid_district, graph, debug=False):
generator_buffer_radius = cfg_ding0.get('mv_connect',
'generator_buffer_radius')
generator_buffer_radius_inc = cfg_ding0.get('mv_connect',
'generator_buffer_radius_inc')
proj1 = partial(pyproj.transform, pyproj.Proj(init='epsg:4326'), pyproj
.Proj(init='epsg:3035'))
proj2 = partial(pyproj.transform, pyproj.Proj(init='epsg:3035'), pyproj
.Proj(init='epsg:4326'))
for generator in sorted(mv_grid_district.mv_grid.generators(), key=lambda
x: repr(x)):
if generator.v_level == 4:
mv_station = mv_grid_district.mv_grid.station()
branch_length = calc_geo_dist_vincenty(generator, mv_station)
branch_kind = mv_grid_district.mv_grid.default_branch_kind
branch_type = mv_grid_district.mv_grid.default_branch_type
branch = BranchDing0(length=branch_length, kind=branch_kind,
type=branch_type, ring=None)
graph.add_edge(generator, mv_station, branch=branch)
if debug:
logger.debug('Generator {0} was connected to {1}'.format(
generator, mv_station))
elif generator.v_level == 5:
generator_shp = transform(proj1, generator.geo_data)
branches = calc_geo_branches_in_buffer(generator,
mv_grid_district.mv_grid, generator_buffer_radius,
generator_buffer_radius_inc, proj1)
conn_objects_min_stack = find_nearest_conn_objects(generator_shp,
branches, proj1, conn_dist_weight=1, debug=debug,
branches_only=False)
generator_connected = False
for dist_min_obj in conn_objects_min_stack:
target_obj_result = connect_node(generator, generator_shp,
mv_grid_district.mv_grid, dist_min_obj, proj2, graph,
conn_dist_ring_mod=0, debug=debug)
if target_obj_result is not None:
if debug:
logger.debug('Generator {0} was connected to {1}'.
format(generator, target_obj_result))
generator_connected = True
break
if not generator_connected and debug:
logger.debug(
'Generator {0} could not be connected, try to increase the parameter `generator_buffer_radius` in config file `config_calc.cfg` to gain more possible connection points.'
.format(generator))
return graph
|
mv_connect_generators
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(mv_grid_district, graph, debug=False):\n generator_buffer_radius = cfg_ding0.get('mv_connect',\n 'generator_buffer_radius')\n generator_buffer_radius_inc = cfg_ding0.get('mv_connect',\n 'generator_buffer_radius_inc')\n proj1 = partial(pyproj.transform, pyproj.Proj(init='epsg:4326'), pyproj\n .Proj(init='epsg:3035'))\n proj2 = partial(pyproj.transform, pyproj.Proj(init='epsg:3035'), pyproj\n .Proj(init='epsg:4326'))\n for generator in sorted(mv_grid_district.mv_grid.generators(), key=lambda\n x: repr(x)):\n if generator.v_level == 4:\n mv_station = mv_grid_district.mv_grid.station()\n branch_length = calc_geo_dist_vincenty(generator, mv_station)\n branch_kind = mv_grid_district.mv_grid.default_branch_kind\n branch_type = mv_grid_district.mv_grid.default_branch_type\n branch = BranchDing0(length=branch_length, kind=branch_kind,\n type=branch_type, ring=None)\n graph.add_edge(generator, mv_station, branch=branch)\n if debug:\n logger.debug('Generator {0} was connected to {1}'.format(\n generator, mv_station))\n elif generator.v_level == 5:\n generator_shp = transform(proj1, generator.geo_data)\n branches = calc_geo_branches_in_buffer(generator,\n mv_grid_district.mv_grid, generator_buffer_radius,\n generator_buffer_radius_inc, proj1)\n conn_objects_min_stack = find_nearest_conn_objects(generator_shp,\n branches, proj1, conn_dist_weight=1, debug=debug,\n branches_only=False)\n generator_connected = False\n for dist_min_obj in conn_objects_min_stack:\n target_obj_result = connect_node(generator, generator_shp,\n mv_grid_district.mv_grid, dist_min_obj, proj2, graph,\n conn_dist_ring_mod=0, debug=debug)\n if target_obj_result is not None:\n if debug:\n logger.debug('Generator {0} was connected to {1}'.\n format(generator, target_obj_result))\n generator_connected = True\n break\n if not generator_connected and debug:\n logger.debug(\n 'Generator {0} could not be connected, try to increase the parameter `generator_buffer_radius` in config file `config_calc.cfg` to gain more possible connection points.'\n .format(generator))\n return graph\n",
"role": "user"
},
{
"content": "mv_connect_generators",
"role": "assistant"
}
] |
def x(graph):
cos_phi_load = cfg_ding0.get('assumptions', 'cos_phi_load')
specs = {}
nodes_demands = {}
nodes_pos = {}
nodes_agg = {}
satellites_only = True
for node in graph.nodes():
if isinstance(node, LVLoadAreaCentreDing0):
if (not node.lv_load_area.is_satellite and not node.
lv_load_area.is_aggregated):
satellites_only = False
for node in graph.nodes():
if isinstance(node, LVLoadAreaCentreDing0):
if not node.lv_load_area.is_satellite or satellites_only:
nodes_demands[str(node)] = int(node.lv_load_area.peak_load /
cos_phi_load)
nodes_pos[str(node)] = node.geo_data.x, node.geo_data.y
if node.lv_load_area.is_aggregated:
nodes_agg[str(node)] = True
else:
nodes_agg[str(node)] = False
elif isinstance(node, MVStationDing0):
nodes_demands[str(node)] = 0
nodes_pos[str(node)] = node.geo_data.x, node.geo_data.y
specs['DEPOT'] = str(node)
specs['BRANCH_KIND'] = node.grid.default_branch_kind
specs['BRANCH_TYPE'] = node.grid.default_branch_type
specs['V_LEVEL'] = node.grid.v_level
specs['NODE_COORD_SECTION'] = nodes_pos
specs['DEMAND'] = nodes_demands
specs['MATRIX'] = calc_geo_dist_matrix_vincenty(nodes_pos)
specs['IS_AGGREGATED'] = nodes_agg
return specs
|
ding0_graph_to_routing_specs
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(graph):\n cos_phi_load = cfg_ding0.get('assumptions', 'cos_phi_load')\n specs = {}\n nodes_demands = {}\n nodes_pos = {}\n nodes_agg = {}\n satellites_only = True\n for node in graph.nodes():\n if isinstance(node, LVLoadAreaCentreDing0):\n if (not node.lv_load_area.is_satellite and not node.\n lv_load_area.is_aggregated):\n satellites_only = False\n for node in graph.nodes():\n if isinstance(node, LVLoadAreaCentreDing0):\n if not node.lv_load_area.is_satellite or satellites_only:\n nodes_demands[str(node)] = int(node.lv_load_area.peak_load /\n cos_phi_load)\n nodes_pos[str(node)] = node.geo_data.x, node.geo_data.y\n if node.lv_load_area.is_aggregated:\n nodes_agg[str(node)] = True\n else:\n nodes_agg[str(node)] = False\n elif isinstance(node, MVStationDing0):\n nodes_demands[str(node)] = 0\n nodes_pos[str(node)] = node.geo_data.x, node.geo_data.y\n specs['DEPOT'] = str(node)\n specs['BRANCH_KIND'] = node.grid.default_branch_kind\n specs['BRANCH_TYPE'] = node.grid.default_branch_type\n specs['V_LEVEL'] = node.grid.v_level\n specs['NODE_COORD_SECTION'] = nodes_pos\n specs['DEMAND'] = nodes_demands\n specs['MATRIX'] = calc_geo_dist_matrix_vincenty(nodes_pos)\n specs['IS_AGGREGATED'] = nodes_agg\n return specs\n",
"role": "user"
},
{
"content": "ding0_graph_to_routing_specs",
"role": "assistant"
}
] |
def x(graph, solution):
node_list = {str(n): n for n in graph.nodes()}
try:
depot = solution._nodes[solution._problem._depot.name()]
depot_node = node_list[depot.name()]
for r in solution.routes():
circ_breaker_pos = None
if len(r._nodes) == 1:
if not solution._problem._is_aggregated[r._nodes[0]._name]:
cable_dist = MVCableDistributorDing0(geo_data=node_list
[r._nodes[0]._name].geo_data, grid=depot_node.grid)
depot_node.grid.add_cable_distributor(cable_dist)
r.allocate([Node(name=repr(cable_dist), demand=0)])
node_list[str(cable_dist)] = cable_dist
solution._problem._is_aggregated[str(cable_dist)] = False
circ_breaker_pos = 1
n1 = r._nodes[0:len(r._nodes) - 1]
n2 = r._nodes[1:len(r._nodes)]
edges = list(zip(n1, n2))
edges.append((depot, r._nodes[0]))
edges.append((r._nodes[-1], depot))
mv_branches = [BranchDing0() for _ in edges]
edges_with_branches = list(zip(edges, mv_branches))
if not circ_breaker_pos:
circ_breaker_pos = r.calc_circuit_breaker_position()
node1 = node_list[edges[circ_breaker_pos - 1][0].name()]
node2 = node_list[edges[circ_breaker_pos - 1][1].name()]
if not (node1 == depot_node and solution._problem.
_is_aggregated[edges[circ_breaker_pos - 1][1].name()] or
node2 == depot_node and solution._problem._is_aggregated[
edges[circ_breaker_pos - 1][0].name()]):
branch = mv_branches[circ_breaker_pos - 1]
circ_breaker = CircuitBreakerDing0(grid=depot_node.grid,
branch=branch, geo_data=calc_geo_centre_point(node1, node2)
)
branch.circuit_breaker = circ_breaker
ring = RingDing0(grid=depot_node.grid)
edges_graph = []
for (n1, n2), b in edges_with_branches:
node1 = node_list[n1.name()]
node2 = node_list[n2.name()]
b.ring = ring
if isinstance(node1, LVLoadAreaCentreDing0):
node1.lv_load_area.ring = ring
b.length = calc_geo_dist_vincenty(node1, node2)
b.kind = depot_node.grid.default_branch_kind
b.type = depot_node.grid.default_branch_type
if node1 == depot_node and solution._problem._is_aggregated[n2
.name()]:
b.connects_aggregated = True
b.kind = depot_node.grid.default_branch_kind_aggregated
b.type = depot_node.grid.default_branch_type_aggregated
elif node2 == depot_node and solution._problem._is_aggregated[
n1.name()]:
b.connects_aggregated = True
b.kind = depot_node.grid.default_branch_kind_aggregated
b.type = depot_node.grid.default_branch_type_aggregated
edges_graph.append((node1, node2, dict(branch=b)))
graph.add_edges_from(edges_graph)
except:
logger.exception(
'unexpected error while converting routing solution to DING0 graph (NetworkX).'
)
return graph
|
routing_solution_to_ding0_graph
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(graph, solution):\n node_list = {str(n): n for n in graph.nodes()}\n try:\n depot = solution._nodes[solution._problem._depot.name()]\n depot_node = node_list[depot.name()]\n for r in solution.routes():\n circ_breaker_pos = None\n if len(r._nodes) == 1:\n if not solution._problem._is_aggregated[r._nodes[0]._name]:\n cable_dist = MVCableDistributorDing0(geo_data=node_list\n [r._nodes[0]._name].geo_data, grid=depot_node.grid)\n depot_node.grid.add_cable_distributor(cable_dist)\n r.allocate([Node(name=repr(cable_dist), demand=0)])\n node_list[str(cable_dist)] = cable_dist\n solution._problem._is_aggregated[str(cable_dist)] = False\n circ_breaker_pos = 1\n n1 = r._nodes[0:len(r._nodes) - 1]\n n2 = r._nodes[1:len(r._nodes)]\n edges = list(zip(n1, n2))\n edges.append((depot, r._nodes[0]))\n edges.append((r._nodes[-1], depot))\n mv_branches = [BranchDing0() for _ in edges]\n edges_with_branches = list(zip(edges, mv_branches))\n if not circ_breaker_pos:\n circ_breaker_pos = r.calc_circuit_breaker_position()\n node1 = node_list[edges[circ_breaker_pos - 1][0].name()]\n node2 = node_list[edges[circ_breaker_pos - 1][1].name()]\n if not (node1 == depot_node and solution._problem.\n _is_aggregated[edges[circ_breaker_pos - 1][1].name()] or \n node2 == depot_node and solution._problem._is_aggregated[\n edges[circ_breaker_pos - 1][0].name()]):\n branch = mv_branches[circ_breaker_pos - 1]\n circ_breaker = CircuitBreakerDing0(grid=depot_node.grid,\n branch=branch, geo_data=calc_geo_centre_point(node1, node2)\n )\n branch.circuit_breaker = circ_breaker\n ring = RingDing0(grid=depot_node.grid)\n edges_graph = []\n for (n1, n2), b in edges_with_branches:\n node1 = node_list[n1.name()]\n node2 = node_list[n2.name()]\n b.ring = ring\n if isinstance(node1, LVLoadAreaCentreDing0):\n node1.lv_load_area.ring = ring\n b.length = calc_geo_dist_vincenty(node1, node2)\n b.kind = depot_node.grid.default_branch_kind\n b.type = depot_node.grid.default_branch_type\n if node1 == depot_node and solution._problem._is_aggregated[n2\n .name()]:\n b.connects_aggregated = True\n b.kind = depot_node.grid.default_branch_kind_aggregated\n b.type = depot_node.grid.default_branch_type_aggregated\n elif node2 == depot_node and solution._problem._is_aggregated[\n n1.name()]:\n b.connects_aggregated = True\n b.kind = depot_node.grid.default_branch_kind_aggregated\n b.type = depot_node.grid.default_branch_type_aggregated\n edges_graph.append((node1, node2, dict(branch=b)))\n graph.add_edges_from(edges_graph)\n except:\n logger.exception(\n 'unexpected error while converting routing solution to DING0 graph (NetworkX).'\n )\n return graph\n",
"role": "user"
},
{
"content": "routing_solution_to_ding0_graph",
"role": "assistant"
}
] |
def x(graph, debug=False, anim=None):
specs = ding0_graph_to_routing_specs(graph)
RoutingGraph = Graph(specs)
timeout = 30000
savings_solver = savings.ClarkeWrightSolver()
local_search_solver = local_search.LocalSearchSolver()
start = time.time()
savings_solution = savings_solver.solve(RoutingGraph, timeout, debug, anim)
if debug:
logger.debug('ClarkeWrightSolver solution:')
util.print_solution(savings_solution)
logger.debug('Elapsed time (seconds): {}'.format(time.time() - start))
local_search_solution = local_search_solver.solve(RoutingGraph,
savings_solution, timeout, debug, anim)
if debug:
logger.debug('Local Search solution:')
util.print_solution(local_search_solution)
logger.debug('Elapsed time (seconds): {}'.format(time.time() - start))
return routing_solution_to_ding0_graph(graph, local_search_solution)
|
solve
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(graph, debug=False, anim=None):\n specs = ding0_graph_to_routing_specs(graph)\n RoutingGraph = Graph(specs)\n timeout = 30000\n savings_solver = savings.ClarkeWrightSolver()\n local_search_solver = local_search.LocalSearchSolver()\n start = time.time()\n savings_solution = savings_solver.solve(RoutingGraph, timeout, debug, anim)\n if debug:\n logger.debug('ClarkeWrightSolver solution:')\n util.print_solution(savings_solution)\n logger.debug('Elapsed time (seconds): {}'.format(time.time() - start))\n local_search_solution = local_search_solver.solve(RoutingGraph,\n savings_solution, timeout, debug, anim)\n if debug:\n logger.debug('Local Search solution:')\n util.print_solution(local_search_solution)\n logger.debug('Elapsed time (seconds): {}'.format(time.time() - start))\n return routing_solution_to_ding0_graph(graph, local_search_solution)\n",
"role": "user"
},
{
"content": "solve",
"role": "assistant"
}
] |
def x(nom_power, nom_voltage, avail_cables):
I_max_load = nom_power / (3 ** 0.5 * nom_voltage)
suitable_cables = avail_cables[avail_cables['I_max_th'] > I_max_load]
if not suitable_cables.empty:
cable_type = suitable_cables.loc[suitable_cables['I_max_th'].idxmin
(), :]
else:
cable_type = avail_cables.loc[avail_cables['I_max_th'].idxmax(), :]
return cable_type
|
cable_type
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(nom_power, nom_voltage, avail_cables):\n I_max_load = nom_power / (3 ** 0.5 * nom_voltage)\n suitable_cables = avail_cables[avail_cables['I_max_th'] > I_max_load]\n if not suitable_cables.empty:\n cable_type = suitable_cables.loc[suitable_cables['I_max_th'].idxmin\n (), :]\n else:\n cable_type = avail_cables.loc[avail_cables['I_max_th'].idxmax(), :]\n return cable_type\n",
"role": "user"
},
{
"content": "cable_type",
"role": "assistant"
}
] |
def x(lv_grid_district, graph, debug=False):
cable_lf = cfg_ding0.get('assumptions', 'load_factor_lv_cable_fc_normal')
cos_phi_gen = cfg_ding0.get('assumptions', 'cos_phi_gen')
lv_loads_res = sorted(lv_grid_district.lv_grid.loads_sector(sector=
'res'), key=lambda _: repr(_))
if len(lv_loads_res) > 0:
lv_loads_res_rnd = set(random.sample(lv_loads_res, len(lv_loads_res)))
else:
lv_loads_res_rnd = None
lv_loads_ria = sorted(lv_grid_district.lv_grid.loads_sector(sector=
'ria'), key=lambda _: repr(_))
if len(lv_loads_ria) > 0:
lv_loads_ria_rnd = set(random.sample(lv_loads_ria, len(lv_loads_ria)))
else:
lv_loads_ria_rnd = None
for generator in sorted(lv_grid_district.lv_grid.generators(), key=lambda
x: repr(x)):
if generator.v_level == 6:
lv_station = lv_grid_district.lv_grid.station()
branch_length = calc_geo_dist_vincenty(generator, lv_station)
branch_type = cable_type(generator.capacity / (cable_lf *
cos_phi_gen), 0.4, lv_grid_district.lv_grid.network.
static_data['LV_cables'])
branch = BranchDing0(length=branch_length, kind='cable', type=
branch_type)
graph.add_edge(generator, lv_station, branch=branch)
elif generator.v_level == 7:
if generator.capacity <= 30 and lv_loads_res_rnd is not None:
if len(lv_loads_res_rnd) > 0:
lv_load = lv_loads_res_rnd.pop()
else:
lv_loads_res_rnd = set(random.sample(lv_loads_res, len(
lv_loads_res)))
lv_load = lv_loads_res_rnd.pop()
lv_conn_target = list(graph.neighbors(lv_load))[0]
elif generator.capacity > 30 and lv_loads_ria_rnd is not None:
if len(lv_loads_ria_rnd) > 0:
lv_load = lv_loads_ria_rnd.pop()
else:
lv_loads_ria_rnd = set(random.sample(lv_loads_ria, len(
lv_loads_ria)))
lv_load = lv_loads_ria_rnd.pop()
lv_conn_target = list(graph.neighbors(lv_load))[0]
else:
lv_conn_target = lv_grid_district.lv_grid.station()
logger.warning(
'No valid conn. target found for {}.Connected to {}.'.
format(repr(generator), repr(lv_conn_target)))
branch_type = cable_type(generator.capacity / (cable_lf *
cos_phi_gen), 0.4, lv_grid_district.lv_grid.network.
static_data['LV_cables'])
branch = BranchDing0(length=1, kind='cable', type=branch_type)
graph.add_edge(generator, lv_conn_target, branch=branch)
return graph
|
lv_connect_generators
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(lv_grid_district, graph, debug=False):\n cable_lf = cfg_ding0.get('assumptions', 'load_factor_lv_cable_fc_normal')\n cos_phi_gen = cfg_ding0.get('assumptions', 'cos_phi_gen')\n lv_loads_res = sorted(lv_grid_district.lv_grid.loads_sector(sector=\n 'res'), key=lambda _: repr(_))\n if len(lv_loads_res) > 0:\n lv_loads_res_rnd = set(random.sample(lv_loads_res, len(lv_loads_res)))\n else:\n lv_loads_res_rnd = None\n lv_loads_ria = sorted(lv_grid_district.lv_grid.loads_sector(sector=\n 'ria'), key=lambda _: repr(_))\n if len(lv_loads_ria) > 0:\n lv_loads_ria_rnd = set(random.sample(lv_loads_ria, len(lv_loads_ria)))\n else:\n lv_loads_ria_rnd = None\n for generator in sorted(lv_grid_district.lv_grid.generators(), key=lambda\n x: repr(x)):\n if generator.v_level == 6:\n lv_station = lv_grid_district.lv_grid.station()\n branch_length = calc_geo_dist_vincenty(generator, lv_station)\n branch_type = cable_type(generator.capacity / (cable_lf *\n cos_phi_gen), 0.4, lv_grid_district.lv_grid.network.\n static_data['LV_cables'])\n branch = BranchDing0(length=branch_length, kind='cable', type=\n branch_type)\n graph.add_edge(generator, lv_station, branch=branch)\n elif generator.v_level == 7:\n if generator.capacity <= 30 and lv_loads_res_rnd is not None:\n if len(lv_loads_res_rnd) > 0:\n lv_load = lv_loads_res_rnd.pop()\n else:\n lv_loads_res_rnd = set(random.sample(lv_loads_res, len(\n lv_loads_res)))\n lv_load = lv_loads_res_rnd.pop()\n lv_conn_target = list(graph.neighbors(lv_load))[0]\n elif generator.capacity > 30 and lv_loads_ria_rnd is not None:\n if len(lv_loads_ria_rnd) > 0:\n lv_load = lv_loads_ria_rnd.pop()\n else:\n lv_loads_ria_rnd = set(random.sample(lv_loads_ria, len(\n lv_loads_ria)))\n lv_load = lv_loads_ria_rnd.pop()\n lv_conn_target = list(graph.neighbors(lv_load))[0]\n else:\n lv_conn_target = lv_grid_district.lv_grid.station()\n logger.warning(\n 'No valid conn. target found for {}.Connected to {}.'.\n format(repr(generator), repr(lv_conn_target)))\n branch_type = cable_type(generator.capacity / (cable_lf *\n cos_phi_gen), 0.4, lv_grid_district.lv_grid.network.\n static_data['LV_cables'])\n branch = BranchDing0(length=1, kind='cable', type=branch_type)\n graph.add_edge(generator, lv_conn_target, branch=branch)\n return graph\n",
"role": "user"
},
{
"content": "lv_connect_generators",
"role": "assistant"
}
] |
def x(mv_grid, mode='load', debug=False):
cos_phi_load = cfg_ding0.get('assumptions', 'cos_phi_load')
cos_phi_feedin = cfg_ding0.get('assumptions', 'cos_phi_gen')
for ring, circ_breaker in zip(mv_grid.rings_nodes(include_root_node=
False), mv_grid.circuit_breakers()):
nodes_peak_load = []
nodes_peak_generation = []
for node in ring:
if isinstance(node, LVStationDing0):
nodes_peak_load.append(node.peak_load / cos_phi_load)
nodes_peak_generation.append(node.peak_generation /
cos_phi_feedin)
elif isinstance(node, CableDistributorDing0):
nodes_subtree = mv_grid.graph_nodes_from_subtree(node)
nodes_subtree_peak_load = 0
nodes_subtree_peak_generation = 0
for node_subtree in nodes_subtree:
if isinstance(node_subtree, LVStationDing0):
nodes_subtree_peak_load += (node_subtree.peak_load /
cos_phi_load)
nodes_subtree_peak_generation += (node_subtree.
peak_generation / cos_phi_feedin)
if isinstance(node_subtree, GeneratorDing0):
nodes_subtree_peak_generation += (node_subtree.
capacity / cos_phi_feedin)
nodes_peak_load.append(nodes_subtree_peak_load)
nodes_peak_generation.append(nodes_subtree_peak_generation)
else:
raise ValueError('Ring node has got invalid type.')
if mode == 'load':
node_peak_data = nodes_peak_load
elif mode == 'loadgen':
if sum(nodes_peak_load) > sum(nodes_peak_generation):
node_peak_data = nodes_peak_load
else:
node_peak_data = nodes_peak_generation
else:
raise ValueError("parameter 'mode' is invalid!")
diff_min = 10000000.0
for ctr in range(len(node_peak_data)):
route_data_part1 = sum(node_peak_data[0:ctr])
route_data_part2 = sum(node_peak_data[ctr:len(node_peak_data)])
diff = abs(route_data_part1 - route_data_part2)
if diff <= diff_min:
diff_min = diff
position = ctr
else:
break
node1 = ring[position - 1]
node2 = ring[position]
circ_breaker.branch = mv_grid._graph.adj[node1][node2]['branch']
circ_breaker.branch_nodes = node1, node2
circ_breaker.branch.circuit_breaker = circ_breaker
circ_breaker.geo_data = calc_geo_centre_point(node1, node2)
if debug:
logger.debug('Ring: {}'.format(ring))
logger.debug(
'Circuit breaker {0} was relocated to edge {1}-{2} (position on route={3})'
.format(circ_breaker, node1, node2, position))
logger.debug('Peak load sum: {}'.format(sum(nodes_peak_load)))
logger.debug('Peak loads: {}'.format(nodes_peak_load))
|
set_circuit_breakers
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(mv_grid, mode='load', debug=False):\n cos_phi_load = cfg_ding0.get('assumptions', 'cos_phi_load')\n cos_phi_feedin = cfg_ding0.get('assumptions', 'cos_phi_gen')\n for ring, circ_breaker in zip(mv_grid.rings_nodes(include_root_node=\n False), mv_grid.circuit_breakers()):\n nodes_peak_load = []\n nodes_peak_generation = []\n for node in ring:\n if isinstance(node, LVStationDing0):\n nodes_peak_load.append(node.peak_load / cos_phi_load)\n nodes_peak_generation.append(node.peak_generation /\n cos_phi_feedin)\n elif isinstance(node, CableDistributorDing0):\n nodes_subtree = mv_grid.graph_nodes_from_subtree(node)\n nodes_subtree_peak_load = 0\n nodes_subtree_peak_generation = 0\n for node_subtree in nodes_subtree:\n if isinstance(node_subtree, LVStationDing0):\n nodes_subtree_peak_load += (node_subtree.peak_load /\n cos_phi_load)\n nodes_subtree_peak_generation += (node_subtree.\n peak_generation / cos_phi_feedin)\n if isinstance(node_subtree, GeneratorDing0):\n nodes_subtree_peak_generation += (node_subtree.\n capacity / cos_phi_feedin)\n nodes_peak_load.append(nodes_subtree_peak_load)\n nodes_peak_generation.append(nodes_subtree_peak_generation)\n else:\n raise ValueError('Ring node has got invalid type.')\n if mode == 'load':\n node_peak_data = nodes_peak_load\n elif mode == 'loadgen':\n if sum(nodes_peak_load) > sum(nodes_peak_generation):\n node_peak_data = nodes_peak_load\n else:\n node_peak_data = nodes_peak_generation\n else:\n raise ValueError(\"parameter 'mode' is invalid!\")\n diff_min = 10000000.0\n for ctr in range(len(node_peak_data)):\n route_data_part1 = sum(node_peak_data[0:ctr])\n route_data_part2 = sum(node_peak_data[ctr:len(node_peak_data)])\n diff = abs(route_data_part1 - route_data_part2)\n if diff <= diff_min:\n diff_min = diff\n position = ctr\n else:\n break\n node1 = ring[position - 1]\n node2 = ring[position]\n circ_breaker.branch = mv_grid._graph.adj[node1][node2]['branch']\n circ_breaker.branch_nodes = node1, node2\n circ_breaker.branch.circuit_breaker = circ_breaker\n circ_breaker.geo_data = calc_geo_centre_point(node1, node2)\n if debug:\n logger.debug('Ring: {}'.format(ring))\n logger.debug(\n 'Circuit breaker {0} was relocated to edge {1}-{2} (position on route={3})'\n .format(circ_breaker, node1, node2, position))\n logger.debug('Peak load sum: {}'.format(sum(nodes_peak_load)))\n logger.debug('Peak loads: {}'.format(nodes_peak_load))\n",
"role": "user"
},
{
"content": "set_circuit_breakers",
"role": "assistant"
}
] |
def x(file_path):
logging.info('Handling duplications for "%s"', file_path)
f = open_strings_file(file_path, 'r+')
header_comment_key_value_tuples = (
extract_header_comment_key_value_tuples_from_file(f))
file_elements = []
section_file_elements = []
keys_to_objects = {}
duplicates_found = []
for header_comment, comments, key, value in header_comment_key_value_tuples:
if len(header_comment) > 0:
for elem in sorted(section_file_elements, key=lambda x: x.
comments[0]):
file_elements.append(elem)
section_file_elements = []
file_elements.append(Comment(header_comment))
if key in keys_to_objects:
keys_to_objects[key].add_comments(comments)
duplicates_found.append(key)
else:
loc_obj = LocalizationEntry(comments, key, value)
keys_to_objects[key] = loc_obj
section_file_elements.append(loc_obj)
for elem in sorted(section_file_elements, key=lambda x: x.comments[0]):
file_elements.append(elem)
f.seek(0)
for element in file_elements:
f.write(unicode(element))
f.write(u'\n')
f.truncate()
f.close()
logging.info('Omitted %d duplicates (%s)' % (len(duplicates_found), ','
.join(duplicates_found)))
logging.info('Finished handling duplications for "%s"', file_path)
|
handle_duplications
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_path):\n logging.info('Handling duplications for \"%s\"', file_path)\n f = open_strings_file(file_path, 'r+')\n header_comment_key_value_tuples = (\n extract_header_comment_key_value_tuples_from_file(f))\n file_elements = []\n section_file_elements = []\n keys_to_objects = {}\n duplicates_found = []\n for header_comment, comments, key, value in header_comment_key_value_tuples:\n if len(header_comment) > 0:\n for elem in sorted(section_file_elements, key=lambda x: x.\n comments[0]):\n file_elements.append(elem)\n section_file_elements = []\n file_elements.append(Comment(header_comment))\n if key in keys_to_objects:\n keys_to_objects[key].add_comments(comments)\n duplicates_found.append(key)\n else:\n loc_obj = LocalizationEntry(comments, key, value)\n keys_to_objects[key] = loc_obj\n section_file_elements.append(loc_obj)\n for elem in sorted(section_file_elements, key=lambda x: x.comments[0]):\n file_elements.append(elem)\n f.seek(0)\n for element in file_elements:\n f.write(unicode(element))\n f.write(u'\\n')\n f.truncate()\n f.close()\n logging.info('Omitted %d duplicates (%s)' % (len(duplicates_found), ','\n .join(duplicates_found)))\n logging.info('Finished handling duplications for \"%s\"', file_path)\n",
"role": "user"
},
{
"content": "handle_duplications",
"role": "assistant"
}
] |
def x(self):
for resource in self._client._resources:
potion_resource = self._client._resources[resource]
try:
oc_cls = _model_lookup[resource]
oc_cls._api = self
oc_cls._resource = potion_resource
setattr(self, oc_cls.__name__, oc_cls)
except KeyError:
pass
|
copy_resources
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n for resource in self._client._resources:\n potion_resource = self._client._resources[resource]\n try:\n oc_cls = _model_lookup[resource]\n oc_cls._api = self\n oc_cls._resource = potion_resource\n setattr(self, oc_cls.__name__, oc_cls)\n except KeyError:\n pass\n",
"role": "user"
},
{
"content": "copy_resources",
"role": "assistant"
}
] |
def x(server, api_key=None):
username = click.prompt('Please enter your One Codex (email)')
if api_key is not None:
return username, api_key
password = click.prompt(
'Please enter your password (typing will be hidden)', hide_input=True)
api_key = fetch_api_key_from_uname(username, password, server)
return username, api_key
|
login_uname_pwd
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(server, api_key=None):\n username = click.prompt('Please enter your One Codex (email)')\n if api_key is not None:\n return username, api_key\n password = click.prompt(\n 'Please enter your password (typing will be hidden)', hide_input=True)\n api_key = fetch_api_key_from_uname(username, password, server)\n return username, api_key\n",
"role": "user"
},
{
"content": "login_uname_pwd",
"role": "assistant"
}
] |
def x(server, creds_file=None, api_key=None, silent=False):
if server[-1] != '/':
server = server + '/'
if creds_file is None:
creds_file = os.path.expanduser('~/.onecodex')
if not os.path.exists(creds_file):
if silent:
return None
creds = {}
elif not os.access(creds_file, os.R_OK):
click.echo('Please check the permissions on {}'.format(
collapse_user(creds_file)), err=True)
sys.exit(1)
else:
with open(creds_file, 'r') as fp:
try:
creds = json.load(fp)
except ValueError:
click.echo(
'Your ~/.onecodex credentials file appears to be corrupted. Please delete it and re-authorize.'
, err=True)
sys.exit(1)
last_update = creds.get('updated_at') or creds.get('saved_at')
last_update = last_update if last_update else datetime.datetime.now(
).strftime(DATE_FORMAT)
diff = datetime.datetime.now() - datetime.datetime.strptime(last_update
, DATE_FORMAT)
if diff.days >= 1:
upgrade_required, msg = check_version(__version__, server)
creds['updated_at'] = datetime.datetime.now().strftime(DATE_FORMAT)
try:
json.dump(creds, open(creds_file, 'w'))
except Exception as e:
if e.errno == errno.EACCES:
click.echo('Please check the permissions on {}'.format(
collapse_user(creds_file)), err=True)
sys.exit(1)
else:
raise
if upgrade_required:
click.echo('\nWARNING: {}\n'.format(msg), err=True)
if silent:
return creds.get('api_key', None)
click.echo('Credentials file already exists ({}). Logout first.'.
format(collapse_user(creds_file)), err=True)
return creds.get('email', None)
email, api_key = login_uname_pwd(server, api_key=api_key)
if api_key is None:
click.echo(
'We could not verify your credentials. Either you mistyped your email or password, or your account does not currently have API access. Please contact help@onecodex.com if you continue to experience problems.'
)
sys.exit(1)
creds.update({'api_key': api_key, 'saved_at': datetime.datetime.now().
strftime(DATE_FORMAT), 'updated_at': None, 'email': email})
try:
json.dump(creds, open(creds_file, 'w'))
except Exception as e:
if e.errno == errno.EACCES:
click.echo('Please check the permissions on {}'.format(
creds_file), err=True)
sys.exit(1)
else:
raise
click.echo('Your ~/.onecodex credentials file was successfully created.',
err=True)
return email
|
login
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(server, creds_file=None, api_key=None, silent=False):\n if server[-1] != '/':\n server = server + '/'\n if creds_file is None:\n creds_file = os.path.expanduser('~/.onecodex')\n if not os.path.exists(creds_file):\n if silent:\n return None\n creds = {}\n elif not os.access(creds_file, os.R_OK):\n click.echo('Please check the permissions on {}'.format(\n collapse_user(creds_file)), err=True)\n sys.exit(1)\n else:\n with open(creds_file, 'r') as fp:\n try:\n creds = json.load(fp)\n except ValueError:\n click.echo(\n 'Your ~/.onecodex credentials file appears to be corrupted. Please delete it and re-authorize.'\n , err=True)\n sys.exit(1)\n last_update = creds.get('updated_at') or creds.get('saved_at')\n last_update = last_update if last_update else datetime.datetime.now(\n ).strftime(DATE_FORMAT)\n diff = datetime.datetime.now() - datetime.datetime.strptime(last_update\n , DATE_FORMAT)\n if diff.days >= 1:\n upgrade_required, msg = check_version(__version__, server)\n creds['updated_at'] = datetime.datetime.now().strftime(DATE_FORMAT)\n try:\n json.dump(creds, open(creds_file, 'w'))\n except Exception as e:\n if e.errno == errno.EACCES:\n click.echo('Please check the permissions on {}'.format(\n collapse_user(creds_file)), err=True)\n sys.exit(1)\n else:\n raise\n if upgrade_required:\n click.echo('\\nWARNING: {}\\n'.format(msg), err=True)\n if silent:\n return creds.get('api_key', None)\n click.echo('Credentials file already exists ({}). Logout first.'.\n format(collapse_user(creds_file)), err=True)\n return creds.get('email', None)\n email, api_key = login_uname_pwd(server, api_key=api_key)\n if api_key is None:\n click.echo(\n 'We could not verify your credentials. Either you mistyped your email or password, or your account does not currently have API access. Please contact help@onecodex.com if you continue to experience problems.'\n )\n sys.exit(1)\n creds.update({'api_key': api_key, 'saved_at': datetime.datetime.now().\n strftime(DATE_FORMAT), 'updated_at': None, 'email': email})\n try:\n json.dump(creds, open(creds_file, 'w'))\n except Exception as e:\n if e.errno == errno.EACCES:\n click.echo('Please check the permissions on {}'.format(\n creds_file), err=True)\n sys.exit(1)\n else:\n raise\n click.echo('Your ~/.onecodex credentials file was successfully created.',\n err=True)\n return email\n",
"role": "user"
},
{
"content": "login",
"role": "assistant"
}
] |
def x(creds_file=None):
if creds_file is None:
creds_file = os.path.expanduser('~/.onecodex')
try:
os.remove(creds_file)
except Exception as e:
if e.errno == errno.ENOENT:
return False
elif e.errno == errno.EACCES:
click.echo('Please check the permissions on {}'.format(
collapse_user(creds_file)), err=True)
sys.exit(1)
else:
raise
return True
|
remove_creds
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(creds_file=None):\n if creds_file is None:\n creds_file = os.path.expanduser('~/.onecodex')\n try:\n os.remove(creds_file)\n except Exception as e:\n if e.errno == errno.ENOENT:\n return False\n elif e.errno == errno.EACCES:\n click.echo('Please check the permissions on {}'.format(\n collapse_user(creds_file)), err=True)\n sys.exit(1)\n else:\n raise\n return True\n",
"role": "user"
},
{
"content": "remove_creds",
"role": "assistant"
}
] |
def x(creds_file=None):
if _remove_creds(creds_file=creds_file):
click.echo('Successfully removed One Codex credentials.', err=True)
sys.exit(0)
else:
click.echo('No One Codex API keys found.', err=True)
sys.exit(1)
|
logout
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(creds_file=None):\n if _remove_creds(creds_file=creds_file):\n click.echo('Successfully removed One Codex credentials.', err=True)\n sys.exit(0)\n else:\n click.echo('No One Codex API keys found.', err=True)\n sys.exit(1)\n",
"role": "user"
},
{
"content": "logout",
"role": "assistant"
}
] |
def x(fn):
@wraps(fn)
def x(ctx, *args, **kwargs):
base_url = os.environ.get('ONE_CODEX_API_BASE',
'https://app.onecodex.com')
api_kwargs = {'telemetry': ctx.obj['TELEMETRY']}
api_key_prior_login = ctx.obj.get('API_KEY')
bearer_token_env = os.environ.get('ONE_CODEX_BEARER_TOKEN')
api_key_env = os.environ.get('ONE_CODEX_API_KEY')
api_key_creds_file = _login(base_url, silent=True)
if api_key_prior_login is not None:
api_kwargs['api_key'] = api_key_prior_login
elif bearer_token_env is not None:
api_kwargs['bearer_token'] = bearer_token_env
elif api_key_env is not None:
api_kwargs['api_key'] = api_key_env
elif api_key_creds_file is not None:
api_kwargs['api_key'] = api_key_creds_file
else:
click.echo(
'The command you specified requires authentication. Please login first.\n'
, err=True)
ctx.exit()
ctx.obj['API'] = Api(**api_kwargs)
return fn(ctx, *args, **kwargs)
return login_wrapper
|
login_required
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(fn):\n\n @wraps(fn)\n def x(ctx, *args, **kwargs):\n base_url = os.environ.get('ONE_CODEX_API_BASE',\n 'https://app.onecodex.com')\n api_kwargs = {'telemetry': ctx.obj['TELEMETRY']}\n api_key_prior_login = ctx.obj.get('API_KEY')\n bearer_token_env = os.environ.get('ONE_CODEX_BEARER_TOKEN')\n api_key_env = os.environ.get('ONE_CODEX_API_KEY')\n api_key_creds_file = _login(base_url, silent=True)\n if api_key_prior_login is not None:\n api_kwargs['api_key'] = api_key_prior_login\n elif bearer_token_env is not None:\n api_kwargs['bearer_token'] = bearer_token_env\n elif api_key_env is not None:\n api_kwargs['api_key'] = api_key_env\n elif api_key_creds_file is not None:\n api_kwargs['api_key'] = api_key_creds_file\n else:\n click.echo(\n 'The command you specified requires authentication. Please login first.\\n'\n , err=True)\n ctx.exit()\n ctx.obj['API'] = Api(**api_kwargs)\n return fn(ctx, *args, **kwargs)\n return login_wrapper\n",
"role": "user"
},
{
"content": "login_required",
"role": "assistant"
}
] |
def x(self, json=True):
if json is True:
return self._results()
else:
return self._table()
|
results
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, json=True):\n if json is True:\n return self._results()\n else:\n return self._table()\n",
"role": "user"
},
{
"content": "results",
"role": "assistant"
}
] |
def x(self, ids=None):
if ids is None:
return self.table()
else:
res = self.table()
return res[res['tax_id'].isin(ids)]
|
abundances
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, ids=None):\n if ids is None:\n return self.table()\n else:\n res = self.table()\n return res[res['tax_id'].isin(ids)]\n",
"role": "user"
},
{
"content": "abundances",
"role": "assistant"
}
] |
def x(self):
super(Samples, self).save()
if self.metadata is not None:
self.metadata.save()
|
save
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n super(Samples, self).save()\n if self.metadata is not None:\n self.metadata.save()\n",
"role": "user"
},
{
"content": "save",
"role": "assistant"
}
] |
def x(cls, files, metadata=None, tags=None, project=None, coerce_ascii=
False, progressbar=None):
res = cls._resource
if not isinstance(files, string_types) and not isinstance(files, tuple):
raise OneCodexException(
'Please pass a string or tuple or forward and reverse filepaths.')
if not isinstance(project, Projects) and project is not None:
project_search = Projects.get(project)
if not project_search:
project_search = Projects.where(name=project)
if not project_search:
try:
project_search = Projects.where(project_name=project)
except HTTPError:
project_search = None
if not project_search:
raise OneCodexException('{} is not a valid project UUID'.format
(project))
if isinstance(project_search, list):
project = project_search[0]
sample_id = upload_sequence(files, res._client.session, res, metadata=
metadata, tags=tags, project=project, coerce_ascii=coerce_ascii,
progressbar=progressbar)
return cls.get(sample_id)
|
upload
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cls, files, metadata=None, tags=None, project=None, coerce_ascii=\n False, progressbar=None):\n res = cls._resource\n if not isinstance(files, string_types) and not isinstance(files, tuple):\n raise OneCodexException(\n 'Please pass a string or tuple or forward and reverse filepaths.')\n if not isinstance(project, Projects) and project is not None:\n project_search = Projects.get(project)\n if not project_search:\n project_search = Projects.where(name=project)\n if not project_search:\n try:\n project_search = Projects.where(project_name=project)\n except HTTPError:\n project_search = None\n if not project_search:\n raise OneCodexException('{} is not a valid project UUID'.format\n (project))\n if isinstance(project_search, list):\n project = project_search[0]\n sample_id = upload_sequence(files, res._client.session, res, metadata=\n metadata, tags=tags, project=project, coerce_ascii=coerce_ascii,\n progressbar=progressbar)\n return cls.get(sample_id)\n",
"role": "user"
},
{
"content": "upload",
"role": "assistant"
}
] |
def x(self, comments):
for comment in comments:
if comment not in self.comments and len(comment) > 0:
self.comments.append(comment)
if len(self.comments[0]) == 0:
self.comments.pop(0)
|
add_comments
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, comments):\n for comment in comments:\n if comment not in self.comments and len(comment) > 0:\n self.comments.append(comment)\n if len(self.comments[0]) == 0:\n self.comments.pop(0)\n",
"role": "user"
},
{
"content": "add_comments",
"role": "assistant"
}
] |
def x(localization_bundle_path):
logging.info('Merging translations')
for lang_dir in os.listdir(localization_bundle_path):
if lang_dir == DEFAULT_LANGUAGE_DIRECTORY_NAME:
continue
for translated_path in glob.glob(os.path.join(
localization_bundle_path, lang_dir, '*' + TRANSLATED_SUFFIX)):
strings_path = translated_path[:-1 * len(TRANSLATED_SUFFIX)]
localizable_path = os.path.join(localization_bundle_path,
DEFAULT_LANGUAGE_DIRECTORY_NAME, os.path.basename(strings_path)
)
localization_merge_back(localizable_path, strings_path,
translated_path, strings_path)
|
merge_translations
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(localization_bundle_path):\n logging.info('Merging translations')\n for lang_dir in os.listdir(localization_bundle_path):\n if lang_dir == DEFAULT_LANGUAGE_DIRECTORY_NAME:\n continue\n for translated_path in glob.glob(os.path.join(\n localization_bundle_path, lang_dir, '*' + TRANSLATED_SUFFIX)):\n strings_path = translated_path[:-1 * len(TRANSLATED_SUFFIX)]\n localizable_path = os.path.join(localization_bundle_path,\n DEFAULT_LANGUAGE_DIRECTORY_NAME, os.path.basename(strings_path)\n )\n localization_merge_back(localizable_path, strings_path,\n translated_path, strings_path)\n",
"role": "user"
},
{
"content": "merge_translations",
"role": "assistant"
}
] |
def x(self, rank='auto', normalize='auto', top_n='auto', threshold='auto',
title=None, xlabel=None, ylabel=None, tooltip=None, return_chart=False,
haxis=None, legend='auto', label=None):
if rank is None:
raise OneCodexException(
"Please specify a rank or 'auto' to choose automatically")
if not (threshold or top_n):
raise OneCodexException(
'Please specify at least one of: threshold, top_n')
if top_n == 'auto' and threshold == 'auto':
top_n = 10
threshold = None
elif top_n == 'auto' and threshold != 'auto':
top_n = None
elif top_n != 'auto' and threshold == 'auto':
threshold = None
if legend == 'auto':
legend = self._field
df = self.to_df(rank=rank, normalize=normalize, top_n=top_n, threshold=
threshold, table_format='long')
if tooltip:
if not isinstance(tooltip, list):
tooltip = [tooltip]
else:
tooltip = []
if haxis:
tooltip.append(haxis)
tooltip.insert(0, 'Label')
magic_metadata, magic_fields = self._metadata_fetch(tooltip, label=label)
if haxis:
sort_order = magic_metadata.sort_values(magic_fields[haxis]
).index.tolist()
for sort_num, sort_class_id in enumerate(sort_order):
magic_metadata.loc[sort_class_id, 'sort_order'] = sort_num
df['sort_order'] = magic_metadata['sort_order'][df['classification_id']
].tolist()
sort_order = alt.EncodingSortField(field='sort_order', op='mean')
else:
sort_order = None
for f in tooltip:
df[magic_fields[f]] = magic_metadata[magic_fields[f]][df[
'classification_id']].tolist()
df['tax_name'] = [('{} ({})'.format(self.taxonomy['name'][t], t) if t in
self.taxonomy['name'] else t) for t in df['tax_id']]
ylabel = self._field if ylabel is None else ylabel
xlabel = '' if xlabel is None else xlabel
tooltip_for_altair = [magic_fields[f] for f in tooltip]
tooltip_for_altair.insert(1, 'tax_name')
tooltip_for_altair.insert(2, '{}:Q'.format(self._field))
dfs_to_plot = []
if haxis:
blank_df = df.iloc[:1].copy()
blank_df[self._field] = 0
dfs_to_plot.append(blank_df)
for md_val in magic_metadata[magic_fields[haxis]].unique():
plot_df = df.where(df[magic_fields[haxis]] == md_val).dropna()
if magic_metadata[magic_fields[haxis]].dtype == 'bool':
plot_df[magic_fields[haxis]] = plot_df[magic_fields[haxis]
].astype(bool)
dfs_to_plot.append(plot_df)
else:
dfs_to_plot.append(df)
charts = []
for plot_num, plot_df in enumerate(dfs_to_plot):
chart = alt.Chart(plot_df).mark_bar().encode(x=alt.X('Label', axis=
alt.Axis(title=xlabel), sort=sort_order), y=alt.Y(self._field,
axis=alt.Axis(title=ylabel), scale=alt.Scale(domain=[0, 1],
zero=True, nice=False)), color=alt.Color('tax_name', legend=alt
.Legend(title=legend)), tooltip=tooltip_for_altair, href='url:N')
if haxis:
if plot_num == 0:
chart.encoding.x.axis = None
elif plot_num > 0:
chart.encoding.y.axis = None
chart.title = str(plot_df[magic_fields[haxis]].tolist()[0])
charts.append(chart)
final_chart = charts[0]
if len(charts) > 1:
for chart in charts[1:]:
final_chart |= chart
final_chart = final_chart.properties(title=title) if title else final_chart
return final_chart if return_chart else final_chart.display()
|
plot_bargraph
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, rank='auto', normalize='auto', top_n='auto', threshold='auto',\n title=None, xlabel=None, ylabel=None, tooltip=None, return_chart=False,\n haxis=None, legend='auto', label=None):\n if rank is None:\n raise OneCodexException(\n \"Please specify a rank or 'auto' to choose automatically\")\n if not (threshold or top_n):\n raise OneCodexException(\n 'Please specify at least one of: threshold, top_n')\n if top_n == 'auto' and threshold == 'auto':\n top_n = 10\n threshold = None\n elif top_n == 'auto' and threshold != 'auto':\n top_n = None\n elif top_n != 'auto' and threshold == 'auto':\n threshold = None\n if legend == 'auto':\n legend = self._field\n df = self.to_df(rank=rank, normalize=normalize, top_n=top_n, threshold=\n threshold, table_format='long')\n if tooltip:\n if not isinstance(tooltip, list):\n tooltip = [tooltip]\n else:\n tooltip = []\n if haxis:\n tooltip.append(haxis)\n tooltip.insert(0, 'Label')\n magic_metadata, magic_fields = self._metadata_fetch(tooltip, label=label)\n if haxis:\n sort_order = magic_metadata.sort_values(magic_fields[haxis]\n ).index.tolist()\n for sort_num, sort_class_id in enumerate(sort_order):\n magic_metadata.loc[sort_class_id, 'sort_order'] = sort_num\n df['sort_order'] = magic_metadata['sort_order'][df['classification_id']\n ].tolist()\n sort_order = alt.EncodingSortField(field='sort_order', op='mean')\n else:\n sort_order = None\n for f in tooltip:\n df[magic_fields[f]] = magic_metadata[magic_fields[f]][df[\n 'classification_id']].tolist()\n df['tax_name'] = [('{} ({})'.format(self.taxonomy['name'][t], t) if t in\n self.taxonomy['name'] else t) for t in df['tax_id']]\n ylabel = self._field if ylabel is None else ylabel\n xlabel = '' if xlabel is None else xlabel\n tooltip_for_altair = [magic_fields[f] for f in tooltip]\n tooltip_for_altair.insert(1, 'tax_name')\n tooltip_for_altair.insert(2, '{}:Q'.format(self._field))\n dfs_to_plot = []\n if haxis:\n blank_df = df.iloc[:1].copy()\n blank_df[self._field] = 0\n dfs_to_plot.append(blank_df)\n for md_val in magic_metadata[magic_fields[haxis]].unique():\n plot_df = df.where(df[magic_fields[haxis]] == md_val).dropna()\n if magic_metadata[magic_fields[haxis]].dtype == 'bool':\n plot_df[magic_fields[haxis]] = plot_df[magic_fields[haxis]\n ].astype(bool)\n dfs_to_plot.append(plot_df)\n else:\n dfs_to_plot.append(df)\n charts = []\n for plot_num, plot_df in enumerate(dfs_to_plot):\n chart = alt.Chart(plot_df).mark_bar().encode(x=alt.X('Label', axis=\n alt.Axis(title=xlabel), sort=sort_order), y=alt.Y(self._field,\n axis=alt.Axis(title=ylabel), scale=alt.Scale(domain=[0, 1],\n zero=True, nice=False)), color=alt.Color('tax_name', legend=alt\n .Legend(title=legend)), tooltip=tooltip_for_altair, href='url:N')\n if haxis:\n if plot_num == 0:\n chart.encoding.x.axis = None\n elif plot_num > 0:\n chart.encoding.y.axis = None\n chart.title = str(plot_df[magic_fields[haxis]].tolist()[0])\n charts.append(chart)\n final_chart = charts[0]\n if len(charts) > 1:\n for chart in charts[1:]:\n final_chart |= chart\n final_chart = final_chart.properties(title=title) if title else final_chart\n return final_chart if return_chart else final_chart.display()\n",
"role": "user"
},
{
"content": "plot_bargraph",
"role": "assistant"
}
] |
def x(resp, state=None):
error_code = resp.status_code
if error_code == 402:
error_message = (
'Please add a payment method to upload more samples. If you continue to experience problems, contact us at help@onecodex.com for assistance.'
)
elif error_code == 403:
error_message = (
'Please login to your One Codex account or pass the appropriate API key.'
)
else:
try:
error_json = resp.json()
except ValueError:
error_json = {}
if 'msg' in error_json:
error_message = error_json['msg'].rstrip('.')
elif 'message' in error_json:
error_message = error_json['message'].rstrip('.')
else:
error_message = None
if state == 'init' and not error_message:
error_message = (
'Could not initialize upload. Are you logged in? If this problem continues, please contact help@onecodex.com for assistance.'
)
elif state == 'upload' and not error_message:
error_message = (
'File could not be uploaded. If this problem continues, please contact help@onecodex.com for assistance.'
)
elif state == 'callback' and not error_message:
error_message = (
'Callback could not be completed. If this problem continues, please contact help@onecodex.com for assistance.'
)
if error_message is None:
error_message = (
'Upload failed. Please contact help@onecodex.com for assistance.')
raise UploadException(error_message)
|
raise_api_error
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(resp, state=None):\n error_code = resp.status_code\n if error_code == 402:\n error_message = (\n 'Please add a payment method to upload more samples. If you continue to experience problems, contact us at help@onecodex.com for assistance.'\n )\n elif error_code == 403:\n error_message = (\n 'Please login to your One Codex account or pass the appropriate API key.'\n )\n else:\n try:\n error_json = resp.json()\n except ValueError:\n error_json = {}\n if 'msg' in error_json:\n error_message = error_json['msg'].rstrip('.')\n elif 'message' in error_json:\n error_message = error_json['message'].rstrip('.')\n else:\n error_message = None\n if state == 'init' and not error_message:\n error_message = (\n 'Could not initialize upload. Are you logged in? If this problem continues, please contact help@onecodex.com for assistance.'\n )\n elif state == 'upload' and not error_message:\n error_message = (\n 'File could not be uploaded. If this problem continues, please contact help@onecodex.com for assistance.'\n )\n elif state == 'callback' and not error_message:\n error_message = (\n 'Callback could not be completed. If this problem continues, please contact help@onecodex.com for assistance.'\n )\n if error_message is None:\n error_message = (\n 'Upload failed. Please contact help@onecodex.com for assistance.')\n raise UploadException(error_message)\n",
"role": "user"
},
{
"content": "raise_api_error",
"role": "assistant"
}
] |
def x(self, rank='auto', normalize='auto', org_vectors=0, org_vectors_scale
=None, title=None, xlabel=None, ylabel=None, color=None, size=None,
tooltip=None, return_chart=False, label=None):
if rank is None:
raise OneCodexException(
"Please specify a rank or 'auto' to choose automatically")
if len(self._results) < 2:
raise OneCodexException(
'`plot_pca` requires 2 or more valid classification results.')
df = self.to_df(rank=rank, normalize=normalize)
if len(df.columns) < 2:
raise OneCodexException(
'Too few taxa in results. Need at least 2 for PCA.')
if tooltip:
if not isinstance(tooltip, list):
tooltip = [tooltip]
else:
tooltip = []
tooltip.insert(0, 'Label')
if color and color not in tooltip:
tooltip.insert(1, color)
if size and size not in tooltip:
tooltip.insert(2, size)
magic_metadata, magic_fields = self._metadata_fetch(tooltip, label=label)
pca = PCA()
pca_vals = pca.fit(df.values).transform(df.values)
pca_vals = pd.DataFrame(pca_vals, index=df.index)
pca_vals.rename(columns=lambda x: 'PC{}'.format(x + 1), inplace=True)
if xlabel is None:
xlabel = 'PC1 ({}%)'.format(round(pca.explained_variance_ratio_[0] *
100, 2))
if ylabel is None:
ylabel = 'PC2 ({}%)'.format(round(pca.explained_variance_ratio_[1] *
100, 2))
plot_data = pd.concat([pca_vals.loc[:, ('PC1', 'PC2')], magic_metadata],
axis=1).reset_index()
alt_kwargs = dict(x=alt.X('PC1', axis=alt.Axis(title=xlabel)), y=alt.Y(
'PC2', axis=alt.Axis(title=ylabel)), tooltip=[magic_fields[t] for t in
tooltip], href='url:N', url=
'https://app.onecodex.com/classification/' + alt.datum.
classification_id)
if color:
alt_kwargs['color'] = magic_fields[color]
if size:
alt_kwargs['size'] = magic_fields[size]
chart = alt.Chart(plot_data).transform_calculate(url=alt_kwargs.pop('url')
).mark_circle().encode(**alt_kwargs)
if title:
chart = chart.properties(title=title)
if org_vectors > 0:
plot_data = {'x': [], 'y': [], 'o': [], 'Eigenvectors': []}
magnitudes = np.sqrt(pca.components_[0] ** 2 + pca.components_[1] ** 2)
magnitudes.sort()
cutoff = magnitudes[-1 * org_vectors]
if org_vectors_scale is None:
org_vectors_scale = 0.8 * np.max(pca_vals.abs().values)
for tax_id, var1, var2 in zip(df.columns.values, pca.components_[0,
:], pca.components_[1, :]):
if np.sqrt(var1 ** 2 + var2 ** 2) >= cutoff:
plot_data['x'].extend([0, var1 * float(org_vectors_scale)])
plot_data['y'].extend([0, var2 * float(org_vectors_scale)])
plot_data['o'].extend([0, 1])
plot_data['Eigenvectors'].extend([self.taxonomy['name'][
tax_id]] * 2)
org_vectors -= 1
if org_vectors == 0:
break
plot_data = pd.DataFrame(plot_data)
vector_chart = alt.Chart(plot_data).mark_line(point=False).encode(x
=alt.X('x', axis=None), y=alt.Y('y', axis=None), order='o',
color='Eigenvectors')
chart += vector_chart
if return_chart:
return chart
else:
chart.interactive().display()
|
plot_pca
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, rank='auto', normalize='auto', org_vectors=0, org_vectors_scale\n =None, title=None, xlabel=None, ylabel=None, color=None, size=None,\n tooltip=None, return_chart=False, label=None):\n if rank is None:\n raise OneCodexException(\n \"Please specify a rank or 'auto' to choose automatically\")\n if len(self._results) < 2:\n raise OneCodexException(\n '`plot_pca` requires 2 or more valid classification results.')\n df = self.to_df(rank=rank, normalize=normalize)\n if len(df.columns) < 2:\n raise OneCodexException(\n 'Too few taxa in results. Need at least 2 for PCA.')\n if tooltip:\n if not isinstance(tooltip, list):\n tooltip = [tooltip]\n else:\n tooltip = []\n tooltip.insert(0, 'Label')\n if color and color not in tooltip:\n tooltip.insert(1, color)\n if size and size not in tooltip:\n tooltip.insert(2, size)\n magic_metadata, magic_fields = self._metadata_fetch(tooltip, label=label)\n pca = PCA()\n pca_vals = pca.fit(df.values).transform(df.values)\n pca_vals = pd.DataFrame(pca_vals, index=df.index)\n pca_vals.rename(columns=lambda x: 'PC{}'.format(x + 1), inplace=True)\n if xlabel is None:\n xlabel = 'PC1 ({}%)'.format(round(pca.explained_variance_ratio_[0] *\n 100, 2))\n if ylabel is None:\n ylabel = 'PC2 ({}%)'.format(round(pca.explained_variance_ratio_[1] *\n 100, 2))\n plot_data = pd.concat([pca_vals.loc[:, ('PC1', 'PC2')], magic_metadata],\n axis=1).reset_index()\n alt_kwargs = dict(x=alt.X('PC1', axis=alt.Axis(title=xlabel)), y=alt.Y(\n 'PC2', axis=alt.Axis(title=ylabel)), tooltip=[magic_fields[t] for t in\n tooltip], href='url:N', url=\n 'https://app.onecodex.com/classification/' + alt.datum.\n classification_id)\n if color:\n alt_kwargs['color'] = magic_fields[color]\n if size:\n alt_kwargs['size'] = magic_fields[size]\n chart = alt.Chart(plot_data).transform_calculate(url=alt_kwargs.pop('url')\n ).mark_circle().encode(**alt_kwargs)\n if title:\n chart = chart.properties(title=title)\n if org_vectors > 0:\n plot_data = {'x': [], 'y': [], 'o': [], 'Eigenvectors': []}\n magnitudes = np.sqrt(pca.components_[0] ** 2 + pca.components_[1] ** 2)\n magnitudes.sort()\n cutoff = magnitudes[-1 * org_vectors]\n if org_vectors_scale is None:\n org_vectors_scale = 0.8 * np.max(pca_vals.abs().values)\n for tax_id, var1, var2 in zip(df.columns.values, pca.components_[0,\n :], pca.components_[1, :]):\n if np.sqrt(var1 ** 2 + var2 ** 2) >= cutoff:\n plot_data['x'].extend([0, var1 * float(org_vectors_scale)])\n plot_data['y'].extend([0, var2 * float(org_vectors_scale)])\n plot_data['o'].extend([0, 1])\n plot_data['Eigenvectors'].extend([self.taxonomy['name'][\n tax_id]] * 2)\n org_vectors -= 1\n if org_vectors == 0:\n break\n plot_data = pd.DataFrame(plot_data)\n vector_chart = alt.Chart(plot_data).mark_line(point=False).encode(x\n =alt.X('x', axis=None), y=alt.Y('y', axis=None), order='o',\n color='Eigenvectors')\n chart += vector_chart\n if return_chart:\n return chart\n else:\n chart.interactive().display()\n",
"role": "user"
},
{
"content": "plot_pca",
"role": "assistant"
}
] |
def x(err_json):
if len(err_json) == 1 and 'validationOf' in err_json[0]:
required_fields = ', '.join(err_json[0]['validationOf']['required'])
return 'Validation error. Requires properties: {}.'.format(
required_fields)
msg = '; '.join(err.get('message', '') for err in err_json)
if not msg:
msg = 'Bad request.'
return msg
|
pretty_print_error
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(err_json):\n if len(err_json) == 1 and 'validationOf' in err_json[0]:\n required_fields = ', '.join(err_json[0]['validationOf']['required'])\n return 'Validation error. Requires properties: {}.'.format(\n required_fields)\n msg = '; '.join(err.get('message', '') for err in err_json)\n if not msg:\n msg = 'Bad request.'\n return msg\n",
"role": "user"
},
{
"content": "pretty_print_error",
"role": "assistant"
}
] |
def x(self, include_references=True):
if include_references:
return json.dumps(self._resource._properties, cls=PotionJSONEncoder)
else:
return json.dumps({k: v for k, v in self._resource._properties.
items() if not isinstance(v, Resource) and not k.startswith('$'
)}, cls=PotionJSONEncoder)
|
to_json
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, include_references=True):\n if include_references:\n return json.dumps(self._resource._properties, cls=PotionJSONEncoder)\n else:\n return json.dumps({k: v for k, v in self._resource._properties.\n items() if not isinstance(v, Resource) and not k.startswith('$'\n )}, cls=PotionJSONEncoder)\n",
"role": "user"
},
{
"content": "to_json",
"role": "assistant"
}
] |
def x(cls, sort=None, limit=None):
return cls.where(sort=sort, limit=limit)
|
all
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cls, sort=None, limit=None):\n return cls.where(sort=sort, limit=limit)\n",
"role": "user"
},
{
"content": "all",
"role": "assistant"
}
] |
def x(cls, *filters, **keyword_filters):
check_bind(cls)
filter_func = keyword_filters.pop('filter', None)
public = False
if any(x['rel'] == 'instances_public' for x in cls._resource._schema[
'links']):
public = keyword_filters.pop('public', False)
instances_route = keyword_filters.pop('_instances', 'instances' if not
public else 'instances_public')
schema = next(l for l in cls._resource._schema['links'] if l['rel'] ==
instances_route)
sort_schema = schema['schema']['properties']['sort']['properties']
where_schema = schema['schema']['properties']['where']['properties']
sort = generate_potion_sort_clause(keyword_filters.pop('sort', None),
sort_schema)
limit = keyword_filters.pop('limit', None if not public else 1000)
where = {}
if len(filters) > 0:
if len(filters) == 1 and isinstance(filters[0], dict):
where = filters[0]
elif all(isinstance(f, six.string_types) for f in filters):
where = {'$uri': {'$in': [cls._convert_id_to_uri(f) for f in
filters]}}
else:
raise NotImplementedError(
"Advanced filtering hasn't been implemented yet")
if len(keyword_filters) > 0:
for k, v in generate_potion_keyword_where(keyword_filters,
where_schema, cls).items():
if k in where:
raise AttributeError('Multiple definitions for same field {}'
.format(k))
where[k] = v
cursor = getattr(cls._resource, instances_route)(where=where, sort=sort,
per_page=DEFAULT_PAGE_SIZE)
if limit is not None:
cursor = itertools.islice(cursor, limit)
wrapped = [cls(_resource=r) for r in cursor]
if filter_func:
if callable(filter_func):
wrapped = [obj for obj in wrapped if filter_func(obj) is True]
else:
raise OneCodexException('Expected callable for filter, got: {}'
.format(type(filter_func).__name__))
return wrapped
|
where
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cls, *filters, **keyword_filters):\n check_bind(cls)\n filter_func = keyword_filters.pop('filter', None)\n public = False\n if any(x['rel'] == 'instances_public' for x in cls._resource._schema[\n 'links']):\n public = keyword_filters.pop('public', False)\n instances_route = keyword_filters.pop('_instances', 'instances' if not\n public else 'instances_public')\n schema = next(l for l in cls._resource._schema['links'] if l['rel'] ==\n instances_route)\n sort_schema = schema['schema']['properties']['sort']['properties']\n where_schema = schema['schema']['properties']['where']['properties']\n sort = generate_potion_sort_clause(keyword_filters.pop('sort', None),\n sort_schema)\n limit = keyword_filters.pop('limit', None if not public else 1000)\n where = {}\n if len(filters) > 0:\n if len(filters) == 1 and isinstance(filters[0], dict):\n where = filters[0]\n elif all(isinstance(f, six.string_types) for f in filters):\n where = {'$uri': {'$in': [cls._convert_id_to_uri(f) for f in\n filters]}}\n else:\n raise NotImplementedError(\n \"Advanced filtering hasn't been implemented yet\")\n if len(keyword_filters) > 0:\n for k, v in generate_potion_keyword_where(keyword_filters,\n where_schema, cls).items():\n if k in where:\n raise AttributeError('Multiple definitions for same field {}'\n .format(k))\n where[k] = v\n cursor = getattr(cls._resource, instances_route)(where=where, sort=sort,\n per_page=DEFAULT_PAGE_SIZE)\n if limit is not None:\n cursor = itertools.islice(cursor, limit)\n wrapped = [cls(_resource=r) for r in cursor]\n if filter_func:\n if callable(filter_func):\n wrapped = [obj for obj in wrapped if filter_func(obj) is True]\n else:\n raise OneCodexException('Expected callable for filter, got: {}'\n .format(type(filter_func).__name__))\n return wrapped\n",
"role": "user"
},
{
"content": "where",
"role": "assistant"
}
] |
def x(cls, uuid):
check_bind(cls)
try:
resource = cls._resource.fetch(uuid)
if isinstance(resource, list):
if len(resource) == 1:
resource = resource[0]
else:
raise TypeError('Potion-Client error in fetching resource')
except HTTPError as e:
if e.response.status_code == 404:
return None
else:
raise e
return cls(_resource=resource)
|
get
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cls, uuid):\n check_bind(cls)\n try:\n resource = cls._resource.fetch(uuid)\n if isinstance(resource, list):\n if len(resource) == 1:\n resource = resource[0]\n else:\n raise TypeError('Potion-Client error in fetching resource')\n except HTTPError as e:\n if e.response.status_code == 404:\n return None\n else:\n raise e\n return cls(_resource=resource)\n",
"role": "user"
},
{
"content": "get",
"role": "assistant"
}
] |
def x(self):
check_bind(self)
if self.id is None:
raise ServerError('{} object does not exist yet'.format(self.
__class__.name))
elif not self.__class__._has_schema_method('destroy'):
raise MethodNotSupported('{} do not support deletion.'.format(self.
__class__.__name__))
try:
self._resource.delete()
except HTTPError as e:
if e.response.status_code == 403:
raise PermissionDenied('')
else:
raise e
|
delete
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n check_bind(self)\n if self.id is None:\n raise ServerError('{} object does not exist yet'.format(self.\n __class__.name))\n elif not self.__class__._has_schema_method('destroy'):\n raise MethodNotSupported('{} do not support deletion.'.format(self.\n __class__.__name__))\n try:\n self._resource.delete()\n except HTTPError as e:\n if e.response.status_code == 403:\n raise PermissionDenied('')\n else:\n raise e\n",
"role": "user"
},
{
"content": "delete",
"role": "assistant"
}
] |
def x(self):
check_bind(self)
creating = self.id is None
if creating and not self.__class__._has_schema_method('create'):
raise MethodNotSupported('{} do not support creating.'.format(self.
__class__.__name__))
if not creating and not self.__class__._has_schema_method('update'):
raise MethodNotSupported('{} do not support updating.'.format(self.
__class__.__name__))
try:
self._resource.save()
except HTTPError as e:
if e.response.status_code == 400:
err_json = e.response.json().get('errors', [])
msg = pretty_print_error(err_json)
raise ServerError(msg)
elif e.response.status_code == 404:
action = 'creating' if creating else 'updating'
raise MethodNotSupported('{} do not support {}.'.format(self.
__class__.__name__, action))
elif e.response.status_code == 409:
raise ServerError('This {} object already exists'.format(self.
__class__.__name__))
else:
raise e
|
save
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n check_bind(self)\n creating = self.id is None\n if creating and not self.__class__._has_schema_method('create'):\n raise MethodNotSupported('{} do not support creating.'.format(self.\n __class__.__name__))\n if not creating and not self.__class__._has_schema_method('update'):\n raise MethodNotSupported('{} do not support updating.'.format(self.\n __class__.__name__))\n try:\n self._resource.save()\n except HTTPError as e:\n if e.response.status_code == 400:\n err_json = e.response.json().get('errors', [])\n msg = pretty_print_error(err_json)\n raise ServerError(msg)\n elif e.response.status_code == 404:\n action = 'creating' if creating else 'updating'\n raise MethodNotSupported('{} do not support {}.'.format(self.\n __class__.__name__, action))\n elif e.response.status_code == 409:\n raise ServerError('This {} object already exists'.format(self.\n __class__.__name__))\n else:\n raise e\n",
"role": "user"
},
{
"content": "save",
"role": "assistant"
}
] |
def x(file_path):
if not isinstance(file_path, tuple):
raise OneCodexException(
'Cannot get the interleaved filename without a tuple.')
if re.match('.*[._][Rr][12][_.].*', file_path[0]):
return re.sub('[._][Rr][12]', '', file_path[0])
else:
warnings.warn(
'Paired-end filenames do not match--are you sure they are correct?'
)
return file_path[0]
|
interleaved_filename
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_path):\n if not isinstance(file_path, tuple):\n raise OneCodexException(\n 'Cannot get the interleaved filename without a tuple.')\n if re.match('.*[._][Rr][12][_.].*', file_path[0]):\n return re.sub('[._][Rr][12]', '', file_path[0])\n else:\n warnings.warn(\n 'Paired-end filenames do not match--are you sure they are correct?'\n )\n return file_path[0]\n",
"role": "user"
},
{
"content": "interleaved_filename",
"role": "assistant"
}
] |
def x(file_path, uncompressed=False):
_, ext = os.path.splitext(file_path)
if uncompressed:
if ext in {'.gz', '.gzip'}:
with gzip.GzipFile(file_path, mode='rb') as fp:
try:
fp.seek(0, os.SEEK_END)
return fp.tell()
except ValueError:
fp.seek(0)
while len(fp.read(8192)) != 0:
pass
return fp.tell()
elif ext in {'.bz', '.bz2', '.bzip', '.bzip2'}:
with bz2.BZ2File(file_path, mode='rb') as fp:
fp.seek(0, os.SEEK_END)
return fp.tell()
return os.path.getsize(file_path)
|
file_size
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_path, uncompressed=False):\n _, ext = os.path.splitext(file_path)\n if uncompressed:\n if ext in {'.gz', '.gzip'}:\n with gzip.GzipFile(file_path, mode='rb') as fp:\n try:\n fp.seek(0, os.SEEK_END)\n return fp.tell()\n except ValueError:\n fp.seek(0)\n while len(fp.read(8192)) != 0:\n pass\n return fp.tell()\n elif ext in {'.bz', '.bz2', '.bzip', '.bzip2'}:\n with bz2.BZ2File(file_path, mode='rb') as fp:\n fp.seek(0, os.SEEK_END)\n return fp.tell()\n return os.path.getsize(file_path)\n",
"role": "user"
},
{
"content": "file_size",
"role": "assistant"
}
] |
def x(file_path, enforce_fastx=True):
if isinstance(file_path, tuple):
assert len(file_path) == 2
file_size = sum(_file_size(f, uncompressed=True) for f in file_path)
file_path = interleaved_filename(file_path)
paired = True
else:
file_size = _file_size(file_path, uncompressed=False)
paired = False
new_filename, ext = os.path.splitext(os.path.basename(file_path))
if ext in {'.gz', '.gzip', '.bz', '.bz2', '.bzip'}:
compressed = ext
new_filename, ext = os.path.splitext(new_filename)
else:
compressed = ''
if paired and compressed:
final_filename = new_filename + ext
else:
final_filename = new_filename + ext + compressed
if enforce_fastx:
if ext in {'.fa', '.fna', '.fasta'}:
file_format = 'fasta'
elif ext in {'.fq', '.fastq'}:
file_format = 'fastq'
else:
raise UploadException(
'{}: extension must be one of .fa, .fna, .fasta, .fq, .fastq'
.format(final_filename))
else:
file_format = None
if file_size == 0:
raise UploadException('{}: empty files can not be uploaded'.format(
final_filename))
return final_filename, file_size, file_format
|
file_stats
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_path, enforce_fastx=True):\n if isinstance(file_path, tuple):\n assert len(file_path) == 2\n file_size = sum(_file_size(f, uncompressed=True) for f in file_path)\n file_path = interleaved_filename(file_path)\n paired = True\n else:\n file_size = _file_size(file_path, uncompressed=False)\n paired = False\n new_filename, ext = os.path.splitext(os.path.basename(file_path))\n if ext in {'.gz', '.gzip', '.bz', '.bz2', '.bzip'}:\n compressed = ext\n new_filename, ext = os.path.splitext(new_filename)\n else:\n compressed = ''\n if paired and compressed:\n final_filename = new_filename + ext\n else:\n final_filename = new_filename + ext + compressed\n if enforce_fastx:\n if ext in {'.fa', '.fna', '.fasta'}:\n file_format = 'fasta'\n elif ext in {'.fq', '.fastq'}:\n file_format = 'fastq'\n else:\n raise UploadException(\n '{}: extension must be one of .fa, .fna, .fasta, .fq, .fastq'\n .format(final_filename))\n else:\n file_format = None\n if file_size == 0:\n raise UploadException('{}: empty files can not be uploaded'.format(\n final_filename))\n return final_filename, file_size, file_format\n",
"role": "user"
},
{
"content": "file_stats",
"role": "assistant"
}
] |
def x(file_name, file_size, metadata, tags, project, samples_resource):
upload_args = {'filename': file_name, 'size': file_size, 'upload_type':
'standard'}
if metadata:
new_metadata = {}
for md_key, md_val in metadata.items():
new_metadata[snake_case(md_key)] = md_val
upload_args['metadata'] = new_metadata
if tags:
upload_args['tags'] = tags
if project:
upload_args['project'] = getattr(project, 'id', project)
try:
upload_info = samples_resource.init_upload(upload_args)
except requests.exceptions.HTTPError as e:
raise_api_error(e.response, state='init')
except requests.exceptions.ConnectionError:
raise_connectivity_error(file_name)
return upload_info
|
call_init_upload
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_name, file_size, metadata, tags, project, samples_resource):\n upload_args = {'filename': file_name, 'size': file_size, 'upload_type':\n 'standard'}\n if metadata:\n new_metadata = {}\n for md_key, md_val in metadata.items():\n new_metadata[snake_case(md_key)] = md_val\n upload_args['metadata'] = new_metadata\n if tags:\n upload_args['tags'] = tags\n if project:\n upload_args['project'] = getattr(project, 'id', project)\n try:\n upload_info = samples_resource.init_upload(upload_args)\n except requests.exceptions.HTTPError as e:\n raise_api_error(e.response, state='init')\n except requests.exceptions.ConnectionError:\n raise_connectivity_error(file_name)\n return upload_info\n",
"role": "user"
},
{
"content": "call_init_upload",
"role": "assistant"
}
] |
def x(file_name, metadata, tags, project):
upload_args = {'filename': file_name}
if metadata:
new_metadata = {}
for md_key, md_val in metadata.items():
new_metadata[snake_case(md_key)] = md_val
upload_args['metadata'] = new_metadata
if tags:
upload_args['tags'] = tags
if project:
upload_args['project'] = getattr(project, 'id', project)
return upload_args
|
make_retry_fields
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_name, metadata, tags, project):\n upload_args = {'filename': file_name}\n if metadata:\n new_metadata = {}\n for md_key, md_val in metadata.items():\n new_metadata[snake_case(md_key)] = md_val\n upload_args['metadata'] = new_metadata\n if tags:\n upload_args['tags'] = tags\n if project:\n upload_args['project'] = getattr(project, 'id', project)\n return upload_args\n",
"role": "user"
},
{
"content": "make_retry_fields",
"role": "assistant"
}
] |
def x(files, session, samples_resource, metadata=None, tags=None, project=
None, coerce_ascii=False, progressbar=None):
filename, file_size, file_format = _file_stats(files)
try:
ascii_fname = unidecode(unicode(filename))
except NameError:
ascii_fname = unidecode(filename)
if filename != ascii_fname:
if coerce_ascii:
logging.warn('Renaming {} to {}, must be ASCII\n'.format(
filename.encode('utf-8'), ascii_fname))
filename = ascii_fname
else:
raise OneCodexException(
'Filenames must be ascii. Try using --coerce-ascii')
if not progressbar:
progressbar = FakeProgressBar()
with progressbar as bar:
if isinstance(files, tuple):
fobj = FASTXInterleave(files, file_size, file_format, bar)
else:
fobj = FilePassthru(files, file_size, bar)
fields = _call_init_upload(filename, file_size, metadata, tags,
project, samples_resource)
def x():
bar.canceled = True
bar.update(1)
logging.info('Canceled upload for sample: {}'.format(fields[
'sample_id']))
samples_resource.cancel_upload({'sample_id': fields['sample_id']})
atexit_register(cancel_atexit)
retry_fields = _make_retry_fields(filename, metadata, tags, project)
try:
sample_id = upload_sequence_fileobj(fobj, filename, fields,
retry_fields, session, samples_resource)
atexit_unregister(cancel_atexit)
return sample_id
except KeyboardInterrupt:
cancel_atexit()
atexit_unregister(cancel_atexit)
raise
|
upload_sequence
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(files, session, samples_resource, metadata=None, tags=None, project=\n None, coerce_ascii=False, progressbar=None):\n filename, file_size, file_format = _file_stats(files)\n try:\n ascii_fname = unidecode(unicode(filename))\n except NameError:\n ascii_fname = unidecode(filename)\n if filename != ascii_fname:\n if coerce_ascii:\n logging.warn('Renaming {} to {}, must be ASCII\\n'.format(\n filename.encode('utf-8'), ascii_fname))\n filename = ascii_fname\n else:\n raise OneCodexException(\n 'Filenames must be ascii. Try using --coerce-ascii')\n if not progressbar:\n progressbar = FakeProgressBar()\n with progressbar as bar:\n if isinstance(files, tuple):\n fobj = FASTXInterleave(files, file_size, file_format, bar)\n else:\n fobj = FilePassthru(files, file_size, bar)\n fields = _call_init_upload(filename, file_size, metadata, tags,\n project, samples_resource)\n\n def x():\n bar.canceled = True\n bar.update(1)\n logging.info('Canceled upload for sample: {}'.format(fields[\n 'sample_id']))\n samples_resource.cancel_upload({'sample_id': fields['sample_id']})\n atexit_register(cancel_atexit)\n retry_fields = _make_retry_fields(filename, metadata, tags, project)\n try:\n sample_id = upload_sequence_fileobj(fobj, filename, fields,\n retry_fields, session, samples_resource)\n atexit_unregister(cancel_atexit)\n return sample_id\n except KeyboardInterrupt:\n cancel_atexit()\n atexit_unregister(cancel_atexit)\n raise\n",
"role": "user"
},
{
"content": "upload_sequence",
"role": "assistant"
}
] |
def x(file_obj, file_name, fields, session, samples_resource):
multipart_fields = OrderedDict()
for k, v in fields['additional_fields'].items():
multipart_fields[str(k)] = str(v)
mime_type = getattr(file_obj, 'mime_type', 'text/plain')
multipart_fields['file'] = file_name, file_obj, mime_type
encoder = MultipartEncoder(multipart_fields)
upload_request = None
try:
upload_request = session.post(fields['upload_url'], data=encoder,
headers={'Content-Type': encoder.content_type}, auth={})
except requests.exceptions.ConnectionError:
pass
if 'status_url' in fields['additional_fields']:
now = time.time()
while time.time() < now + 60 * 60 * 4:
try:
resp = session.post(fields['additional_fields'][
'status_url'], json={'sample_id': fields['sample_id']})
resp.raise_for_status()
except (ValueError, requests.exceptions.RequestException) as e:
logging.debug('Retrying due to error: {}'.format(e))
raise RetryableUploadException(
'Unexpected failure of direct upload proxy. Retrying...')
if resp.json() and resp.json().get('complete', True) is False:
logging.debug(
'Blocking on waiting for proxy to complete (in progress)...'
)
time.sleep(30)
else:
break
if resp.json().get('code') in [200, 201]:
file_obj.close()
return
elif resp.json().get('code') == 500:
logging.debug('Retrying due to 500 from proxy...')
raise RetryableUploadException(
'Unexpected issue with direct upload proxy. Retrying...')
else:
raise_api_error(resp, state='upload')
else:
file_obj.close()
if upload_request.status_code not in [200, 201]:
raise RetryableUploadException(
'Unknown connectivity issue with proxy upload.')
try:
if not fields['additional_fields'].get('callback_url'):
samples_resource.confirm_upload({'sample_id': fields[
'sample_id'], 'upload_type': 'standard'})
except requests.exceptions.HTTPError as e:
raise_api_error(e.response, state='callback')
except requests.exceptions.ConnectionError:
raise_connectivity_error()
|
direct_upload
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_obj, file_name, fields, session, samples_resource):\n multipart_fields = OrderedDict()\n for k, v in fields['additional_fields'].items():\n multipart_fields[str(k)] = str(v)\n mime_type = getattr(file_obj, 'mime_type', 'text/plain')\n multipart_fields['file'] = file_name, file_obj, mime_type\n encoder = MultipartEncoder(multipart_fields)\n upload_request = None\n try:\n upload_request = session.post(fields['upload_url'], data=encoder,\n headers={'Content-Type': encoder.content_type}, auth={})\n except requests.exceptions.ConnectionError:\n pass\n if 'status_url' in fields['additional_fields']:\n now = time.time()\n while time.time() < now + 60 * 60 * 4:\n try:\n resp = session.post(fields['additional_fields'][\n 'status_url'], json={'sample_id': fields['sample_id']})\n resp.raise_for_status()\n except (ValueError, requests.exceptions.RequestException) as e:\n logging.debug('Retrying due to error: {}'.format(e))\n raise RetryableUploadException(\n 'Unexpected failure of direct upload proxy. Retrying...')\n if resp.json() and resp.json().get('complete', True) is False:\n logging.debug(\n 'Blocking on waiting for proxy to complete (in progress)...'\n )\n time.sleep(30)\n else:\n break\n if resp.json().get('code') in [200, 201]:\n file_obj.close()\n return\n elif resp.json().get('code') == 500:\n logging.debug('Retrying due to 500 from proxy...')\n raise RetryableUploadException(\n 'Unexpected issue with direct upload proxy. Retrying...')\n else:\n raise_api_error(resp, state='upload')\n else:\n file_obj.close()\n if upload_request.status_code not in [200, 201]:\n raise RetryableUploadException(\n 'Unknown connectivity issue with proxy upload.')\n try:\n if not fields['additional_fields'].get('callback_url'):\n samples_resource.confirm_upload({'sample_id': fields[\n 'sample_id'], 'upload_type': 'standard'})\n except requests.exceptions.HTTPError as e:\n raise_api_error(e.response, state='callback')\n except requests.exceptions.ConnectionError:\n raise_connectivity_error()\n",
"role": "user"
},
{
"content": "direct_upload",
"role": "assistant"
}
] |
def x(file_obj, file_name, fields, retry_fields, session, samples_resource):
try:
_direct_upload(file_obj, file_name, fields, session, samples_resource)
sample_id = fields['sample_id']
except RetryableUploadException:
logging.error('{}: Connectivity issue, trying direct upload...'.
format(file_name))
file_obj.seek(0)
try:
retry_fields = samples_resource.init_multipart_upload(retry_fields)
except requests.exceptions.HTTPError as e:
raise_api_error(e.response, state='init')
except requests.exceptions.ConnectionError:
raise_connectivity_error(file_name)
s3_upload = _s3_intermediate_upload(file_obj, file_name,
retry_fields, session, samples_resource._client._root_url +
retry_fields['callback_url'])
sample_id = s3_upload.get('sample_id', '<UUID not yet assigned>')
logging.info('{}: finished as sample {}'.format(file_name, sample_id))
return sample_id
|
upload_sequence_fileobj
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_obj, file_name, fields, retry_fields, session, samples_resource):\n try:\n _direct_upload(file_obj, file_name, fields, session, samples_resource)\n sample_id = fields['sample_id']\n except RetryableUploadException:\n logging.error('{}: Connectivity issue, trying direct upload...'.\n format(file_name))\n file_obj.seek(0)\n try:\n retry_fields = samples_resource.init_multipart_upload(retry_fields)\n except requests.exceptions.HTTPError as e:\n raise_api_error(e.response, state='init')\n except requests.exceptions.ConnectionError:\n raise_connectivity_error(file_name)\n s3_upload = _s3_intermediate_upload(file_obj, file_name,\n retry_fields, session, samples_resource._client._root_url +\n retry_fields['callback_url'])\n sample_id = s3_upload.get('sample_id', '<UUID not yet assigned>')\n logging.info('{}: finished as sample {}'.format(file_name, sample_id))\n return sample_id\n",
"role": "user"
},
{
"content": "upload_sequence_fileobj",
"role": "assistant"
}
] |
def x(file_path, session, documents_resource, progressbar=None):
if not isinstance(file_path, six.string_types):
raise ValueError('Expected file_path to be a string, got {}'.format
(type(file_path).__name__))
file_name, file_size, _ = _file_stats(file_path, enforce_fastx=False)
if not progressbar:
progressbar = FakeProgressBar()
with progressbar as bar:
fobj = FilePassthru(file_path, file_size, bar)
document_id = upload_document_fileobj(fobj, file_name, session,
documents_resource)
bar.finish()
return document_id
|
upload_document
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_path, session, documents_resource, progressbar=None):\n if not isinstance(file_path, six.string_types):\n raise ValueError('Expected file_path to be a string, got {}'.format\n (type(file_path).__name__))\n file_name, file_size, _ = _file_stats(file_path, enforce_fastx=False)\n if not progressbar:\n progressbar = FakeProgressBar()\n with progressbar as bar:\n fobj = FilePassthru(file_path, file_size, bar)\n document_id = upload_document_fileobj(fobj, file_name, session,\n documents_resource)\n bar.finish()\n return document_id\n",
"role": "user"
},
{
"content": "upload_document",
"role": "assistant"
}
] |
def x(file_obj, file_name, session, documents_resource, log=None):
try:
fields = documents_resource.init_multipart_upload()
except requests.exceptions.HTTPError as e:
raise_api_error(e.response, state='init')
except requests.exceptions.ConnectionError:
raise_connectivity_error(file_name)
s3_upload = _s3_intermediate_upload(file_obj, file_name, fields,
session, documents_resource._client._root_url + fields['callback_url'])
document_id = s3_upload.get('document_id', '<UUID not yet assigned>')
logging.info('{}: finished as document {}'.format(file_name, document_id))
return document_id
|
upload_document_fileobj
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_obj, file_name, session, documents_resource, log=None):\n try:\n fields = documents_resource.init_multipart_upload()\n except requests.exceptions.HTTPError as e:\n raise_api_error(e.response, state='init')\n except requests.exceptions.ConnectionError:\n raise_connectivity_error(file_name)\n s3_upload = _s3_intermediate_upload(file_obj, file_name, fields,\n session, documents_resource._client._root_url + fields['callback_url'])\n document_id = s3_upload.get('document_id', '<UUID not yet assigned>')\n logging.info('{}: finished as document {}'.format(file_name, document_id))\n return document_id\n",
"role": "user"
},
{
"content": "upload_document_fileobj",
"role": "assistant"
}
] |
def x(file_obj, file_name, fields, session, callback_url):
import boto3
from boto3.s3.transfer import TransferConfig
from boto3.exceptions import S3UploadFailedError
client = boto3.client('s3', aws_access_key_id=fields[
'upload_aws_access_key_id'], aws_secret_access_key=fields[
'upload_aws_secret_access_key'])
config = TransferConfig(use_threads=False)
boto_kwargs = {}
if hasattr(file_obj, 'progressbar'):
boto_kwargs['Callback'] = file_obj.progressbar.update
file_obj.progressbar = None
try:
client.upload_fileobj(file_obj, fields['s3_bucket'], fields[
'file_id'], ExtraArgs={'ServerSideEncryption': 'AES256'},
Config=config, **boto_kwargs)
except S3UploadFailedError:
raise_connectivity_error(file_name)
try:
resp = session.post(callback_url, json={'s3_path': 's3://{}/{}'.
format(fields['s3_bucket'], fields['file_id']), 'filename':
file_name, 'import_as_document': fields.get(
'import_as_document', False)})
except requests.exceptions.ConnectionError:
raise_connectivity_error(file_name)
if resp.status_code != 200:
raise_connectivity_error(file_name)
try:
return resp.json()
except ValueError:
return {}
|
s3_intermediate_upload
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_obj, file_name, fields, session, callback_url):\n import boto3\n from boto3.s3.transfer import TransferConfig\n from boto3.exceptions import S3UploadFailedError\n client = boto3.client('s3', aws_access_key_id=fields[\n 'upload_aws_access_key_id'], aws_secret_access_key=fields[\n 'upload_aws_secret_access_key'])\n config = TransferConfig(use_threads=False)\n boto_kwargs = {}\n if hasattr(file_obj, 'progressbar'):\n boto_kwargs['Callback'] = file_obj.progressbar.update\n file_obj.progressbar = None\n try:\n client.upload_fileobj(file_obj, fields['s3_bucket'], fields[\n 'file_id'], ExtraArgs={'ServerSideEncryption': 'AES256'},\n Config=config, **boto_kwargs)\n except S3UploadFailedError:\n raise_connectivity_error(file_name)\n try:\n resp = session.post(callback_url, json={'s3_path': 's3://{}/{}'.\n format(fields['s3_bucket'], fields['file_id']), 'filename':\n file_name, 'import_as_document': fields.get(\n 'import_as_document', False)})\n except requests.exceptions.ConnectionError:\n raise_connectivity_error(file_name)\n if resp.status_code != 200:\n raise_connectivity_error(file_name)\n try:\n return resp.json()\n except ValueError:\n return {}\n",
"role": "user"
},
{
"content": "s3_intermediate_upload",
"role": "assistant"
}
] |
def x(self, loc):
assert loc == 0
if self.progressbar:
self.progressbar.update(-self._tell)
self._fp_left.seek(loc)
self._fp_right.seek(loc)
self._tell = loc
self._buf = Buffer()
|
seek
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, loc):\n assert loc == 0\n if self.progressbar:\n self.progressbar.update(-self._tell)\n self._fp_left.seek(loc)\n self._fp_right.seek(loc)\n self._tell = loc\n self._buf = Buffer()\n",
"role": "user"
},
{
"content": "seek",
"role": "assistant"
}
] |
def x(self, loc):
assert loc == 0
if self.progressbar:
self.progressbar.update(-self._fp.tell())
self._fp.seek(loc)
|
seek
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, loc):\n assert loc == 0\n if self.progressbar:\n self.progressbar.update(-self._fp.tell())\n self._fp.seek(loc)\n",
"role": "user"
},
{
"content": "seek",
"role": "assistant"
}
] |
def x(old_strings_file, new_strings_file):
old_localizable_dict = (
generate_localization_key_to_entry_dictionary_from_file(
old_strings_file))
output_file_elements = []
f = open_strings_file(new_strings_file, 'r+')
for header_comment, comments, key, value in extract_header_comment_key_value_tuples_from_file(
f):
if len(header_comment) > 0:
output_file_elements.append(Comment(header_comment))
localize_value = value
if key in old_localizable_dict:
localize_value = old_localizable_dict[key].value
output_file_elements.append(LocalizationEntry(comments, key,
localize_value))
f.close()
write_file_elements_to_strings_file(old_strings_file, output_file_elements)
|
merge_strings_files
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(old_strings_file, new_strings_file):\n old_localizable_dict = (\n generate_localization_key_to_entry_dictionary_from_file(\n old_strings_file))\n output_file_elements = []\n f = open_strings_file(new_strings_file, 'r+')\n for header_comment, comments, key, value in extract_header_comment_key_value_tuples_from_file(\n f):\n if len(header_comment) > 0:\n output_file_elements.append(Comment(header_comment))\n localize_value = value\n if key in old_localizable_dict:\n localize_value = old_localizable_dict[key].value\n output_file_elements.append(LocalizationEntry(comments, key,\n localize_value))\n f.close()\n write_file_elements_to_strings_file(old_strings_file, output_file_elements)\n",
"role": "user"
},
{
"content": "merge_strings_files",
"role": "assistant"
}
] |
def x(self, parser):
parser.add_argument('--log_path', default='', help='The log file path')
parser.add_argument('--verbose', help='Increase logging verbosity',
action='store_true')
|
configure_parser
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, parser):\n parser.add_argument('--log_path', default='', help='The log file path')\n parser.add_argument('--verbose', help='Increase logging verbosity',\n action='store_true')\n",
"role": "user"
},
{
"content": "configure_parser",
"role": "assistant"
}
] |
def x(self):
parser = argparse.ArgumentParser(description=self.description())
self.configure_parser(parser)
self.run(parser.parse_args())
|
run_with_standalone_parser
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n parser = argparse.ArgumentParser(description=self.description())\n self.configure_parser(parser)\n self.run(parser.parse_args())\n",
"role": "user"
},
{
"content": "run_with_standalone_parser",
"role": "assistant"
}
] |
def x(self, metric='simpson', rank='auto'):
if metric not in ('simpson', 'chao1', 'shannon'):
raise OneCodexException(
'For alpha diversity, metric must be one of: simpson, chao1, shannon'
)
if self._guess_normalized():
raise OneCodexException(
'Alpha diversity requires unnormalized read counts.')
df = self.to_df(rank=rank, normalize=False)
output = {'classification_id': [], metric: []}
for c_id in df.index:
output['classification_id'].append(c_id)
output[metric].append(skbio.diversity.alpha_diversity(metric, df.
loc[c_id].tolist(), [c_id]).values[0])
return pd.DataFrame(output).set_index('classification_id')
|
alpha_diversity
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, metric='simpson', rank='auto'):\n if metric not in ('simpson', 'chao1', 'shannon'):\n raise OneCodexException(\n 'For alpha diversity, metric must be one of: simpson, chao1, shannon'\n )\n if self._guess_normalized():\n raise OneCodexException(\n 'Alpha diversity requires unnormalized read counts.')\n df = self.to_df(rank=rank, normalize=False)\n output = {'classification_id': [], metric: []}\n for c_id in df.index:\n output['classification_id'].append(c_id)\n output[metric].append(skbio.diversity.alpha_diversity(metric, df.\n loc[c_id].tolist(), [c_id]).values[0])\n return pd.DataFrame(output).set_index('classification_id')\n",
"role": "user"
},
{
"content": "alpha_diversity",
"role": "assistant"
}
] |
def x(self, metric='braycurtis', rank='auto'):
if metric not in ('jaccard', 'braycurtis', 'cityblock'):
raise OneCodexException(
'For beta diversity, metric must be one of: jaccard, braycurtis, cityblock'
)
if self._guess_normalized():
raise OneCodexException(
'Beta diversity requires unnormalized read counts.')
df = self.to_df(rank=rank, normalize=False)
counts = []
for c_id in df.index:
counts.append(df.loc[c_id].tolist())
return skbio.diversity.beta_diversity(metric, counts, df.index.tolist())
|
beta_diversity
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, metric='braycurtis', rank='auto'):\n if metric not in ('jaccard', 'braycurtis', 'cityblock'):\n raise OneCodexException(\n 'For beta diversity, metric must be one of: jaccard, braycurtis, cityblock'\n )\n if self._guess_normalized():\n raise OneCodexException(\n 'Beta diversity requires unnormalized read counts.')\n df = self.to_df(rank=rank, normalize=False)\n counts = []\n for c_id in df.index:\n counts.append(df.loc[c_id].tolist())\n return skbio.diversity.beta_diversity(metric, counts, df.index.tolist())\n",
"role": "user"
},
{
"content": "beta_diversity",
"role": "assistant"
}
] |
def x(self, weighted=True, rank='auto'):
if self._guess_normalized():
raise OneCodexException('UniFrac requires unnormalized read counts.')
df = self.to_df(rank=rank, normalize=False)
counts = []
for c_id in df.index:
counts.append(df.loc[c_id].tolist())
tax_ids = df.keys().tolist()
tree = self.tree_build()
tree = self.tree_prune_rank(tree, rank=df.ocx_rank)
from skbio.tree import TreeNode
new_tree = TreeNode(name='fake root')
new_tree.rank = 'no rank'
new_tree.append(tree)
if weighted:
return skbio.diversity.beta_diversity('weighted_unifrac', counts,
df.index.tolist(), tree=new_tree, otu_ids=tax_ids)
else:
return skbio.diversity.beta_diversity('unweighted_unifrac', counts,
df.index.tolist(), tree=new_tree, otu_ids=tax_ids)
|
unifrac
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, weighted=True, rank='auto'):\n if self._guess_normalized():\n raise OneCodexException('UniFrac requires unnormalized read counts.')\n df = self.to_df(rank=rank, normalize=False)\n counts = []\n for c_id in df.index:\n counts.append(df.loc[c_id].tolist())\n tax_ids = df.keys().tolist()\n tree = self.tree_build()\n tree = self.tree_prune_rank(tree, rank=df.ocx_rank)\n from skbio.tree import TreeNode\n new_tree = TreeNode(name='fake root')\n new_tree.rank = 'no rank'\n new_tree.append(tree)\n if weighted:\n return skbio.diversity.beta_diversity('weighted_unifrac', counts,\n df.index.tolist(), tree=new_tree, otu_ids=tax_ids)\n else:\n return skbio.diversity.beta_diversity('unweighted_unifrac', counts,\n df.index.tolist(), tree=new_tree, otu_ids=tax_ids)\n",
"role": "user"
},
{
"content": "unifrac",
"role": "assistant"
}
] |
def x(username, password, server_url):
with requests.Session() as session:
text = session.get(server_url + 'login').text
csrf = re.search('type="hidden" value="([^"]+)"', text).group(1)
login_data = {'email': username, 'password': password, 'csrf_token':
csrf, 'next': '/api/get_token'}
page = session.post(server_url + 'login', data=login_data)
try:
key = page.json()['key']
except (ValueError, KeyError):
key = None
return key
|
fetch_api_key_from_uname
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(username, password, server_url):\n with requests.Session() as session:\n text = session.get(server_url + 'login').text\n csrf = re.search('type=\"hidden\" value=\"([^\"]+)\"', text).group(1)\n login_data = {'email': username, 'password': password, 'csrf_token':\n csrf, 'next': '/api/get_token'}\n page = session.post(server_url + 'login', data=login_data)\n try:\n key = page.json()['key']\n except (ValueError, KeyError):\n key = None\n return key\n",
"role": "user"
},
{
"content": "fetch_api_key_from_uname",
"role": "assistant"
}
] |
def x(version, server):
def x(client_version, server_version):
client_version = tuple([int(x) for x in client_version.split('-')[0
].split('.')])
server_version = tuple([int(x) for x in server_version.split('.')])
return client_version < server_version
data = requests.post(server + 'api/v0/check_for_cli_update', data={
'version': version})
if data.status_code != 200:
return False, 'Error connecting to server'
data = data.json()
latest_version = data['latest_version']
if version_inadequate(version, latest_version):
return (True,
'Please upgrade your client to the latest version (v{}) using the command `pip install --upgrade onecodex`'
.format(latest_version))
return False, None
|
check_version
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(version, server):\n\n def x(client_version, server_version):\n client_version = tuple([int(x) for x in client_version.split('-')[0\n ].split('.')])\n server_version = tuple([int(x) for x in server_version.split('.')])\n return client_version < server_version\n data = requests.post(server + 'api/v0/check_for_cli_update', data={\n 'version': version})\n if data.status_code != 200:\n return False, 'Error connecting to server'\n data = data.json()\n latest_version = data['latest_version']\n if version_inadequate(version, latest_version):\n return (True,\n 'Please upgrade your client to the latest version (v{}) using the command `pip install --upgrade onecodex`'\n .format(latest_version))\n return False, None\n",
"role": "user"
},
{
"content": "check_version",
"role": "assistant"
}
] |
def x(ctx, param, value):
if value is not None and len(value) != 32:
raise click.BadParameter('API Key must be 32 characters long, not {}'
.format(str(len(value))))
else:
return value
|
valid_api_key
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(ctx, param, value):\n if value is not None and len(value) != 32:\n raise click.BadParameter('API Key must be 32 characters long, not {}'\n .format(str(len(value))))\n else:\n return value\n",
"role": "user"
},
{
"content": "valid_api_key",
"role": "assistant"
}
] |
def x(j, no_pretty):
if not no_pretty:
click.echo(json.dumps(j, cls=PotionJSONEncoder, sort_keys=True,
indent=4, separators=(',', ': ')))
else:
click.echo(j)
|
pprint
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(j, no_pretty):\n if not no_pretty:\n click.echo(json.dumps(j, cls=PotionJSONEncoder, sort_keys=True,\n indent=4, separators=(',', ': ')))\n else:\n click.echo(j)\n",
"role": "user"
},
{
"content": "pprint",
"role": "assistant"
}
] |
def x():
v = sys.version_info
if v.major == 3:
return False
if v.major == 2 and v.minor == 7 and v.micro >= 9:
return False
try:
import OpenSSL
import ndg
import pyasn1
except ImportError:
pass
return True
|
is_insecure_platform
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x():\n v = sys.version_info\n if v.major == 3:\n return False\n if v.major == 2 and v.minor == 7 and v.micro >= 9:\n return False\n try:\n import OpenSSL\n import ndg\n import pyasn1\n except ImportError:\n pass\n return True\n",
"role": "user"
},
{
"content": "is_insecure_platform",
"role": "assistant"
}
] |
def x():
m = """
######################################################################################
# #
# Your version of Python appears to be out of date and lack important security #
# features. Please update to Python >= 2.7.9 or `pip install requests[security]`. #
# #
# InsecurePlatformWarning: A true SSLContext object is not available. This #
# prevents urllib3 from configuring SSL appropriately and may cause certain #
# SSL connections to fail. For more information, see #
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning. #
# #
######################################################################################
"""
if is_insecure_platform():
click.echo(m, err=True)
return True
else:
cli_log.debug('Python SSLContext passed')
return False
|
warn_if_insecure_platform
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x():\n m = \"\"\"\n######################################################################################\n# #\n# Your version of Python appears to be out of date and lack important security #\n# features. Please update to Python >= 2.7.9 or `pip install requests[security]`. #\n# #\n# InsecurePlatformWarning: A true SSLContext object is not available. This #\n# prevents urllib3 from configuring SSL appropriately and may cause certain #\n# SSL connections to fail. For more information, see #\n# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning. #\n# #\n######################################################################################\n\"\"\"\n if is_insecure_platform():\n click.echo(m, err=True)\n return True\n else:\n cli_log.debug('Python SSLContext passed')\n return False\n",
"role": "user"
},
{
"content": "warn_if_insecure_platform",
"role": "assistant"
}
] |
def x(url, input_path):
r = requests.get(url, stream=True)
if r.status_code != 200:
cli_log.error('Failed to download file: %s' % r.json()['message'])
local_full_path = get_download_dest(input_path, r.url)
original_filename = os.path.split(local_full_path)[-1]
with open(local_full_path, 'wb') as f:
click.echo('Downloading {}'.format(original_filename), err=True)
for chunk in r.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
f.flush()
pprint('Successfully downloaded %s to %s' % (original_filename,
local_full_path), True)
|
download_file_helper
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(url, input_path):\n r = requests.get(url, stream=True)\n if r.status_code != 200:\n cli_log.error('Failed to download file: %s' % r.json()['message'])\n local_full_path = get_download_dest(input_path, r.url)\n original_filename = os.path.split(local_full_path)[-1]\n with open(local_full_path, 'wb') as f:\n click.echo('Downloading {}'.format(original_filename), err=True)\n for chunk in r.iter_content(chunk_size=1024):\n if chunk:\n f.write(chunk)\n f.flush()\n pprint('Successfully downloaded %s to %s' % (original_filename,\n local_full_path), True)\n",
"role": "user"
},
{
"content": "download_file_helper",
"role": "assistant"
}
] |
def x(f):
for ext in SUPPORTED_EXTENSIONS:
if f.endswith(ext):
return True
log.error('Failed upload: Not an allowed file extension: %s', f)
raise SystemExit
|
check_for_allowed_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(f):\n for ext in SUPPORTED_EXTENSIONS:\n if f.endswith(ext):\n return True\n log.error('Failed upload: Not an allowed file extension: %s', f)\n raise SystemExit\n",
"role": "user"
},
{
"content": "check_for_allowed_file",
"role": "assistant"
}
] |
def x(fp):
home_dir = os.path.expanduser('~')
abs_path = os.path.abspath(fp)
return abs_path.replace(home_dir, '~')
|
collapse_user
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(fp):\n home_dir = os.path.expanduser('~')\n abs_path = os.path.abspath(fp)\n return abs_path.replace(home_dir, '~')\n",
"role": "user"
},
{
"content": "collapse_user",
"role": "assistant"
}
] |
def x(fn):
@wraps(fn)
def x(*args, **kwargs):
client = None
if len(args) > 0 and isinstance(args[0], click.Context):
ctx = args[0]
if ctx.obj and ctx.obj.get('API') is not None:
client = ctx.obj['API']._raven_client
elif ctx.params.get('telemetry', False):
client = get_raven_client()
elif ctx.obj and ctx.obj.get('TELEMETRY', False):
client = get_raven_client()
try:
return fn(*args, **kwargs)
except Exception as e:
if client:
client.captureException()
client.context.clear()
sys.stdout = StringIO()
raise e
return telemetry_wrapper
|
telemetry
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(fn):\n\n @wraps(fn)\n def x(*args, **kwargs):\n client = None\n if len(args) > 0 and isinstance(args[0], click.Context):\n ctx = args[0]\n if ctx.obj and ctx.obj.get('API') is not None:\n client = ctx.obj['API']._raven_client\n elif ctx.params.get('telemetry', False):\n client = get_raven_client()\n elif ctx.obj and ctx.obj.get('TELEMETRY', False):\n client = get_raven_client()\n try:\n return fn(*args, **kwargs)\n except Exception as e:\n if client:\n client.captureException()\n client.context.clear()\n sys.stdout = StringIO()\n raise e\n return telemetry_wrapper\n",
"role": "user"
},
{
"content": "telemetry",
"role": "assistant"
}
] |
def x(fn):
@wraps(fn)
def x(*args, **kwargs):
try:
fn(*args, **kwargs)
except (OneCodexException, UploadException) as e:
sys.stderr.write('\nERROR: {}'.format(e))
sys.stderr.write(
'\nPlease feel free to contact us for help at help@onecodex.com\n\n'
)
sys.exit(1)
return pretty_errors_wrapper
|
pretty_errors
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(fn):\n\n @wraps(fn)\n def x(*args, **kwargs):\n try:\n fn(*args, **kwargs)\n except (OneCodexException, UploadException) as e:\n sys.stderr.write('\\nERROR: {}'.format(e))\n sys.stderr.write(\n '\\nPlease feel free to contact us for help at help@onecodex.com\\n\\n'\n )\n sys.exit(1)\n return pretty_errors_wrapper\n",
"role": "user"
},
{
"content": "pretty_errors",
"role": "assistant"
}
] |
def x(func, *args, **kwargs):
try:
atexit.unregister(func, *args, **kwargs)
except AttributeError:
for i in range(len(atexit._exithandlers)):
if atexit._exithandlers[i] == (func, args, kwargs):
atexit._exithandlers[i] = lambda : None, [], {}
break
|
atexit_unregister
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(func, *args, **kwargs):\n try:\n atexit.unregister(func, *args, **kwargs)\n except AttributeError:\n for i in range(len(atexit._exithandlers)):\n if atexit._exithandlers[i] == (func, args, kwargs):\n atexit._exithandlers[i] = lambda : None, [], {}\n break\n",
"role": "user"
},
{
"content": "atexit_unregister",
"role": "assistant"
}
] |
def x(self):
from skbio.tree import TreeNode
nodes = {}
for tax_id in self.taxonomy.index:
node = TreeNode(name=tax_id, length=1)
node.tax_name = self.taxonomy['name'][tax_id]
node.rank = self.taxonomy['rank'][tax_id]
node.parent_tax_id = self.taxonomy['parent_tax_id'][tax_id]
nodes[tax_id] = node
for tax_id in self.taxonomy.index:
try:
parent = nodes[nodes[tax_id].parent_tax_id]
except KeyError:
if tax_id != '1':
warnings.warn(
'tax_id={} has parent_tax_id={} which is not in tree'.
format(tax_id, nodes[tax_id].parent_tax_id))
continue
parent.append(nodes[tax_id])
return nodes['1']
|
tree_build
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n from skbio.tree import TreeNode\n nodes = {}\n for tax_id in self.taxonomy.index:\n node = TreeNode(name=tax_id, length=1)\n node.tax_name = self.taxonomy['name'][tax_id]\n node.rank = self.taxonomy['rank'][tax_id]\n node.parent_tax_id = self.taxonomy['parent_tax_id'][tax_id]\n nodes[tax_id] = node\n for tax_id in self.taxonomy.index:\n try:\n parent = nodes[nodes[tax_id].parent_tax_id]\n except KeyError:\n if tax_id != '1':\n warnings.warn(\n 'tax_id={} has parent_tax_id={} which is not in tree'.\n format(tax_id, nodes[tax_id].parent_tax_id))\n continue\n parent.append(nodes[tax_id])\n return nodes['1']\n",
"role": "user"
},
{
"content": "tree_build",
"role": "assistant"
}
] |
def x(self, tree, tax_ids):
tax_ids_to_keep = []
for tax_id in tax_ids:
tax_ids_to_keep.append(tax_id)
tax_ids_to_keep.extend([x.name for x in tree.find(tax_id).ancestors()])
tree = tree.copy()
tree.remove_deleted(lambda n: n.name not in tax_ids_to_keep)
return tree
|
tree_prune_tax_ids
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, tree, tax_ids):\n tax_ids_to_keep = []\n for tax_id in tax_ids:\n tax_ids_to_keep.append(tax_id)\n tax_ids_to_keep.extend([x.name for x in tree.find(tax_id).ancestors()])\n tree = tree.copy()\n tree.remove_deleted(lambda n: n.name not in tax_ids_to_keep)\n return tree\n",
"role": "user"
},
{
"content": "tree_prune_tax_ids",
"role": "assistant"
}
] |
def x(self, tree, rank='species'):
if rank is None:
return tree.copy()
tree = tree.copy()
for node in tree.postorder():
if node.rank == rank:
node._above_rank = True
elif any([getattr(n, '_above_rank', False) for n in node.children]):
node._above_rank = True
else:
node._above_rank = False
tree.remove_deleted(lambda n: not getattr(n, '_above_rank', False))
return tree
|
tree_prune_rank
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, tree, rank='species'):\n if rank is None:\n return tree.copy()\n tree = tree.copy()\n for node in tree.postorder():\n if node.rank == rank:\n node._above_rank = True\n elif any([getattr(n, '_above_rank', False) for n in node.children]):\n node._above_rank = True\n else:\n node._above_rank = False\n tree.remove_deleted(lambda n: not getattr(n, '_above_rank', False))\n return tree\n",
"role": "user"
},
{
"content": "tree_prune_rank",
"role": "assistant"
}
] |
def x(file_path, file_elements):
f = open_strings_file(file_path, 'w')
for element in file_elements:
f.write(unicode(element))
f.write(u'\n')
f.close()
|
write_file_elements_to_strings_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_path, file_elements):\n f = open_strings_file(file_path, 'w')\n for element in file_elements:\n f.write(unicode(element))\n f.write(u'\\n')\n f.close()\n",
"role": "user"
},
{
"content": "write_file_elements_to_strings_file",
"role": "assistant"
}
] |
def x(args=None):
logging_level = logging.WARNING
if args is not None and args.verbose:
logging_level = logging.INFO
config = {'level': logging_level, 'format': 'jtlocalize:%(message)s'}
if args is not None and args.log_path != '':
config['filename'] = args.log_path
logging.basicConfig(**config)
|
setup_logging
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(args=None):\n logging_level = logging.WARNING\n if args is not None and args.verbose:\n logging_level = logging.INFO\n config = {'level': logging_level, 'format': 'jtlocalize:%(message)s'}\n if args is not None and args.log_path != '':\n config['filename'] = args.log_path\n logging.basicConfig(**config)\n",
"role": "user"
},
{
"content": "setup_logging",
"role": "assistant"
}
] |
def x(file_path, localization_entry_attribute_name_for_key):
localization_dictionary = {}
f = open_strings_file(file_path, 'r+')
header_comment_key_value_tuples = (
extract_header_comment_key_value_tuples_from_file(f))
if len(header_comment_key_value_tuples) == 0:
logging.warning(
"Couldn't find any strings in file '%s'. Check encoding and format."
% file_path)
for header_comment, comments, key, value in header_comment_key_value_tuples:
localization_entry = LocalizationEntry(comments, key, value)
localization_dictionary[localization_entry.__getattribute__(
localization_entry_attribute_name_for_key)] = localization_entry
f.close()
return localization_dictionary
|
generate_localization_dictionary_from_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_path, localization_entry_attribute_name_for_key):\n localization_dictionary = {}\n f = open_strings_file(file_path, 'r+')\n header_comment_key_value_tuples = (\n extract_header_comment_key_value_tuples_from_file(f))\n if len(header_comment_key_value_tuples) == 0:\n logging.warning(\n \"Couldn't find any strings in file '%s'. Check encoding and format.\"\n % file_path)\n for header_comment, comments, key, value in header_comment_key_value_tuples:\n localization_entry = LocalizationEntry(comments, key, value)\n localization_dictionary[localization_entry.__getattribute__(\n localization_entry_attribute_name_for_key)] = localization_entry\n f.close()\n return localization_dictionary\n",
"role": "user"
},
{
"content": "generate_localization_dictionary_from_file",
"role": "assistant"
}
] |
def x(file_descriptor):
file_data = file_descriptor.read()
findall_result = re.findall(HEADER_COMMENT_KEY_VALUE_TUPLES_REGEX,
file_data, re.MULTILINE | re.DOTALL)
returned_list = []
for header_comment, _ignored, raw_comments, key, value in findall_result:
comments = re.findall('/\\* (.*?) \\*/', raw_comments)
if len(comments) == 0:
comments = [u'']
returned_list.append((header_comment, comments, key, value))
return returned_list
|
extract_header_comment_key_value_tuples_from_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_descriptor):\n file_data = file_descriptor.read()\n findall_result = re.findall(HEADER_COMMENT_KEY_VALUE_TUPLES_REGEX,\n file_data, re.MULTILINE | re.DOTALL)\n returned_list = []\n for header_comment, _ignored, raw_comments, key, value in findall_result:\n comments = re.findall('/\\\\* (.*?) \\\\*/', raw_comments)\n if len(comments) == 0:\n comments = [u'']\n returned_list.append((header_comment, comments, key, value))\n return returned_list\n",
"role": "user"
},
{
"content": "extract_header_comment_key_value_tuples_from_file",
"role": "assistant"
}
] |
def x(results_dict, file_path):
result_pairs = re.findall(JTL_REGEX, open(file_path).read())
for result_key, result_comment in result_pairs:
results_dict[result_key] = result_comment
return results_dict
|
extract_jtl_string_pairs_from_text_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(results_dict, file_path):\n result_pairs = re.findall(JTL_REGEX, open(file_path).read())\n for result_key, result_comment in result_pairs:\n results_dict[result_key] = result_comment\n return results_dict\n",
"role": "user"
},
{
"content": "extract_jtl_string_pairs_from_text_file",
"role": "assistant"
}
] |
def x(file_descriptor, entry_comment, entry_key):
escaped_key = re.sub('([^\\\\])"', '\\1\\"', entry_key)
file_descriptor.write(u'/* %s */\n' % entry_comment)
file_descriptor.write(u'"%s" = "%s";\n' % (escaped_key, escaped_key))
|
write_entry_to_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_descriptor, entry_comment, entry_key):\n escaped_key = re.sub('([^\\\\\\\\])\"', '\\\\1\\\\\"', entry_key)\n file_descriptor.write(u'/* %s */\\n' % entry_comment)\n file_descriptor.write(u'\"%s\" = \"%s\";\\n' % (escaped_key, escaped_key))\n",
"role": "user"
},
{
"content": "write_entry_to_file",
"role": "assistant"
}
] |
def x(localization_key_to_comment, file_path, section_name):
output_file = open_strings_file(file_path, 'a')
write_section_header_to_file(output_file, section_name)
for entry_key, entry_comment in sorted(localization_key_to_comment.
iteritems(), key=operator.itemgetter(1)):
output_file.write(u'\n')
write_entry_to_file(output_file, entry_comment, entry_key)
output_file.close()
|
append_dictionary_to_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(localization_key_to_comment, file_path, section_name):\n output_file = open_strings_file(file_path, 'a')\n write_section_header_to_file(output_file, section_name)\n for entry_key, entry_comment in sorted(localization_key_to_comment.\n iteritems(), key=operator.itemgetter(1)):\n output_file.write(u'\\n')\n write_entry_to_file(output_file, entry_comment, entry_key)\n output_file.close()\n",
"role": "user"
},
{
"content": "append_dictionary_to_file",
"role": "assistant"
}
] |
def x(file_name, localization_key_to_comment):
output_file_descriptor = open_strings_file(file_name, 'w')
for entry_key, entry_comment in sorted(localization_key_to_comment.
iteritems(), key=operator.itemgetter(1)):
write_entry_to_file(output_file_descriptor, entry_comment, entry_key)
output_file_descriptor.write(u'\n')
output_file_descriptor.close()
|
write_dict_to_new_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(file_name, localization_key_to_comment):\n output_file_descriptor = open_strings_file(file_name, 'w')\n for entry_key, entry_comment in sorted(localization_key_to_comment.\n iteritems(), key=operator.itemgetter(1)):\n write_entry_to_file(output_file_descriptor, entry_comment, entry_key)\n output_file_descriptor.write(u'\\n')\n output_file_descriptor.close()\n",
"role": "user"
},
{
"content": "write_dict_to_new_file",
"role": "assistant"
}
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.