query
stringlengths
9
9.05k
document
stringlengths
10
222k
negatives
listlengths
19
20
metadata
dict
Loads mixd volume meshes.
def load_volume_mixd(dim, fname=None, mxyz=None, mien=None, hexa=False): vertices, elements = mixd_load_(fname, mxyz, mien) mesh = Mesh() mesh.vertices = vertices.reshape(-1, dim) if hexa: mesh.elements = elements.reshape(-1, 8) else: mesh.elements = elements.reshape(-1, 4) re...
[ "def load_volume_mesh(fname):\n fname = abs_fname_(fname)\n\n m = meshio.read(fname)\n mesh = Mesh()\n mesh.vertices = m.points\n\n for i, c in enumerate(m.cells):\n if i == 0:\n elements = c.data\n else:\n elements = np.vstack((elements, c.data))\n\n mesh.eleme...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Loads spline files of extension `.iges` `.xml` `.itd`
def load_splines(fname): fname = str(fname) fname = abs_fname_(fname) sr = splinelibpy.Reader() ext = os.path.splitext(fname)[1] if ext == ".iges": loaded_splines = sr.read_iges(fname) elif ext == ".xml": loaded_splines = sr.read_xml(fname) elif ext == ".itd": ...
[ "def load_and_interpolate_data(path, gauge_data):\n files = sorted(list(Path(path).glob(\"*.HDF5\")))\n datasets = []\n for filename in files:\n data = ImergFile(filename).to_xarray_dataset(roi=ROI)\n datasets.append(\n data.interp(\n {\n \"latitud...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks if fname is absolute. If not, turns it into an abspath. Tilde safe.
def abs_fname_(fname): if os.path.isabs(fname): pass elif '~' in fname: fname = os.path.expanduser(fname) else: fname = os.path.abspath(fname) return fname
[ "def _makeAbsolute(fname):\n if fname[0] != '/':\n return os.path.join(os.getcwd(), fname)\n else:\n return fname", "def absolute_file_path(file_path):\n if type(file_path) is not str:\n return file_path\n else:\n return os.path.abspath(os.path.expanduser(file_path))", "d...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks to see if the user is a librarian for certain routes
def librarian(f): @wraps(f) def decorated_function(*args, **kwargs): if current_user.lflag == 0: flash("You are not a librarian! Please sign in to a librarian account") return redirect(url_for('main')) return f(*args, **kwargs) return decorated_function
[ "def admin_check(self, path=\"\"):\r\n\r\n try:\r\n usr = self.get_user().name.lower()\r\n except:\r\n usr = ''\r\n\r\n if str(usr.lower()) in admins:\r\n if path:\r\n self.render(path)\r\n else:\r\n return True\r\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Instantiate a model from local directory or remote model repo. Note that when loading from remote, the model revision can be specified.
def from_pretrained(cls, model_name_or_path: str, revision: Optional[str] = DEFAULT_MODEL_REVISION, cfg_dict: Config = None, device: str = None, **kwargs): prefetched = kwargs.get('model_prefe...
[ "def _create_model(self):\n # load moduĺe\n model_module = importlib.import_module(self.model_name)\n\n # import model\n model = model_module.getModel()\n\n return model", "def _load_from(cls, model_state: dict) -> AbstractModel:\n return cls(model=model_state.get('model'...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Generates the trading instance objects from their class types. This method attaches all of the trading objects (DataHandler, Strategy, Portfolio, and ExecutionHandler) to various internal members. This ties together all the other classes to the Backtester object.
def _generate_trading_instances(self): print("Creating DataHandler, Strategy, Portfolio, and ExecutionHandler for") # Set internal data members equal to the classes we passed in earlier, along with necessary parameters. # https://softwareengineering.stackexchange.com/questions/131403/what-is-th...
[ "def _generate_trading_instances(self, data_feed):\r\n print \"Creating DataHandler, Strategy, Portfolio, and ExecutionHandler...\"\r\n \r\n if self.data_feed == 1: #HistoricCSVDataHandler\r\n self.data_handler = self.data_handler_cls(self.events, self.csv_dir, self.symbol_list, self...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Executes the backtest. This is where the signal handling of the Backtesting engine is carried out. There are two while loops, the outerloop (heartbeat) and the nested innerloop, which checks if there is an event in the Event Queue object. The inner loop acts on the Event by calling the appropriate method
def _run_backtest(self): i = 0 while True: i += 1 print(i) # Update the market bars if self.data_handler.continue_backtest == True: self.data_handler.update_bars() else: break ...
[ "def _run_backtest(self):\n i = 0\n\n while True:\n i += 1\n print(i)\n\n # Update the market bars\n if self.data_handler.continue_backtest == True:\n self.data_handler.update_bars()\n else:\n break\n\n # H...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Generates the trading instance objects from their class types. This method attaches all of the trading objects (DataHandler, Strategy, Portfolio, and ExecutionHandler) to various internal members. This ties together all the other classes to the Backtester object.
def _generate_trading_instances(self, strategy_params_dict): print("Creating DataHandler, Strategy, Portfolio, and ExecutionHandler for") print("strategy parameter list: %s..." % strategy_params_dict) # Set internal data members equal to the classes we passed in earlier, along with necessary pa...
[ "def _generate_trading_instances(self):\n print(\"Creating DataHandler, Strategy, Portfolio, and ExecutionHandler for\")\n\n # Set internal data members equal to the classes we passed in earlier, along with necessary parameters.\n # https://softwareengineering.stackexchange.com/questions/131403...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Executes the backtest. This is where the signal handling of the Backtesting engine is carried out. There are two while loops, the outerloop (heartbeat) and the nested innerloop, which checks if there is an event in the Event Queue object. The inner loop acts on the Event by calling the appropriate method
def _run_backtest(self): i = 0 while True: i += 1 print(i) # Update the market bars if self.data_handler.continue_backtest == True: self.data_handler.update_bars() else: break # Handle the Events ...
[ "def _run_backtest(self):\r\n i = 0\r\n while True:\r\n i += 1\r\n #Update the market bars\r\n if self.data_handler.continue_backtest == True:\r\n self.data_handler.update_bars()\r\n self.data_handler.update_symbol_list(min_bars=self.strat...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Outputs the strategy performance and other metrics from the backtest.
def _output_performance(self): self.portfolio.create_equity_curve_dataframe() print("Creating summary statistics...") stats = self.portfolio.output_summary_stats() print("Creating equity curve...") print(self.portfolio.equity_curve.tail(10)) pprint.pprint(stats) ...
[ "def _output_performance(self):\r\n\r\n self.portfolio.create_equity_curve_dataframe()\r\n self.portfolio.create_positioning_dataframe()\r\n \r\n print \"Creating summary stats...\"\r\n stats = self.portfolio.output_summary_stats()\r\n \r\n print \"Creating the equit...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Simulates the backtest and outputs portfolio performance. Loops over all variants of strategy parameters of a space generated by a cartesian product of hyperparameter values. Generates new instances of all the data handlers, event queues, and portfolio objects upon each iteration, in order to ensure a "clean slate" for...
def simulate_trading(self): # Create the file output stream posix_now = datetime.datetime.timestamp(datetime.datetime.now()) out_path = os.getcwd() + "/OutputResults/backtest_{}".format(posix_now)[:-7:] + ".csv" out = open(out_path, "w+") spl = len(self.strat_params_list) ...
[ "def _executeStrategies(self):\n \n for i in self.portfolio.strategies:\n \n #i.initialise()\n \n #print \"Tickers are:\", i.tickers\n \n for idx, ticker in enumerate(i.tickers):\n # Setup the strategy for each ins...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Anonymous users can make `whoami` requests. They receive a 401 response confirming they are not logged in.
def test_whoami_by_anonymous_user(self): response = self.client.get("/api/users/whoami/") self.assertEqual(response.status_code, 401)
[ "def test_anonymous_access(self):\n self.send_get(self.anonymous_client, expected_status=401)\n self.send_patch(self.anonymous_client, {}, expected_status=401)", "def test_get_anonymous_user(self):\n user = self.get_anonymous_user()\n self.assertFalse(user.is_authenticated())\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Fetch node data using k8s API
def get_node_data(cluster_id): try: # fetching the token from secret of the namespace 'dashboard' _TOKEN = [base64.b64decode(secret_item.data['token']).decode('UTF-8') for secret_item in client.CoreV1Api( ).list_namespaced_secret('dashboard').items if base64.b64decode(secret_item.data['names...
[ "def api_node_read(request):\n\n return api_read_by_params(request, 'Node')", "def describe_kubernetes_resource_on_node(node, nspace):\n ssh = SSH()\n ssh.connect(node)\n\n nspace = '-n {nspace}'.format(nspace=nspace) if nspace else ''\n\n cmd = 'kubectl describe {nspace} all'.forma...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Fetch compute cell data
def get_compute_cell_data(cluster_id=None, namespace_id=None): cells_info = client.CustomObjectsApi().list_cluster_custom_object('kiyot.elotl.co', 'v1beta1', 'cells') return cells_info
[ "def compute(self, data):", "def fetch_data():\n data.fetch_data()\n data.start_updating()", "def get_nypd_complaint_results():\n data = None\n blob = BUCKET.blob(NYPD_COMPLAINT_FNAME)\n if blob.exists():\n blob.reload(client=STORAGE_CLIENT)\n if blob.time_created.strftime(\"%Y-%m-%...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get count of resources for requested cluster and namespace
def get_resource_count(cluster_id, namespace_id=None): # fetching namespaced resource count if namespace_id: # Deployment count deployment_count = len(client.AppsV1beta2Api().list_namespaced_deployment(namespace_id).items) # Pod count pod_items = client.CoreV1Api().list_namespace...
[ "def count_resources(self):\n total = 0\n for dataset in self.datasets:\n distribution = dataset.get('distribution', [])\n total += len(distribution)\n return total", "def get_cluster_count(self) -> int:\n return len(self.get_all_cluster_ids())", "def resource_c...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get cluster capacity from node detail
def get_cluster_capacity_info(cluster_id): cpu_capacity_info = get_node_data(cluster_id) cpu_capacity_in_cores = round(unit_conversion(sum([int(''.join(filter( str.isdigit, str(item['status']['allocatable']['cpu'])))) for item in cpu_capacity_info]), 'm'), 2) memory_capacity_in_gib = round(sum( ...
[ "def GetCapacity(ctx):\n \"\"\"The fields returned from this method can be used to calculate the efficiency rates that are displayed in the Element User Interface.\"\"\"\n if ctx.element is None:\n ctx.logger.error(\"You must establish at least one connection and specify which you intend to use.\")\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
get resource usage information from pods usage
def get_cluster_usage_info(cluster_id, kind, namespace_id=None, pods_list=None): if pods_list is None: pods_list = [] else: logger.info('pod list not none') if pods_list == 'no_pod_resource': return {'cpu': 0, 'memory': 0} else: logger.info('resources no 0') ...
[ "def resources_metrics(self, pod_name, namespace, command=COMMAND, process=\"python\"):\n cpu = None\n memory = None\n gpu_util_memory = []\n try:\n resp = stream(self.k8s_coreapi.connect_get_namespaced_pod_exec, pod_name, namespace,\n command=command,...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Providing random mock values for resource capacity and usage.
def randomise(mock_info): mock_info["resource_info"]["usage"]["cpu"] = round(random.uniform(0, 1), 2) mock_info["resource_info"]["usage"]["cpu_percentage"] = round(random.uniform(0, 1), 2) mock_info["resource_info"]["usage"]["memory"] = round(random.uniform(0, 1), 2) mock_info["resource_info"]["usage"][...
[ "def random_usage(self):\n return randrange(0, 101)", "def test_capacity(self):\n\t\tself.assertEqual(self.settings.knapsack_cap, DEFAULT_KS_CAPACITY)", "def test_glass_capacity__has_expected_default_value():\n glass = moet.create_glass(\"A\")\n assert glass.capacity == 250", "def test_capacity(s...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns N samples from the prior.
def sample_from_prior(self, n_samples): pass
[ "def sample_prior(self, n_samples):\n return np.random.normal(size=[n_samples, self.n_z])", "def sample_from_prior(self, n_samples):\n\n p0 = self.min + self.rng.rand(n_samples) * (self.max - self.min)\n return p0[:, np.newaxis]", "def sample_from_prior(self):\n raise NotImplementedE...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Computes the gradient of the prior with respect to theta.
def gradient(self, theta): pass
[ "def grad_loss(self, *args):\n theta,X,y = args\n m,dim = X.shape\n grad = np.zeros((dim,))\n\n ##########################################################################\n # Compute the gradient of the loss function for unregularized logistic #\n # regression ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns N samples from the prior.
def sample_from_prior(self, n_samples): p0 = self.min + self.rng.rand(n_samples) * (self.max - self.min) return p0[:, np.newaxis]
[ "def sample_from_prior(self, n_samples):\n pass", "def sample_prior(self, n_samples):\n return np.random.normal(size=[n_samples, self.n_z])", "def sample_from_prior(self):\n raise NotImplementedError", "def get(self, n):\n return random.sample(self.samples, n)", "def sample_from_...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Computes the gradient of the prior with respect to theta.
def gradient(self, theta): pass
[ "def grad_loss(self, *args):\n theta,X,y = args\n m,dim = X.shape\n grad = np.zeros((dim,))\n\n ##########################################################################\n # Compute the gradient of the loss function for unregularized logistic #\n # regression ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Find a square that forms a bracket with `square` for `player` in the given `direction`. Returns None if no such square exists. Returns the index of the bracketing square if found
def find_bracket(self, square, player, board, direction): curr = square+ direction opp = self.opponent(player) if(board[curr]!=opp): return None while(self.is_valid(curr) and board[curr]==opp): curr+=direction if(self.is_valid(curr) and board[curr] ...
[ "def find_bracket(square, player, board, direction):\n bracket = square + direction\n if board[bracket] == player:\n return None\n opp = Othello.opponent(player)\n while board[bracket] == opp:\n bracket += direction\n return None if board[bracket] in (OUTER, ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Flip pieces in the given direction as a result of the move by player.
def make_flips(self, move, player, board, direction): curr = move + direction opp = self.opponent(player) while(board[curr]==opp): board[curr] = player curr += direction #return board
[ "def flip_in_direction(direction):\n row, col = increment_row_col(action.row, action.col, direction)\n if promising(row, col):\n flips = []\n while promising(row, col):\n flips.append([row, col])\n row, col = increment_row_col...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Can player make any moves? Returns a boolean
def any_legal_move(self, player, board): moves = self.legal_moves(player, board) #print(moves) return len(moves)!=0
[ "def valid_move(self, player, move):\n return (True)", "def is_move_possible(self, player: Player, move: str):\n if move == 'skip': return True, 'Just chill out man'\n\n if move[:4] == 'move' or move in ['w', 's', 'a', 'd']:\n direction = move[5:]\n if move == 'w' or dir...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Which player should move next? Returns None if no legal moves exist.
def next_player(self,board, prev_player): opp = self.opponent(prev_player) isOpp = self.any_legal_move(opp, board) isPrev = self.any_legal_move(prev_player, board) if(isOpp==False and isPrev==False): return None elif(isOpp == False and isPrev == True): ...
[ "def next_player(board, prev_player):\n opp = Othello.opponent(prev_player)\n if Othello.any_legal_move(opp, board):\n return opp\n elif Othello.any_legal_move(prev_player, board):\n return prev_player\n return None", "def decide_next_player(self):\n move_c...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Compute player's score (number of player's pieces minus opponent's).
def score(self,player, board): numPlayer = 0 numOpp = 0 for i in self.squares(): if board[i] == player: numPlayer+= SQUARE_WEIGHTS[i] else: numOpp+=SQUARE_WEIGHTS[i] return numPlayer-numOpp
[ "def score2(self,player, board):\r\n numPlayer = 0\r\n numOpp = 0\r\n for i in self.squares():\r\n if board[i] == player:\r\n numPlayer+= 1\r\n else:\r\n numOpp+=1\r\n return numPlayer-numOpp", "def calculate_winning_score(self) -> in...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Compute player's score (number of player's pieces minus opponent's).
def score2(self,player, board): numPlayer = 0 numOpp = 0 for i in self.squares(): if board[i] == player: numPlayer+= 1 else: numOpp+=1 return numPlayer-numOpp
[ "def score(self,player, board):\r\n numPlayer = 0\r\n numOpp = 0\r\n for i in self.squares():\r\n if board[i] == player:\r\n numPlayer+= SQUARE_WEIGHTS[i]\r\n else:\r\n numOpp+=SQUARE_WEIGHTS[i]\r\n return numPlayer-numOpp", "def calc...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Clip the values of x from eps to 1eps and renormalize them so that they sum to 1.
def clip_and_renorm(x, eps=1e-5): x = np.clip(x, eps, 1-eps) return x / x.sum()
[ "def threshold_and_normalize_pixels(x, eps=1e-2):\n x = torch.clamp(x, min=eps)\n x = x / torch.sum(x, dim=1, keepdim=True)\n return x", "def normalize(x):\n sumx = sum(x)\n y = []\n for xi in x:\n xi = xi*(1./sumx)\n y.append(xi)\n return y", "def scale_to_start(x):\n x = ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Run the sumproduct belief propagation for a single ray accumulating the occupancy to ray messages in log space and producing the new ray to occupancy messages. Arguments
def single_ray_belief_propagation(ray_voxel_indices, ray_to_occupancy_accumulated_pon, ray_to_occupancy_pon, s): # Create an index that when passed to a numpy array will return the voxels # that this ray passes through # TODO: Remove this c...
[ "def belief_propagation(\n S,\n ray_voxel_indices,\n ray_voxel_count,\n ray_to_occupancy_messages_pon,\n grid_shape,\n gamma=0.05,\n bp_iterations=3,\n progress_callback=lambda *args: None\n):\n # Extract the number of rays\n N, M = S.shape\n\n # Initialize the ray to occupancy mess...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Run the belief propagation for a set of rays
def belief_propagation( S, ray_voxel_indices, ray_voxel_count, ray_to_occupancy_messages_pon, grid_shape, gamma=0.05, bp_iterations=3, progress_callback=lambda *args: None ): # Extract the number of rays N, M = S.shape # Initialize the ray to occupancy messages to uniform ...
[ "def _run_belief_propagation(self, uncalibrated_jt) -> nx.DiGraph:\n raise NotImplementedError(\"Not implemented by student\")", "def propagate_rays(self, optical_elements = []):\r\n for optical_element in optical_elements:\r\n for i in self.__ray_list:\r\n optical_element....
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Plot stats for an optimization run property specified by opt_run_property. It is possible to plot a histogram or a line plot. In a line plot, on the x axis are the numbers of the multistarts, where the multistarts are ordered with respect to a function value. On the y axis of the line plot the value of the correspondin...
def optimization_run_property_per_multistart( results: Union[Result, Sequence[Result]], opt_run_property: str, axes: Optional[matplotlib.axes.Axes] = None, size: Tuple[float, float] = (18.5, 10.5), start_indices: Optional[Union[int, Iterable[int]]] = None, colors: Optional[Union[List[float], Lis...
[ "def plot_stats(values, path='', experiment='', run_type='', x_var_name='', plot_agg=True, plot_runs=True, smth_wnd=10,\n\t\t\t show=True, save=True):\n\n\tif experiment is not None or experiment != '':\n\t\texperiment = '_' + experiment\n\n\tif path != '' and path[-1] != '/':\n\t\tpath = path + '/'\n\n\tfig = pl...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks a row & peg combination to see if it refers to a real place in the triangle.
def is_valid(row, peg): return ( (row < TRI_SIZE) and (row >= 0) and (peg < TRI_SIZE) and (peg >= 0) and (peg <= row) )
[ "def is_row_echelon(a):\n n = len(a)\n p = -1\n for i in range(n):\n j = 0\n while j <= n:\n if j == n:\n p = n\n break\n elif a[i][j] == 0:\n j += 1\n elif j > p:\n p = j\n break\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns a copy of the triangle (faster than deepcopy).
def copy_triangle(tri): return [[peg for peg in row] for row in tri]
[ "def copy(self):\n return Triangle2D(self._points[0], self._points[1], self._points[2])", "def clone(self):\n\t\t\n\t\treturn Triangle([ point[:] for point in self.points ])", "def copy(self):\n return vertex(self.x, self.y, self.z)", "def flipped(self):\n return Triangle([self[0], self[2...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Performs a jump between an occupied (row, peg) tuple A and an unoccupied C, passing over B. If anything is bad with the jump, returns False; otherwise returns True.
def jump(tri, A, B, C): start_row, start_peg = A mid_row, mid_peg = B end_row, end_peg = C # Check to make sure A is occupied and B is clear if tri[start_row][start_peg] == False: return False if tri[end_row][end_peg]: return False # Make sure we're jumping over an occupied space. if t...
[ "def valid_jump(a, b):\n return (a[0] != b[0] and a[1] != b[1] and a[0] - a[1] != b[0] - b[1]\n and a[0] + a[1] != b[0] + b[1])", "def op_jump_postconditions(self,oldPieceCoords,newPieceCoords):\n\n # Start of new state constrution\n next_gs_board = Board.from_binary_matrix(sel...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns a (mid_row, mid_peg) tuple between (start_row, start_peg) and (end_row, end_peg).
def mid(start_row, start_peg, end_row, end_peg): if start_row + 2 == end_row: mid_row = start_row + 1 elif start_row == end_row + 2: mid_row = start_row - 1 elif start_row == end_row: mid_row = start_row if start_peg + 2 == end_peg: mid_peg = start_peg + 1 elif start...
[ "def startAndEnd(self):\n upperRow = 0\n upperCol = 0\n lowerRow = 0\n lowerCol = 0\n if self.selectionMode == kSelectionNone:\n upperRow = self.penRow\n upperCol = self.penCol\n lowerRow = self.penRow\n lowerCol = self.penCol\n e...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Searches, using recursive backtracking.
def search(tri, history = []): count = 0 children = [] for start_row in range(len(tri)): for start_peg in range(len(tri[start_row])): if tri[start_row][start_peg] == True: count += 1 for end_row, end_peg in jump_lookup[(start_row, start_peg)]: ...
[ "def find_recurse (search, indices) :\n assert type(indices) is dict\n assert not search.tag == ''\n if (debug) :\n print search.tag\n if (verbose) :\n print indices\n\n for index in indices.keys() :\n if (debug) :\n print str(index) + '\\t' + str(indices[index...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create a redis connection by uri.
def connect_redis(uri): puri = urlparse.urlparse(uri) host = puri.hostname port = puri.port password = puri.password if puri.password else '' db_name = puri.path.split('/')[1] r = redis.Redis(host=host, port=port, password=password, db=db_name) assert r.ping() return r
[ "def conn_redis(host, port, db=0):\r\n r = redis.Redis(host=host, port=port, db=db)\r\n return r", "def connect_to_redis():\n return Redis(host=redis_host, port=redis_port, db=0)", "def _redis_from_dsn(self, dsn):\r\n import redis\r\n parts = urlparse(dsn)\r\n _, _, netloc = parts....
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update next_waypoint based on base_waypoints and current_pose. True if a valid waypoint has been updated, False otherwise
def _update_next_waypoint(self): if not self.base_waypoints: #rospy.logwarn("Waypoints not updated: base_waypoints not available yet.") return False if not self.current_pose: #rospy.logwarn("Waypoints not updated: current_pose not available yet.") return ...
[ "def update_final_waypoints(self):\n #rospy.logdebug('WaypointUpdater::update_final_waypoints (enter)')\n theta = get_yaw(self.pose_stamped)\n next_wp_index = self.next_waypoint(get_position(self.pose_stamped), theta)\n # start with an empty list to publish\n final_waypoints = []\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update next_waypoint based on current_pose and base_waypoints Generate the list of the next LOOKAHEAD_WPS waypoints Update velocity for them Publish them to "/final_waypoints"
def update_and_publish(self): # 1. Find next_waypoint based on ego position & orientation if self._update_next_waypoint(): # 2. Generate the list of next LOOKAHEAD_WPS waypoints num_base_wp = len(self.base_waypoints) last_base_wp = num_base_wp-1 waypoint_...
[ "def update_final_waypoints(self):\n #rospy.logdebug('WaypointUpdater::update_final_waypoints (enter)')\n theta = get_yaw(self.pose_stamped)\n next_wp_index = self.next_waypoint(get_position(self.pose_stamped), theta)\n # start with an empty list to publish\n final_waypoints = []\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Restore original velocities of points
def restore_velocities(self, indexes): for idx in indexes: self.set_waypoint_velocity(self.base_waypoints, idx, self.base_wp_orig_v[idx])
[ "def set_velocities(self):\r\n self.wx = np.copy(Turbine.wzero)\r\n self.wy = np.copy(Turbine.wzero)", "def reset_velocity(self):\n self.current_vel[0] = 0\n self.current_vel[1] = 0", "def new_velocity(self):\n self.velocity = self.vafter", "def revolver(self):\r\n\t\tself._...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Decelerate a list of wayponts so that they stop on stop_index
def decelerate(self, waypoints, stop_index, stop_distance): if stop_index <= 0: return dist = self.distance(waypoints, 0, stop_index) step = dist / stop_index # Generate waypoint velocity by traversing the waypoint list backwards: # - Everything beyond stop_index wil...
[ "def plan_stop(wps, idx, min_decel, max_decel, speed_limit):\n\n if idx < 0:\n return []\n\n wps = wps[0: idx+1]\n\n # Calculate the acceleration needed to stop the car at the last waypoint in wps\n path_length = distance(wps, 0, len(wps)-1)\n a = -wps[0].twist.twist.linear.x**2/(2*path_length...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Compare two waypoints to see whether they are the same (within 0.5 m and 0.5 m/s)
def is_same_waypoint(self, wp1, wp2, max_d=0.5, max_v=0.5): dl = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2 + (a.z-b.z)**2) ddif = dl(wp1.pose.pose.position, wp2.pose.pose.position) if ddif < max_d: return True return False
[ "def coords_equal(wp1, wp2, approx=True):\n if approx:\n d = gps_dist(wp1, wp2)\n if d >= GPS_ERROR:\n rospy.logdebug(\"Lists are not the same because WPs are %2.6f apart\"%d)\n return d < GPS_ERROR\n else:\n return wp1.lat==wp2.lat and wp1.lon==wp2.lon and wp1.alt==wp2....
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Ensures that a capital can only belong to one country
def test_capital_unicity(self): # Get Bangkok bangkok = Country.objects.get(iso3="THA").capital # Get United States united_states = Country.objects.get(iso3="USA") # Initialize assertRaises block with self.assertRaises(IntegrityError): # Set the capital of...
[ "def validate_country_field(**kwargs):\n valid_countries = [country.value for country in CountryType]\n country_name = kwargs['country']\n if country_name not in valid_countries:\n raise AttributeError(\"Not a valid country\")", "def validate(s):\n s=s.upper()\n if not Area.objects.filter(tw...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Ensures that the cleaning of UN member status behaves as expected
def test_un_member_status(self): # Get Hong Kong hong_kong = Country.objects.get(iso3="HKG") # Assert that is_un_member_at is None self.assertEqual(hong_kong.is_un_member_at, None) # Initialize assertRaises block with self.assertRaises(ValidationError): # ...
[ "def clean(self, uid, states=None):\n\n # doesn't change status", "def _clean( self ):\n\t\tself.__is_dirty = False", "def clean_up(self):\n ...", "def clean(self):\n # Perform the standard ACE cleaning\n max_status = mm_ace.clean(self)\n\n # Replace bad values with NaN and remove times...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Remove all erroneous colors, replaced by the most commonly found in the direct neighborhood
def clean(data, out, npcolors): prev_err = 0 new_err = 0 old = data.copy() for r in range(data.shape[0]): for c in range(data.shape[1]): found = -1 for i, col in enumerate(npcolors): if data[r, c] == col: found = i if fo...
[ "def checkToRemoveColours(self):\n found_colours = []\n for b in self.board_bubbles:\n if b != 0:\n found_colours.append(b.colour)\n\n # print(found_colours)\n index_to_pop = []\n for i in range(0, len(self.future_bubbles)):\n if self.future_bu...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Implement call sklearn metric on dataset.
def __call__(self, dataset: 'SklearnCompatible', dropna: bool = False) -> float: assert hasattr(dataset, 'target'), 'Dataset should have target to calculate metric' if self.one_dim: assert dataset.shape[1] == 1, 'Dataset should have single column if metric is one_dim' # TODO: maybe r...
[ "def evaluate(self, dataset):\n\t\tpass", "def compute(cls, dataset):\n return dataset", "def evaluate(self, dataset, *args, **kwargs):\n\n losses = []\n for sample in dataset:\n output = self.predict(sample, *args, **kwargs)\n losses.append(self.metric_loss(output, sa...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create metric for dataset. Get LAMLMetric that is called on dataset.
def get_dataset_metric(self) -> LAMLMetric: # for now - case of sklearn metric only one_dim = self.name in _one_dim_output_tasks dataset_metric = SkMetric(self.metric_func, name=self.metric_name, one_dim=one_dim, greater_is_better=self.greater_is_better) ...
[ "def create_metric(self) -> EvalMetric:\n pass", "def _create_metric(self, metric_class, name, description):\n return metric_class(\n name, description, namespace=self.namespace, labelnames=self.labels, registry=self.registry\n ).labels(*self.label_values)", "def create_metric(se...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Collectes entries in rootdir's basedir directory which is always relateive to rootdir.
def _collect_entries(rootdir: str, basedir: str): files = [] dirs = [] for entry in os.listdir(os.path.join(rootdir, basedir)): rel_path = os.path.join(basedir, entry) full_path = os.path.join(rootdir, rel_path) isdir = os.path.isdir(full_path) if isdir and (rel_path in ('....
[ "def _directories_in_root(self):\n\n if self._root_directory_listing is None:\n self._root_directory_listing = []\n for root, _, _ in os.walk(self._root, topdown = False):\n self._root_directory_listing.append(root)\n\n return self._root_directory_listing", "def ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return MD5 hash's hexdigest bases on nongit nonpycache entries of the root_dir. The purpose is to check if two directory is identical except the modification dates. The two directories can be on different machines when the file transfer would be costly.
def python_repo_hash_md5(root_dir: str, *, verbose: bool = False): m = hashlib.md5() for e in _collect_entries(root_dir, '.'): if verbose: log_info('Processing e', e) m.update( f"path={e['path']}\tisdir={e['isdir']}\tsize={e['size']}\tmode={e['mode']:03o}\tmtime={e['mtime...
[ "def calculate_md5_of_dir(self, verbose=0):\n directory = self.cfg['sharing_path']\n if verbose:\n start = time.time()\n md5Hash = hashlib.md5()\n if not os.path.exists(directory):\n self.stop(1, 'Error during calculate md5! Impossible to find \"{}\" in user folder'...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Computes the overall style cost from several chosen layers
def compute_style_cost(model, STYLE_LAYERS): # initialize the overall style cost J_style = 0 for layer_name, coeff in STYLE_LAYERS: # Select the output tensor of the currently selected layer out = model[layer_name] # Set a_S to be the hidden layer activation from the layer we have...
[ "def compute_style_cost(sess, model, style_layers):\n J_style = 0\n\n for layer_name, coeff in style_layers:\n out = model[layer_name]\n a_S = sess.run(out)\n a_G = out\n J_style_layer = compute_layer_style_cost(a_S, a_G)\n J_style += coeff * J_style_layer\n return J_styl...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Deactivate an ApiOAuth2Application Does not delete the database record, but revokes all tokens and sets a flag that hides this instance from API
def deactivate(self, save=False): client = cas.get_client() # Will raise a CasHttpError if deletion fails, which will also stop setting of active=False. resp = client.revoke_application_tokens(self.client_id, self.client_secret) # noqa self.is_active = False if save: ...
[ "def deactivate(self):\n api_args = {'deactivate': True}\n response = DynectSession.get_session().execute(self.uri, 'PUT',\n api_args)\n self._build(response['data'])", "def revoke_api_access(application):\n try:\n file = open(PA...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Reset the secret of an ApiOAuth2Application Revokes all tokens
def reset_secret(self, save=False): client = cas.get_client() client.revoke_application_tokens(self.client_id, self.client_secret) self.client_secret = generate_client_secret() if save: self.save() return True
[ "def reset_secret_key(self):\n self.secret_key = utils.generate_uuid(3)\n self.save()", "def refresh_token():\n try:\n deserialized_message = peek_app_token()\n app_id = deserialized_message.get('app_id')\n installation_id = deserialized_message.get('installation_id')\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Deactivate an ApiOAuth2PersonalToken Does not delete the database record, but hides this instance from API
def deactivate(self, save=False): client = cas.get_client() # Will raise a CasHttpError if deletion fails for any reason other than the token # not yet being created. This will also stop setting of active=False. try: resp = client.revoke_tokens({'token': self.token_id}) # no...
[ "def deactivate(self, save=False):\n client = cas.get_client()\n # Will raise a CasHttpError if deletion fails, which will also stop setting of active=False.\n resp = client.revoke_application_tokens(self.client_id, self.client_secret) # noqa\n\n self.is_active = False\n\n if sav...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Calls the cvsfileUsage function to start parsing
def __call__(self): return self.csvfileUsage()
[ "def concavity_from_csv():\n\tpass\n\t# COPYPASTED FROM OTHER FILE, NEED TO ADAPT\n\t# default_param = AGPD.get_common_default_param()\n\t# default_param[\"already_preprocessed\"] = False\n\t# default_param[\"X_source\"] = None\n\t# default_param[\"Y_source\"] = None\n\t# default_param[\"min_elevation\"] = None\n\t...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check For valid csv data
def check_valid_csv_data(self, row): obj = re.match(re.compile('^[0-9]{4}\,[A-Z]{1}[a-z]{2}\,.'), ','.join(row)) if not obj: raise Exception("Invalid Data String must be like `1990` `Jan` Check Sample file")
[ "def check_valid_csvformat(self, csv_path):\n with open(self.csv_path, \"rb+\") as file_obj:\n reader = csv.reader(file_obj, delimiter=',') # CSV DictReader object\n self.check_valid_csv_header(reader.next())\n self.check_valid_csv_data(reader.next())", "def verify_csv(csv...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check if csv is in valid format with data
def check_valid_csvformat(self, csv_path): with open(self.csv_path, "rb+") as file_obj: reader = csv.reader(file_obj, delimiter=',') # CSV DictReader object self.check_valid_csv_header(reader.next()) self.check_valid_csv_data(reader.next())
[ "def check_valid_csv_data(self, row):\n obj = re.match(re.compile('^[0-9]{4}\\,[A-Z]{1}[a-z]{2}\\,.'),\n ','.join(row))\n if not obj:\n raise Exception(\"Invalid Data String must be like `1990` `Jan` Check Sample file\")", "def validate_csv(filen...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Prepare the company's data
def prepare_company_data(self, month, year, row, company_data): for key, value in row.items(): if not company_data[key]: company_data[key] = {'year':year, 'month':month, 'value':value} else: """main operation updating the company's data per year ...
[ "def prepare_data(self):", "def load_company_data(apps,schema_editor):\n DefaultCompany = apps.get_model(\"company\", \"Company\")\n DefaultUser = apps.get_model(\"accounts\", \"User\")\n\n comp = DefaultCompany.objects.create(\n name=\"Superrecord Management System\",\n address=\"P.O.Box x...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Takes WARC record and returns domain of target URI plus Counter for domains of outlinked pages if these exist.
def parse_links(record): try: page_url = record['WARC-Header-Metadata']['WARC-Target-URI'] page_domain = urlparse.urlparse(page_url).netloc links = record['Payload-Metadata']['HTTP-Response-Metadata']['HTML-Metadata']['Links'] out_links = Counter([urlparse.urlparse(url['url']).netloc...
[ "def parse_urls(record):\n url_list = []\n try:\n page_url = record['WARC-Header-Metadata']['WARC-Target-URI']\n x = urlparse.urlparse(page_url)\n if len(x.path) > 1:\n url_list += [(x.netloc, x.path, 1)]\n except:\n pass\n try: \n links = record['Payload...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Takes WARC record and outputs all pairs (domain, path) from URIs, if these exist. It searches both target URI and outlinks and does not distinguish between them.
def parse_urls(record): url_list = [] try: page_url = record['WARC-Header-Metadata']['WARC-Target-URI'] x = urlparse.urlparse(page_url) url_list += [(x.netloc, x.path)] except: pass try: links = record['Payload-Metadata']['HTTP-Response-Metadata']['HTML-Metada...
[ "def parse_urls(record):\n url_list = []\n try:\n page_url = record['WARC-Header-Metadata']['WARC-Target-URI']\n x = urlparse.urlparse(page_url)\n if len(x.path) > 1:\n url_list += [(x.netloc, x.path, 1)]\n except:\n pass\n try: \n links = record['Payload...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Takes domain and concatenates with path URIs separated by newlines..
def domain_string(domain, path_set): out = domain + '\n' + '\n'.join(list(path_set)) + '\n\n\n' return out
[ "def merge_link(url_domain, url_path):\n\n # Ensure domain is not empty\n if url_domain.strip() == \"\":\n return url_path\n\n # Strip / at end of domain\n if url_domain[-1] == \"/\":\n url_domain = url_domain[0:-1]\n\n # Strip / at beginning of path\n if url_path[0] == \"/\":\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates a DataFrame with polygones and IDs for all tax zones.
def createEmptyMapData(): with open('data/taxzone.json', 'r') as f: taxzones = json.load(f) polygons_shape = [shape(feature['geometry']) for feature in taxzones['features']] names = [feature['properties']['id'] for feature in taxzones['features']] map_data = pd.DataFrame({'poly': polygons_shape...
[ "def taxa_data_frame(self):\n cols = list(self._taxa.keys())\n cols.remove(\"uid\")\n cols.remove(\"object\")\n df = DataFrame(self._taxa, columns=cols, index=self._taxa[\"uid\"])\n df.index.name = \"uid\"\n\n return df", "def _regions(self, voronoi_diagram, unique_id, id...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Appends a new column named 'field_name' to map_data. The data is read from json_file. Flag single_point_per_zone set True, will only read a single count per polygon.
def addJsonFileToMapData(json_file, field_name, map_data, single_point_per_zone=False): # Read the json file json_data = pd.io.json.read_json(json_file) json_data['points'] = json_data.apply(lambda row: Point(row.coords), axis=1) # Loop over all polygons in the map. poly_counts = [] for polygon...
[ "def add_fields(self, data):\n\n fields = data[0].keys()\n for name in fields:\n self.shp_writer.field(name, 'C', 100)", "def write_with_native_bindings(self,tmp_name,queryset,geo_field):\n dr = ogr.GetDriverByName('ESRI Shapefile')\n ds = dr.CreateDataSource(tmp_name)\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
A message handler method may simply be a method with som kwargs. The kwargs will be given all incoming pipeline data, the bus and the incoming payload.
def MessageHandlerMethod(**kwargs): data: dict = kwargs['data'] bus: AbstractPikaBus = kwargs['bus'] payload: dict = kwargs['payload'] print(payload) if payload['reply']: payload['reply'] = False bus.Reply(payload=payload)
[ "def handle_message(**payload):\n handler_instance = message.MessageHandler(payload)\n handler_instance.handle()", "def _handler(self, message):\n\n data = pickle.loads(message['data'])\n\n if not data[2]:\n # empty method call; bail out\n return\n\n # call the fun...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
derivative of tanh(x) = 1. (tanh(x) ^.2)
def d_tanh(x): return 1. - np.power(np.tanh(x), 2)
[ "def dtanh(x):\r\n\r\n return 1 - (math.tanh(x)**2)", "def tanh(x):\r\n\r\n return math.tanh(x)", "def grad_tanh(self):\n grad = 1 - self.tanh(self.x) ** 2\n return grad", "def grad_tanh(self):\n tanh_x = np.tanh(self.x)\n grad = 1 - (tanh_x * tanh_x)\n return grad", "def derivative(f): # ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the operational_state of this ConnectionEndPoint.
def operational_state(self) -> str: return self._operational_state
[ "def get_connection_state(self):\n return self.connection_state", "def state(self):\n return pn_connection_state(self._impl)", "def connection_status(self):\n return self._connection_status", "def state(self):\n\n if self._socket is not None:\n return self._socket.state(...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the operational_state of this ConnectionEndPoint.
def operational_state(self, operational_state: str): allowed_values = ["DISABLED", "ENABLED"] # noqa: E501 if operational_state not in allowed_values: raise ValueError( "Invalid value for `operational_state` ({0}), must be one of {1}" .format(operational_stat...
[ "def operational_status(self, operational_status):\n\n self._operational_status = operational_status", "def setOperationState(self, state):\n self.in_oper = state\n return self.in_oper", "def _set_state(self, connection_state):\r\n self._state = connection_state", "def _set_connection_stat...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the termination_direction of this ConnectionEndPoint.
def termination_direction(self) -> str: return self._termination_direction
[ "def termination_direction(self, termination_direction: str):\n allowed_values = [\"BIDIRECTIONAL\", \"SINK\", \"SOURCE\", \"UNDEFINED_OR_UNKNOWN\"] # noqa: E501\n if termination_direction not in allowed_values:\n raise ValueError(\n \"Invalid value for `termination_directio...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the termination_direction of this ConnectionEndPoint.
def termination_direction(self, termination_direction: str): allowed_values = ["BIDIRECTIONAL", "SINK", "SOURCE", "UNDEFINED_OR_UNKNOWN"] # noqa: E501 if termination_direction not in allowed_values: raise ValueError( "Invalid value for `termination_direction` ({0}), must be ...
[ "def set_termination(self, termination):\n # FIXME should be internally accessible only?\n self.__termination = termination", "def termination_direction(self) -> str:\n return self._termination_direction", "def direction(self, direction):\n\n self._direction = direction", "def term...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the termination_state of this ConnectionEndPoint.
def termination_state(self) -> str: return self._termination_state
[ "def end_state(self):\n\n endseg = self.segments[len(self.segments) - 1]\n return endseg.state.state_at(endseg.dur)", "def terminating_on(self):\n return self._terminating_on", "def get_shutdown_state(self):\n return self.shutdown", "def get_connection_state(self):\n return ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the termination_state of this ConnectionEndPoint.
def termination_state(self, termination_state: str): allowed_values = ["LP_CAN_NEVER_TERMINATE", "LT_NOT_TERMINATED", "TERMINATED_SERVER_TO_CLIENT_FLOW", "TERMINATED_CLIENT_TO_SERVER_FLOW", "TERMINATED_BIDIRECTIONAL", "LT_PERMENANTLY_TERMINATED", "TERMINATION_STATE_UNKNOWN"] # noqa: E501 if termination...
[ "def set_termination(self, termination):\n # FIXME should be internally accessible only?\n self.__termination = termination", "def terminated(self, terminated):\n\n self._terminated = terminated", "def termination(self, value):\n self._termination = value", "def terminating_on(self...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the layer_protocol_name of this ConnectionEndPoint.
def layer_protocol_name(self) -> str: return self._layer_protocol_name
[ "def _get_layer_name(self):\n return self.__layer_name", "def network_protocol(self):\n return self._network_protocol", "def layer_name(self):\n return self.__class__.__name__", "def get_protocol(self):\n return self._protocol", "def get_channel_layer(self):\n if self.chan...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the layer_protocol_name of this ConnectionEndPoint.
def layer_protocol_name(self, layer_protocol_name: str): allowed_values = ["OTSiA", "OCH", "OTU", "ODU", "ETH", "ETY", "DSR"] # noqa: E501 if layer_protocol_name not in allowed_values: raise ValueError( "Invalid value for `layer_protocol_name` ({0}), must be one of {1}" ...
[ "def layer_protocol_name(self) -> str:\n return self._layer_protocol_name", "def network_protocol(self, network_protocol):\n self._network_protocol = network_protocol", "def setlayer(self, layername, layer, force=False):\n if not force and self.layers.has_key(layername):\n raise ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the connectivity_service_end_point of this ConnectionEndPoint.
def connectivity_service_end_point(self) -> str: return self._connectivity_service_end_point
[ "def end_point(self) -> ConnectivityServiceEndPoint:\n return self._end_point", "def endpoint_service(self) -> ModelInstanceEndpoint:\n return self._endpoint_service", "def connectivity_service_end_point(self, connectivity_service_end_point: str):\n\n self._connectivity_service_end_point = ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the connectivity_service_end_point of this ConnectionEndPoint.
def connectivity_service_end_point(self, connectivity_service_end_point: str): self._connectivity_service_end_point = connectivity_service_end_point
[ "def end_point(self, end_point: ConnectivityServiceEndPoint):\n\n self._end_point = end_point", "def endpoint_service(self, endpoint_service: ModelInstanceEndpoint):\n self._endpoint_service = endpoint_service", "def connectivity_service(self, connectivity_service: List[ConnectivityService]):\n\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the parent_node_edge_point of this ConnectionEndPoint.
def parent_node_edge_point(self) -> List[str]: return self._parent_node_edge_point
[ "def getParent(self):\n return self.parent_edge", "def parent_node(self):\n if self._parent_node:\n return self._nodes.get_by_id(self._parent_node)\n return None", "def getNodeParent(self, node):\n return node.parent", "def get_parent(self, node):\n return self._p...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the parent_node_edge_point of this ConnectionEndPoint.
def parent_node_edge_point(self, parent_node_edge_point: List[str]): self._parent_node_edge_point = parent_node_edge_point
[ "def setParent(self, edge):\n self.parent_edge = edge", "def set_parent(self, parent_node):\n self.set_parent = parent_node", "def set_parent(self, node):\n self._parent = node", "def set_parent(self, parent: \"BaseSegment\") -> None:\n self._parent = weakref.ref(parent)", "def p...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the client_node_edge_point of this ConnectionEndPoint.
def client_node_edge_point(self) -> List[str]: return self._client_node_edge_point
[ "def client_node_edge_point(self, client_node_edge_point: List[str]):\n\n self._client_node_edge_point = client_node_edge_point", "def node_edge_point(self) -> List[str]:\n return self._node_edge_point", "def parent_node_edge_point(self) -> List[str]:\n return self._parent_node_edge_point",...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the client_node_edge_point of this ConnectionEndPoint.
def client_node_edge_point(self, client_node_edge_point: List[str]): self._client_node_edge_point = client_node_edge_point
[ "def node_edge_point(self, node_edge_point: List[str]):\n\n self._node_edge_point = node_edge_point", "def parent_node_edge_point(self, parent_node_edge_point: List[str]):\n\n self._parent_node_edge_point = parent_node_edge_point", "def setParent(self, edge):\n self.parent_edge = edge", "...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the connection_port_direction of this ConnectionEndPoint.
def connection_port_direction(self) -> str: return self._connection_port_direction
[ "def get_port_direction(self, port):\n if port == 1:\n self.__port_b_direction = self.__bus.read_byte_data(\n self.__ioaddress, self.IODIRB)\n return self.__port_b_direction\n else:\n self.__port_a_direction = self.__bus.read_byte_data(\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the connection_port_direction of this ConnectionEndPoint.
def connection_port_direction(self, connection_port_direction: str): allowed_values = ["BIDIRECTIONAL", "INPUT", "OUTPUT", "UNIDENTIFIED_OR_UNKNOWN"] # noqa: E501 if connection_port_direction not in allowed_values: raise ValueError( "Invalid value for `connection_port_direct...
[ "def connection_port_direction(self) -> str:\n return self._connection_port_direction", "def connection_port(self, connection_port):\n\n self._connection_port = connection_port", "def changePort(self, to_port, from_port=None, direction='CW'):\n if not 0 < to_port <= self.num_ports:\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the connection_port_role of this ConnectionEndPoint.
def connection_port_role(self) -> str: return self._connection_port_role
[ "def connection_port_role(self, connection_port_role: str):\n allowed_values = [\"SYMMETRIC\", \"ROOT\", \"LEAF\", \"TRUNK\", \"UNKNOWN\"] # noqa: E501\n if connection_port_role not in allowed_values:\n raise ValueError(\n \"Invalid value for `connection_port_role` ({0}), mu...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the connection_port_role of this ConnectionEndPoint.
def connection_port_role(self, connection_port_role: str): allowed_values = ["SYMMETRIC", "ROOT", "LEAF", "TRUNK", "UNKNOWN"] # noqa: E501 if connection_port_role not in allowed_values: raise ValueError( "Invalid value for `connection_port_role` ({0}), must be one of {1}" ...
[ "def connection_port_role(self) -> str:\n return self._connection_port_role", "def connection_port(self, connection_port):\n\n self._connection_port = connection_port", "def port_mode(self, port_mode):\n\n self._port_mode = port_mode", "def set_port(self, party_port) -> None:\n\n s...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Resolve the free type variables of a constructor given the argument types we instantiate the class with. This means we need to match up argument types with variables from the class layout. In the most general case this means we need to fixpoint infer all methods called from the constructor.
def infer_constructor_application(classtype, argtypes): # Figure out the list of argtypes cls = classtype.impl init = cls.__init__.py_func argtypes = fill_missing_argtypes(init, tuple(argtypes)) # Determine __init__ argnames argspec = inspect.getargspec(init) assert not argspec.varargs ...
[ "def _resolve_args(self, env, args):\n pos_args, kw_args = args\n\n def check_value(v):\n if isinstance(v, AstTypeRef):\n return self._resolve_type(env, v)\n else:\n return v\n\n new_pos_args = [check_value(pos_arg) for pos_arg in pos_args]\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates a new boto assignment mock class with the given fields supplied with the specified values.
def make_boto_assignment(values): assignment = mock.MagicMock() assignment.AssignmentId = str(uuid.uuid4()) assignment.HITId = str(uuid.uuid4()) assignment.WorkerId = str(uuid.uuid4()) assignment.answers = [[]] for key, value in values.items(): answer_mock = mock.MagicMock() ans...
[ "def __init__(self, **kwargs):\n default_values = {\n 'name': 'Organization Name',\n 'ubi': 'Unified Business Identifier',\n 'address_line_1': '',\n 'address_line_2': '',\n 'city': '',\n 'state': '',\n 'zipcode': '',\n 'c...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Adds some html content after the first plugin from a specific placeholder gets rendered
def add_extra_html(instance, placeholder, rendered_content, original_context): html_before = getattr(placeholder, '_extra_html_before', '') html_after = getattr(placeholder, '_extra_html_after', '') if not html_before and not html_after: return rendered_content template_data = ['{{rendered_cont...
[ "def test_render_placeholder_cache(self):\n invalidate_cms_page_cache()\n ex = Example1(\n char_1='one',\n char_2='two',\n char_3='tree',\n char_4='four'\n )\n ex.save()\n ph1 = ex.placeholder\n ###\n # add the test plugin\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test task with error in command.
def test_cmd_error(self): task = Task("uid", False, False, "does_not_exist", None, ".") task._checkpoint_dir = tmp_checkpoint_dir() with self.assertRaisesRegexp(RuntimeError, ".*executing Task's command:.*"): task.run() task.shell = True with self.assertRaisesRegexp(R...
[ "def failing_task():\n raise Exception('task failed :(')", "def test_verify_error(self):\n task = Task(\"uid\", False, False, \"echo\", \"does_not_exist\", \".\", \"A\")\n task._checkpoint_dir = tmp_checkpoint_dir()\n with self.assertRaisesRegexp(RuntimeError, \".*executing Task's verifica...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test task with error in verification.
def test_verify_error(self): task = Task("uid", False, False, "echo", "does_not_exist", ".", "A") task._checkpoint_dir = tmp_checkpoint_dir() with self.assertRaisesRegexp(RuntimeError, ".*executing Task's verification:.*"): task.run() task.shell = True with self.asser...
[ "def failing_task():\n raise Exception('task failed :(')", "def test_task_add():\n pytest.fail('Not implemented yet.')", "def _test_run_with_failure(self, task_class, expected_message):\r\n task_entry = self._create_input_entry()\r\n self.define_option_problem(PROBLEM_URL_NAME)\r\n wi...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
List the iDRAC configuration settings
def list_idrac_settings(self): return self._idrac_cfg.list_idrac_settings()
[ "def list_settings(self):\n raise", "def list_config():\n console = Console()\n _config = loadConfig()\n json_data = richJSON.from_data({**asdict(_config)})\n console.print(Panel(json_data, title=\"SubmarineCliConfig\"))", "def listAvailableSettings(db):", "def _list_settings(self, settings...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates a configuration job for applying all pending changes to an iDRAC.
def commit_pending_idrac_changes( self, idrac_fqdd='iDRAC.Embedded.1', reboot=False, start_time='TIME_NOW'): return self._job_mgmt.create_config_job( resource_uri=uris.DCIM_iDRACCardService, cim_creation_class_name='DCIM_iDRACCardService', ...
[ "def apply_pending_config(node, **kwargs):\n\n raid_controller = None\n error_msgs = []\n\n # RAID controller validation\n raid_controller = drac_common.validate_required_string(kwargs,\n 'raid_controller',\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Applies all pending changes on the BIOS by creating a config job
def commit_pending_bios_changes(self, reboot=False, start_time='TIME_NOW'): return self._job_mgmt.create_config_job( resource_uri=ironic_uris.DCIM_BIOSService, cim_creation_class_name='DCIM_BIOSService', cim_name='DCIM:BIOSService', target=self.BIOS_DEVICE_FQDD, ...
[ "def apply(self) -> None:\n _ba.apply_config()", "def apply_pending_config(node, **kwargs):\n\n raid_controller = None\n error_msgs = []\n\n # RAID controller validation\n raid_controller = drac_common.validate_required_string(kwargs,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Abandon all pending changes to a NIC.
def abandon_pending_nic_changes(self, nic_id): self._job_mgmt.delete_pending_config( resource_uri=uris.DCIM_NICService, cim_creation_class_name='DCIM_NICService', cim_name='DCIM:NICService', target=nic_id)
[ "def abandon_bios(module, bmc):\n debug(module, \"Abandoning pending BIOS configuration changes\")\n try:\n bmc.abandon_pending_bios_changes()\n except drac_exc.BaseClientException as e:\n module.fail_json(msg=\"Failed to abandon pending BIOS jobs: %s\" %\n repr(e))", ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Apply all pending changes to a NIC by creating a configuration job.
def commit_pending_nic_changes(self, nic_id, reboot=False): return self._job_mgmt.create_config_job( resource_uri=uris.DCIM_NICService, cim_creation_class_name='DCIM_NICService', cim_name='DCIM:NICService', target=nic_id, reboot=reboot)
[ "async def apply_changes(self, interface: Interface) -> None:\n inet = self.sys_dbus.network.interfaces.get(interface.name)\n\n # Update exist configuration\n if (\n inet\n and inet.settings\n and inet.settings.connection.interface_name == interface.name\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates a configuration job. In CIM (Common Information Model), weak association is used to name an instance of one class in the context of an instance of another class. SystemName and SystemCreationClassName are the attributes of the scoping system, while Name and CreationClassName are the attributes of the instance o...
def create_config_job(self, resource_uri, cim_creation_class_name, cim_name, target, cim_system_creation_class_name='DCIM_ComputerSystem', cim_system_name='DCIM:Com...
[ "def create(self, force=False):\n if self.exist is None:\n self.check_remote_project()\n\n with self.get_handler() as handler:\n if self.exist:\n if force:\n handler.delete_job(self.name)\n handler.create_job(self.name, self.co...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates a configuration job for applying all pending changes to a NIC.
def create_nic_config_job( self, nic_id, reboot=False, start_time='TIME_NOW'): return self._job_mgmt.create_config_job( resource_uri=uris.DCIM_NICService, cim_creation_class_name='DCIM_NICService', cim_name='DCIM:NICService', ...
[ "def commit_pending_nic_changes(self, nic_id, reboot=False):\n return self._job_mgmt.create_config_job(\n resource_uri=uris.DCIM_NICService,\n cim_creation_class_name='DCIM_NICService',\n cim_name='DCIM:NICService',\n target=nic_id,\n reboot=reboot)", ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates a reboot job.
def create_reboot_job(self, reboot_type='graceful_reboot_with_forced_shutdown'): return self._job_mgmt.create_reboot_job(reboot_type)
[ "def restart_job(id):\n db = dbutil.create_db_session()\n job = db.query(model.Job).filter_by(id=id).first()\n new_job = model.Job()\n new_job.measurement_file = job.measurement_file\n new_job.onyx_file = job.onyx_file\n new_job.status = model.Job.STATUS_LAUNCHED\n new_job.launched_time = datet...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Deletes the given jobs. If no jobs are given, all jobs are deleted.
def delete_jobs(self, job_ids=['JID_CLEARALL']): return self._job_mgmt.delete_jobs(job_ids)
[ "def delete(jobs):\n if not isinstance(jobs, (list, tuple)):\n jobs = [jobs]\n # create delete statement for the job\n stmt = [Job.__table__.delete().where(Job.id == bindparam(\"_id\"))]\n # delete entries in the relationship tables\n for relation_table in [job_dependencies, job_pipes, job_gro...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Obtain the legacy, nonUEFI, boot protocol of a NIC.
def get_nic_legacy_boot_protocol(self, nic_id): return self._nic_cfg.get_nic_legacy_boot_protocol(nic_id)
[ "def set_nic_legacy_boot_protocol(self, nic_id, value):\n return self._nic_cfg.set_nic_legacy_boot_protocol(nic_id, value)", "def set_nic_legacy_boot_protocol_none(self, nic_id):\n return self._nic_cfg.set_nic_legacy_boot_protocol(nic_id, 'NONE')", "def get_sockio_ioctl(nic_name: str, ioctl: SOCKI...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Obtain the link status, up or down, of a NIC.
def get_nic_link_status(self, nic_id): return self._nic_mgmt.get_nic_link_status(nic_id)
[ "def get_interface_status(conn_obj, interface, device=\"dut\"):\n command = \"cat /sys/class/net/{}/operstate\".format(interface)\n if device==\"dut\":\n return utils_obj.remove_last_line_from_string(st.show(conn_obj, command, skip_tmpl=True))", "def IsLinkup(nic,timeout):\n nic = nic.strip()\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Obtain a setting of a NIC.
def get_nic_setting(self, nic_id, attribute_name): return self._nic_cfg.get_nic_setting(nic_id, attribute_name)
[ "def get_nic_settings(bmc):\n nic_settings = bmc.list_nics()\n return nic_settings", "def get_setting(self, setting):\n return self.do_rpc(\"get_setting\", key=key)", "def getIPForNic(self, value):\n _nic=self.getNic(value)\n _ip=\"\"\n if _nic.isDHCP():\n import soc...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }