query stringlengths 9 9.05k | document stringlengths 10 222k | negatives listlengths 19 20 | metadata dict |
|---|---|---|---|
pylag.mlfit.MLCrossSpectrum.process_fit_results(fit_result, params) Process a scipy.optimise fit result to calculate the bestfitting cross spectrum, lag spectrum and errors from the model. | def process_fit_results(self, fit_result, params):
hess = fit_result.hess_inv(fit_result.x) if callable(fit_result.hess_inv) else np.diag(fit_result.hess_inv)
self.cpsd = self.get_cpsd()
if self.cpsd_model is None:
self.cpsd_error = hess[:len(self.fbins)] ** 0.5
else:
... | [
"def process_fit_results(self, fit_result, params):\n self.psd = self.get_psd()\n if self.model is None:\n self.psd_error = self.param_error\n else:\n # calculate the error on each PSD point from the error on each parameter\n psd_deriv = self.model.eval_gradient... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
loglike, grad = pylag.mlfit.StackedMLCrossSpectrum.log_likelihood(params, eval_gradient=True) Evaluate log(marginal likelihood), as well as its gradient, for the covariance matrix defined by some set of input parameters. The log(likelihood) for the stack of light curve pairs is the sum of the log(likelihood) evaluated ... | def log_likelihood(self, params, eval_gradient=True):
if eval_gradient:
segment_loglike = [c.log_likelihood(params, eval_gradient) for c in self.mlcross_spec]
# separate and sum the likelihoods and the gradients
like = np.array([l[0] for l in segment_loglike])
gra... | [
"def log_likelihood(self, params, eval_gradient=True):\n c = self.cov_matrix(params)\n\n # add white noise along the leading diagonal\n # this should be the Poisson noise term when calculating a PSD\n if self.noise is not None:\n c += np.diag(self.noise)\n\n try:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Searches a list for a given item and returns a boolean value as to whether it is present and the processing time needed to find the item. | def sequential_search(a_list, item):
strt_time = time.time()
pos = 0
found = False
while pos < len(a_list) and not found:
if a_list[pos] == item:
found = True
else:
pos = pos + 1
end_time = time.time()
run_time = end_time - strt_time
return (run_ti... | [
"def search(self, item):\n current = self.head\n found = False\n while current is not None and not found:\n if current.get_data() is item:\n found = True\n else:\n current = current.get_next()\n return found",
"def search(self, item):... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the 4 different search algorithms by generating 100 test lists of three different sizes, then calculates the average processing time for each and returns the results in string. | def main():
samp_size = [500, 1000, 10000]
tests = {'Sequential': 0,
'Ordered': 0,
'Bin Iterative': 0,
'Bin Recursive': 0}
for smpl in samp_size:
counter = 0
while counter < 100:
test_list = list_gen(smpl)
tests['Sequential'] +... | [
"def multi_results(benchmark):\n # Read in results\n tensat_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n taso_root = os.path.join(os.path.dirname(tensat_root), \"TASO\")\n\n taso_benchmark_name = benchmark\n if benchmark == 'nasneta':\n taso_benchmark_name = 'nasnet_a'\... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Make list of lists of pixels, each inner list is a row in original picture. If ratio is specified, shrink the picture by it and return smaller one. | def make_list(filename, ratio):
f = filename
make_smaller = ratio
if not f.endswith(".ppm"):
f += ".ppm"
(width, height), data = read_file(f)
width, height = int(width.strip("b'")), int(height.strip("b'").strip("\\\\n"))
final_list = []
final_small = []
print(width, height)
... | [
"def split_image(pixels, corner, square_size):\n opposite_corner = (corner[0] + square_size, corner[1] + square_size)\n\n square_rows = pixels[corner[0]:opposite_corner[0]]\n square = []\n for row in square_rows:\n square.append(row[corner[1]:opposite_corner[1]])\n\n return square",
"def get... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a short string summary of this Sequence | def summary(self) -> str:
if self.id:
id = self.id
else:
if len(self) <= 20:
id = "Sequence={}".format(str(self))
else:
id = "Sequence"
return "{};\n Alphabet={};\n Length={};\n Parent={};\n Type={}".format(
id, ... | [
"def summary(self) -> str:",
"def summary_func(self):\n return('The description of this major is' + ': ' + self.summ)",
"def summary(self):\n\n name='name:%s'%self.getName()\n damage='dégat:%s'%self.getDamage()\n ammos='munitions:%s'%self.getAmmos()\n return '\\n'.join([name, ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Raises AlphabetError if this Sequence does not conform to its alphabet | def _validate_alphabet(self):
Sequence.validate_alphabet(str(self), self.alphabet) | [
"def checkAlphabet(self, sequence):\n ok = [ch for ch in sequence if ch in self.E]\n if len(ok) < len(sequence):\n return False \n return True",
"def get_alphabet_with_probabilities(self):\n raise self._alphabet_distribution",
"def test_unicode_letter_raises(letter):\n\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a new Sequence corresponding to the reverse complement of this Sequence. Location on parent, if it exists, is converted appropriately. | def reverse_complement(self, new_id: str = None, new_type: str = None) -> "Sequence":
if not self.alphabet.is_nucleotide_alphabet():
raise AlphabetError("Cannot reverse complement sequence with alphabet {}".format(self.alphabet))
location = self.location_on_parent.reverse_strand() if self.lo... | [
"def reverse_complement_SeqRecord(record):\n return SeqRecord(seq = record.seq.reverse_complement(), \\\n id = record.id, description = \"reverse complement\" )",
"def get_reversed(self) -> _Node:\n parent = _NullPathNode()\n for node in _NodeIterator(self):\n paren... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the Parent object representing the closest ancestor (parent, parent of parent, etc.) of this sequence which has the given sequence type. If include_self is True and this sequence has the given type, returns a new Parent object representing this sequence. Raises NoSuchAncestorException if no ancestor with the gi... | def first_ancestor_of_type(self, sequence_type: str, include_self: bool = True) -> Parent:
if include_self and self.sequence_type == sequence_type:
return Parent(sequence=self)
if self.parent:
return self.parent.first_ancestor_of_type(sequence_type, True)
raise NoSuchAnce... | [
"def has_ancestor_of_type(self, sequence_type: str, include_self: bool = True) -> bool:\n if include_self and self.sequence_type == sequence_type:\n return True\n if self.parent:\n return self.parent.has_ancestor_of_type(sequence_type, include_self=True)\n return False",
... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns True if some ancestor (parent, parent of parent, etc.) of this sequence which has the given sequence type, or False otherwise. If include_self is True and this sequence has the given type, returns True. | def has_ancestor_of_type(self, sequence_type: str, include_self: bool = True) -> bool:
if include_self and self.sequence_type == sequence_type:
return True
if self.parent:
return self.parent.has_ancestor_of_type(sequence_type, include_self=True)
return False | [
"def first_ancestor_of_type(self, sequence_type: str, include_self: bool = True) -> Parent:\n if include_self and self.sequence_type == sequence_type:\n return Parent(sequence=self)\n if self.parent:\n return self.parent.first_ancestor_of_type(sequence_type, True)\n raise ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a FASTAformatted string for this sequence. These are linebroken every num_chars. | def to_fasta(self, num_chars: Optional[int] = 60) -> str:
if self.is_empty:
raise EmptySequenceFastaError("Cannot write FASTA for empty Sequence")
r = [f">{self.id}"]
for i in range(0, self._len, num_chars):
r.append(str(self)[i : i + num_chars])
return "\n".join... | [
"def pretty(self, width=2):\n ret = \"\"\n full_width = width * 10\n seq = ((\" \" * ((self.start_pos - 1) % full_width)) + \"{}\" \\\n + (\" \" * ((width - 1) - ((self.end_pos - 1) % full_width)))).format(\n self.sequence)\n sa = (self.start_pos - 1) // full_wid... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Run a job. This applies the function node, and returns a |ResultMessage| when complete. If an exception is raised in the job, the |ResultMessage| will have ``'error'`` status. | def run_job(key, node):
try:
result = node.apply()
return ResultMessage(key, 'done', result, None)
except Exception as exc:
return ResultMessage(key, 'error', None, exc) | [
"def run_job(job_name, job_config):\n\n logging.info(f\"Running job {job_name}\")\n results, query_id = execute_query(centra, query=job['query'])\n\n if 'csv' in job['output']:\n output_csv(job_name, results)\n \n if 'stdout' in job['output']:\n logging.info(f\"Displaying results for jo... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calculates the unbiased maximum likelihood estimation of the covariance matrix of a matrix x of shape (n_samples, n_features). | def get_mle_covariance(self, x = None, ddof = 1):
if is_none(x):
x = self.x
# small number to avoid singularities
return np.cov(x, ddof = 1, rowvar = False) + 1e-6 * np.identity(x.shape[1]) | [
"def compute_covariance_matrix(X):\n return np.cov(X, rowvar=0)",
"def gauss(mu, covar, x):\n \n n, d = x.shape\n\n j, k = covar.shape\n\n # Check that the covariance matrix is the correct dimension\n if ((j != d) or (k !=d)):\n raise Exception('Dimension of the covariance matrix and data... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calculates the maximum likelihood estimation of the mean vector of a matrix x of shape (n_samples, n_features). | def get_mle_mean(self, x = None):
if is_none(x):
x = self.x
return x.mean(0) | [
"def likelihood(self, x: np.ndarray) -> np.ndarray:",
"def MVN_log_likelihood(X, model):\n return np.sum(multivariate_normal.logpdf(X.T, model.mean, model.cov))",
"def MVN_log_likelihood(X, model):\n\n return logsumexp(multivariate_normal.logpdf(X.T, mean=model.mean, cov=model.cov))",
"def mlln(self, x,... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get x, raw, cost, classes from a pandas dataframe. | def _pandas_parser(self, df):
if is_none(df):
return
# just to make sure that stuff is sorted
# supress warning as this works like intended
pd.options.mode.chained_assignment = None
df.sort_values(['functional', 'basis', 'unrestricted', 'reaction'])
pd.options... | [
"def metrics_classification(df):\n true_labels = df['True value']\n predictions = df.drop('True value', axis=1)\n\n scoreDf = pd.DataFrame(columns=[\"Model\", \"Accuracy\"])\n scoreDf.set_index(\"Model\")\n for model_name, predictions in predictions.iteritems():\n scoreDf = scoreDf.append(get_metrics_for_al... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Minimize x'Cx, where C is the covariance matrix and x is the portfolio weights. The constraints sum(x) = 1 and m'x = 0 is used, with m being the asset means. Optionally the constraint x >= 0 is used if self.positive_constraint == False. l2 regularization can be included. Also max cost constraint | def zero_mean_min_variance(self, x = None, alpha = 0):
if is_none(x):
x = self.x
# in case of mixtures
mean = np.sum((self.mixture_weights[i] * self.mean[i]
for i in range(self.n_mixtures)), axis = 0)
E_2 = np.sum((self.mixture_weights[i] * (self.mean[i][:,... | [
"def minVarPortfolio(self):\n\n try:\n self.inv_cov_matrix = self.inv_cov_matrix\n except:\n raise ValueError(\"Inverse Variance-Covariance matrix not assigned. Call var_covar function\")\n try:\n try:\n _min_variance_vector = self.inv_cov_matrix ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Determine the optimal l2 value for the min_variance_upper_mean_bound approach by n_splits x n_repeats repeated kfold cross validation | def min_variance_upper_mean_bound_cv(self):
l2 = 10**np.arange(-9, -5.01, 0.5)
return self.internal_cv(self.min_variance_upper_mean_bound, l2) | [
"def cross_validation(X, Y, sigmas, llambdas, Ntot):\n parameters = []\n random.seed(666)\n\n for i in range(len(sigmas)):\n K = laplacian_kernel(X, X, sigmas[i])\n\n for j in range(len(llambdas)):\n \n for m in range(5):\n maes = []\n split = range(Ntot)\n random.shuffle(split)\... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Determines the optimal regularization parameter of the given alphas, for a given method | def internal_cv(self, method, alphas):
se = np.zeros(alphas.size)
for i, (train, test) in enumerate(self.cv_generator.split(range(self.n_samples))):
for j, v in enumerate(alphas):
weights = method(x = self.x[train], alpha = v)
se[j] += sum(np.sum(weights * s... | [
"def setRegularizationParameter(self, beta) -> None:\n ...",
"def fast_opt_svr_hyperparams(x, y, cs, epsilons, gammas, validation_method, parameter):\r\n \r\n if validation_method != 'cv' and validation_method != 'midknn':\r\n# print('\\'{0}\\' is unknown. Please check \\'validation_method\\'.... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the score given a metric, where y_pred is the prediction error. | def score(y_pred, metric):
if metric == 'mae':
#np.mean(abs(y_pred-y))
return np.mean(abs(y_pred))
elif metric == 'rmsd':
#np.sqrt(np.mean((y_pred-y)**2))
return np.sqrt(np.mean((y_pred)**2))
elif metric == 'max':
#np.max(abs(y_pred-y))
return np.max(abs(y_pr... | [
"def metric_score(self, batch, y_pred):\n return -self.loss_score(batch, y_pred)",
"def performance_metric(y_true, y_predict):\n\n error = metrics.mean_squared_error(y_true,y_predict)\n return error",
"def score(y_true, y_pred):\n\n\treturn roc_auc_score(y_true, y_pred)",
"def get_metric(self, x,... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for description, mapped from YANG variable /local_routes/static_routes/static/config/description (string) | def _set_description(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/l... | [
"def set_description(self, description):\n if not isinstance(description, str):\n raise ValueError(\"Description must be a string.\")\n try:\n self._set_config_value(\n _SERVICE_INFO_SECTION_NAME, \"Description\", description\n )\n except Exceptio... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for index, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/config/index (string) | def _set_index(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing... | [
"def _set_index(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for next_hop, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/config/next_hop (union) | def _set_next_hop(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p... | [
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|2... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for metric, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/config/metric (uint32) | def _set_metric(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="metric", parent=self, path_helper=self._path_helper, extmethods=self._... | [
"def _set_metric(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=metric.metric, is_container='container', presence=False, yang_name=\"metric\", rest_name=\"metric\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, registe... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for next_hop, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/state/next_hop (union) | def _set_next_hop(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p... | [
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|2... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for recurse, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/state/recurse (boolean) | def _set_recurse(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="recurse", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://opencon... | [
"def set_recursive(self, b):\n _ldns.ldns_resolver_set_recursive(self, b)\n #parameters: ldns_resolver *,bool,\n #retvals: ",
"def set_visited_right(self):\n self.visited_right = True",
"def recursion_depth(self, recursion_depth):\n\n self._recursion_depth = recursion_depth",
... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for config, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/enable_bfd/config (container) | def _set_config(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_he... | [
"def _set_enable_bfd(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_enable_bfd_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd, is_container='container', yang_name=\"enable-bfd\", parent=self, path_he... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for state, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/enable_bfd/state (container) | def _set_state(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper... | [
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_band_steering_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for subinterface, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/interface_ref/config/subinterface (leafref) | def _set_subinterface(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="subinterface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang... | [
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openco... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for interface, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/interface_ref/state/interface (leafref) | def _set_interface(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local... | [
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for subinterface, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/interface_ref/state/subinterface (leafref) | def _set_subinterface(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="subinterface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang... | [
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openco... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for config, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/interface_ref/config (container) | def _set_config(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_config, is_container='container', yang_name="config", parent=self, path_helper=self._path... | [
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for state, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/interface_ref/state (container) | def _set_state(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_hel... | [
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for index, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/index (leafref) | def _set_index(self, v, load=False):
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = Y... | [
"def _set_index(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for config, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/config (container) | def _set_config(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extme... | [
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_config, is_container='container', yang_name=\"config\", parent=self, path_help... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for state, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/state (container) | def _set_state(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethod... | [
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Getter method for enable_bfd, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/enable_bfd (container) | def _get_enable_bfd(self):
return self.__enable_bfd | [
"def _set_enable_bfd(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_enable_bfd_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd, is_container='container', yang_name=\"enable-bfd\", parent=self, path_he... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for enable_bfd, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/enable_bfd (container) | def _set_enable_bfd(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_enable_bfd_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd, is_container='container', yang_name="enable-bfd", parent=self, path_helper=self._pa... | [
"def _get_enable_bfd(self):\n return self.__enable_bfd",
"def enable_peer_bfd(self, **kwargs):\n rbridge_id = kwargs.pop('rbridge_id', '1')\n peer_ip = kwargs.pop('peer_ip')\n delete = kwargs.pop('delete', False)\n get = kwargs.pop('get', False)\n feature_tmp = '_neighbor{0}_... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for interface_ref, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/interface_ref (container) | def _set_interface_ref(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_interface_ref_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref, is_container='container', yang_name="interface-ref", parent=self, path_hel... | [
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for next_hop, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop (list) | def _set_next_hop(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("index",yc_next_hop_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop, yang_name="next-hop", parent=self, is_container='list', user_ordered=False, p... | [
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|2... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for prefix, mapped from YANG variable /local_routes/static_routes/static/prefix (leafref) | def _set_prefix(self, v, load=False):
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = ... | [
"def _set_prefix_set(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../../../../../../../../defined-sets/prefix-sets/prefix-set/config/name', caller=self._path() + ['prefix-set'], path_helper=self._path_helper, req... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for config, mapped from YANG variable /local_routes/static_routes/static/config (container) | def _set_config(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmeth... | [
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for state, mapped from YANG variable /local_routes/static_routes/static/state (container) | def _set_state(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods,... | [
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_help... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for next_hops, mapped from YANG variable /local_routes/static_routes/static/next_hops (container) | def _set_next_hops(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_next_hops_openconfig_local_routing__local_routes_static_routes_static_next_hops, is_container='container', yang_name="next-hops", parent=self, path_helper=self._path_helper, extmethods=s... | [
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=next_hop.next_hop, is_container='container', presence=False, yang_name=\"next-hop\", rest_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethod... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for static, mapped from YANG variable /local_routes/static_routes/static (list) | def _set_static(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("prefix",yc_static_openconfig_local_routing__local_routes_static_routes_static, yang_name="static", parent=self, is_container='list', user_ordered=False, path_helper=self._path_he... | [
"def _set_static_routes(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_static_routes_openconfig_local_routing__local_routes_static_routes, is_container='container', yang_name=\"static-routes\", parent=self, path_helper=self._path_helper, extm... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for description, mapped from YANG variable /local_routes/local_aggregates/aggregate/config/description (string) | def _set_description(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/l... | [
"def set_description(self, description):\n if not isinstance(description, str):\n raise ValueError(\"Description must be a string.\")\n try:\n self._set_config_value(\n _SERVICE_INFO_SECTION_NAME, \"Description\", description\n )\n except Exceptio... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for discard, mapped from YANG variable /local_routes/local_aggregates/aggregate/state/discard (boolean) | def _set_discard(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="discard", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://opencon... | [
"def is_discarded(self):\n return self._discarded",
"def discard(self) :\n\t\tassert self._holded is not None, \\\n\t\t\t\"Discarding a configuration requires to be held\"\n\t\tself._dict = self._holded\n\t\tself._holded = None",
"def discard():\n player = current_player._get_current_object()\n if ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for prefix, mapped from YANG variable /local_routes/local_aggregates/aggregate/prefix (leafref) | def _set_prefix(self, v, load=False):
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = ... | [
"def _set_prefix_set(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../../../../../../../../defined-sets/prefix-sets/prefix-set/config/name', caller=self._path() + ['prefix-set'], path_helper=self._path_helper, req... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for config, mapped from YANG variable /local_routes/local_aggregates/aggregate/config (container) | def _set_config(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_local_aggregates_aggregate_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._e... | [
"def _set_aggregate(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"prefix\",yc_aggregate_openconfig_local_routing__local_routes_local_aggregates_aggregate, yang_name=\"aggregate\", parent=self, is_container='list', user_ordered=Fa... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for state, mapped from YANG variable /local_routes/local_aggregates/aggregate/state (container) | def _set_state(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_local_aggregates_aggregate_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extme... | [
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_interfaces__interfaces_interface_subinterfaces_subinterface_ipv6_addresses_address_vrrp_vrrp_group_state, is_container='container', yang_name=\"state\", parent=se... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for aggregate, mapped from YANG variable /local_routes/local_aggregates/aggregate (list) | def _set_aggregate(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("prefix",yc_aggregate_openconfig_local_routing__local_routes_local_aggregates_aggregate, yang_name="aggregate", parent=self, is_container='list', user_ordered=False, path_helpe... | [
"def _set_local_aggregates(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_local_aggregates_openconfig_local_routing__local_routes_local_aggregates, is_container='container', yang_name=\"local-aggregates\", parent=self, path_helper=self._path_... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for static_routes, mapped from YANG variable /local_routes/static_routes (container) | def _set_static_routes(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_static_routes_openconfig_local_routing__local_routes_static_routes, is_container='container', yang_name="static-routes", parent=self, path_helper=self._path_helper, extmethods=self._... | [
"def _set_static(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"prefix\",yc_static_openconfig_local_routing__local_routes_static_routes_static, yang_name=\"static\", parent=self, is_container='list', user_ordered=False, path_helpe... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for local_aggregates, mapped from YANG variable /local_routes/local_aggregates (container) | def _set_local_aggregates(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_local_aggregates_openconfig_local_routing__local_routes_local_aggregates, is_container='container', yang_name="local-aggregates", parent=self, path_helper=self._path_helper, extme... | [
"def _set_aggregate(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"prefix\",yc_aggregate_openconfig_local_routing__local_routes_local_aggregates_aggregate, yang_name=\"aggregate\", parent=self, is_container='list', user_ordered=Fa... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for local_routes, mapped from YANG variable /local_routes (container) | def _set_local_routes(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_local_routes_openconfig_local_routing__local_routes, is_container='container', yang_name="local-routes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, regis... | [
"def _set_static_routes(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_static_routes_openconfig_local_routing__local_routes_static_routes, is_container='container', yang_name=\"static-routes\", parent=self, path_helper=self._path_helper, extm... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Convert this instance into a dict, which includes some detailed information of the target/source build, i.e. build version and file name. | def to_dict_detail(self, target_lib, offset=0):
detail_info = asdict(self)
try:
with open(self.stdout, 'r') as fout:
detail_info['stdout'] = fout.read()
with open(self.stderr, 'r') as ferr:
detail_info['stderr'] = ferr.read()
except FileNot... | [
"def build_info(self):\n \n path='/build_info'\n res = self.client.call(path, 'GET', data='', token=self.manager.identity.token)\n self.logger.debug('Openstack build info: %s' % \\\n truncate(res))\n return res[0]",
"def _info(self, identity=None):\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Insert the job_info into the database | def insert_database(self, job_info):
with sqlite3.connect(self.path) as connect:
cursor = connect.cursor()
cursor.execute("""
INSERT INTO Jobs (ID, TargetPath, IncrementalPath, Verbose, Partial, OutputPath, Status, Downgrade, OtherFlags, STDOUT, STDERR, StartTime, Fin... | [
"def _create_job_info(self, job_dir):\n meta = self._build_job_meta(job_dir)\n\n self.logger.debug(\"Create job: %s\" % meta)\n\n job_record = JobRecord.from_json(meta)\n job_record.save()",
"def insert_jobexp(exp_args, jobman_args):\n table_name = jobman_args.get(\"table_name\", \"... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the status of all jobs as a list of JobInfo | def get_status(self):
with sqlite3.connect(self.path) as connect:
cursor = connect.cursor()
cursor.execute("""
SELECT ID, TargetPath, IncrementalPath, Verbose, Partial, OutputPath, Status, Downgrade, OtherFlags, STDOUT, STDERR, StartTime, FinishTime
FROM Jobs
... | [
"def list_jobs(self, status=True):",
"def check_job_status_all(self):\r\n\t\ttry:\r\n\t\t\t# r = requests.get(self.base_url + \"jobs\")\r\n\t\t\tr = requests.get(self.base_url + \"joboverview\")\r\n\t\t\tr.raise_for_status()\r\n\t\t\t# print('Response from check_job_status_all: %s'%(r.text))\r\n\t\t\tresponse = j... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Change the status and finish time of job in the database | def update_status(self, id, status, finish_time):
with sqlite3.connect(self.path) as connect:
cursor = connect.cursor()
cursor.execute("""
UPDATE Jobs SET Status=(?), FinishTime=(?)
WHERE ID=(?)
""",
(status, fini... | [
"def update_final_job_logstatus():\n list = [\"deleted\", \"killed\"]\n pcjs = bm.Job.objects.filter(log_extracted='no')\n pcjs = pcjs.filter(status__in=list)\n dt = datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\")\n for job in pcjs:\n bm.Job.objects.filter(job_id=job.job_id).update(\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Enumerates quote from a filename or a stream filename filename or stream encoding applicable only if filename empty_name replces an empty author name enumerate on quote A quote is defined a dictionary. | def enumerate_quotes(filename, encoding="utf-8", empty_name="Inconnu"):
if isinstance(filename, str):
with open(filename, "r", encoding=encoding) as f:
for q in enumerate_quotes(f):
yield q
else:
re1 = re.compile("chapter[{]([0-9]+)[}]")
re2 = re.compile(
... | [
"def __read_quotes(self, quote_path):\n with codecs.open(quote_path, mode=\"r\", encoding=\"utf-8\", errors='ignore') as f:\n lines = f.readlines()\n \n for line in lines:\n splitted = line.replace('\\n','').replace('\\r', '').split(': ')\n if (len(splitted) > 1):\n if (splitted[0] no... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Test that it outputs an empty dict if the type of transaction is invalid | def test_invalid_type(self):
dataDict = {
'type': 'gibberish'
}
target = __import__('')
THParse = target.TransactionHistoryParse(dataDict)
result = THParse.main()
self.assertEqual(result, {}) | [
"def test_to_dictionary(self):\n self.assertDictEqual(self.payment.to_dictionary(), {\n \"trader_id\": \"32\" * 20,\n \"transaction_number\": 2,\n \"transferred\": {\n \"amount\": 3,\n \"type\": \"BTC\"\n },\n \"payment_id\"... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This decorator dumps out the arguments passed to a function before calling it. | def dump_args(func):
argnames = func.func_code.co_varnames[:func.func_code.co_argcount]
fname = func.func_name
def echo_func(*args, **kwargs):
print fname, ":", ', '.join('%s=%r' % entry
for entry in zip(argnames,args) + kwargs.items())
return func(*args, **kwarg... | [
"def log_argumrnts(logger):\n def decorator(func):\n @wraps\n def wraped_func(*args, **kwargs):\n args = inspect.getcallargs(func, *args, **kwargs)\n msg = \"call `{}` with arguments: {}\".format(func.__name__, args)\n logger.info(msg)\n return func(*args... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return base url from article row. | def get_url_base(row):
url = row['url']
domain_ending = re.compile(r'\.[a-z]{2,3}$')
domain_prefix = re.compile(r'^[a-z]+\.')
domains_to_keep = {'wsj', 'cnn', 'cbs', 'nbc', 'bbc', 'de'}
net_loc = urlparse(url).netloc
while net_loc.startswith('www.'):
net_loc = net_loc[4:]
while True... | [
"def extract_link(row):\n return row[len(row) - 4]",
"def __generateFlickrURL(self, row):\n return \"http://farm{0}.staticflickr.com/{1}/{2}_{3}_{4}.jpg\".format(\n row[0], row[1], row[2], row[3], self._image_size)",
"def _get_urls(row):\n all = row.findAll('th') + row.findAll('td')\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Count words in text from article row. | def count_words(row):
text = row['text']
return len(text.split()) | [
"def countClauseWord(self, article):\n num = 0\n wordList = article.split(\" \")\n for word in wordList:\n if word in self.clauseWordsList:\n num += 1\n return num",
"def _countWords(self, level, value):\n text = ' '.join(self.dataframe.xs(value, level=... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Print all rows in Pandas DataFrame x. | def print_full(x):
pd.set_option('display.max_rows', len(x))
print(x)
pd.reset_option('display.max_rows') | [
"def print_full(self, dataframe):\n pandas.set_option('display.max_rows', len(x))\n print(dataframe)\n pandas.reset_option('display.max_rows')",
"def print_full_df(df):\n\n pd.set_option('display.max_rows', len(df))\n pd.set_option('display.max_columns', len(df.columns))\n print(df)\n pd.rese... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calculate global stats on article db. | def global_stats(articles: pd.DataFrame):
print(f'Number of articles: {len(articles):,}')
num_sources = len(pd.value_counts(articles['base_url'], sort=False))
print(f'Number of news sources: {num_sources}')
mean_wc = articles['word_count'].mean()
print(f'Global mean word count: {mean_wc:.1f}')
m... | [
"def stats():\n db = ingest.load_database()\n ingest.database_stats(db)",
"def calc_statistics(self):\n pass",
"def articles_total():",
"def init_stats(self):\n self.relation_stats = {\n \"sentences\": collections.defaultdict(int),\n \"paragraphs\": collections.defaultdict(int),\... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calculate aggregate word count statistics on each source's articles. | def calculate_word_count_stats(articles: pd.DataFrame):
by_source = articles.groupby(['base_url'])['word_count']
by_source = by_source.agg(['count', 'mean', 'std'])
by_source.sort_values('count', ascending=False, inplace=True)
print_full(by_source)
top_sources = by_source.head(10).index
top_cou... | [
"def calculate_stats(twitter_content, language):\r\n\tnlp = stanza.Pipeline(language)\r\n\t#initialize variables\r\n\ttoken_without_frequencies = Counter()\r\n\ttoken_frequencies = Counter()\r\n\tupos_frequencies = Counter()\r\n\tner_frequencies = Counter()\r\n\tnum_sentences = 0\r\n\tmax_sentence = 0\r\n\tmin_sent... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Display statistics on articles. | def show_stats():
articles = build_df()
global_stats(articles)
calculate_word_count_stats(articles)
calculate_missing_values(articles)
sns.kdeplot(articles['word_count'], bw=1)
sns.plt.show() | [
"def articles_total():",
"def popular_articles():\n\n results = fetch_query(\n \"\"\"select articles.title, count(log.path)\n from articles, log\n where log.path = '/article/' || articles.slug\n and log.status = '200 OK'\n group by articles.title\n order by count(log... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
An implementation of col2im based on fancy indexing and np.add.at | def col2im_indices(cols, x_shape, field_height=3, field_width=3, padding=1,stride=1):
N, C, H, W = x_shape
H_padded, W_padded = H + 2 * padding, W + 2 * padding
x_padded = np.zeros((N, C, H_padded, W_padded), dtype=cols.dtype)
k, i, j = get_im2col_indices(x_shape, field_height, field_width, padding,... | [
"def col2im_indices(self, cols, x_shape, field_height=3, field_width=3, padding=1,\n stride=1):\n N, C, H, W = x_shape\n\n H_padded, W_padded = H + 2 * padding, W + 2 * padding\n x_padded = np.zeros((N, C, H_padded, W_padded), dtype=cols.dtype)\n k, i, j = self._get... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Compute kernels for positions. pos = (y, x) gaussian = exp(0.5 (pos/sigma)2). | def _compute_pos_kernels(size, sigma):
if size % 2 != 1:
raise ValueError('Kernel size must be odd')
hs = size // 2
row = -np.array(range(-hs, hs + 1), dtype=np.float32)
pos = np.zeros((size, size, 2), dtype=np.float32)
pos[:, :, 1] = np.broadcast_to(row, (size, siz... | [
"def multivariate_gaussian(self, pos):\n\n n = self.mu.shape[0]\n Sigma_det = np.linalg.det(self.sigma)\n Sigma_inv = np.linalg.inv(self.sigma)\n N = np.sqrt((2*np.pi)**n * Sigma_det)\n # This einsum call calculates (x-mu)T.Sigma-1.(x-mu) in a vectorized\n # way across all ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
takes a word as a string returns True if the word does not contain the letter 'e' | def has_no_e(word):
for letter in word:
if letter == 'e':
return False
return True | [
"def has_no_e(word):\n for c in word:\n if c == 'e':\n return False\n return True",
"def not_letter(character: str) -> bool:\n return character not in LETTERS",
"def exclude(letter):\n return letter in string.punctuation or letter in string.whitespace",
"def avoids(word, forbidde... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
execute program to create the graph | def create_graph(self):
self.my_graph = eval_or_exec(self.program)
self.parse_graph() | [
"def main():\n\n # Storing the name of the files\n network_file = sys.argv[1]\n tests_file = sys.argv[2]\n results_file = sys.argv[3]\n\n # Initiate\n digraph = Digraph()\n stations = StationCatalog()\n\n # Load stations\n stations.load(network_file)\n\n # Create Nodes\n for station... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Computes the necessary wind loads for the structure and then assigns them to the appropriate nodes | def assign_wind_loads(self):
Vr = self.mean_hourly_wind_speed
roa = 1.22 # kg/m3 - density of air in Great Britan
if self.terrain_category == 2:
# This calculates the variation of wind speed with height
Kr = 1.10 # Terrain roughness factor
Z0 = 0.01 # Terrain ... | [
"def distribute_unit_load(self, aEID, piercedElements, nPiercings):\r\n aModel = self.aeroModel\r\n sModel = self.structuralModel\r\n #print \"piercedElements = \",piercedElements\r\n nIDs = []\r\n if nPiercings == 0:\r\n #assert len(nPiercings)==1,'fix me...'\r\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
outputs an slf file in beam format | def create_slf_file(self):
mesh = open(self.name, 'w')
mesh.write('numel numnp nmat nmode (This is for a beam bridge)\n')
mesh.write(str(len(self.edge_list))+'\t'+str(len(self.node_list))
+ '\t'+str(len(self.beams)) + '\t0\n')
mesh.write('matl no., E mod, Poiss. Rati... | [
"def writeInputFile(beam,lattice,fname='test.in'):\n if sum(beam.multi_charge.n_particles) != beam.n_particles:\n #print('input error <- sum(beam.multi_charge.n_particles) not qual to beam.n_particles')\n if beam.multi_charge.n_states == 1:\n #print(' ... enforcing beam.multi_charge.n_particles[0] to ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
generate and show mesh stresses using bmpost | def show_mesh(self):
self.create_graph()
self.assign_load_case()
# self.assign_wind_loads()
self.apply_stresses()
self.create_slf_file()
self.test_slf_file()
self.parse_results()
self.show_analysis() | [
"def exchange_bmesh_data(msh_active,msh_paste):\r\n import bmesh\r\n bm = bmesh.new()\r\n bm.from_mesh(msh_paste)\r\n bm.to_mesh(msh_active)\r\n msh_active.update()",
"def _generate_mesh(self):\n self._mesh_points = self._make_pos()",
"def create_mesh_data(self):\n\n # if len(self.p... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return a list of unique fact values | def unique_factvalues(raw_facts):
factvalues = set([])
for fact in raw_facts:
factvalues.add(fact.value)
return factvalues | [
"def get_unique_items():\r\n sheet_data = read_sheet_data(config.get(\"sheet1_title_range\"))\r\n return set(chain.from_iterable(sheet_data[\"values\"]))",
"def get_unique_elements(self, field: str) -> list:\n return self.properties.distinct(field)",
"def get_unique_values_serie(s):\r\n\r\n if s... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Covert the facts generator from pypuppet into a simple dict(). | def facts2dict(raw_facts):
facts = {}
for fact in raw_facts:
facts[fact.name] = fact.value
return facts | [
"def get_facts(self):\n output = self.device.facts\n\n uptime = self.device.uptime or -1\n\n interfaces = junos_views.junos_iface_table(self.device)\n interfaces.get()\n interface_list = interfaces.keys()\n\n return {\n \"vendor\": \"Juniper\",\n \"mod... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Check if the passed in query contains an OPERATOR This is used to work out if the raw input from the client is asking nfi to query puppetdb or customize how the results are passed back. | def has_operator(query):
for char in query:
if char in OPERATORS:
return True
return False | [
"def isop(tok):\n return tok in oplist",
"def isOp(s):\n return getOp(s) != None",
"def _isOperator(self, token):\n token = token.strip()\n \n if(token == \"+\"):\n return True\n\n if(token == \"*\"):\n return True\n \n return False",
"def ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Convert a very simple query into AST format for puppetdb This allows the client to pass in a very simple and have it converted into the AST format for puppetdb. for example. >>> simple2AST("hostname=bob") ["=", ["fact", "hostname"], "bob"] >>> simple2AST("hostname=bob hostname=fred") ["or", ["=", ["fact", "hostname"], ... | def simple2AST(queries):
# split up strings into a list of queries
if not isinstance(queries, str):
raise Exception("simple2AST only converts a single query")
# Make sure the query is a query
if not has_operator(queries):
raise Exception("simple2AST only converts queries: '%s'" % queri... | [
"def simplify_query(query):\n query_list = []\n query = query.split()\n\n #Now that the query is split, all that needs to be done\n #is writing the desired elements to the list in order.\n query_list.append(list_of_select(query))\n query_list.append(list_of_from(query))\n\n #This conditional pr... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Allow for simple query of facts. | def fact_query(db, raw_client_input=None):
if raw_client_input:
return nodes_query(db, raw_client_input)
else:
return fact_names(db) | [
"def find(self, **kwargs):\n q = self.compile_query(**kwargs)\n return [f for f in six.itervalues(self.facts) if q(f)]",
"def facts(name):\n cube = get_cube(name)\n result = cube.facts(fields=request.args.get('fields'),\n cuts=request.args.get('cut'),\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return a blastn object with initialized blast_records and hsp_records | def create_blastn_object(query_genes:str, db:str, qcov=False,id=PERC_ID_CUTOFF):
blastn_object = Blastn()
stdout_xml = blastn_query1(query_genes, db, qcov=qcov, id=id)
blastn_object.create_blast_records(stdout_xml)
blastn_object.create_hsp_objects(query_genes)
return blastn_object.hsp_objects | [
"def create_blastn_bsr_object(query_genes, db):\n blastn_object = Blastn()\n stdout_xml = blastn_query1(query_genes, db, qcov=True)\n blastn_object.create_blast_records(stdout_xml)\n blastn_object.create_hsp_objects(query_genes)\n return blastn_object",
"def construct_blast_query_object(x, parser_o... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a Blastn object with initialized blast_records and hsp_records with cutoff bsr. | def create_blastn_bsr_object(query_genes, db):
blastn_object = Blastn()
stdout_xml = blastn_query1(query_genes, db, qcov=True)
blastn_object.create_blast_records(stdout_xml)
blastn_object.create_hsp_objects(query_genes)
return blastn_object | [
"def bsr(blast_object:Blastn, max_bits_dict:dict):\n\n for hsp in blast_object.hsp_objects:\n hsp.bsr = hsp.bits / max_bits_dict[hsp.name]\n\n if hsp.bsr < MIN_BSR:\n blast_object.remove_hsp_object_all(hsp)",
"def create_blastn_object(query_genes:str, db:str, qcov=False,id=PERC_ID_CUTO... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Assigns valid attributes to first and second hsp object. | def valid_strands(first_hsp_object: HSP, second_hsp_object: HSP) -> None :
if first_hsp_object.name == second_hsp_object.name:
if (first_hsp_object.strand or second_hsp_object.strand) \
and not (first_hsp_object.strand and second_hsp_object.strand):
first_hsp_object.valid = True... | [
"def set_attributes(self, model_1, obj_1, obj_2, overwrite=True):\n for (\n attr\n ) in (\n obj_2.traits()\n ): # Iterate through all attributes in obj_2. These should be the same traits as obj_1 assuming the precondition\n class_name = str(type(obj_2.traits()[... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Extends any missing bp's in the hsp_object that was removed by blastn by comparing to the respective sbjct's sequence. | def extend_sbjct(hsp_object, database, primer_dict):
start = hsp_object.start
end = hsp_object.end
query_start = hsp_object.query_start
query_end = hsp_object.query_end
len_missing = len(primer_dict[hsp_object.name]) - abs(end - start)
begin_missing = query_start - 1
end_missing = abs(len(p... | [
"def test_remove_h_bonds(self):\n test_mol = self.mHBonds.generate_h_bonded_structures()[0]\n test_mol.remove_h_bonds()\n\n for i, atm1 in enumerate(test_mol.atoms):\n for j, atm2 in enumerate(test_mol.atoms):\n if j < i and test_mol.has_bond(atm1, atm2):\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calculate the hamming distance between two sequences | def hamming_dist(seq1, seq2):
dist = sum(x != y for x, y in zip(seq1, seq2))
return(dist) | [
"def hamming_distance(bits1: str, bits2: str) -> int:\n bits1 = [int(b) for b in bits1]\n bits2 = [int(b) for b in bits2]\n return hamming(bits1, bits2) * len(bits1)",
"def hammingDistance( s1, s2 ):\n strLen = len( s1 )\n count = 0\n for i in range( strLen ):\n if s1[i] != s2[i]:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Modifies hsp object to determine if facing end of contig and within MAX_PERC_END of the amp len from the end of the contig. | def valid_dir(hsp: HSP):
dist_end = abs((hsp.db_length + 1) - hsp.start - hsp.amp_len)
dist_start = abs(hsp.start - hsp.amp_len)
if dist_end <= (MAX_PERC_END * hsp.amp_len):
hsp.location = True
hsp.end_dist = dist_end
elif dist_start <= (MAX_PERC_END * hsp.amp_len):
hsp.location... | [
"def in_endcaps(c: Cylinder, p : np.array)->bool:\n close = np.isclose(np.array([p[2],p[2]]), np.array([c.zmin, c.zmax]), atol=1e-06)\n return close.any()",
"def H2UpperLimit(self):\n f = not self.normview\n if f:\n self.normalize()\n\n def recalcfit(self):\n self.... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Determines if the missing cj0181 sequence is the chimeric primer sequence for its exception | def cj0181_missing_seq(hsp_object, primer_dict, database, chimeric_seq) -> bool:
start = hsp_object.start
end = hsp_object.end
query_start = hsp_object.query_start
query_end = hsp_object.query_end
len_missing = len(primer_dict[hsp_object.name]) - abs(end - start) - 1
begin_missing = query_start ... | [
"def check_seq(self):\n nuc_list = ['A', 'T', 'C', 'G']\n global check, error_details\n for row_index, row in self.primer_df.iterrows():\n for letter in row['Primer_seq'].strip():\n if letter not in nuc_list:\n check += 1\n error =... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Removes hsp object from blast_object if BSR is not >= MIN_BSR | def bsr(blast_object:Blastn, max_bits_dict:dict):
for hsp in blast_object.hsp_objects:
hsp.bsr = hsp.bits / max_bits_dict[hsp.name]
if hsp.bsr < MIN_BSR:
blast_object.remove_hsp_object_all(hsp) | [
"def remove_oldest(own):\n if own.capacityOf==0 or own.capacityOf<0:\n print(\"Capacity of RingBuffer is 0 or less than 1. Can't use this RingBuffer\")\n return\n return own.removeFirst()",
"def remove(self, hspl):\n\n hsplObject = hspl.findtext(\"{%s}object\" % getHSPLN... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
eHybridization when both primers are found. | def ehyb_both_prim_found(blast, f_hsp, r_hsp):
lo_hsp_ehybrid_qcov = ehyb(blast) # assigns ehybrid attributes to each hsp from amp vs db
ehybrid_qcov_pass = [hsp for hsp in lo_hsp_ehybrid_qcov if hsp.ehybrid == True]
ehybrid_qcov_fail = [hsp for hsp in lo_hsp_ehybrid_qcov if hsp.ehybrid == False]
for ... | [
"def map_hydrogens(spc_1: ARCSpecies,\n spc_2: ARCSpecies,\n backbone_map: Dict[int, int],\n ) -> Dict[int, int]:\n atom_map = backbone_map\n atoms_1, atoms_2 = spc_1.mol.atoms, spc_2.mol.atoms\n for hydrogen_1 in atoms_1:\n if hydrogen_1.is_hydroge... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the closest fingerprint match of the binary results based off of the fingerprints in cgftypes_file | def find_closest_fingerprint(bin_results:list, cgftypes_file:str) -> list:
with open(cgftypes_file, "r") as fingerprint_track:
csvReader = csv.reader(fingerprint_track)
header = next(csvReader)
cgf_type_index = header.index("cgf.type")
strain_freq_index = header.index("num.strains")... | [
"def find_matches(filename, e):\n best_matches = {}\n with open(filename) as f:\n try:\n for record in NCBIXML.parse(f):\n best = {}\n if record.alignments:\n for alignment in record.alignments:\n genome = extract_id(ali... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Prints the given state of the puzzle | def print_puzzle(state):
print('-----')
for i in range(4):
print('|', end="")
for j in range(3):
if state[i][j] == 0:
print(" |", end="")
else:
print("", state[i][j], "|", end="")
if i == 0:
bre... | [
"def print_state(self):\n p1_board = self.board[0:6]\n p2_board = self.board[7:13]\n p2_board.reverse()\n p1_purse = self.board[6]\n p2_purse = self.board[13]\n\n print('\\n')\n print(\"Player 1 Score: {}\".format(self.p1_score))\n print(\"Player 2 Score: {}\"... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a string corresponding to the move between two positions of a tile | def get_move(old_i, new_i):
dx = new_i[0] - old_i[0]
dy = new_i[1] - old_i[1]
if dx > 0:
return "left"
elif dx < 0:
return "right"
elif dy > 0:
return "up"
elif dy < 0:
return "down"
else:
return "" | [
"def translate_to_tile(self, tile_x, pos_x, tile_y, pos_y):\n x = int(tile_x) * DISPLAY_SIZE['x'] + pos_x\n y = int(tile_y) * DISPLAY_SIZE['y'] + pos_y\n return x, y",
"def render_tile(tile):\n\n # each tile list has the meaning: [Visible (bool), Mine (bool), Adjacent Mines (int)]\n # visible, mine... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the path, obtained through BBS algorithm and a boolean indicating if the path terminates at the goal state | def BBS(initial_state, check_dict):
print("Implementing BBS...")
q = []
heapq.heappush(q, (initial_state[0][2], initial_state))
accomplished = False
while len(q) != 0:
path = heapq.heappop(q)[1]
if is_goal(path[-1][0]):
goal = path
... | [
"def path_walker_backtrack(self, path: list):\n trajectory = -1\n while path:\n current = path.pop()\n if self._maze.get_cell(current.x, current.y).get_flag() == 1:\n current = current.get_parent()\n if current.get_parent() is not None and self.is_en... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Merges several make configuration objects into one in the order they are specified. If several configurations contain attributes with the same names, the latter value will be kept (with respect to the order in which they were provided to this function). | def merge(*args):
d = {}
for conf in args:
for key, value in conf.__dict__.items():
d[key] = value
return from_dict(d) | [
"def mergeConfig(self, *args, **kwargs):\n other = cherrypy.lib.reprconf.Config(*args, **kwargs)\n # Top-level keys are namespaces to merge, second level should get replaced\n for k, v in other.items():\n mergeFrom = self.get(k, {})\n mergeFrom.update(v)\n self[k] = mergeFrom",
"def ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Parse a caption (srt) text passed in and return a list of section numbers ordered descending by highest speech speed | def get_srt_section_ids(text: str) -> List[int]:
text = text.strip().split("\n\n")
sections = []
for line in text:
id, duration, text = line.split("\n")
start_time, end_time = duration.split("-->")
duration = (parse(end_time) - parse(start_time)).total_seconds()
sect... | [
"def get_srt_section_ids(text: str) -> List[int]:\n sections = []\n\n for section in grouper(text.strip().splitlines(), 4):\n idx, duration, caption = [sec.strip() for sec in section if sec]\n idx = int(idx)\n duration = caption_duration_from_string(duration)\n sections.append(Sect... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Triggered when another instrument is selected from the combobox. | def on_comboBox_instrument_currentIndexChanged(self, p0):
self.instrument = self.getInstrumentFromName(p0)
if self.instrument.providesTempRange:
self.groupBox_temp.setEnabled(False)
else:
self.groupBox_temp.setEnabled(True) | [
"def on_pick(self, event):\r\n pass",
"def comboBoxOccasion_SelectionChanged(self, event):\n self.SelectedItem.occasion = event.GetInt()",
"def OnSelectedItemChanged(self):\n pass",
"def onPotencialChanged(self):\n self.potencial = self.potenzialDropDown.currentIndex()",
"def ite... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
L{client._urljoin} preserves the fragment identifier from either the new path or the base URL respectively, as specified in the HTTP 1.1 bis draft. | def test_preserveFragments(self):
self.assertEqual(
client._urljoin(b"http://foo.com/bar#frag", b"/quux"),
b"http://foo.com/quux#frag",
)
self.assertEqual(
client._urljoin(b"http://foo.com/bar", b"/quux#frag2"),
b"http://foo.com/quux#frag2",
... | [
"def simple_urljoin(base, other):\n return '/'.join([base.rstrip('/'), other.lstrip('/')])",
"def unsafe_join_url_path(base, *args):\n base = \"/\" + base.lstrip(\"/\")\n for path in args:\n base = base.rstrip(\"/\") + \"/\" + path.lstrip(\"/\")\n return base",
"def BaseJoin(base, uriRef):\r\... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Replace the string "HOST" in C{template} with this test's host. Byte strings Python between (and including) versions 3.0 and 3.4 cannot be formatted using C{%} or C{format} so this does a simple replace. | def makeURIString(self, template):
self.assertIsInstance(self.host, bytes)
self.assertIsInstance(self.uriHost, bytes)
self.assertIsInstance(template, bytes)
self.assertIn(b"HOST", template)
return template.replace(b"HOST", self.uriHost) | [
"def test_replace_template(self):\n template_sample = (r'a {{templatename '\n r' | accessdate={{Fecha|1993}} '\n r' |atitle=The [[real title]] }}')\n self.assertEqual(textlib.replaceExcept(template_sample, 'a', 'X',\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Assert that all of a L{client.URI}'s components match the expected values. | def assertURIEquals(
self, uri, scheme, netloc, host, port, path, params=b"", query=b"", fragment=b""
):
self.assertEqual(
(scheme, netloc, host, port, path, params, query, fragment),
(
uri.scheme,
uri.netloc,
uri.host,
... | [
"def test_check_uri(self):\n # OK\n self.assertTrue(SiteService.check_uri(\"localhost:12345\"))\n self.assertTrue(SiteService.check_uri(\"www.google.com:12345\"))\n self.assertTrue(SiteService.check_uri(\"127.0.0.1:12345\"))\n # Missing Port\n self.assertFalse(SiteService.c... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
L{client.URI.fromBytes} by default assumes port 80 for the I{http} scheme and 443 for the I{https} scheme. | def test_parseDefaultPort(self):
uri = client.URI.fromBytes(self.makeURIString(b"http://HOST"))
self.assertEqual(80, uri.port)
# Weird (but commonly accepted) structure uses default port.
uri = client.URI.fromBytes(self.makeURIString(b"http://HOST:"))
self.assertEqual(80, uri.por... | [
"def test_parseCustomDefaultPort(self):\n uri = client.URI.fromBytes(self.makeURIString(b\"http://HOST\"), defaultPort=5144)\n self.assertEqual(5144, uri.port)\n uri = client.URI.fromBytes(\n self.makeURIString(b\"https://HOST\"), defaultPort=5144\n )\n self.assertEqual... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
L{client.URI.fromBytes} accepts a C{defaultPort} parameter that overrides the normal default port logic. | def test_parseCustomDefaultPort(self):
uri = client.URI.fromBytes(self.makeURIString(b"http://HOST"), defaultPort=5144)
self.assertEqual(5144, uri.port)
uri = client.URI.fromBytes(
self.makeURIString(b"https://HOST"), defaultPort=5144
)
self.assertEqual(5144, uri.port... | [
"def test_parseDefaultPort(self):\n uri = client.URI.fromBytes(self.makeURIString(b\"http://HOST\"))\n self.assertEqual(80, uri.port)\n # Weird (but commonly accepted) structure uses default port.\n uri = client.URI.fromBytes(self.makeURIString(b\"http://HOST:\"))\n self.assertEqu... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
The path of a I{URI} with an empty path is C{b'/'}. | def test_emptyPath(self):
uri = self.makeURIString(b"http://HOST/")
self.assertURIEquals(
client.URI.fromBytes(uri),
scheme=b"http",
netloc=self.uriHost,
host=self.host,
port=80,
path=b"/",
) | [
"def test_originFormEmptyPath(self):\n uri = client.URI.fromBytes(self.makeURIString(b\"http://HOST/\"))\n self.assertEqual(b\"/\", uri.originForm)",
"def test_originFormNoPath(self):\n uri = client.URI.fromBytes(self.makeURIString(b\"http://HOST\"))\n self.assertEqual(b\"/\", uri.orig... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |