query
stringlengths
9
9.05k
document
stringlengths
10
222k
negatives
listlengths
19
20
metadata
dict
Log a critical SQL error and exit
def sql_error(err): try: logger.critical('MySQL error [%d]: %s', err.args[0], err.args[1]) except IndexError: logger.critical('MySQL error: %s', err) sys.exit(-1)
[ "def sql_error(err):\n try:\n LOGGER.critical('MySQL error [%d]: %s', err.args[0], err.args[1])\n except IndexError:\n LOGGER.critical('MySQL error: %s', err)\n terminate_program(-1)", "def sql_error(err):\n try:\n LOGGER.critical('MySQL error [%d]: %s', err.args[0], err.args[1])\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
r"""Test the rotation matrix generator.
def test_rot(self): print("rot()") obs = self.fixture # rotation(0) = identity for axis in [1, 2, 3]: # theta = 0.0 rotation = obs.rot(0.0, axis) # find || eye - rot1 || diff = np.linalg.norm(np.eye(3) - rotation) self.assertA...
[ "def test_rotation_matrix(self):\n R = utils.random_rotation_matrix()\n # Matrix must be 3x3\n assert R.shape == (3, 3)\n # Make sure that det(R) is 1\n assert np.isclose(np.linalg.det(R), 1.0)\n # Check that the inverse is equal to the transpose of R\n assert np.all...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return an integer representation based on the given string representation. The Trace.traceLevels hash table is used to do the translation. The integer returned is one of the levels from the Level class. The incoming level is intended to be an integer or a Jython string.
def _coerceLevel(self,level): if (type(level) == type(0)): if (level >= Level.NONE and level <= Level.FINEST): result = level else: raise TraceSpecificationException("Unknown integer trace level: %s Valid integer trace levels: %s <= level <= %s" % (level, Level.NONE, Level.FINEST)) ...
[ "def _str_to_level(string):\n if string.lower() == DEBUG:\n return logging.DEBUG\n return logging.INFO", "def _log_level_from_string(string):\n try:\n return getattr(logging, string)\n except AttributeError:\n raise ValueError('invalid log level: %r' % string)", "def convert_to_...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a Python/Jython regular expression string that represents the given pattern.
def _patternToRegEx(self,pattern): if (pattern == "*"): # special case that matches anything regex = ".*?" else: regex = pattern if (regex.find(".") >= 0): regex = regex.replace(".", "\.") #endIf asteriskIndex = regex.find("*") if (asteriskIndex < 0): ...
[ "def get_regex_from_pattern(pattern: str) -> str:\n if len(pattern) > 2 and pattern[2] == \":\" and pattern[:2] in {\"sh\", \"re\", \"id\"}:\n (style, pattern) = (pattern[:2], pattern[3:])\n else:\n (style, pattern) = (\"id\", pattern) # \"identical\" match is the default\n if style == \"sh\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return "true" if the given trace level is a valid string or integer representation of a trace level.
def _isTraceLevel(self,level): if (type(level) == type(0)): result = level >= Level.NONE and level <= Level.FINEST elif (type(level) == type("") or type(level) == type(u"")): level = level.lower() validLevel = Trace.traceLevels.get(level) # Keep in mind, trace level "none" maps t...
[ "def _coerceLevel(self,level):\n if (type(level) == type(0)):\n if (level >= Level.NONE and level <= Level.FINEST):\n result = level\n else:\n raise TraceSpecificationException(\"Unknown integer trace level: %s Valid integer trace levels: %s <= level <= %s\" % (level, Level.NONE, Level.F...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Set the trace level for this instance of the trace class based on the Trace class traceSpec. If there is no trace spec that has a module pattern that matches this trace instance module name, then the trace level is not modified.
def configureThisTrace(self): for spec in Trace.traceSpec: if (spec.compiledRegex.match(self.entityName)): self.traceLevel = spec.level break #endIf #endFor
[ "def configureTrace(traceString):\n \n setTraceSpec(traceString)\n registeredModules = Trace.tracedEntities.keys()\n for module in registeredModules:\n for spec in Trace.traceSpec:\n if (spec.compiledRegex.match(module)):\n trace = Trace.tracedEntities[module]\n trace.setTraceLevel(spec.le...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a string useable for output to stdout or a log file that provides a representation of the "exception stack" and the "frame stack" from "top to bottm" (TTB). The "exception stack" captures the code tree from main to where the exception was raised and is usually the most interesting part of the stack. The "frame s...
def _exceptionStackTTB(self,methodName,exc,depth=10): stack = "" # Reconstruct the call stack from where the trace of the exception was initiated by invoking # Trace.error() or Trace.severe(). stackList = traceback.extract_stack() try: for stackData in stackList: sourcefile,line,funct...
[ "def _exceptionStackBTT(self,methodName,exc,depth=10):\n stack = \"\"\n # Reconstruct the call stack from where the trace of the exception was initiated by invoking \n # Trace.error() or Trace.severe().\n stackList = traceback.extract_stack()\n try:\n stack = \"\\tFrame stack (most recent call l...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a string useable for output to stdout or a log file that provides a representation of the "exception stack" and the "frame stack" from "bottom to top" (BTT). The "exception stack" captures the code tree from main to where the exception was raised and is usually the most interesting part of the stack. The "frame ...
def _exceptionStackBTT(self,methodName,exc,depth=10): stack = "" # Reconstruct the call stack from where the trace of the exception was initiated by invoking # Trace.error() or Trace.severe(). stackList = traceback.extract_stack() try: stack = "\tFrame stack (most recent call last):\n" ...
[ "def _exceptionStackTTB(self,methodName,exc,depth=10):\n stack = \"\"\n # Reconstruct the call stack from where the trace of the exception was initiated by invoking \n # Trace.error() or Trace.severe().\n stackList = traceback.extract_stack()\n try:\n for stackData in stackList:\n sourcef...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Set the trace level for this instance of Trace to the given level. The given level may be a Jython string that is a valid trace level as determined by the _coerceLevel() method. Or the given level may be an integer constant that is one of the levels defined in the Level class.
def setTraceLevel (self,level): if (type(level) == type("") or type(level) == type(u"")): if (level): level = self._coerceLevel(level) self.traceLevel = level #endIf elif (type(level) == type(0)): if (self._isTraceLevel(level)): self.traceLevel = level else: ...
[ "def level(self, level):\n\n self._level = level", "def set_logging_level(self, level):\n if str(level) == '1':\n self.logging_level = logging.DEBUG\n elif str(level) == '2':\n self.logging_level = logging.INFO\n elif str(level) == '3':\n self.logging_l...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The configureTrace() method defined for the Trace class is a convenience wrapper around the configureTrace() method defined for the Trace module. It is often the case that a Trace class instance is readily available to use for "global" trace configuration.
def configureTrace(self,traceString): configureTrace(traceString)
[ "def configureTrace(traceString):\n \n setTraceSpec(traceString)\n registeredModules = Trace.tracedEntities.keys()\n for module in registeredModules:\n for spec in Trace.traceSpec:\n if (spec.compiledRegex.match(module)):\n trace = Trace.tracedEntities[module]\n trace.setTraceLevel(spec.le...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a list of TraceSpecification instances that represent a parsing of the given trace string. The returned list holds a TraceSpecifification instance for each trace specification in the given trace string.
def parseTraceString(traceString): result = [] # If the given traceString is enclosed in double-quotes, # then strip the double-quotes. if (traceString[0] == '"' and traceString[-1] == '"'): traceString = traceString[1:-1] #endIf traceStrings = traceString.split(":") for trace in traceStrings: tra...
[ "def parse(str):\n\n str = util.misc.remove_whitespace(str)\n descs = []\n\n pattern = re.compile(r'experiment\\{')\n match = pattern.search(str)\n\n while match:\n\n exp_str = match.group()\n left = 1\n right = 0\n i = match.end()\n\n # consume the string until cur...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Given a trace specification string, set the module traceSpec used by all instances of Trace.
def setTraceSpec(traceString): if (not traceString): raise Exception("The traceString argument must be a non-empty string.") #endIf Trace.traceSpec = parseTraceString(traceString) Trace.traceString = traceString
[ "def configureTrace(traceString):\n \n setTraceSpec(traceString)\n registeredModules = Trace.tracedEntities.keys()\n for module in registeredModules:\n for spec in Trace.traceSpec:\n if (spec.compiledRegex.match(module)):\n trace = Trace.tracedEntities[module]\n trace.setTraceLevel(spec.le...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the module traceSpec used by all instances of Trace.
def getTraceSpec(): return Trace.traceSpec
[ "def get_testbench_specs(self, tb_type: str) -> Dict[str, Any]:\n return self._specs['testbenches'][tb_type]", "def get_required_module_descriptors(self):\r\n return []", "def spec(self):\n return getattr(self, \"_SPEC_\")", "def spec(self):\n return self._spec", "def spec(cls, f...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Set the global trace specification based on the given trace string. Loop through all registered modules and set their trace class trace level based on the given trace string if the module name matches one of the module patterns in the given trace string.
def configureTrace(traceString): setTraceSpec(traceString) registeredModules = Trace.tracedEntities.keys() for module in registeredModules: for spec in Trace.traceSpec: if (spec.compiledRegex.match(module)): trace = Trace.tracedEntities[module] trace.setTraceLevel(spec.level) ...
[ "def setTraceSpec(traceString):\n \n if (not traceString):\n raise Exception(\"The traceString argument must be a non-empty string.\")\n #endIf\n \n Trace.traceSpec = parseTraceString(traceString)\n Trace.traceString = traceString", "def configureTrace(self,traceString):\n configureTrace(traceString)"...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
the freshly created queue has 'rear' value of 0 (the next available pointer in the queue buffer).
def test_queue_rear(self): queue = Queue(shape=(100, 2, 2, 2)) self.assertEqual(queue.rear, 0)
[ "def requeue(self):", "def test_that_rear_is_the_right_value_at_length_of_one(empty_queue):\n empty_queue.enqueue(9)\n assert empty_queue.front == empty_queue.rear", "def test_that_rear_is_the_right_value_at_length_greater_than_one(empty_queue):\n empty_queue.enqueue(9)\n empty_queue.enqueue(1)\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Convert a BigBird string into a JSON object.
def bbjson(s): return BunchDict(json.loads(unquote(s)))
[ "def convert_to_json(self, string):\n return json.dumps(string)", "def string_to_obj(string_val, confix_type=None):\n intermediate_val = json.loads(string_val)\n return confix.intermediate_to_obj(intermediate_val, confix_type)", "def band_py_to_json(band):", "def loads(s):\r\n\r\n if s and isi...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Map a userprovided query expression over a set of JSON objects. The query is executed for every JSON object in the input file. The JSON object is bound to the special variable '_'. In addition, all JSON attributes appear in the global namespace for the query expression.
def map(query, fh, skip_header_row, default_obj={}): # First, try the JsonRecordReader; then attempt the csv record reader reader = MetaRecordReader(default_obj) # Hack: append an 'else []' to queries that lack an else clause if " if " in query and not " else " in query: query = query + " else...
[ "def json_query(data, expr):\n\treturn jmespath.search(expr, data)", "def process_query(query_file):\n \n #create main query_dictionary where all data from query files will be placed\n query_dict = {}\n query_file.readline()\n \n \n #add SEARCH specification to query_dict\n query_dict['sea...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Execute a series of query expressions. 1) Evalue each query over its corresponding input file. 2) If there are two inputs, join the results together. 3) If an aggregate is provided, aggregate the results. 4) If distinct is True, retain one copy of each input. 5) If order_by is provided, sort the results based on the gi...
def run_query(queries, files, default_obj, skip_header, agg, distinct, order_by, limit): its = [map(query, phile, skip_header, default_obj) for (query, phile) in zip(queries, files)] it = its[0] if len(its) == 2: it = join_op(*its) if agg is not None: it = aggregate_op(it, agg) if ...
[ "def run_query(where_clause, limit=1000):\n sql = \"SELECT * FROM catalog WHERE {} ORDER BY creators, title LIMIT {}\"\\\n .format(where_clause, limit)\n with sqlite3.connect(db_name) as db:\n results = pd.read_sql_query(sql, db)\n print_results(results)", "def exec_query(collection,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Tests the entrypoint with data passing with newstyled KFP components. This test case emulates a similar scenario as testMainWithV1Producer, except for that the inputs of this step are all provided by a newstyled KFP component.
def testMainWithV2Producer(self): # Set mocked user function. self._import_func.return_value = main.test_func2 # Set GFile read function self._mock_gcs_read.return_value = _PRODUCER_EXECUTOR_OUTPUT entrypoint.main( executor_metadata_json_file=_OUTPUT_METADATA_JSON_LOCATION, function...
[ "def test_main(self):\n pass", "def test_start(self):\n self.producer.start()\n pass", "def test_initialized_components(self):\n m, data = add_components_and_load_data(\n prereq_modules=IMPORTED_PREREQ_MODULES,\n module_to_test=MODULE_BEING_TESTED,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Run colordiff if it can be found, and plain diff otherwise.
def _run_diff(oldfile, newfile): # TODO: It may be nicer to use the internal diff engine for this. # For one, this would use the correct colors set up for hg # diff rather than the colors set up for colordiff. It's not # clear to me how this can be done though, and if it is # ...
[ "def _RunClangFormatDiff(opts, clang_diff_files, top_dir, upstream_commit):\n\n if not clang_diff_files:\n return 0\n\n # Set to 2 to signal to CheckPatchFormatted() that this patch isn't\n # formatted. This is used to block during the presubmit.\n return_value = 0\n\n # Locate the clang-format binary in th...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return all files in the working directory that match the patterns and are tracked (clean, modified or added). Ignored or unknown files are only matched when given literally. If patterns is empty, match all tracked files. Supports options['include'] and options['exclude'] which work like the include and exclude options ...
def _get_files(repo, patterns, options): ctx = repo[None] match = match_func(repo, ctx, patterns, options) try: status = ctx.status(listclean=True, listignored=True, listunknown=True) except TypeError: # Compatibility with older Mercurial versions. status = ctx.status(clean=True,...
[ "def match_files(patterns, files):\n\tall_files = files if isinstance(files, collections.Container) else list(files)\n\treturn_files = set()\n\tfor pattern in patterns:\n\t\tif pattern.include is not None:\n\t\t\tresult_files = pattern.match(all_files)\n\t\t\tif pattern.include:\n\t\t\t\treturn_files.update(result_...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Run uncrustify on the specified files or directories. If no files are specified, operates on the whole working directory.
def uncrustify(ui, repo, *patterns, **options): if options["diff"] and options["modify"]: raise util.Abort("cannot specify --diff and --modify at the same time") if options["diff"]: mode = "diff" elif options["modify"]: mode = "modify" else: mode = "status" no_backu...
[ "def runUnification():\n checkDirectory(unificationOptions.defaultPath,unificationOptions.logging)\n generateScript()\n generateFunctions()\n generateOptions()\n generateSettings()\n generateModCompat()", "def clean_filesystem(files=[]):\n remove_files(files + find_cache_files())", "def rem...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This create the debian source control minimal tree structure based on the current buildout folder. It also copy the buildout DEBIAN folder to the package
def init_structure(self): dest = os.path.join(self.cwd, 'build', 'debian') self.mkdir_p(dest) struct = os.path.join(dest, self.cwd) self.mkdir_p(struct) # copytree_src = os.path.join(self.cwd, 'DEBIAN') # self.copytree(copytree_src, dest, symlinks=False, ignore=None) ...
[ "def release():\n # create the dist directory \n with quiet():\n local('rm -rf {}'.format(env.paths['dist']))\n local('mkdir -p {}'.format(env.paths['dist']))\n # find compiled packages\n for (dirpath, dirnames, filenames) in os.walk(env.paths['compiled']):\n files = []\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test that parsing a config produces the expected ModuleConfig object.
def test_parse_config(self): user_config = {"weighted_display_name_like": "testabc [SoMeThInG]"} _, _, module_config = create_user_directory_search_module_with_config( user_config ) # Check that the generated config contains what we expect self.assertEqual( ...
[ "def test_polarion_config_parser(polarion_config):\n assert polarion_config.test_case_url() == 'https://127.0.0.1/polarion/import/testcase'\n assert polarion_config.test_run_url() == 'https://127.0.0.1/polarion/import/xunit'\n assert polarion_config.username() == 'my_user'\n assert polarion_config.passw...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Tests UserDirectorySearchModule.get_search_query_ordering return values
def test_get_search_query_ordering(self): user_config = {"weighted_display_name_like": "[Modernisation]"} module, _, _ = create_user_directory_search_module_with_config(user_config) # Check postgres # Check the generated SQL and arguments of the above config when using postgres ...
[ "def assertSearchFindsInOrder(self, query, ids):\n ctool = self.portal.portal_catalog\n result_ids = [b.getId for b in ctool.unrestrictedSearchResults(**query)]\n self.assertListEqual(result_ids, ids)", "def test_search_sorted(self):\n article = self.create_article(content=\"test\")\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Assumes x and epsilon are positve floats & epsilon < 1 Returns a y such that yy is within epsilon of x
def squareRootExhaustive(x, epsilon): step = espilon**2 ans = 0.0 while abs(ans**2 - x) >= epsilon and ans*ans <= x: # The ans*ans <= is there because of floating point arithmetic I think. ans += step if ans*ans > x: raise ValueError ...
[ "def withinEpsilon(x, y, epsilon):\n return abs(x - y) <= epsilon", "def within(a, b, epsilon=1e-6):\n return abs(a - b) <= epsilon", "def approxEquals(x1, x2, epsilon = 0.005):\n return (x1 > (x2 - epsilon)) and (x1 < (x2 + epsilon))", "def _assertWithinEpsilon(self, a, b, epsilon=1.0):\n self.asse...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test fetching samples to prepare.
def test_samples_to_prepare(sample_store): # GIVEN a store with sample in a mix of states assert len(sample_store._get_query(table=Sample).all()) > 1 assert ( len( [sample for sample in sample_store._get_query(table=Sample).all() if sample.prepared_at] ) >= 1 ) #...
[ "def test_get(self):\n obs = self.tester.get('1.SKM7.640188')\n exp = PrepSample('1.SKM7.640188', self.tester)\n self.assertEqual(obs, exp)", "def test_get(self):\n\n self.driver.start_sampling()\n\n self.create_sample_data_set_dir(TEST_FILE_ONE, TELEMETERED_DIR, TEST_FILE_ONE)\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test fetching a sample by entry id.
def test_get_sample_by_entry_id(sample_store, entry_id=1): # GIVEN a store with a sample assert len(sample_store._get_query(table=Sample).all()) > 1 # WHEN finding a sample by entry id sample: Sample = sample_store.get_sample_by_entry_id(entry_id=entry_id) # THEN samples should be a list of sample...
[ "def get_sample_by_id():\n sample_id = demisto.getArg('id')\n r = req('GET', SUB_API + 'samples/' + sample_id)\n sample = sample_to_readable(r.json().get('data'))\n md = tableToMarkdown('ThreatGrid - Sample', [sample], [\n 'ID', 'Filename', 'State', 'Status', 'MD5', 'SHA1', 'SHA256', 'OS', 'Submi...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test fetching a sample by internal id.
def test_get_sample_by_internal_id(sample_store, internal_id="test_internal_id"): # GIVEN a store with a sample assert len(sample_store._get_query(table=Sample).all()) > 1 # WHEN finding a sample by internal id sample: Sample = sample_store.get_sample_by_internal_id(internal_id=internal_id) # THEN...
[ "def _test_get_sample(self):\n db = mdb.get_db()\n sample = db.samples.find_one({'sample_id': \"unit_tester\"})\n if sample:\n logging.debug(\"sample id: {}, visibility: {}, owner: {}\"\n .format(sample['_id'], sample['is_public'], sample['owner']))\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test fetching samples to deliver.
def test_get_samples_to_deliver(sample_store): # GIVEN a store with a sample assert len(sample_store._get_query(table=Sample).all()) > 1 # WHEN finding samples to deliver samples = sample_store.get_samples_to_deliver() # THEN samples should be a list of samples assert isinstance(samples, list)...
[ "def test_get(self):\n\n self.driver.start_sampling()\n\n self.create_sample_data_set_dir(TEST_FILE_ONE, TELEMETERED_DIR, TEST_FILE_ONE)\n self.create_sample_data_set_dir(TEST_FILE_ONE, RECOVERED_DIR, TEST_FILE_ONE)\n\n self.assert_data(TELEMETERED_PARTICLES, TEST_FOUR_EXPECTED_RESULTS_T...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test fetching samples to invoice.
def test_get_samples_to_invoice_query(sample_store): # GIVEN a store with a sample assert len(sample_store._get_query(table=Sample).all()) > 1 # WHEN finding samples to invoice sample = sample_store.get_samples_to_invoice_query().first() # THEN samples should be a list of samples assert isinst...
[ "def test_get_all_samples(self):\n self.login()\n\n page_size = 20\n\n # hit the API endpoint for both pages\n for page in range(1, 3):\n\n data = {'page': page,\n 'page_size': page_size}\n response = self.client.get(reverse('searchsamples'), data...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test getting samples not invoiced.
def test_get_samples_not_invoiced(sample_store): # GIVEN a store with a sample assert len(sample_store._get_query(table=Sample).all()) > 1 # WHEN finding samples to invoice samples = sample_store.get_samples_not_invoiced() # THEN samples should be a list of samples assert isinstance(samples, l...
[ "def test_filterSamples(self):\r\n exp = ['PC.356', 'PC.593']\r\n self.overview_map.filterSamples(['PC.593', 'PC.356'])\r\n obs = self.overview_map.SampleIds\r\n self.assertEqual(obs, exp)\r\n\r\n self.overview_map.filterSamples([])\r\n self.assertEqual(self.overview_map.Sa...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test that samples to invoice can be returned for a customer.
def test_get_samples_to_invoice_for_customer( store_with_samples_for_multiple_customers: Store, helpers: StoreHelpers, three_customer_ids: List[str], ): # GIVEN a database with samples for a customer # THEN the one customer can be retrieved customer: Customer = store_with_samples_for_multiple_c...
[ "def test_get_samples_to_invoice_query(sample_store):\n # GIVEN a store with a sample\n assert len(sample_store._get_query(table=Sample).all()) > 1\n\n # WHEN finding samples to invoice\n sample = sample_store.get_samples_to_invoice_query().first()\n\n # THEN samples should be a list of samples\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test that samples can be returned for a customer.
def test_get_samples_by_customer_id_and_pattern_with_collaboration( store_with_samples_for_multiple_customers: Store, helpers: StoreHelpers, three_customer_ids: List[str], ): # GIVEN a database with samples for a customer # THEN the one customer can be retrieved customer: set[Customer] = store_...
[ "def test_get_samples_to_invoice_for_customer(\n store_with_samples_for_multiple_customers: Store,\n helpers: StoreHelpers,\n three_customer_ids: List[str],\n):\n # GIVEN a database with samples for a customer\n\n # THEN the one customer can be retrieved\n customer: Customer = store_with_samples_f...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Read in `file` and print out the frequency of words in that file.
def print_word_freq(file): text_file = open(file, 'r') contents = text_file.read() words = contents.split() def clean_text(text): text = text.lower() all_letters = "abcdefghijklmnopqrstuvwxyz" text_to_keep = "" for char in text: if char in all_letters:...
[ "def print_word_freq(file):\n with open(file) as file:\n text = file.read().lower()\n # print(repr(text[0:-1]))\n new_text_string = \"\"\n for word in text:\n if word not in punctuation:\n new_text_string += word\n # print(new_text_string)\n lis...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test the `save_one_genome` method. Pass a taxon ID in the parameters.
def test_genbank_to_genome_taxonomy(self): result = self.gfu.genbank_to_genome(self.ctx, { 'workspace_name': self.ws_name, 'generate_ids_if_needed': 'true', # why is this a string 'taxon_id': '3702', 'file': { 'path': f"{_DATA_PATH}/wigglesworthia...
[ "def test_genbank_to_genome_invalid_taxon_id(self):\n result = self.gfu.genbank_to_genome(self.ctx, {\n 'workspace_name': self.ws_name,\n 'generate_ids_if_needed': 'true', # why is this a string\n 'taxon_id': '9999999999',\n 'file': {\n 'path': f\"{...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test the `save_one_genome` method. Pass a taxon ID in the parameters.
def test_genbank_to_genome_invalid_taxon_id(self): result = self.gfu.genbank_to_genome(self.ctx, { 'workspace_name': self.ws_name, 'generate_ids_if_needed': 'true', # why is this a string 'taxon_id': '9999999999', 'file': { 'path': f"{_DATA_PATH}/...
[ "def test_genbank_to_genome_taxonomy(self):\n result = self.gfu.genbank_to_genome(self.ctx, {\n 'workspace_name': self.ws_name,\n 'generate_ids_if_needed': 'true', # why is this a string\n 'taxon_id': '3702',\n 'file': {\n 'path': f\"{_DATA_PATH}/wi...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The function at any point in time returns if till the current input the string matches the given regular expression. It does so by comparing the current state with the end state `q3`. It also checks for `stopped` flag which sees that due to bad input the iteration of FSM had to be stopped.
def does_match(self): if self.stopped: return False return self.current_state == self.q3
[ "def is_result(string):\n pattern_string = # fill in here\n pattern = re.compile(pattern_string)\n return bool(pattern.search(string))", "def match(self, s):\n # A list of previous states.\n previous = self.start.followes()\n # Loop through the string, One character at a time.\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
BSURFS (7110, 71, 588, 1, 24, 190, 198, 189, 44, 188, 197, 190, 64, 106, 189, 196, 84, 195, 188, 106, 1) BSURFS 1 24 190 198 189+ $ EID2 G1 G2 G3 EID3 G1 G2 G3 + 44 188 197 190 64 106 189 196+ $ EID4 G1 G2 G3 + 84 195 188 106
def _read_bsurfs(self, data: bytes, n: int) -> int: bsurfs
[ "def bbcalfunc(bbfile,nfreqlst): \r\n \r\n fid=file(bbfile,'r')\r\n fidlines=fid.readlines()\r\n #define the delimiter\r\n if bbfile.find('.txt')>=0:\r\n delimiter='\\t'\r\n elif bbfile.find('.csv')>=0:\r\n delimiter=','\r\n \r\n freq=[]\r\n breal=[]\r\n bimag=[]\r\n f...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Given a TTT board `b`, determine who has won and return. If no one has won, return None
def winner(b): # Row of three for row in b: if row[0] == " ": # First row entry is blank; ignore! continue if row[0]==row[1] and row[1]==row[2]: return row[0] # Column of three for i in range(3): if b[0][i] == " ": # First column en...
[ "def scoreBoard(self, b):\r\n ox = self.ox \r\n if b.winsFor(ox) == True:\r\n return 100.0\r\n elif b.winsFor(self.oppCh()) == True:\r\n return 0.0\r\n else:\r\n return 50.0", "def has_won(board, player):\r\n return False", "def w(b,s):\n # t is...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Modify board b (list of lists) to account for move in string `move`. If the move is illegal, raises an exception.
def apply_move(b,player,move): move = move.strip().lower() if len(move)!=2: raise Exception("Valid move is two characters (e.g. A2 or B3)") if move[0] not in COLS: move = move[::-1] if move[0] not in COLS: raise Exception("No column spec found") j = COLS.index(move[0]) i ...
[ "def make_move(self, board):", "def set_board(self, move_string):\r\n next_checker = 'X' # we starten door een 'X' te spelen\r\n for col_char in move_string:\r\n col = int(col_char)\r\n if 0 <= col <= self.width:\r\n self.add_move(col, next_checker)\r\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Annotate sequence with kmer repeats.
def repeat_annotation(sequence, kmer_size): max_observed_repeats = [1 for i in range(len(sequence))] for i in range(len(sequence) - (kmer_size - 1)): kmer_count = 0 start_index = i end_index = i + (kmer_size - 1) for j in range(i, len(sequence), kmer_size): if sequenc...
[ "def setRepeat(self, repeats):\r\n assert type(repeats)==int , \\\r\n 'repeats attribute must be integer'\r\n return self._domInstance.setAttribute('repeats', str(repeats))", "def kmer_composition(k, text):\r\n # TODO: your code here\r\n d = {}\r\n for i in range(len(text)-k+1):\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a specific BokehJS deployment file
def bokehjs_file(filename): return flask.send_from_directory(bokeh_app.bokehjsdir, filename)
[ "def bokehjssrc_file(filename):\n return flask.send_from_directory(bokeh_app.bokehjssrcdir, filename)", "def get_bokeh_resources() -> TemplateResourcesData:\n template_resources = TemplateResourcesData()\n template_resources.js = CDN.js_files[0]\n template_resources.css = CDN.css_files[0]\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a specific BokehJS source code file
def bokehjssrc_file(filename): return flask.send_from_directory(bokeh_app.bokehjssrcdir, filename)
[ "def bokehjs_file(filename):\n return flask.send_from_directory(bokeh_app.bokehjsdir, filename)", "def _source_file(self):\n return 'source.c'", "def get_js_file(self):\n return 'placeholder'", "def get_source_script(self, line):\n\n script_path = line.replace(self.SOURCE_LINE, \"\").s...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This function should return a list containing the two highest quantity, in descending order, for all fruits.
def sorted_fruit_quantity(f): # skip the header of the file move_cursor(f) # put all the quantities into a list # expected output: [5, 10, 3, 15] # read the file line by line output = [] for line in f: line_list = line.split() # ["Apple","5"] output.append(int(line_list[1...
[ "def getHighestLookbackMultiplePriceBar(self):\n\n highestLookbackMultiplePriceBar = None\n \n graphicsItems = self.items()\n\n for item in graphicsItems:\n if isinstance(item, LookbackMultiplePriceBarGraphicsItem):\n lmpb = item.getLookbackMultiplePriceBar()\n\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Parse MBP data with given url
def parse_mbp_data(self, url): body = self.get_content(url) if body is None: return None doc_body = pq(body) content = doc_body('#main-container').html() lines = content.splitlines() record = { 'url': url, 'price': self.get_price(line...
[ "def parse_url(self, url):", "def parseMpdUrl(mpdUrl,outputMode=\"log\"):\n print(\"mpdtimeline.py::main - Get '%s'\" % (mpdUrl))\n\n r = requests.get(mpdUrl,\n verify=False)\n if r.status_code != 200:\n raise Exception(\"Failed to download manifest. %s - %s\" % (path,r.status_code,r.text))...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test that reset fixtures endpoint is not available when it is not set up.
def test_reset_fixtures_url_not_found_if_not_setup(settings, caplog): caplog.set_level('WARNING') settings.ALLOW_TEST_FIXTURE_SETUP = None response = _request_reset_fixtures() assert response.status_code == status.HTTP_404_NOT_FOUND assert caplog.messages == [ 'The `reset_fixture` endpoint...
[ "def test_url_found_if_env_setup():\n response = _request_reset_fixtures()\n assert response.status_code == status.HTTP_201_CREATED", "def test_load_fixture_url_not_found_if_not_setup(settings, caplog):\n caplog.set_level('WARNING')\n\n settings.ALLOW_TEST_FIXTURE_SETUP = None\n response = _request...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test that reset fixtures endpoint is available when set up.
def test_url_found_if_env_setup(): response = _request_reset_fixtures() assert response.status_code == status.HTTP_201_CREATED
[ "def test_reset_fixtures_url_not_found_if_not_setup(settings, caplog):\n caplog.set_level('WARNING')\n\n settings.ALLOW_TEST_FIXTURE_SETUP = None\n response = _request_reset_fixtures()\n assert response.status_code == status.HTTP_404_NOT_FOUND\n\n assert caplog.messages == [\n 'The `reset_fixt...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test new adviser is removed by reset fixtures.
def test_new_adviser_removed_by_reset_fixtures(): new_adviser_pk = AdviserFactory().pk _request_reset_fixtures() with pytest.raises(Advisor.DoesNotExist): Advisor.objects.get(pk=new_adviser_pk)
[ "def tearDown(self):\n self.labGroup.delete()", "def tearDown(self):\n\n del business.business_records[1]", "def tearDown(self):\n\t\tself.deck = None", "def tearDown(self):\n del self.test_dht22", "def test_delete_training_dataset(self):\n pass", "def tearDown(self):\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test that create user endpoint is not available when it is not set up.
def test_create_user_url_not_found_if_not_setup(settings, caplog): caplog.set_level('WARNING') settings.ALLOW_TEST_FIXTURE_SETUP = None response = _request_create_user(SEED_USER_DATA) assert response.status_code == status.HTTP_404_NOT_FOUND with pytest.raises(Advisor.DoesNotExist): Advisor...
[ "def test_create_user_with_no_role(setup_client):\n client = setup_client\n payload = {\n 'email': 'test@gmail.com',\n 'password': 'password',\n 'name': 'Test name'\n }\n res = client.post(CREATE_USER_URL, payload)\n assert res.status_code == status.HTTP_400_BAD_REQUEST\n user...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test that created user has token in the cache.
def test_created_user_has_token_in_cache(): _request_create_user(SEED_USER_DATA) token = SEED_USER_DATA['token'] cache_key = f'access_token:{token}' expected_data = { 'email': SEED_USER_DATA['email'], 'sso_email_user_id': SEED_USER_DATA['sso_email_user_id'], } assert cache.get(c...
[ "def test_user_cache(self):\n original_token = TestExpirableToken(user=self.user)\n token = TestExpirableToken.from_key(original_token.key)\n\n def test_init_cache():\n user = original_token.user\n\n def test_user_cache():\n user = token.user\n\n self.assertN...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test that load fixture endpoint is not available when it is not set up.
def test_load_fixture_url_not_found_if_not_setup(settings, caplog): caplog.set_level('WARNING') settings.ALLOW_TEST_FIXTURE_SETUP = None response = _request_load_fixture({'fixture': [ADVISER_FIXTURE]}) assert response.status_code == status.HTTP_404_NOT_FOUND with pytest.raises(Advisor.DoesNotExist...
[ "def test_reset_fixtures_url_not_found_if_not_setup(settings, caplog):\n caplog.set_level('WARNING')\n\n settings.ALLOW_TEST_FIXTURE_SETUP = None\n response = _request_reset_fixtures()\n assert response.status_code == status.HTTP_404_NOT_FOUND\n\n assert caplog.messages == [\n 'The `reset_fixt...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test load fixture endpoint.
def test_load_fixture(caplog): caplog.set_level('INFO') with pytest.raises(Advisor.DoesNotExist): Advisor.objects.get(pk=ADVISER_FIXTURE['pk']) response = _request_load_fixture({'fixture': [ADVISER_FIXTURE]}) assert response.status_code == status.HTTP_201_CREATED adviser = Advisor.object...
[ "def load_fixture(request):\n if not settings.ALLOW_TEST_FIXTURE_SETUP:\n logger.warning(\n 'The `load_fixture` endpoint is not enabled. The ALLOW_TEST_FIXTURE_SETUP environment'\n ' variable is not set.',\n )\n raise Http404\n\n fixture = request.data['fixture']\n\n...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Draw all the objects in the scene
def draw_objects(): # Disable the turtle animation, and erase the scren. turtle.tracer(False) turtle.hideturtle() turtle.clear() # Draw all the parts of the scene. draw_ball() draw_target() draw_bounds() draw_pins() show_status() # Now show the screen, after everything ha...
[ "def draw_objects_on_screen(self):\n self.draw_ship()\n self.draw_asteroid()\n self.draw_torpedo()\n self.draw_special_torpedo()", "def draw_objects(self):\n self.draw_ship()\n self.draw_asteroids(self._asteroid_list)\n self.draw_torpedos(self._torpedos_list)", "...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Draw the ball at its current position
def draw_ball(): draw_circle(ball, 'yellow')
[ "def draw_ball(ball):\n surf = pygame.Surface((2 * ball.r, 2 * ball.r))\n surf.fill(BLACK)\n surf.set_colorkey(BLACK)\n circle(surf, ball.color, (ball.r, ball.r), ball.r)\n screen.blit(surf, (ball.x - ball.r, ball.y - ball.r))", "def draw(self):\n self.ball_sprite.draw()", "def drawBall(se...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Draw the bounding rectangle.
def draw_bounds(): pass
[ "def draw_bounding_box(self):\n # Gets the bounding box\n xmin, ymin, xmax, ymax = self.get_bounding_box()\n\n # Gets the actual coordinates\n width = xmax - xmin\n height = ymax - ymin\n center_x = xmin + (width)/2\n center_y = ymin + (height)/2\n\n arcade.dr...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Draw all the pins.
def draw_pins(): pass
[ "def draw_pins(self, data):\n # Here we set defaults (with 'or' keyword ...)\n ax = self.ax\n plot_pins = self.plot_pins\n plot_pins_values = self.plot_pins_values\n #plot_pins_method = self.plot_pins_method or \"highlight\"\n plot_pins_colors = self.plot_pins_colors\n\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check if ball reached target. If it did, the game is won.
def check_ball_on_target(): pass
[ "def reachedTarget(self, target, position):\n if(target == None): return True\n xd = target[0]-position[0]\n yd = target[1]-position[1]\n dist = math.sqrt(xd*xd+yd*yd)\n return dist < TARGET_TOLERANCE_MM", "def hit_paddle(self):\n pass\n\n #Implement if collision w...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Add a pin at the current mouse location. Turtle calls this when the user clicks the mouse.
def add_pin(x, y): pass
[ "def on_mouse_press(self, x, y, button):\n\n pass", "def mouseMoveTo(point):\r\n mouse.moveTo_(point)", "def grab(self, event):\n self.ypos = event.y\n self.xpos = event.x\n self.config(cursor='fleur')", "def on_mouse_press(self, x, y, button, modifiers):\n pass", "def ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Take an input cubelist containing forecasts from different cycles and merges them into a single cube.
def process(self, cubelist: Union[List[Cube], CubeList]) -> Cube: cubelist = rebadge_forecasts_as_latest_cycle(cubelist) # Take all the realizations from all the input cube and # put in one array all_realizations = [cube.coord("realization").points for cube in cubelist] all_real...
[ "def forecast_dataframe_to_cube(\n df: DataFrame, training_dates: DatetimeIndex, forecast_period: int,\n) -> Cube:\n\n representation_type = get_forecast_representation(df)\n\n fp_point = pd.Timedelta(int(forecast_period), unit=\"seconds\")\n\n cubelist = CubeList()\n\n for adate in training_dates:\n...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
renders the `Dockerrun.aws.json` template
def render_template(self): apps = [{ 'name': container.name, 'image': container.image, 'environment': container.environment, 'memory': container.memory, 'portMappings': container.portmappings } for container in self.containers] t = sel...
[ "def rendernodejson():\n template = json.load(\n open(\"./chef/node.json.template\", \"r\"),\n object_pairs_hook=collections.OrderedDict\n )\n template[\"db_user_password\"] = env.DB_USER_PASSWORD\n template[\"db_host\"] = env.RDS_HOST\n template[\"aws_access_key_id\"] = env.AWS_ACCESS_...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates an inmemory tarfile that will be used as the docker context
def create_docker_context(self): self.tarfile = io.BytesIO() with tarfile.open(fileobj=self.tarfile, mode="w|") as tar: for f in self.files: tarinfo = tarfile.TarInfo(f['name']) tarinfo.size = len(f['content']) if 'mode' in f: ...
[ "def build_tar(self) -> BytesIO:\n fileobj = BytesIO()\n with tarfile.open(fileobj=fileobj, mode=\"w\") as tar:\n tar.add(str(self.dockerfile), arcname=\"Dockerfile\")\n for item in self.contents:\n tar.add(str(self.base_dir / item), arcname=str(item))\n fil...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
runs docker build with the tarfile context
def build(self): docker = Client(version='auto') status = docker.build( fileobj=self.tarfile, custom_context=True, tag=self.tag, pull=True, nocache=True, rm=True, ) for line in status: # This effectively blocks on ...
[ "def docker_build(ctx):\n ctx.run(\"docker build . -f docker/Dockerfile -t resource-node\")", "def build(self, tag, dockerfile: str):\n with tempfile.NamedTemporaryFile(mode='rb+') as f:\n f.file.write(dockerfile.encode())\n f.file.flush()\n f.file.seek(0)\n r...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Pull the image and create a container with host_config
def setup(self): exists = [i for i in self.client.images() if self.image in i['RepoTags']] # Only pull the image if we don't have it if not exists or self.pull: self.client.pull(self.image) self.logger.debug("Pulled {}".format(self.image)) self.container = self...
[ "def build_container(client):\n client.images.build(path=os.path.join(os.path.abspath(\"\"), \"docker\"), tag=\"scrape_light\")", "def _create_docker_host(self, host, environment, optml_subdirs, command, volumes):\n optml_volumes = self._build_optml_volumes(host, optml_subdirs)\n optml_volumes.ex...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Starts the container, and optionally executes a callback that is passed the container's info
def start(self, callback=None): self.logger.debug('Starting container {}'.format(self.image)) response = self.client.start(container=self.container['Id']) if response: self.logger.warning(response) self.logger.debug('Checking if {} service is ready'.format(self.name)) ...
[ "def start(self, container: Container):", "def test_start(self):\n an_container = models.Container.get(self.client, 'an-container')\n\n an_container.start(wait=True)", "def start(self, **kwargs):\n if self.running:\n self.logger.debug(\"Container is running\")\n return...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Run the provisioner of this class for a set of services
def provision(self, services, requirements=None): if hasattr(self, 'service_provisioner'): provisioner = self.service_provisioner(services=services, container=self, requirements=requirements) ...
[ "def provision(self, services, requirements=None):\n try:\n super(SolrDockerRunner, self).provision(services=services, requirements=requirements)\n except UnknownServiceError as error:\n self.logger.warning('Skipping unknown service: {}'.format(error))\n pass", "def ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Override default provisioning behaviour to skip services that are unknown.
def provision(self, services, requirements=None): try: super(SolrDockerRunner, self).provision(services=services, requirements=requirements) except UnknownServiceError as error: self.logger.warning('Skipping unknown service: {}'.format(error)) pass
[ "def test_unknown_service(self):\n raise NotImplementedError # FIXME", "def test_no_such_conf_section_ignore_service_type(self):\n del self.oslo_config_dict['heat']\n self.assert_service_disabled(\n 'orchestration',\n \"Not in the list of requested service_types.\",\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Class factory for the docker runner. Returns a specific class for a specific type of service.
def docker_runner_factory(image): mapping = { 'gunicorn': GunicornDockerRunner, 'redis': RedisDockerRunner, 'consul': ConsulDockerRunner, 'postgres': PostgresDockerRunner, 'registrator': RegistratorDockerRunner, 'solr': SolrDockerRunner } for key in mapping:...
[ "def get_class_instance(name, collector, config):\n try:\n py_mod = importlib.import_module('.' + name, main.__name__)\n\n if hasattr(py_mod, name):\n class_inst = getattr(py_mod, name)(collector=collector,\n config=config)\n else:\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Parse Amazon tracking numbers.
def parse_amazon(email): tracking_numbers = [] soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') # see if it's an shipped order email order_number_match = re.search('Your AmazonSmile order #(.*?) has shipped', email[EMAIL_ATTR_SUBJECT]) if not order_number_match: order_n...
[ "def parse_amazon_de(email):\n tracking_numbers = []\n \n soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser')\n\n # see if it's an shipped order email\n order_number_match = re.search('Order: #(.*?)\\n', email[EMAIL_ATTR_BODY])\n if not order_number_match:\n order_number_match = re.sea...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Initialising of delib64.dll (WIN10) a 64bit driver is expected. Number is the digit when more than one Interface of a Kind is configured..!
def __init__(self, Interface="USB", Number=DRIVER_NUM): self.bib = CDLL("delib64") # this will NOT fail... self.interface = Interface self.number = 0 #Number self.handle = 0 self.version = 0 if self.interface == "USB": self.createModule(self.RO_USB) e...
[ "def testAbiCompatibility64(self):\n if self._dut.GetCpuAbiList(64):\n self._TestAbiCompatibility(64)\n else:\n logging.info(\"Skip the test as the device doesn't support 64-bit \"\n \"ABI.\")", "def is64bit():\r\n return platform.machine().endswith('...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Dieser Befehl dient zum Auslesen des TimeoutStatus. Argumente ([Debug=1])
def analogDaTimeoutStatus(self, Debug=0): self.bib.DapiSpecialCommand.argtypes = \ [c_ulong, c_ulong, c_ulong, c_ulong, c_ulong] self.bib.DapiSpecialCommand.restype = c_ulong timeout_status = self.bib.DapiSpecialCommand(self.handle,\ self.DAPI_SPECIAL_CMD_...
[ "def getTimeout():", "def _timeout(signum, frame):\n # Raise TimeoutException with system default timeout message\n raise TimeoutException()", "def handler(signum, frame):\n raise TimeoutError(msg)", "def pytest_timeout_cancel_timer(item):", "def __init__(self):\n\t\tself.name = 'timeout'\n...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Mit diesem Befehl wird die Default Konfiguration eines D/A Wandlers geladen. Der D/A Wandler Kanal wird sofort auf die Ausgabespannung 0V gesetzt. Argumente (Startchannel, [Stopchannel>=Startchannel], [Debug=1])
def analogDaSetZero(self, Startchannel, Stopchannel=None, Debug=0): self.bib.DapiSpecialCommand.argtypes = \ [c_ulong, c_ulong, c_ulong, c_ulong, c_ulong] self.bib.DapiSpecialCommand.restype = None if Stopchannel == None or Stopchannel == Startchannel: ...
[ "def default_channel(self) -> int:\r\n ...", "def analogDaLoadVolts(self, Startchannel, Stopchannel=None, Debug=0): \n self.bib.DapiSpecialCommand.argtypes = \\\n [c_ulong, c_ulong, c_ulong, c_ulong, c_ulong]\n self.bib.DapiSpecialCommand.restype = None\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Mit diesem Befehl wird die aktuelle D/A Wandler Einstellung (Spannung/StromWert, Enable/Disable und D/A Wandler Modus) in das EEPROM gespeichert. Diese Spannungswerte gelten bei einem/mehreren Kanal/Kanälen dann beim Einschalten bzw. nach einem Timeout eines D/A Wandlers (EEPROMKonfiguration). Argumente (Startchannel, ...
def analogDaSaveVolts(self, Startchannel, Stopchannel=None, Debug=0): self.bib.DapiSpecialCommand.argtypes = \ [c_ulong, c_ulong, c_ulong, c_ulong, c_ulong] self.bib.DapiSpecialCommand.restype = None if Stopchannel == None or Stopchannel == Startchannel: ...
[ "def analogDaLoadVolts(self, Startchannel, Stopchannel=None, Debug=0): \n self.bib.DapiSpecialCommand.argtypes = \\\n [c_ulong, c_ulong, c_ulong, c_ulong, c_ulong]\n self.bib.DapiSpecialCommand.restype = None\n if Stopchannel == None or Stopchannel == Startchan...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Mit diesem Befehl wird der D/A Wandler, mit der im EEPROM gespeicherten Konfiguration, gesetzt. Diese Spannungswerte gelten bei einem/mehreren Kanal/Kanälen dann beim Einschalten bzw. nach einem Timeout eines D/A Wandlers (EEPROMKonfiguration). Argumente (Startchannel, [Stopchannel>=Startchannel], [Debug=1])
def analogDaLoadVolts(self, Startchannel, Stopchannel=None, Debug=0): self.bib.DapiSpecialCommand.argtypes = \ [c_ulong, c_ulong, c_ulong, c_ulong, c_ulong] self.bib.DapiSpecialCommand.restype = None if Stopchannel == None or Stopchannel == Startchannel: ...
[ "def analogDaSaveVolts(self, Startchannel, Stopchannel=None, Debug=0): \n self.bib.DapiSpecialCommand.argtypes = \\\n [c_ulong, c_ulong, c_ulong, c_ulong, c_ulong]\n self.bib.DapiSpecialCommand.restype = None\n if Stopchannel == None or Stopchannel == Startchan...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Dieser Befehl speichert die Zählerstände von 8 Eingangszähler gleichzeitig in ein Zwischenspeicher (Latch). So können anschließend alle Zählerstände des Latches nacheinander ausgelesen werden. Argumente (Channels= Low, High, All, [Debug=0]) Low=07, High= 815, All=0..15 counter
def counter48LatchAll(self, Channels=None, Debug=0): # void DapiSpecialCommand(ULONG handle, DAPI_SPECIAL_CMD_CNT48, # DAPI_SPECIAL_CNT48_LATCH_GROUP8, ULONG ch, 0) self.bib.DapiSpecialCommand.argtypes = \ [c_ulong, c_ulong, c_ulong, c_ulong, c_ulong] ...
[ "def checkLongSignal(self, i):\n pass", "def test_load_channels_with_number_above_upper_bound():\n try:\n switch.load_channels(\"200\", upper_bound=99)\n assert False, \"load_channels() should have failed\"\n except Exception as error:\n assert isinstance(error, switch.LoadError)...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Dieser Befehl liest einen 48 Bit Zähler eines Eingangszählerkanals. Argumente (Channel, Width=[32|48], default=48bit, [Debug=0])
def counter48GetCount(self, Channel, Width=None, Debug=0): #ULONGLONG DapiCnt48CounterGet48(ULONG handle, ULONG ch); self.bib.DapiCnt48CounterGet48.argtypes = [c_ulong, c_ulong] self.bib.DapiCnt48CounterGet48.restype = c_ulonglong # ULONG DapiCnt48CounterGet32(ULONG handle, ULONG ch); ...
[ "def channel_simulator(self, typeid, data):\n\n newdata = bytearray(data)\n #print \"new data:{}\".format(newdata)\n #print \"new data length:{}\".format(len(newdata))\n #print \"data:{}\".format(data)\n print \"data length:{}\".format(len(data))\n\n #Seed random number gen...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Dieser Befehl setzt die digitalen Ausgänge von 1,8 oder 16 Ausgängen mit vorgegebenen Werten ("Data"). Eine Zahl zwischen 0 und 15 setzt einen EINZELNEN entsprechenden Ausgang. Bezeichnungen Low, High, All setzen das Byte oder ein Word auf H oder L. wird "Data" weggelassen, werden die Ausgägnge resetet! Hier wird das M...
def digitalDoSet(self, Startchannel_width=None, Data=0, Debug=0): # void DapiDOSet1(ULONG handle, ULONG ch, ULONG data) self.bib.DapiDOSet1.argtypes = [c_ulong, c_ulong, c_ulong] self.bib.DapiDOSet1.restype = None # void # void DapiDOSet8(ULONG handle, ULONG ch, ULONG data ...
[ "def lcdSendData(self, data):\n self.RW = 0x00 # setting the W/R to low (writing mode)\n self.RS = 0x01 # setting the register select to 1\n self.lcdMakePacket(data) # sending the highest 4 bits\n self.lcdMakePacket(data << 0x04) # ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Inicia un Equipo con un diccionario de atributos. La lista de pokemones vivos se guardan en una tupla junto a sus movimientos
def __init__(self, diccionario): self.numero = diccionario['numero'] self.nombre = diccionario['equipo_nombre'] self.pokmov = lectores.pokemon_y_movimiento_a_tuplas(diccionario)
[ "def elegir_oponentes_atacar(equipos,piloto,nombres):\t\t\t\t\r\n\toponentes = choice(equipos)\r\n\twhile piloto in oponentes:\r\n\t\toponentes = choice(equipos)\r\n\tgunplas_oponentes = []\r\n\tfor oponente in oponentes:\r\n\t\tgunplas_oponentes.append(oponente.get_gunpla())\t\r\n\tindice_oponente_atacar = piloto....
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Recibe el número de un pokemon derrotado y elimina su tupla de la lista de pokemones vivos.
def eliminar_pokemon_derrotado(self, derrotado): for par in self.pokmov: if par[0] == derrotado.numero: self.pokmov.remove(par)
[ "def eliminar_pilotos(gunpla_enemigo,oponentes,nombres,oponente_atacar,equipos,piloto,turnos):\r\n\tif gunpla_enemigo.get_energia_restante() < 0:\r\n\t\toponentes.remove(oponente_atacar)\r\n\t\tprint(\"{} fue destruido\".format(nombres[oponente_atacar]))\r\n\t\tif len(oponentes)==0:\r\n\t\t\tequipos.remove(oponente...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Recibe qué archivo de equipos quiere usar un jugador. Se ingresa por parametro qué numero de jugador está eligiendo archivo.
def recibir_archivo_jugador(): ingreso = '' while not os.path.exists(ingreso): ingreso = gamelib.input(MENSAJE_INGRESE_RUTA) if not ingreso: return None if os.path.exists(ingreso): return ingreso gamelib.say(MENSAJE_ERROR_RUTA)
[ "def choix_fichier(self):\n\t\tself.filename = askopenfilename(title=\"Ouvrir votre document\", filetypes=[('seulement des jpg pou le moment','.jpg')])", "def cargar_fichas_jugador():\n try:\n fichas = open(os.path.join(absolute_path,\"lib\",\"info\",\"saves\",\"fichas_jugador.json\"),\"r\",encoding='ut...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Shows add file dialog
def handleActionAdd(self): self.fDialog.show()
[ "def show_add(self):\n\n # Prevent user from adding files to an archive when one isn't open.\n try:\n print(self.zip_filename)\n except:\n self.show_msg(\"No zip file is open.\\nOpen a zip file, first.\")\n return\n\n content = AddDialog(addFiles=self.add...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Remove metadata by clicking X
def removeMeta(self, row, column): filePath = self.filesList.selectedItems()[0].text(2) metaHeader = (self.metadataList.item(row, 0)).text() logging.debug("Removing metadata " + metaHeader + " from " + str(filePath)) self.filesList.removeMeta(filePath, metaHeader, row)
[ "def remove_metadata(self, key: str) -> None:\n\t\tcore.BNBinaryViewRemoveMetadata(self.handle, key)", "def delete_metadata(self):\n import os\n os.remove(os.path.join('data', '.%s.json' % self.name))", "def handleCleanMetadataKeep(self):\n logging.debug(\"Removing all metadata found...\")\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Clean metadata but keep orig file
def handleCleanMetadataKeep(self): logging.debug("Removing all metadata found...") filePath = self.filesList.selectedItems()[0].text(2) self.filesList.removeAllMeta(filePath)
[ "def delete_metadata(self):\n import os\n os.remove(os.path.join('data', '.%s.json' % self.name))", "def __cleanup_metadata(metadata_prev, metadata):\n if not metadata_prev:\n return\n\n result_src_files = __get_result_source_files(metadata_prev)\n for plist_file, source_file in resu...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
When all metadata has been cleared
def handleAllMetaClear(self, path): logging.debug("All Metadata removed, clearing the table...") self.metadataList.clear() self.metadataList.setRowCount(0) self.metadataList.setHorizontalHeaderLabels(["Metadata Header", "Value"]) self.fileNotSupported.hide() self.changeEn...
[ "def clear (self):\n\n\t\tself.meta = {}\n\t\tself.data = {}", "def clear_mutable_metadata(self):\n self.metadata_set.exclude(name__in=[\"useragent\", \"product\", \"category\"]).delete()\n self._metadata = None", "def clear_mutable_metadata(self):\n self.metadata_set.exclude(name__in=['use...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Print whatever is displayed in second tab as pdf to a file
def printPdfPersonalData(self, fileName, fileType, outFile): self.filesList.doBackup(fileName) self.filesList.cleanPdataMarks(fileName) if fileType != 'Pdf': outFile = AddedFile.changeExt(outFile, "pdf") #try: printer = QtGui.QPrinter() printer.set...
[ "def save_pdf(self, filename):\n from chaco.pdf_graphics_context import PdfPlotGraphicsContext\n gc = PdfPlotGraphicsContext(filename=filename)\n #pagesize = self.pagesize,\n #dest_box = self.dest_box,\n #dest_box_units = self.dest_box_units)\n gc.re...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Fired to change monitor settings
def handleMonitorSettings(self): winPos = self.mainWindow.pos() popPos = QtCore.QPoint(winPos.x() + (self.mainWindow.width() - self.settingsPopup.width()) / 2, winPos.y() + self.mainWindow.height() / 2) self.monitorPopUp.move(popPos) self.monitorPopUp.show()
[ "def on_settings(self):\n\n # Pull the current app state from the relay Observer object\n status, interval, ntfc_status, ntfc_state = settings_state.get_state()\n\n # Pass it to the Observable object in order to render the Settings window\n settings_changed, update_interval, ntfc_changed...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Says that some fields are numbers, and that if we take their sum with the given coefficients, then we get zero.
def sum_is_zero( coefficients: Iterable[float], period_as_delimiter: bool = False, force_dollar_decimal: bool = False) -> Predicate: coefficients = tuple(coefficients) assert isinstance(coefficients, tuple) return sum_is_near_zero( coefficients, tolerance=0, taper=0, period_as_delimi...
[ "def qty_or_zero(self) -> Decimal:", "def _nonnegative_coefficients(x):\n if is_Polynomial(x) or is_MPolynomial(x):\n return all([ c >= 0 for c in x.coeffs() ])\n else:\n return x >= 0", "def EvaluateFields(self, *float, **kwargs):\n ...", "def test_geometric_sum_zero(self):\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Says that some fields are numbers, and that if we take their sum with the given coefficients, then we get a number greater than (or equal to) the given bound.
def sum_is_at_least( lower_bound: float, coefficients: Iterable[float], strict: bool = True, period_as_delimiter: bool = False, force_dollar_decimal: bool = False) -> Predicate: coefficients = tuple(coefficients) assert isinstance(coefficients, tuple) return SumIsAtLeast( name=f'sum_is_a...
[ "def lower_bound(self) -> float:\n ...", "def _constrain(self, num):\r\n return min(self.total, max(0, num))", "def EvaluateFields(self, *float, **kwargs):\n ...", "def insideSimplex(BC,bound=True):\n if bound:\n return (BC >= 0.).all(0)\n else:\n return (BC > 0.).all(...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Says that a field is equal to the given amount, within a certain tolerance.
def is_nearly_equal_to( amount: float, tolerance: float = 0.5, taper: float = 0.5, period_as_delimiter: bool = False, force_dollar_decimal: bool = False) -> Predicate: return sum_is_approximately( amount, [1], tolerance, taper, period_as_delimiter, force_dollar_decimal)
[ "def is_equal_to(amount: float) -> Predicate:\n return is_nearly_equal_to(amount, tolerance=0, taper=0)", "def equals_exact(self, other, tolerance): # -> bool:\n ...", "def assert_within_tolerance(lval, rval, tolerance, message=None):\r\n real_message = message or \"%r !~= %r\" % (lval, rval)\r\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Says that a field is exactly equal to some constant amount.
def is_equal_to(amount: float) -> Predicate: return is_nearly_equal_to(amount, tolerance=0, taper=0)
[ "def test_var_fee_lt_1(self):\n # try with a value gt 0, should be fine\n result1 = SimpleTransaction(bill=self.bill, cash_only_var_fee=0.5)\n self.assertIsInstance(result1, SimpleTransaction)\n\n # try with a value lt 0, should raise an error\n with self.assertRaisesMessage(Asser...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Says that a field is greater than some constant amount.
def is_greater_than( amount: float, strict: bool = True, period_as_delimiter: bool = False, force_dollar_decimal: bool = False) -> Predicate: return sum_is_at_least( amount, [1], strict, period_as_delimiter, force_dollar_decimal)
[ "def gt(value, limit):\n return value > limit", "def greater_than(field: str, value: Any) -> Expression:\n return Expression(_criterion(field, \"greaterThan\", value))", "def set_GreaterThan(self, value):\n super(MoneyReceivedInputSet, self)._set_input('GreaterThan', value)", "def _check_amount_w...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Says that a field is less than some constant amount.
def is_less_than( amount: float, strict: bool = True, period_as_delimiter: bool = False, force_dollar_decimal: bool = False) -> Predicate: return sum_is_at_least( -amount, [-1], strict, period_as_delimiter, force_dollar_decimal)
[ "def _on_order_amount_too_low(self, _msg):\r\n self.debug(\"### Server said: 'Order amount is too low'\")\r\n self.count_submitted -= 1", "def check_less(self, x, name, val):\n if x >= val:\n raise ValueError(self.__class__.__name__ + \": \" + \"Value for parameter %s greater than ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns a 3x3 pixel array representing a superposition of all current probabilities for this block. This function is called every frame for multiple blocks and singlehandedly slows the program down by an enormous amount, but its worth it for the cool animation effect
def superposition(self): superpos_array = [[0,0,0],[0,0,0],[0,0,0]] #check normalised: n = sum(self.block_weights) if n != 1: #normalise here if required self.block_weights = [x/n for x in self.block_weights] o = self.block_opts w = self.bl...
[ "def _current_frame(self):\n ret = np.array(self.board)\n # Lay over players (0.75 for opponents, 0.5 for self)\n for i in range(4):\n x,y = np.rint(self.xstate[i]).astype(int)\n ret[x:x+4, y:y+4, :] = 0.25\n ret[x:x+4, y:y+4, i] = 0.5\n return ret", "def getArray(self):\n return...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
check for uncollapsed states (inefficient to say the least)
def check_done(grid): for row in grid: for el in row: if not el.collapsed: return False else: return True
[ "def __bool__(self):\n return len(self._states_) > 0", "def getAbsorbingStates(m):\n\t\n a=[]\n for r in range(len(m)):\n if(sum(m[r])==0): a.append(r)\n return a", "def _check_collapsibles(self, blk):\n group = find_group(blk)\n for gblk in group:\n if gblk.name in...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Initializes the particles with supplied values for charge c, mass m, and position r.
def __init__(self, charge, mass, position): self.c = charge self.m = mass self.r = position
[ "def initialize(self, particles):\n self.particles = particles", "def init_particles(self):\n self.r = np.random.rand(self.PART, 3) * 2 * (self.halfL - self.RAD) - (self.halfL - self.RAD)\n\n v_polar = np.random.random((self.PART, 2))\n\n self.v = np.zeros((self.PART, 3))\n\n se...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create dict for the delete button
def create_delete_buttons_dict(self): btns_dict = dict() for index, _ in enumerate(self.games): btns_dict[index] = { "text": "X", "on_click": self.delete, "on_click_params": [index], } return btns_dict
[ "def button_delete(self):\n # print('Delete button')\n #If no focus - return\n x = self.tree.focus()\n if x=='':\n return\n x = int(x)\n # print(int(x))\n Data.tunas.pop(x)\n if len(Data.tunas) == 0:\n #If Tunas length == 0, do like New f...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create the dict for all buttons
def create_buttons_dict(self): btns_dict = dict() for index, game in enumerate(self.games): name = game.split(".")[0] btns_dict[name] = { "text": name, "on_click": self.load, "on_click_params": [index], } return ...
[ "def create_buttons_dict(self):\n return {\n \"menu\": {\n \"text\": \"Menu\",\n \"on_click\": self.load_next_state,\n \"on_click_params\": [MENU]\n }\n }", "def create_buttons_dict(self):\n return {\n \"song\": {\n...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Delete a file using the num
def delete(self, num): file_name = self.games[num] file_path = path.join(self.saved_games, file_name) if path.exists(file_path): os.remove(file_path) logger.info("Remove the file %s", file_path) else: logger.error("The file %s doesn't existe", file_pat...
[ "def delete_file(filename):\n\tprint client.file_delete(filename)", "def _cleanup(self, fnum):\n while os.path.exists('%s.%s' % (self.name, fnum)):\n try:\n fname = '%s.%s' % (self.name, fnum)\n os.unlink(fname)\n # self.log.debug(\"Cleaned up file: %...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Listen for delete btns
def events_delete_btns(self): events = pg.event.get() for btn in self.delete_btns: btn.listen(events)
[ "def delete(event, context):\r\n return", "def process_IN_DELETE_SELF(self, event):", "def _notify_delete(self, cuds_object):", "def delete_button_callback(self, button):\n\t\tRPIO.del_interrupt_callback(button)", "def on_delete(self, count):\n self._logger.info(\"Delete count : %s \" % count)\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Converts image of type torch.tensor to numpy.ndarray for matplotlib display image
def convert_tensor_to_numpy_img(tensor_img): img = tensor_img.to('cpu').clone().detach() img = img.numpy().squeeze(0) img = img.transpose(1, 2, 0) img = img * np.array((0.229, 0.224, 0.225)) + np.array((0.485, 0.456, 0.406)) img = img.clip(0, 1) return img
[ "def _to_vis_tensor(image):\n if isinstance(image, torch.Tensor):\n return image.cpu()\n elif isinstance(image, np.ndarray) and image.dtype in (np.complex64, np.complex128):\n image = cplx.to_tensor(image)\n if cplx.is_complex(image):\n image = torch.view_as_real(image)\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }