query
stringlengths
9
9.05k
document
stringlengths
10
222k
negatives
listlengths
19
20
metadata
dict
gives us the open demand of a market INPUT sellingPrice (the price at which we sell energy) maxPrice (maximum price customers are willing to pay) maxDemand (total demand of a market) OUTPUT
def demand(sellingPrice, market): # by Marieke maxPrice = market.maxPrice maxDemand = market.maxDemand # if the selling price is greater than what customers want to pay, return 0 if (sellingPrice > maxPrice): return 0 # if nothing is produced for market if (sellingPrice...
[ "def get_max_profit2(stock_prices):\n\n #make sure we have at least two prices to compare\n assert len(stock_prices) > 1, \"too few prices!\"\n\n #keep running track of the cheapest previous buying price and the best\n #possible profit so far\n min_to_left = stock_prices[0]\n max_profit = stock_pr...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update the module list based on the information in the data folder.
def cli_update_module_list(): update_module_list()
[ "def refresh(self):\n self.Module_Files = []\n self.Modules = {}\n module_paths = maya.mel.eval('getenv MAYA_MODULE_PATH').split(\";\")\n for p in module_paths:\n try:\n self.Module_Files += [os.path.join(p, x).replace('\\\\', '/') for x in os.listdir(p) if\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
General density matrix split funciton. Split big density matrix of the whole system by two reduced matrices such that matrix of "left" system has the dimension of left_mat_dim.
def split(density_mat, left_mat_dim): #Set dimensions of splitted matrices dim_1 = left_mat_dim if density_mat.shape[0] % dim_1 != 0: raise Warning("Wrong dimensions of splitted matrices") dim_2 = int(density_mat.shape[0] / dim_1) # Create matrix of block matrices o...
[ "def split_image_dmap(image, coor, resolution: str = \"256\"):\n img_dim = image.shape # (h, w, 3)\n res_dim = res[resolution] # (w, h)\n\n n_sub_img_h = int(np.floor(img_dim[0] / res_dim[1])) # times sub-image height can fit in image's height\n n_sub_img_w = int(np.floor(img_dim[1] / res_dim[0])) #...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
By given density mat return all singles and correlators
def reduced_matrix_measurements(density_mat): dim = density_mat.shape[0] n_spins = int(math.log2(dim)) singles = np.zeros((n_spins, 2)) # single spins correlators = np.zeros((n_spins - 1, 4)) # correlators distribution for i in range(n_spins): single_rho = ReducedM...
[ "def get_density(matrix):\n zeros = 0\n for r in matrix:\n for val in r:\n if val == 0.0 : zeros += 1\n return 1 - (zeros / float(matrix.size))", "def build_density_mat(C, n_orbital):\n\n return C[:,:n_orbital].dot(C[:,:n_orbital].T)", "def calc_dist_mat_corr(sim_mat, distance_dict...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Find the distance between 2 matrices according to singles and correlators measurements
def distance_by_measurements(singles_1, singles_2, correlators_1, correlators_2): return ((singles_1 - singles_2) ** 2).mean() + ((correlators_1 - correlators_2) ** 2).mean() # return ((singles_1 - singles_2) ** 2).mean()
[ "def grassmanian_distance(Q1,Q2):\n return np.linalg.norm(Q1 @ Q1.T - Q2 @ Q2.T,ord=2)", "def test_math_matrix_distance():\n data = [{'a': 14, 'b': 22}, {'a': 17, 'b': 3}]\n expect = [[0.0, 19.235384061671343], [19.235384061671343, 0.0]]\n assert math_util.matrix_distance(data, 'a', 'b') == expect", ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns view name (needed to check if view exists in database)
def name(self): return self.instance.db_view.name
[ "def get_view_name(self):\n if self.view_name is not None:\n return self.view_name\n return 'monitoring_{0}'.format(self.model.__name__.lower())", "def test_view_name():\n view_clause1 = ViewClause(\n 'myview',\n MetaData(schema='myschema'),\n select('*')\n )\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Renders sql to create schema
def sql(self): return INIT_SCHEMA.format(schema=self._schema)
[ "def compile_create(self, blueprint, command, _):\n columns = ', '.join(self._get_columns(blueprint))\n\n sql = 'CREATE TABLE %s (%s' % (self.wrap_table(blueprint), columns)\n\n sql += self._add_foreign_keys(blueprint)\n\n sql += self._add_primary_keys(blueprint)\n\n return sql + ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Iterates over abstraction to create table sql
def tables(self): yield self.sql_create_table
[ "def iter_table(model_dict):\n for key in model_dict:\n if not db.table_exists(key):\n db.connect(reuse_if_open=True)\n db.create_tables([model_dict[key]])\n #print(f\"Created table '{key}'\")\n db.close()", "def create_table_for(self, model):", "def __to_sq...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Iterates over abstraction to create view sql
def views(self): yield self.sql_create_view
[ "def create_db_views(conn):\n sql_snpcount = \"\"\" DROP VIEW IF EXISTS snp_counts;\n CREATE VIEW snp_counts\n AS SELECT snp_id, COUNT(*) as count\n FROM genotypes GROUP BY snp_id;\n \"\"\"\n # Create each view in tur...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Iterates over abstractions to create triggers sql
def triggers(self): for nm in ('sql_trigger_on_insert', 'sql_trigger_on_update', 'sql_trigger_on_delete'): yield getattr(self, nm)
[ "def write_triggers(self, table):\n index_sql = super(PostgresDbWriter, self).write_triggers(table)\n for sql in index_sql:\n self.execute(sql)", "def _create_triggers(self):\n self._create_settings_record_trigger()", "def build_triggers(self):\n self._triggers = []\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Iterates over abstractions to create constraints sql
def constraints(self): for attr in self._entity_class.attrs: if attr.db_not_null: yield Constraint(self, attr)
[ "def write_constraints(self, table):\n constraint_sql = super(PostgresDbWriter, self).write_constraints(table)\n for sql in constraint_sql:\n self.execute(sql)", "def as_constraint(self, *args):\n return []", "def add_constraints(self, cons):\n if not cons or (not cons.per...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Iterates over ``.state_values`` to walk over own attributes
def items(self): for name in self.state_values: yield name, getattr(self, name)
[ "def device_state_attributes(self):", "def getStates():", "def allStates():", "def iterAttrs(self):\n return iter(self.requested_attributes.values())", "def get_state_data(cls, entity):\n attrs = get_domain_class_attribute_iterator(type(entity))\n return dict([(attr,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns dictionary with key/value pairs suitable for sql template for index
def sql_vars(self): return { 'index_name': self.name, 'spec': self.spec, 'db_table': self.db_table.name, }
[ "def get_datadict_template():\n return {column: \"\" for column in get_columns()}", "def index(self):\n if self.query['queryType'] == 'scan':\n if not self.query.get('columns') or '__time' in self.query['columns']:\n return ['__time']\n return []\n if self.que...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns dictionary with key/value pairs suitable for sql template for constraint
def sql_vars(self): return { 'constraint_name': self.name, 'attr': self._attr.slug, 'db_table': self.db_table.name, 'minlen': self._attr.minlen, }
[ "def sql_tmpl(self):\n if self._attr.is_int:\n return CONSTRAINT_INT\n if self._attr.is_text:\n return CONSTRAINT_TEXT\n return CONSTRAINT_NOT_NULL", "def list_constraints(self) -> Sequence[dict]:\n constraints, meta_constraints = self.cypher_query(\"SHOW CONSTRAI...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns sql template for constraint according to attribute type
def sql_tmpl(self): if self._attr.is_int: return CONSTRAINT_INT if self._attr.is_text: return CONSTRAINT_TEXT return CONSTRAINT_NOT_NULL
[ "def __to_sql(self, types):\n\n if len(types) < 1:\n logging.error(\n \"Size of dictionary equals 0, \\\n no attributes to create SQL query\"\n )\n raise KeyError\n\n sql = f\"\"\"CREATE TABLE \\\"{self.table_name}\\\" (\"\"\"\n\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Iterates over registered within registry tables instances of
def tables(): for table in _registry.itervalues(): yield table
[ "def __iter__(self):\n return iter(self._routing_tables_by_chip.values())", "def __iter__(self):\n for item in self._table:\n yield item._key # yield the key", "def _from_catalog(self):\n for tbl in self.fetch():\n self[tbl.key()] = tbl", "def ite...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the next line from standard input, without any trailing newlines.
def nextstr(): l = sys.stdin.readline() if l[-1] == '\n': l = l[:-1] return l
[ "def _next_nonempty_line(self):\n line = \"\"\n while not line:\n line = self._next_line()\n return line", "def next(self):\n # apply implicit line ending conversion\n line = self.readline()\n if line:\n return line\n else:\n raise ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the next line from standard input as a list of integers, where the input is split by ' '.
def nextints(): return [int(t) for t in nextstr().split(' ')]
[ "def _read_line() -> List[int]:\n return list(map(int, input().split()))", "def readIntegersFromConsole(self):\n try:\n line = raw_input()\n inputArray = line.split(' ')\n \n for j in range(len(inputArray)):\n inputArray[j] = int(inputArray[j])\n\n return inputArray ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Parses the arguments sent in from a webhook and returns the request body as a python object
def load_webhook_body(): import sys import json payload = "" for index in range(len(sys.argv)): payload += str(sys.argv[index]).strip() start = payload.find("RequestBody:") end = payload.find("RequestHeader:") requestBody = payload[start+12:end-1] return json.loads...
[ "def load_webhook_body():\n import sys\n import json\n\n # Read all the arguments sent in by the webhook\n payload = \"\"\n for index in range(len(sys.argv)):\n payload += str(sys.argv[index]).strip()\n\n # Get the RequestBody so we can process it\n start = payload.find(\"RequestBody:\")...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This funciton creates entries in a window to add row values to insert into a table.
def createEntries(self, inputWindow, database): for index, (key) in enumerate(database.keys()): if "tableID" in key or "tableName" in key: continue Label(inputWindow, text=key).grid(row=index, column=0, padx=5) entry = Entry(inputWindow) entry.grid...
[ "def addTableRow(self, database):\n inputWindow = Toplevel(self.root)\n self.createEntries(inputWindow, database)\n inputBtn = Button(inputWindow, text=\"Submit Data\",\n command=lambda: self.handleAddRecord(inputWindow, database))\n inputBtn.grid(row=len(databas...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This function adds rows to a selected database from self.database and also verifies if the type is correct.
def handleAddRecord(self, inputWindow, database): for (key, value) in self.entries.items(): if "tableID" in key or "tableName" in key: continue types = next( x for x in self.dataTypes if x["tableID"] == database["tableID"]) try: ...
[ "def case_add_records(self, refresh_db_before):\n new_types = [\"Type one\", \"Type two\"]\n\n for serv_type in new_types:\n ServerTypeOp.add(serv_type)\n\n check_records = ServerTypeOp.get()\n\n self.assertEqual(len(check_records), len(new_types))\n\n for record, exp_s...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This function verifies if the user intended to delete the row.
def verifyDeleteRow(self, inputWindow, entry, database): verificationWindow = Toplevel(inputWindow) Label(verificationWindow, text=f"Do you want to row with index {entry.get()}?").grid( row=0, column=0) yesBtn = Button(verificationWindow, text="Yes", command=...
[ "def deleteRow(self, *args) -> \"bool\" :\n return _core.TableCommandInput_deleteRow(self, *args)", "def validate_delete(self):\r\n pass", "def userDeleteEntry(self):\n delete_term = input(\"What do you want to delete?\")\n result = self.searchDatabase(delete_term)\n print(res...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This function adds a row to a selected table from self.databases.
def addTableRow(self, database): inputWindow = Toplevel(self.root) self.createEntries(inputWindow, database) inputBtn = Button(inputWindow, text="Submit Data", command=lambda: self.handleAddRecord(inputWindow, database)) inputBtn.grid(row=len(database.items()) +...
[ "def add_database_entry(self, row):\n database = self.open_database_ab()\n pickle.dump(row, database)\n self.close_database(database)", "def addRow(self, row_info):\n pass", "def add_row(self, row):\n self.results_table_rows.append(row)", "def add_table_entry(self, table_id,...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This function creates window to enter row index and on button click verifies if user intended to delete a row.
def deleteTableRow(self, database): inputWindow = Toplevel(self.root) Label(inputWindow, text="Enter the index of a row to delete:").grid( row=1, column=0, padx=15, pady=10) entry = Entry(inputWindow) entry.grid(row=1, column=1, padx=15, pady=10) inputBtn = Button(i...
[ "def verifyDeleteRow(self, inputWindow, entry, database):\n verificationWindow = Toplevel(inputWindow)\n Label(verificationWindow, text=f\"Do you want to row with index {entry.get()}?\").grid(\n row=0, column=0)\n\n yesBtn = Button(verificationWindow, text=\"Yes\",\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This function verifies if the user intended to delete the table.
def verifyDeleteTable(self, inputWindow, entry): verificationWindow = Toplevel(inputWindow) Label(verificationWindow, text=f"Do you want to delete table with index {entry.get()}?").grid( row=0, column=0) yesBtn = Button(verificationWindow, text="Yes", command...
[ "def validate_delete(self):\r\n pass", "def test_delete_schema(self):\n try:\n Draft4Validator.check_schema(token.TokenView2.DELETE_SCHEMA)\n schema_valid = True\n except RuntimeError:\n schema_valid = False\n\n self.assertTrue(schema_valid)", "def us...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This function creates window to enter table index and on button click verifies if user intended to delete a table.
def deleteTable(self): inputWindow = Toplevel(self.root) Label(inputWindow, text="Enter the index of a table to delete:").grid( row=1, column=0, padx=15, pady=10) entry = Entry(inputWindow) entry.grid(row=2, column=1) inputBtn = Button(inputWindow, text="Submit Dat...
[ "def verifyDeleteTable(self, inputWindow, entry):\n verificationWindow = Toplevel(inputWindow)\n Label(verificationWindow, text=f\"Do you want to delete table with index {entry.get()}?\").grid(\n row=0, column=0)\n\n yesBtn = Button(verificationWindow, text=\"Yes\",\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This is the main funciton of a program. It creates main view with TreeView and appends data to it. It adds buttons to manipulate tables and rows.
def createMainInterface(self): self.root.title("Database Client") for widget in self.root.winfo_children(): widget.destroy() Label(self.root, text='Database Client', font='Helvetica 28 bold').grid( row=0, column=0, sticky="nsew", pady=10) if len(self.databases) =...
[ "def initialize_user_interface(self):\n self.parent.title(\"Canvas Test\")\n self.parent.grid_rowconfigure(1,weight=1)\n self.parent.grid_columnconfigure(1,weight=1)\n self.parent.config(background=\"lavender\")\n\n\n # Define the different GUI widgets\n #self.dose_label = ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the conversation of this ClientRemoveEventAllOfPayload.
def conversation(self, conversation): if type(conversation) is Undefined: conversation = None self.nulls.discard("conversation") elif conversation is None: self.nulls.add("conversation") else: self.nulls.discard("conversation") self._conve...
[ "def stop_conversation(self):\n if self.assistant:\n self.assistant.stop_conversation()", "def remove_conversation_from_session():\n if 'convstatus' in session:\n session.pop('convstatus', None)\n if 'activeconv' in session:\n session.pop('activeconv', None)", "def removeco...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Tear down the testserver.
def tearDownClass(cls): # type: () -> None cls.test_server.stop() # wait a bit for server to shutdown time.sleep(1)
[ "def tearDown(self):\n self.mock_server.shutdown()", "def tearDownClass(cls):\n cls.http_server.shutdown()", "def shutdown(self):\n self._server.shutdown()", "def shutdown(self):\n if self.is_running:\n self.server.server_close()\n self.server.socket.close()\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Assert command_id and target_id.
def assert_command_id(self, request, target_id): # type: (AmsPacket, int) -> None # Check the request code received by the server command_id = request.ams_header.command_id command_id = struct.unpack("<H", command_id)[0] self.assertEqual(command_id, target_id)
[ "def assert_command_id(self, request: AmsPacket, target_id: int) -> None:\n # Check the request code received by the server\n command_id = request.ams_header.command_id\n command_id = struct.unpack(\"<H\", command_id)[0]\n self.assertEqual(command_id, target_id)", "def test__put_target...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Open plc connection twice.
def test_open_twice(self): # type: () -> None self.plc.close() with self.plc: # connection should now be open self.assertTrue(self.plc.is_open) self.plc.open() # connection should now be closed self.assertFalse(self.plc.is_open)
[ "def reuse_or_reconnect(self):\n if not self.isconnected():\n self.connect()", "def reopen_connection(self):\n self.close()\n self.open_connection()", "def connect(self):\n try:\n self.logger.debug(\"connect(), opening communication at '%s'\" % self._address)\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test read_by_name method with handle passed in
def test_read_by_name_with_handle(self): # type: () -> None handle_name = "TestHandle" with self.plc: handle = self.plc.get_handle(handle_name) read_value = self.plc.read_by_name( "", constants.PLCTYPE_BYTE, handle=handle ) # Retrieve ...
[ "def test_write_by_name_with_handle(self):\n # type: () -> None\n handle_name = \"TestHandle\"\n value = \"Test Value\"\n\n with self.plc:\n handle = self.plc.get_handle(handle_name)\n self.plc.write_by_name(\"\", value, constants.PLCTYPE_STRING, handle=handle)\n\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test write_by_name method with handle passed in
def test_write_by_name_with_handle(self): # type: () -> None handle_name = "TestHandle" value = "Test Value" with self.plc: handle = self.plc.get_handle(handle_name) self.plc.write_by_name("", value, constants.PLCTYPE_STRING, handle=handle) # Retrieve li...
[ "def test_write_structure_by_name(self):\n # type: () -> None\n\n handle_name = \"TestHandle\"\n struct_to_write = OrderedDict([(\"sVar\", \"Test Value\")])\n value = \"Test Value\"\n\n structure_def = ((\"sVar\", pyads.PLCTYPE_STRING, 1),)\n\n # test with no structure size...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test write by structure method
def test_write_structure_by_name(self): # type: () -> None handle_name = "TestHandle" struct_to_write = OrderedDict([("sVar", "Test Value")]) value = "Test Value" structure_def = (("sVar", pyads.PLCTYPE_STRING, 1),) # test with no structure size passed in with ...
[ "def test_standardise_data(self):\n\t\tassert False, \"Write Test\"", "def test_create_writable_protocol():\n f = _WritableFile()\n WeldxFile(f, tree=dict(test=\"yes\"), mode=\"rw\")\n new_file = TestWeldXFile.make_copy(f.to_wrap)\n assert WeldxFile(new_file)[\"test\"] == \"yes\"", "...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test passthrough of FILETIME value by notification decorator
def test_notification_decorator_filetime(self): # type: () -> None @self.plc.notification(timestamp_as_filetime=True) def callback(handle, name, timestamp, value): self.assertEqual(timestamp, 132223104000000000) notification = structs.SAdsNotificationHeader() notifi...
[ "def test_atime(self, mock_datetime: MagicMock):\n mock_file_stat = MagicMock()\n self.file_path._file_stat = mock_file_stat\n self.assertEqual(mock_datetime.datetime.fromtimestamp.return_value, self.file_path.atime)\n mock_datetime.datetime.fromtimestamp.assert_called_once_with(mock_fil...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test decoding of STRING value by notification decorator
def test_notification_decorator_string(self): # type: () -> None @self.plc.notification(constants.PLCTYPE_STRING) def callback(handle, name, timestamp, value): self.assertEqual(value, "Hello world!") notification = create_notification_struct(b"Hello world!\x00\x00\x00\x00")...
[ "def test_decode():\n assert one.decode(one.encode(\"Hello World\")) == \"hello world\"", "def decode_string(self, value):\n return value", "def stringReceived(self, string):\n raise NotImplementedError()", "def test_encode_and_parse():\n note_on = Message('note_on')\n assert note_on ==...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test decoding of LREAL value by notification decorator
def test_notification_decorator_lreal(self): # type: () -> None @self.plc.notification(constants.PLCTYPE_LREAL) def callback(handle, name, timestamp, value): self.assertEqual(value, 1234.56789012345) notification = create_notification_struct( struct.pack("<d", 1...
[ "def test_decode(self):\n self.lattice.decode()\n self.assertCountEqual(self.lattice.data.flatten(), self.lattice_data.flatten())\n self.assertFalse(self.lattice.is_encoded)", "def test_valid_hook_data(self):\n value = HookDataLookup.handle(\"fake_hook::result\", context=self.ctx)\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test decoding of array value by notification decorator
def test_notification_decorator_array(self): # type: () -> None @self.plc.notification(constants.PLCTYPE_ARR_INT(5)) def callback(handle, name, timestamp, value): self.assertEqual(value, [0, 1, 2, 3, 4]) notification = create_notification_struct( b"\x00\x00\x01\...
[ "def test_notification_decorator_struct_array(self):\n # type: () -> None\n\n arr_type = structs.SAdsVersion * 4\n\n @self.plc.notification(arr_type)\n def callback(handle, name, timestamp, value):\n self.assertEqual(len(value), 4)\n for i in range(4):\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test decoding of array of structs value by notification decorator
def test_notification_decorator_struct_array(self): # type: () -> None arr_type = structs.SAdsVersion * 4 @self.plc.notification(arr_type) def callback(handle, name, timestamp, value): self.assertEqual(len(value), 4) for i in range(4): self.asser...
[ "def test_notification_decorator_array(self):\n # type: () -> None\n\n @self.plc.notification(constants.PLCTYPE_ARR_INT(5))\n def callback(handle, name, timestamp, value):\n self.assertEqual(value, [0, 1, 2, 3, 4])\n\n notification = create_notification_struct(\n b\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test get_handle and release_handle methods
def test_get_and_release_handle(self): # type: () -> None handle_name = "TestHandle" with self.plc: handle = self.plc.get_handle(handle_name) # Retrieve list of received requests from server requests = self.test_server.request_history # Assert that the serve...
[ "def test_write_by_name_with_handle(self):\n # type: () -> None\n handle_name = \"TestHandle\"\n value = \"Test Value\"\n\n with self.plc:\n handle = self.plc.get_handle(handle_name)\n self.plc.write_by_name(\"\", value, constants.PLCTYPE_STRING, handle=handle)\n\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Assert command_id and target_id.
def assert_command_id(self, request: AmsPacket, target_id: int) -> None: # Check the request code received by the server command_id = request.ams_header.command_id command_id = struct.unpack("<H", command_id)[0] self.assertEqual(command_id, target_id)
[ "def assert_command_id(self, request, target_id):\n # type: (AmsPacket, int) -> None\n # Check the request code received by the server\n command_id = request.ams_header.command_id\n command_id = struct.unpack(\"<H\", command_id)[0]\n self.assertEqual(command_id, target_id)", "de...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test read by name without passing the datatype.
def test_read_by_name_without_datatype(self) -> None: # create variable on testserver self.handler.add_variable(PLCVariable("test_var", 42, constants.ADST_INT16, "INT")) with self.plc: # read twice to show caching read_value = self.plc.read_by_name("test_var") ...
[ "def test_read_by_name_with_handle(self):\n # type: () -> None\n handle_name = \"TestHandle\"\n with self.plc:\n handle = self.plc.get_handle(handle_name)\n read_value = self.plc.read_by_name(\n \"\", constants.PLCTYPE_BYTE, handle=handle\n )\n\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test write_list_by_name with structure definition
def test_write_list_by_name_with_structure(self): self.handler.add_variable( PLCVariable("TestStructure", b"\x01\x00", constants.ADST_INT16, symbol_type="TestStructure")) self.handler.add_variable(PLCVariable("TestVar", 0, constants.ADST_UINT8, "USINT")) variables = ["TestStructure",...
[ "def test_write_structure_by_name(self):\n # type: () -> None\n\n handle_name = \"TestHandle\"\n struct_to_write = OrderedDict([(\"sVar\", \"Test Value\")])\n value = \"Test Value\"\n\n structure_def = ((\"sVar\", pyads.PLCTYPE_STRING, 1),)\n\n # test with no structure size...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test read_device_info for AdvancedHandler.
def test_read_device_info(self): with self.plc: name, version = self.plc.read_device_info() self.assertEqual(name, "TestServer") self.assertEqual(version.build, 3)
[ "async def get_device_info(self) -> Any:\n return await self.__send_request(EP_DEVICE_INFO)", "async def test_dli_device_info(\n hass: HomeAssistant, wemo_dli_entity, device_registry: dr.DeviceRegistry\n) -> None:\n device_entries = list(device_registry.devices.values())\n\n assert device_entries[...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test read_state for AdvancedHandler.
def test_read_state(self): with self.plc: state = self.plc.read_state() self.assertEqual(state[0], constants.ADSSTATE_RUN)
[ "def readstate(self, state):\n self.__readstate = state", "async def test_switch_read_alarm_state(hass, utcnow):\n helper = await setup_test_component(hass, create_security_system_service)\n\n helper.characteristics[CURRENT_STATE].value = 0\n state = await helper.poll_and_get_state()\n assert s...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test write_control for AdvancedHandler.
def test_write_control(self): with self.plc: self.plc.write_control(constants.ADSSTATE_IDLE, 0, 0, constants.PLCTYPE_INT)
[ "def char_write(self, handle, value, wait_for_response=False):\r\n pass", "def channel_control_write(self, channel, control_dict):\n control_json = json.dumps(control_dict)\n self.send(PacketType.request_send_control, channel=channel, data=control_json)", "def test_write_by_name_with_handle...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test for proper WSTRING handling
def test_read_wstring(self): # add WSTRING variable containing 'Überraschung' expected1 = "Überraschung" expected2 = "hello world" var = PLCVariable( "wstr", expected1.encode("utf-16-le") + b"\x00\x00", constants.ADST_WSTRING, "WSTRING" ) ...
[ "def _validate_strings(self):\n pass", "def test_string_direct(self):\n for source in (\"direct\", \"default\"):\n self.assertEqual(self.setting.detect_type(u\"Hello\", source), \"unicode\")\n self.assertEqual(self.setting.detect_type(\"Hello\", source), \"unicode\")", "def t...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Is called when task is deleted. Handels changes in Storypoints.
def run_before_delete_task(sender, **kwargs): new_task = kwargs['instance'] # new calculation of storypoints if storypoints changes or new task added # old_task = Task.objects.filter(pk=new_task.id) # if Task.objects.filter(pk=new_task.id).exists(): # new_task.storypoints = 0 new_task.storypo...
[ "def delete(self, *args, **kwargs):\n try:\n self.terminate_task()\n self.periodic_task.delete()\n except:\n pass\n return super(ShoalScrapeTask, self).delete(*args, **kwargs)", "def remove_task(self, task):\n with db:\n task.delete_instance(...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Is called when task changes. Handels changes in Storypoints.
def run_before_saving_task(sender, **kwargs): new_task = kwargs['instance'] old_storypoints = -1 # new calculation of storypoints if storypoints changes or new task added if Task.objects.filter(pk=new_task.id).exists(): old_storypoints = Task.objects.get(pk=new_task.id).storypoints old_f...
[ "def task_added(self, task):\n pass", "def task_started(self, task):\n pass", "def task_bypassed(self, task):\n pass", "def process_task(self, task):\n raise NotImplementedError()", "def config_changed(self):\n from flexget.task import config_changed\n for task in self.tasks:\n...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
find total rows just by checking unique user ids that has connection log with at least on "bytes_in" in details
def __getInOutUsageTotalRows(self, conditions): total_rows_query = "select count(distinct user_id) as count from connection_log where " + \ "%s and "%conditions + \ "connection_log_id in " + \ "(select connection_log_details.con...
[ "def __getUniqueUserIDs(self, conditions):\n total_rows_query = \"select count(distinct user_id) as count from connection_log where \" + conditions\n return db_main.getHandle().selectQuery(total_rows_query)[0][\"count\"]", "def _count_users(self, txn: LoggingTransaction, time_from: int) -> int:\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
merge in_usages and out_usages and return a list in format [[user_id, user_repr, in_usage, out_usage],..]
def __createInOutUsageReportList(self, in_usages, out_usages): out_usage_dic = {} for user_id, out_usage in out_usages: out_usage_dic[user_id] = out_usage inout_usage = [] for user_id, in_usage in in_usages: inout_usage.append([user_id, ...
[ "def getUserReport(self):\n usage = []\n for proc in self.psInfo:\n newProc = True\n if len(usage) > 0:\n for p in usage:\n if p.user == proc.user:\n p.cpu += float(proc.cpu)\n p.mem += float(proc.mem...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
return total number of unique user_ids with "conditions" in connection_log
def __getUniqueUserIDs(self, conditions): total_rows_query = "select count(distinct user_id) as count from connection_log where " + conditions return db_main.getHandle().selectQuery(total_rows_query)[0]["count"]
[ "def __getInOutUsageTotalRows(self, conditions):\n total_rows_query = \"select count(distinct user_id) as count from connection_log where \" + \\\n \"%s and \"%conditions + \\\n \"connection_log_id in \" + \\\n \"(select connect...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This is a handler for SIGTERM and SIGINT. Other signals can be mapped here as well (SIGHUP?) Basically it just sets a global flag, and main() will exit it's loop if the signal is trapped.
def signal_handler(sig_num, frame): global exit_flag if sig_num == signal.SIGINT: logger.warning( " SIGINT recieved from the os: program terminated w/ ctr-c" ) exit_flag = True elif sig_num == signal.SIGTERM: logger.warning(" SIGTERM recieved from the os: pro...
[ "def run_term_signal_handler(sig, frame):\n # pylint: disable=unused-argument\n if _config.VERBOSE_PROCESSES_ENABLED:\n print_out(\"Run process: Received termination signal ({})\".format(sig))\n\n # This triggers the registered exit handler run_exit_handler()\n raise SystemExit(1)", "def sigter...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Finds all magic words and sends back an list of
def find_magic_words(d): files = os.listdir(d) found_words = [] for file in files: with open(d + "/" + file) as f: content = f.readlines() for index, line in enumerate(content): match = re.findall(r"magic", line) if match: ...
[ "def log_magic_words(d, before):\n\n after = [w for w in find_magic_words(d)]\n added = [w for w in after if w not in before]\n removed = [w for w in before if w not in after]\n\n for word_added in added:\n print word_added\n logger.info(''' Magic word \"{}\" found at line {} in file {}'''...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Prints all the magic words to the log
def log_magic_words(d, before): after = [w for w in find_magic_words(d)] added = [w for w in after if w not in before] removed = [w for w in before if w not in after] for word_added in added: print word_added logger.info(''' Magic word "{}" found at line {} in file {}''' ...
[ "def print_meta(self):\n self.logger.handle('Author: {}'.format(self.meta['author']), Logger.HEADER)\n self.logger.handle('Module name: {}, version {}'.format(self.meta['name'], self.meta['version']), Logger.HEADER)\n self.logger.handle('Description: {}'.format(self.meta['description']), Logger...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Logs if a file was added or removed
def log_files(d, before): after = [f for f in os.listdir(d)] added = [f for f in after if f not in before] removed = [f for f in before if f not in after] for file_added in added: logger.info(''' File added {}''' .format(file_added)) for file_removed in removed: ...
[ "def log_delete(filename):\r\n logging.info('FILE-DELETED: \\n \\t%s\\n', filename)", "def log_moved(filename1, filename2):\r\n logging.info('FILE-MOVED: \\n \\t%s -- to: %s\\n', filename1, filename2)", "def test_logging_file_moved(self):\n logging.info('Testing file moving')\n logging.shutd...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Attempt Tumblr gallery Download
def test_gallery(self): _task, _prog, _file = mock_handler_request(self.dir, 'https://theshadowmoose.tumblr.com/post/184562233420/test-rmd-image-upload') res = tumblr.handle(_task, _prog) self.assertTrue(res, "Tumblr gallery search failed! %s" % res.failure_reason) self.assertEqual(len(res.album_urls), 2, "Hand...
[ "def downloading_all_photos(self):\n self.create_folder()\n pic_counter = 1\n for url_link in self.pic_url_list:\n print(pic_counter)\n pic_prefix_str = self.g_search_key + \"/\" + self.g_search_key + str(pic_counter)\n self.download_single_image(url_link.encode...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test embedded video download
def test_video(self): _task, _prog, _file = mock_handler_request(self.dir, 'theshadowmoose.tumblr.com/post/184562318724/another-test-post-with-video') res = tumblr.handle(_task, _prog) self.assertTrue(res, "Tumblr video download failed!") self.assertTrue(_file.exists(), "Tumblr video was not downloaded! %s" % r...
[ "def report_video_webpage_download( video_id):\n\tprint('[youtube] %s: Downloading video webpage' % video_id)", "def test_video_vob_should_return_true(self):\n\n video_name : str = \"video.vob\"\n\n is_video : bool = script.is_video(video_name, debug_function = True)\n\n self.assertTrue(is_vi...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Trains TFIDF model given a dataframe
def train_tfidf_model(model, df): df = common.tokenise_pos_stemming(df) logger.info("training model...") train_data = df.as_matrix(columns=["all_tok_pos_stem"])[:, 0] train_labels = df.as_matrix(columns=["labelmax"])[:, 0] model.fit(train_data, train_labels) return model
[ "def tokenize_flow(df: pd.DataFrame, **tf_params) -> Tuple[TfidfVectorizer, csr_matrix]:\n if not 'stop_words' in tf_params:\n tf_params['stop_words'] = stopwords.words('english') + OUR_STOP_WORDS\n\n vectorizer = TfidfVectorizer(**tf_params)\n corpus = df['body']\n X = vectorizer.fit_transform(c...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the semantics associated with a concept. If you pass a parsed graph, the method do not load the rdf again.
def semantics(self, concept, parsed_graph=None): concept_semantics_uri = self.concept_base_uri+concept+"/semantics" semantics_predicate_uri = self.senticapi_base_uri+"semantics" if parsed_graph is None: g = rdflib.Graph() parsed_graph = g.parse(data=self._fix_rdf(concept...
[ "def polarity(self, concept, parsed_graph=None):\n concept_polarity_uri = self.concept_base_uri+concept+\"/polarity\"\n predicate_uri = self.senticapi_base_uri+\"polarity\"\n\n if parsed_graph is None:\n graph = rdflib.Graph()\n parsed_graph = graph.parse(data=self._fix_rd...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return sentics of a concept. If you pass a parsed graph, the method do not load the rdf again.
def sentics(self, concept, parsed_graph=None): concept_sentics_uri = self.concept_base_uri+concept+"/sentics" sentics = {"pleasantness": 0, "attention": 0, "sensitivity": 0, "aptitude": 0} if parsed_graph is None: graph = rdflib.Graph() parsed_graph = graph.parse(data=se...
[ "def semantics(self, concept, parsed_graph=None):\n concept_semantics_uri = self.concept_base_uri+concept+\"/semantics\"\n semantics_predicate_uri = self.senticapi_base_uri+\"semantics\"\n\n if parsed_graph is None:\n g = rdflib.Graph()\n parsed_graph = g.parse(data=self._...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the polarity of a concept. If you pass a parsed graph, the method do not load the rdf again.
def polarity(self, concept, parsed_graph=None): concept_polarity_uri = self.concept_base_uri+concept+"/polarity" predicate_uri = self.senticapi_base_uri+"polarity" if parsed_graph is None: graph = rdflib.Graph() parsed_graph = graph.parse(data=self._fix_rdf(concept_polar...
[ "def polarity(self) -> Polarity:\n try:\n cid = self.data[0][0][\"identification\"]\n except IndexError:\n return Polarity(\"positive\")\n return Polarity(cid.mz_references[0].detected_polarity)", "def polarity(text):\n \n vader_analyzer = SentimentIntensityAnalyze...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
plots a quick geoid undulation map
def TEST_Map_Geoid(): HC, HS = imp.Fetch_Coef("full4") lmax = 10; mins = 600; levels = 70; title = f"Map of Geoid undulation" fig = Map_Geoid(mins, levels, title, lmax, HC, HS)
[ "def plotGlobe3D():", "def PlotTomoMap(fname, dlon=0.5, dlat=0.5, title='', datatype='ph', outfname='', browseflag=False, saveflag=True):\n if title=='':\n title=fname;\n if outfname=='':\n outfname=fname;\n Inarray=np.loadtxt(fname)\n LonLst=Inarray[:,0]\n LatLst=Inarray[:,1]\n ZV...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Default heuristic used for Overkill bot, used to find the highest yield for unoccupied squares, or the sum of the effective Overkill for a given opponent square
def __heuristic(self, game_map, square): if square.owner == 0 and square.strength > 0: return square.production / square.strength else: return sum(neighbor.strength for neighbor in game_map.neighbors(square) if neighbor.owner not in (...
[ "def best_hand(hand):\n\treturn max(itertools.combinations(hand, 5), key=hand_rank)", "def greedy_heuristic(protocol_obj):\n if protocol_obj.happyness == 0:\n return protocol_obj.exp_extraction * 1.15 # 15% increase\n else: \n return protocol_obj.exp_extraction", "def best_hand(hand):\n re...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check that implicit url reversing works.
def test_reverse_implicit(): assert reverse("api-1.0.0:list_events") == "/api/events"
[ "def test_swagger_docs_reverses_to_correct_url(self):\n url = reverse('swagger-docs')\n self.assertEqual(url, '/docs/')", "def test_users_reverses_to_correct_url(self):\n url = reverse('api:users')\n self.assertEqual(url, '/v1/users/')", "def test_lazy_reverse(self):\n resolve...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Apply non reflexive borders in a wave_field
def non_reflexive_border_2d(wave_field, V, dx, dz, dt): P = wave_field # Left P[2, 0:2, :] = P[1, 0:2, :] + (V[0:2, :]*dt/dz) * ( P[1, 1:3, :] - P[1, 0:2, :]) # Right P[2, -2:, :] = P[1, -2:, :] - (V[-2:, :]*dt/dz) * ( P[1, -2:, :] - P[1, -3:-1, :]) # Bottom P[2, :, -2:] = P[1, :, -2:] - ...
[ "def set_fwdboundary_complexhull(self, outline,fs):\n print '\\n\\n set_fwdboundary_complexhull\\n\\n'\n bhull = self.barehull.hullfwd.get_finest_surface()\n bbow = self.bulb.hullbulb\n \n \n \n \n \n nvts = len(outline)#number of vertices \...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Obtaining the Shape of the network's input.
def get_input_shape(self): return self.network.inputs[self.input_blob].shape
[ "def shape(self):\n return self._input.shape", "def shape(x):\n\treturn tf.shape(x)", "def shape(input, name=None):\n return array_ops.shape(input, name=name)", "def infer_image_input_shape(self) -> Optional[Tuple[int, int]]:\n return self.output_shape", "def infer_image_input_shape(self) -...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Starting an Asynchronous Inference on the Input Image/ Video Frame.
def exec_net(self, request_id, frame): self.inference_handler = self.inference_plugin.start_async(request_id=request_id, inputs={self.input_blob: frame}) return self.inference_plugin
[ "def infer_on_stream(args, client):\n # Initialize the Inference Engine\n infer_network = Network()\n\n # Set Probability threshold for detections\n prob_threshold = args.prob_threshold\n\n # Load the model through `infer_network`\n infer_network.load_model(args.model, args.device, CPU_EXTENSION, ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates a training loop from the given step function and options.
def _create_train_loop_fn(train_step_fn, options: StandardTrainerOptions): if options.use_tf_while_loop: loop_fn = loop_fns.create_tf_while_loop_fn(train_step_fn) if options.use_tpu_summary_optimization: loop_fn = loop_fns.LoopFnWithSummaries(loop_fn) else: loop_fn = tf.function(loop_fn) els...
[ "def _create_train_step_node(self):\n\n with tf.name_scope(\"train\"):\n if self.opt == 'gradient_descent':\n self.train_step = tf.train.GradientDescentOptimizer(self.learning_rate).minimize(self.cost)\n\n # Below are used for debug purpose only\n # [se...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The current training dataset.
def train_dataset(self): return self._train_dataset
[ "def get_train_dataset(self) -> Dataset:\n return self.train_data", "def training_set(self) -> tf.data.Dataset:\n return self._training_set", "def get_training_data(self):\r\n\r\n if self._training_data is None:\r\n idx = int(math.ceil(\r\n self._training_split * l...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets a new training dataset, replacing the current one. Any unprocessed examples in the current dataset are discarded.
def train_dataset(self, train_dataset): self._train_dataset = train_dataset self._train_iter = None
[ "def update_train_dataset(self, extra_dataset: Dataset) -> None:\n self.train_data = ConcatDataset([self.train_data, extra_dataset])", "def training_set(self, _training_set: tf.data.Dataset) -> None:\n self._training_set = _training_set", "def dataset(self, dataset):\n if self._dataset is n...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The current evaluation dataset.
def eval_dataset(self): return self._eval_dataset
[ "def getEvaluation(self):\n return self.fulldata", "def build_eval_dataset(self):\n pass", "def _make_train_eval_dataset(self):\n return self._train_dataset.take(-1) # Take all.", "def get_eval_dataset(self, stage_id: int) -> tf.data.Dataset:\n pass", "def active_dataset(self):\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets a new eval dataset, replacing the current one. Any unprocessed examples in the current dataset are discarded.
def eval_dataset(self, eval_dataset): self._eval_dataset = eval_dataset
[ "def _make_train_eval_dataset(self):\n return self._train_dataset.take(-1) # Take all.", "def eval_dataset(self):\n return self._eval_dataset", "def build_eval_dataset(self):\n pass", "def setDataSet( self, dataSet ):\n self._dataSet = dataSet", "def _extract_and_set_eval_batches(self) ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Global Conv Block for GCN
def GlobalConvBlock(inputs, n_filters=21, size=3): net_1 = slim.conv2d(inputs, n_filters, [size, 1], activation_fn=None, normalizer_fn=None) net_1 = slim.conv2d(net_1, n_filters, [1, size], activation_fn=None, normalizer_fn=None) net_2 = slim.conv2d(inputs, n_filters, [1, size], activation_fn=None, normal...
[ "def vgg_block(layer_in, num_filters, num_conv):\n\n\tfor _ in range(num_conv):\n\t\tlayer_in = Conv2D(num_filters, (3,3), padding=\"same\", activation=\"relu\")(layer_in)\n\n\tlayer_in = MaxPooling2D((2,2), strides=(2,2))(layer_in)\n\treturn layer_in", "def __init__(self, input_size, input_dim, hidden_dim, kerne...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Replaces the exiting LUNs to lun_list.
def replace_lun(self, *lun_list): lun_add = self._prepare_luns_add(lun_list) lun_remove = self._prepare_luns_remove(lun_list, False) return self.modify(lun_add=lun_add, lun_remove=lun_remove)
[ "def init_lun_list(self):\n\n self.luns = []\n if not os.path.exists(self.device_dir):\n return\n\n unsorted_luns = []\n\n pattern = os.path.join(\n BASE_SYSFS_SCSI_DEVICES_DIR, ('%d:[0-9]*:[0-9]*:[0-9]*' % (self.host_id)))\n\n if self.verbose > 2:\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Updates the LUNs in CG, adding the ones in `add_luns` and removing the ones in `remove_luns`
def update_lun(self, add_luns=None, remove_luns=None): if not add_luns and not remove_luns: log.debug("Empty add_luns and remove_luns passed in, " "skip update_lun.") return RESP_OK lun_add = self._prepare_luns_add(add_luns) lun_remove = self._prepar...
[ "def map_luns(self, *params):\n if not params or len(params)==0:\n raise TypeError(\"map_luns takes at lease 1 argument 0 given.\")\n elif params and len(params)>2:\n raise TypeError(\"map_luns takes at lease 1 argument %u given.\" %(len(params)))\n disk=params[0]\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Attaches all member LUNs to the specified host.
def attach_to(self, host=None, access_mask=HostLUNAccessEnum.BOTH): raise NotImplementedError()
[ "def hostAdd(self, host):\n h = self.addHost(host[0], ip=host[1], mac=host[3])\n\n self.addLink(host[4], h, host[5], 1)", "def install(self, host, otherhosts):\n # TODO: Implement this.\n raise Exception( \"Not implemented\" )", "def put_all_hsm_hosts_to_maintenance(request, ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Detaches all members from host, if None, detach from all hosts.
def detach_from(self, host=None): raise NotImplementedError()
[ "def detach_and_delete_ports(self):\n detach_and_delete_ports(self._connection, self._node,\n self.created_ports, self.attached_ports)", "def ex_balancer_detach_members(self, balancer, members):\r\n accepted = self.ex_balancer_detach_members_no_poll(balancer, members)\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Compute covariance matrix with respect of given weigths
def var_cov_matrix(df, weigths): sigma = np.cov(np.array(df).T, ddof=0) var = (np.array(weigths) * sigma * np.array(weigths).T).sum() return var
[ "def cov_matrix(self, params):\n # if no model is specified, the PSD model is just the PSD value in each frequency bin\n # note the factor of 2 to integrate over the negative frequencies too!\n if self.model is None:\n psd = np.exp(np.array([params[p].value for p in params])) * self....
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
this function is used to get average time for every student activity per day
def getAverageTime(pathName): dfAcitivity = pd.read_csv(pathName) # rename the columns for better useage dfAcitivity.columns = ['timestamp', 'activity_inference'] totalTimeStationary, totalTimeWalking, totalTimeRunning, unknownTime = 0, 0, 0, 0 # record every record and find the total time for thre...
[ "def getAveragedHourActivity(self):\n averagedHourActivity = [0] * 24\n weekAct = self.weekdayActivity\n firstAvailableWeekDay = 0\n\n for weekDay in weekAct:\n firstAvailableWeekDay = weekDay\n break\n\n numDays = weekAct[firstAvailableWeekDay][1] * 7\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines a decorator for accessing API calls. Use it by specifying the app, the API method, followed by the permissions necessary to execute the method.
def rpc(app, command, method="POST", permission_required=None): def _decorator(func): @app.route('/' + command, "POST") @app.route('/' + command, "OPTIONS") @app.route('/' + command, method) def _wrapper(argument=None): response.content_type = 'application/json; charset=u...
[ "def api_action(**unscoped_kwargs):\n\n def _api_handler(func):\n\n @functools.wraps(func)\n async def wrapper(self, *args, **kwargs):\n try:\n return await func(self, *args, *kwargs)\n except Exception as ex:\n return web.json_response(data={\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates an instance of HyperedgeAggregator layer.
def __init__(self, dimensions, attention_aggregator, dropout=0.5): super(HyperedgeAggregator, self).__init__() self.dimensions = dimensions self.linear = torch.nn.Linear(self.dimensions, self.dimensions) self.attention_aggregator = attention_aggregator if self.attention_aggregato...
[ "def __init__(\n self, dimensions, aggregate_method,\n link_pred_method='hadamard-product'):\n super(HyperedgePoolingLayer, self).__init__()\n self.dimensions = dimensions\n self.aggregate_method = aggregate_method\n self.link_pred_method = link_pred_method\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates an instance of NodeDistributor layer.
def __init__(self, dimensions, dropout=0.5): super(NodeDistributor, self).__init__() self.dimensions = dimensions self.dropout = dropout self.linear = torch.nn.Linear(self.dimensions, self.dimensions)
[ "def __init__(self, nodes=None, replicas=3, distribution_points=3):\n self.distribution_points = distribution_points\n self.replicas = replicas\n\n self.ring = dict()\n self._sorted_keys = []\n\n if nodes:\n for node in nodes:\n self.add_node(node)", "d...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Reinitializes manually created torch parameters.
def _reset_parameters(self): torch.nn.init.xavier_normal_(self.initial_embeddings)
[ "def reset_parameters(self):\n logger.info('===== Initialize %s with Xavier uniform distribution =====' % self.__class__.__name__)\n # see https://github.com/pytorch/fairseq/blob/master/fairseq/models/transformer.py\n # embedding\n nn.init.normal_(self.embed.weight, mean=0., std=self.d_m...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
set the default position of the item
def set_default_position(self, position): # default position is the starting position self.default_position = position # position is the current position of the object self.position = position
[ "def user32_SetMenuDefaultItem(jitter):\n ret_ad, args = jitter.func_args_stdcall([\"hMenu\", \"uItem\", \"fByPos\"])\n raise RuntimeError('API not implemented')\n jitter.func_ret_stdcall(ret_ad, ret_value)", "def set_fake_position(self, pos):\n raise NotImplementedError()", "def set_current(sel...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
make the bird fly, if the up is true, it will fly up, otherwise fly down
def fly(self, up, game_display): if up: self.up(game_display) # reset the downward speed after fly up because downwards speed is 0 after flying up self.down_v = 0 else: self.down(game_display)
[ "def fly(self) -> None:", "def Fly_movement(self):\n num = len(self.points)\n if self.points[self.i][0] == self.points[(self.i+1) % num][0] and self.points[self.i][1] < self.points[(self.i+1) % num][1]: # down\n if self.hit_box.y > self.points[(self.i+1) % num][1]:\n self.i...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
game intro in shell, this will also set the difficulty
def shell_intro(self): print(''' ---------------------------------------------------------- Welcome to Flappy Bird. Below are game the game controls: Fly the bird: Press Space or Up Arrow key Quit: Click the exit botton or press Q ----------------------------------------...
[ "def run():\n\n clear_shell()\n play(choose_difficulty())", "def intro(level):\n os.system('clear')\n print(\"\\nLEVEL \", level)\n if level == 1:\n print(\"\"\"\n You are poor farmer but you have a dream.\n You want to sail the ship. Someone in city sell tickets.\n But last night y...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a dictionary of each word and the no. of times they appear. You can assume that `phrase` does not contain any punctuation and that each word is separated with a space.
def count_words(phrase): word_counts = {} for word in phrase.split(' '): if word in word_counts: word_counts[word] += 1 else: word_counts[word] = 1 return word_counts
[ "def word_count(self):\n\n # Split by non-alphanumerical boundaires\n split_text = re.split('\\W',self.text.lower())\n\n # Count occurences\n counts = {}\n for word in split_text:\n if word:\n if word in counts:\n counts[word] += 1\n else:\n counts[word] = 1\n\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the list of melons that cost `price`.
def print_melon_at_price(price): melon_prices = { 2.50: ['Cantaloupe', 'Honeydew'], 2.95: ['Watermelon'], 3.25: ['Musk', 'Crenshaw'], 14.25: ['Christmas'] } if price not in melon_prices: return return sorted(melon_prices[price])
[ "def compex_by_price(self, price):\n complexs = [i for i in self if 'комплексные обеды' in i['category']]\n for c in complexs:\n if c['price'] == price:\n return c", "def scrape_prices(self) -> list:\r\n cars = self.__cars\r\n prices = []\r\n for car i...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
assign canvas to tool
def setCanvas(self,c): self['canvas'] = c
[ "def canvas_changed(self):", "def canvas(self, canv: tkinter.Canvas, x: int, y: int, tag: str) -> None:\n if self._pos_type is not None and self._pos_type is not GeoManager.CANVAS:\n raise ValueError(\"Can't add already positioned slot!\")\n obj_id = canv.create_window(\n x, y,...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
get position of canvas
def canvasPos(self): pos = pos_minus(local.mouse.pos,self['screenpos']) pos = SafeSize(pos) pos = min(pos[0],self['size'][0]-1),min(pos[1],self['size'][1]-1) return pos
[ "def canvas_pos(self, canv: tkinter.Canvas) -> Tuple[int, int]:\n if self._canv_info is not None:\n _, x, y = self._canv_info\n return x, y\n raise ValueError('Not on a canvas!')", "def get_canvas_coords(self, **kwargs):\r\n return CanvasCoords(self.sc, **kwargs)", "de...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
set canvas to all the tools
def setCanvas(self,c): self['canvas'] = c
[ "def set_current_canvas(canvas):\n # Notify glir \n canvas.context._do_CURRENT_command = True\n # Try to be quick\n if canvasses and canvasses[-1]() is canvas:\n return\n # Make this the current\n cc = [c() for c in canvasses if c() is not None]\n while canvas in cc:\n cc.remove(c...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
open all the mines
def openAll(self): for n,m in self.getMines(): if m['state'] == 'flag' and m['flag']!='mine': m['state'] = 'wrongflag' elif m['flag'] == 'mine' and m['state']!='touchmine': m['state'] = 'mine' elif m['state'] != 'touchmine': m['state'] = m['flag']
[ "def open_maze(self):\n with open(\"maze.txt\", \"r\") as read_maze:\n self.p_maze.extend(read_maze.readlines())\n for i, elt in enumerate(self.p_maze):\n self.p_maze[i] = self.p_maze[i].strip(\"\\n\")", "def startAll(self):\n for core in self.core_list:\n try...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the sensor value. Returns 999 if request fails
def get_sensor_value_from_pin(pin): try: response = mybolt.analogRead(pin) data = json.loads(response) if data["success"] != 1: print("Request not successfull") print("This is the response->", data) return -999 sensor_value = int(data["value"]) ...
[ "def get_sensor_value(self, sensor_name):\n yield self._client.until_synced()\n response = yield self._client.req.sensor_value(sensor_name)\n if not response.reply.reply_ok():\n raise WorkerRequestError(response.reply.arguments[1])\n raise Return(response.informs[0].arguments[...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Turn a file name into a module name
def file_to_module(filename): return filename[:-3].replace("/", ".")
[ "def nonColonizedName_to_moduleName(name):\r\n return re.sub('\\.', '_', name)", "def _modulenamemangle(self, modfilename):\n if not self.source:\n return modfilename\n return os.path.splitext(os.path.basename(modfilename))[0]", "def _module_fullname_to_path(fullname):\n return fu...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Turn a directory name into a module
def dir_to_module(filename): return filename.replace("/", ".")
[ "def nonColonizedName_to_moduleName(name):\r\n return re.sub('\\.', '_', name)", "def _module_fullname_to_path(fullname):\n return fullname.replace(\".\", os.sep) + \".py\"", "def get_module_dir_by_obj_name(obj_name: str) -> str:\n return os.path.dirname(get_module_file_by_obj_name(obj_name))", "def ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Asign colors by module goal
def pick_color(goal): if goal[-3:] == ".py": goal = file_to_module(goal) for name, clr in zip(NAMES, COLORS): if goal.startswith(name): return clr return "green"
[ "def mcolor(method):\n if method == \"simpson\":\n return \"red\"\n elif method == \"center\":\n return \"blue\"\n elif method == \"trapeze\":\n return \"green\"", "def obt_color_pieza(self):\n pass", "def base_color(self):\n ...", "def switch_color(self):\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Compute the minimum and maximum of the data in parallel.
def min_and_max(data, block_shape=None, n_threads=None, mask=None, verbose=False, roi=None): n_threads = multiprocessing.cpu_count() if n_threads is None else n_threads blocking = get_blocking(data, block_shape, roi, n_threads) n_blocks = blocking.numberOfBlocks @threadpool_limits.wrap(limits=1) # res...
[ "def get_min_max_values(self):\n\n min_val = min(self.array_like)\n max_val = max(self.array_like)\n\n self.min_max_val = (min_val, max_val)", "def calc_minmax(data):\n channels = data[0].shape[1]\n \n # find minimum of each channel\n mins = np.ones(channels) * 10000\n maxs = n...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test that extractions can be made for custom span group names
def test_custom_span_group_name(self): # let's make sure that this pipe has been initialized # At least for MacOS and Linux which are currently supported... # allow default QuickUMLS (very small sample data) to be loaded nlp = spacy.blank("en") custom_span_group_name = "my_ow...
[ "def test_grouping_attribute() -> None:\n g = Grouping()\n assert g._groups == []", "def test_invalid_span_exception(self):\n control_data = control_dict_for_testing(\n \"\"\"\n classic-tag-block:\n not-a-span:\n artist: Some Artist\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns True if n is prime.
def _isPrimeN(n): if n == 1: return False if n == 2: return True if n == 3: return True if n % 2 == 0: return False if n % 3 == 0: return False i = 5 w = 2 while i * i <= n: if n % i == 0: return False ...
[ "def primep(n):\n return fermat_test(n) and miller_rabin_test(n)", "def isprime(n):\n _validate_int(n)\n # Deal with trivial cases first.\n if n < 2:\n return False\n elif n == 2:\n return True\n elif n%2 == 0:\n return False\n elif n <= 7: # 3, 5, 7\n return True...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }