query
stringlengths
9
9.05k
document
stringlengths
10
222k
negatives
listlengths
19
20
metadata
dict
Create an Azure Cosmos DB MongoDB database
def cli_cosmosdb_mongodb_database_create(client, resource_group_name, account_name, database_name, throughput=None, ...
[ "def cli_cosmosdb_database_create(client, database_id, throughput=None):\n return client.CreateDatabase({'id': database_id}, {'offerThroughput': throughput})", "def create_db_collections():\n db_name = container[Configuration]['mongodb']['db_name'].get()\n typer.echo(f\"Creating mongodb collections in {d...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks if an Azure Cosmos DB MongoDB database exists
def cli_cosmosdb_mongodb_database_exists(client, resource_group_name, account_name, database_name): try: client.get_mongo_db_database(resource_group_name, account_name, database_name) ...
[ "def checkExistence_DB(self):\n DBlist = self.client.list_database_names()\n if self.DB_NAME in DBlist:\n # print(f\"DB: '{self.DB_NAME}' exists\")\n return True\n # print(f\"DB: '{self.DB_NAME}' not yet present OR no collection is present in the DB\")\n return Fals...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create an Azure Cosmos DB MongoDB collection
def cli_cosmosdb_mongodb_collection_create(client, resource_group_name, account_name, database_name, collection_name, ...
[ "def create_db_collections():\n db_name = container[Configuration]['mongodb']['db_name'].get()\n typer.echo(f\"Creating mongodb collections in {db_name} database\")\n event_loop = container[EventLoopBase]\n with click_spinner.spinner():\n event_loop.run(container[AsyncMongoDBUtils].create_indexes...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Updates an Azure Cosmos DB MongoDB collection
def cli_cosmosdb_mongodb_collection_update(client, resource_group_name, account_name, database_name, collection_name, ...
[ "def update_collection(self, collection, doc):\n\n\t\ttry:\t\n\t\t\tself.db[collection].update({'_id' : ObjectId(doc['_id'])},\n\t\t\t\t\t\t\t\t\tdoc,\n\t\t\t\t\t\t\t\t\tupsert = False)\n\t\texcept Exception as e:\n\t\t\tlogging.error(\"[{}] : {}\".format(sys._getframe().f_code.co_name,e))\n\t\t\texit(1)", "def u...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks if an Azure Cosmos DB MongoDB collection exists
def cli_cosmosdb_mongodb_collection_exists(client, resource_group_name, account_name, database_name, collection_name): try: client.get_m...
[ "def cli_cosmosdb_mongodb_database_exists(client,\n resource_group_name,\n account_name,\n database_name):\n try:\n client.get_mongo_db_database(resource_group_name, account_name, databa...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks if an Azure Cosmos DB Cassandra keyspace exists
def cli_cosmosdb_cassandra_keyspace_exists(client, resource_group_name, account_name, keyspace_name): try: client.get_cassandra_keyspace(resource_group_name, account_name, keyspac...
[ "def cli_cosmosdb_cassandra_table_exists(client,\n resource_group_name,\n account_name,\n keyspace_name,\n table_name):\n try:\n client.get_cassandra_...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create an Azure Cosmos DB Cassandra table
def cli_cosmosdb_cassandra_table_create(client, resource_group_name, account_name, keyspace_name, table_name, schema, ...
[ "def createAzureTable(self):\n self.table_service = TableService(self.account_name, self.account_key)\n self.table_service.create_table(self.table_name)", "def create_cass_keyspace(cls):\n connection.setup(TESTING_IPS, INITIAL_KEYSPACE_NAME)\n # Make sure the keyspace is empty\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update an Azure Cosmos DB Cassandra table
def cli_cosmosdb_cassandra_table_update(client, resource_group_name, account_name, keyspace_name, table_name, default_tt...
[ "def cli_cosmosdb_cassandra_table_throughput_update(client,\n resource_group_name,\n account_name,\n keyspace_name,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks if an Azure Cosmos DB Cassandra table exists
def cli_cosmosdb_cassandra_table_exists(client, resource_group_name, account_name, keyspace_name, table_name): try: client.get_cassandra_table(resou...
[ "def cli_cosmosdb_table_exists(client,\n resource_group_name,\n account_name,\n table_name):\n try:\n client.get_table(resource_group_name, account_name, table_name)\n except HttpResponseError as ex:\n return ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks if an Azure Cosmos DB table exists
def cli_cosmosdb_table_exists(client, resource_group_name, account_name, table_name): try: client.get_table(resource_group_name, account_name, table_name) except HttpResponseError as ex: return _handle_exis...
[ "def cli_cosmosdb_cassandra_table_exists(client,\n resource_group_name,\n account_name,\n keyspace_name,\n table_name):\n try:\n client.get_cassandra_...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update an Azure Cosmos DB SQL database throughput
def cli_cosmosdb_sql_database_throughput_update(client, resource_group_name, account_name, database_name, throughput=None, ...
[ "def cli_cosmosdb_mongodb_database_throughput_update(client,\n resource_group_name,\n account_name,\n database_name,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update an Azure Cosmos DB SQL container throughput
def cli_cosmosdb_sql_container_throughput_update(client, resource_group_name, account_name, database_name, container_name, ...
[ "def cli_cosmosdb_sql_container_throughput_migrate(client,\n resource_group_name,\n account_name,\n database_name,\n con...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Migrate an Azure Cosmos DB SQL container throughput
def cli_cosmosdb_sql_container_throughput_migrate(client, resource_group_name, account_name, database_name, container_na...
[ "def cli_cosmosdb_mongodb_database_throughput_migrate(client,\n resource_group_name,\n account_name,\n database_name,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update an Azure Cosmos DB MongoDB database throughput
def cli_cosmosdb_mongodb_database_throughput_update(client, resource_group_name, account_name, database_name, th...
[ "def cli_cosmosdb_mongodb_collection_throughput_update(client,\n resource_group_name,\n account_name,\n database_name,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Migrate an Azure Cosmos DB MongoDB database throughput
def cli_cosmosdb_mongodb_database_throughput_migrate(client, resource_group_name, account_name, database_name, ...
[ "def cli_cosmosdb_mongodb_collection_throughput_migrate(client,\n resource_group_name,\n account_name,\n database_name,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update an Azure Cosmos DB MongoDB collection throughput
def cli_cosmosdb_mongodb_collection_throughput_update(client, resource_group_name, account_name, database_name, ...
[ "def cli_cosmosdb_mongodb_database_throughput_update(client,\n resource_group_name,\n account_name,\n database_name,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Migrate an Azure Cosmos DB MongoDB collection throughput
def cli_cosmosdb_mongodb_collection_throughput_migrate(client, resource_group_name, account_name, database_name, ...
[ "def cli_cosmosdb_mongodb_database_throughput_migrate(client,\n resource_group_name,\n account_name,\n database_name,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update an Azure Cosmos DB Cassandra keyspace throughput
def cli_cosmosdb_cassandra_keyspace_throughput_update(client, resource_group_name, account_name, keyspace_name, ...
[ "def cli_cosmosdb_cassandra_table_throughput_update(client,\n resource_group_name,\n account_name,\n keyspace_name,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Migrate an Azure Cosmos DB Cassandra keyspace throughput
def cli_cosmosdb_cassandra_keyspace_throughput_migrate(client, resource_group_name, account_name, keyspace_name, ...
[ "def cli_cosmosdb_cassandra_table_throughput_migrate(client,\n resource_group_name,\n account_name,\n keyspace_name,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update an Azure Cosmos DB Cassandra table throughput
def cli_cosmosdb_cassandra_table_throughput_update(client, resource_group_name, account_name, keyspace_name, table_n...
[ "def __update_throughput(table_name, key_name, read_units, write_units):\r\n try:\r\n current_ru = dynamodb.get_provisioned_table_read_units(table_name)\r\n current_wu = dynamodb.get_provisioned_table_write_units(table_name)\r\n except JSONResponseError:\r\n raise\r\n\r\n # Check table...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Migrate an Azure Cosmos DB Cassandra table throughput
def cli_cosmosdb_cassandra_table_throughput_migrate(client, resource_group_name, account_name, keyspace_name, ta...
[ "def cli_cosmosdb_table_throughput_migrate(client,\n resource_group_name,\n account_name,\n table_name,\n throughput_type):\n if throughput_type == \...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update an Azure Cosmos DB Gremlin database throughput
def cli_cosmosdb_gremlin_database_throughput_update(client, resource_group_name, account_name, database_name, th...
[ "def cli_cosmosdb_gremlin_graph_throughput_update(client,\n resource_group_name,\n account_name,\n database_name,\n graph_na...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Migrate an Azure Cosmos DB Gremlin database throughput
def cli_cosmosdb_gremlin_database_throughput_migrate(client, resource_group_name, account_name, database_name, ...
[ "def cli_cosmosdb_gremlin_graph_throughput_migrate(client,\n resource_group_name,\n account_name,\n database_name,\n gra...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update an Azure Cosmos DB Gremlin graph throughput
def cli_cosmosdb_gremlin_graph_throughput_update(client, resource_group_name, account_name, database_name, graph_name, ...
[ "def cli_cosmosdb_gremlin_database_throughput_update(client,\n resource_group_name,\n account_name,\n database_name,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Migrate an Azure Cosmos DB Gremlin database throughput
def cli_cosmosdb_gremlin_graph_throughput_migrate(client, resource_group_name, account_name, database_name, graph_name, ...
[ "def cli_cosmosdb_gremlin_database_throughput_migrate(client,\n resource_group_name,\n account_name,\n database_name,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update an Azure Cosmos DB table throughput
def cli_cosmosdb_table_throughput_update(client, resource_group_name, account_name, table_name, throughput=None, ma...
[ "def __update_throughput(table_name, key_name, read_units, write_units):\r\n try:\r\n current_ru = dynamodb.get_provisioned_table_read_units(table_name)\r\n current_wu = dynamodb.get_provisioned_table_write_units(table_name)\r\n except JSONResponseError:\r\n raise\r\n\r\n # Check table...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Migrate an Azure Cosmos DB table throughput
def cli_cosmosdb_table_throughput_migrate(client, resource_group_name, account_name, table_name, throughput_type): if throughput_type == "autoscale"...
[ "def cli_cosmosdb_cassandra_table_throughput_migrate(client,\n resource_group_name,\n account_name,\n keyspace_name,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Show the identity associated with a Cosmos DB account
def cli_cosmosdb_identity_show(client, resource_group_name, account_name): cosmos_db_account = client.get(resource_group_name, account_name) return cosmos_db_account.identity
[ "def identity(self):\n return self.data['identity']", "def _get_account_id(self) -> str:\n return self._post(\n DEXCOM_AUTHENTICATE_ENDPOINT,\n json={\n \"accountName\": self._username,\n \"password\": self._password,\n \"application...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update the identities associated with a Cosmos DB account
def cli_cosmosdb_identity_assign(client, resource_group_name, account_name, identities=None): existing = client.get(resource_group_name, account_name) SYSTEM_ID = '[system]' enable_system = identities is Non...
[ "def cli_cosmosdb_identity_remove(client,\n resource_group_name,\n account_name,\n identities=None):\n\n existing = client.get(resource_group_name, account_name)\n\n SYSTEM_ID = '[system]'\n remove_system_assign...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Remove the identities associated with a Cosmos DB account
def cli_cosmosdb_identity_remove(client, resource_group_name, account_name, identities=None): existing = client.get(resource_group_name, account_name) SYSTEM_ID = '[system]' remove_system_assigned_identity =...
[ "def remove_identity(self, identity):\n del self.identities[identity.uuid]\n identity.universe = None", "def delete_credentials(credentials):\n\tcredentials.delete_credentials()", "def test_v1_supervision_identities_id_delete(self):\n pass", "def delete_connected_accounts(self):\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Adds a virtual network rule to an existing Cosmos DB database account
def cli_cosmosdb_network_rule_add(cmd, client, resource_group_name, account_name, subnet, virtual_network=None, igno...
[ "def cli_cosmosdb_network_rule_remove(cmd,\n client,\n resource_group_name,\n account_name,\n subnet,\n virtual_network=None):\n subn...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Remove a virtual network rule from an existing Cosmos DB database account
def cli_cosmosdb_network_rule_remove(cmd, client, resource_group_name, account_name, subnet, virtual_network=None): subnet = _get_v...
[ "def cli_cosmosdb_mongocluster_firewall_rule_delete(client, resource_group_name, cluster_name, rule_name):\r\n\r\n return client.begin_delete_firewall_rule(resource_group_name, cluster_name, rule_name)", "def removeNetwork(conn):\n try:\n net = conn.networkLookupByName('vauto')\n except libvirt.l...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Approve a private endpoint connection request for Azure Cosmos DB.
def approve_private_endpoint_connection(client, resource_group_name, account_name, private_endpoint_connection_name, description=None): return _update_private_endpoint_connection_status( client, resource_group_name, account_name, private_endpoint_connection_name, is_...
[ "def reject_private_endpoint_connection(client, resource_group_name, account_name, private_endpoint_connection_name,\n description=None):\n\n return _update_private_endpoint_connection_status(\n client, resource_group_name, account_name, private_endpoint_connection_na...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Reject a private endpoint connection request for Azure Cosmos DB.
def reject_private_endpoint_connection(client, resource_group_name, account_name, private_endpoint_connection_name, description=None): return _update_private_endpoint_connection_status( client, resource_group_name, account_name, private_endpoint_connection_name, is_ap...
[ "def reject_invitation(GraphArn=None):\n pass", "def LeaseRevoke(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def reject(principal_id, resource_arn):\n return _...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Shows an Azure Cosmos DB database
def cli_cosmosdb_database_show(client, database_id): return client.ReadDatabase(_get_database_link(database_id))
[ "def cli_cosmosdb_database_list(client):\n return list(client.ReadDatabases())", "def cli_cosmosdb_collection_show(client, database_id, collection_id):\n collection = client.ReadContainer(_get_collection_link(database_id, collection_id))\n offer = _find_offer(client, collection['_self'])\n return {'co...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Lists all Azure Cosmos DB databases
def cli_cosmosdb_database_list(client): return list(client.ReadDatabases())
[ "def list_database(db=None):\n if db is None:\n return CONNECTION.get_connection().database_names()\n return CONNECTION.get_connection()[db].collection_names()", "def listDB(self):\n # Responses: list of db names\n return self.get(\"/_all_dbs\", descr='listDB').addCallback(\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates an Azure Cosmos DB database
def cli_cosmosdb_database_create(client, database_id, throughput=None): return client.CreateDatabase({'id': database_id}, {'offerThroughput': throughput})
[ "def create_db(glue_client, account_id, database_name, description):\n try:\n glue_client.get_database(\n CatalogId=account_id,\n Name=database_name\n )\n except glue_client.exceptions.EntityNotFoundException:\n print(\"Creating database: %s\" % database_name)\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Deletes an Azure Cosmos DB database
def cli_cosmosdb_database_delete(client, database_id): client.DeleteDatabase(_get_database_link(database_id))
[ "def delete_database(ctx, app_name, yes, database_id):\n logging.getLogger(\"gigalixir-cli\").info(\"WARNING: Deleting your database will destroy all your data and backups.\")\n logging.getLogger(\"gigalixir-cli\").info(\"WARNING: This can not be undone.\")\n logging.getLogger(\"gigalixir-cli\").info(\"WAR...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Shows an Azure Cosmos DB collection and its offer
def cli_cosmosdb_collection_show(client, database_id, collection_id): collection = client.ReadContainer(_get_collection_link(database_id, collection_id)) offer = _find_offer(client, collection['_self']) return {'collection': collection, 'offer': offer}
[ "def test_get_collection(self):\n \n coluri = 'http://localhost:3000/catalog/cooee'\n \n meta = self.api.get_collection(coluri)\n \n \n self.assertEqual(coluri, meta['collection_url'])\n self.assertEqual('COOEE', meta['collection_name'])\n self.assertEq...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Lists all Azure Cosmos DB collections
def cli_cosmosdb_collection_list(client, database_id): return list(client.ReadContainers(_get_database_link(database_id)))
[ "def list_collections():\n\n try:\n collections = facade.list_collections(kind='document')\n return collections, 200\n except gmap_exc.DatabaseNotExist as err:\n return err.message, 400\n except Exception as err:\n return str(err), 500", "def get_all_collections(self):\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Deletes an Azure Cosmos DB collection
def cli_cosmosdb_collection_delete(client, database_id, collection_id): client.DeleteContainer(_get_collection_link(database_id, collection_id))
[ "def delete_collection(collection):\r\n collection.delete_many({})", "def delete_db_collection(self):\n self.conn.drop_collection(self.colname)", "def clear_collection(collection):\n for doc in collection.stream():\n doc.reference.delete()", "def drop_collection(self, db, col):\n se...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates an Azure Cosmos DB collection
def cli_cosmosdb_collection_create(client, database_id, collection_id, throughput=None, partition_key_path=None, default_ttl=None, ...
[ "def cli_cosmosdb_mongodb_collection_create(client,\n resource_group_name,\n account_name,\n database_name,\n collection_name,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks if an Azure Cosmos DB Mongo Role Definition exists
def cli_cosmosdb_mongo_role_definition_exists(client, resource_group_name, account_name, mongo_role_definition_id): try: client.get_mongo_role_definition(mongo_role_defin...
[ "def cli_cosmosdb_mongo_role_definition_exists(client,\r\n resource_group_name,\r\n account_name,\r\n mongo_role_definition_id):\r\n try:\r\n client.get_mongo_role_definit...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks if an Azure Cosmos DB Mongo User Definition exists
def cli_cosmosdb_mongo_user_definition_exists(client, resource_group_name, account_name, mongo_user_definition_id): try: client.get_mongo_user_definition(mongo_user_defin...
[ "def cli_cosmosdb_mongo_user_definition_exists(client,\r\n resource_group_name,\r\n account_name,\r\n mongo_user_definition_id):\r\n try:\r\n client.get_mongo_user_definit...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks if an Azure Cosmos DB Sql Role Definition exists
def cli_cosmosdb_sql_role_definition_exists(client, resource_group_name, account_name, role_definition_id): try: client.get_sql_role_definition(role_definition_id, resource_gro...
[ "def cli_cosmosdb_mongo_role_definition_exists(client,\n resource_group_name,\n account_name,\n mongo_role_definition_id):\n try:\n client.get_mongo_role_definition(mongo_...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks if an Azure Cosmos DB Sql Role Assignment exists
def cli_cosmosdb_sql_role_assignment_exists(client, resource_group_name, account_name, role_assignment_id): try: client.get_sql_role_assignment(role_assignment_id, resource_gro...
[ "def exists(self, role_name):\r\n for role in self.all():\r\n if role.name.lower() == role_name.lower():\r\n return True\r\n return False", "def cli_cosmosdb_mongo_role_definition_exists(client,\n resource_group_name,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates an Azure Managed Cassandra Cluster
def cli_cosmosdb_managed_cassandra_cluster_create(client, resource_group_name, cluster_name, location, delegated_managem...
[ "def cli_cosmosdb_managed_cassandra_cluster_create(client,\r\n resource_group_name,\r\n cluster_name,\r\n location,\r\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Updates an Azure Managed Cassandra Cluster
def cli_cosmosdb_managed_cassandra_cluster_update(client, resource_group_name, cluster_name, tags=None, identity_type=No...
[ "def cli_cosmosdb_managed_cassandra_cluster_update(client,\r\n resource_group_name,\r\n cluster_name,\r\n tags=None,\r\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
List Azure Managed Cassandra Clusters by resource group and subscription.
def cli_cosmosdb_managed_cassandra_cluster_list(client, resource_group_name=None): if resource_group_name is None: return client.list_by_subscription() return client.list_by_resource_group(resource_group_name)
[ "def cli_cosmosdb_managed_cassandra_cluster_list(client,\r\n resource_group_name=None):\r\n\r\n if resource_group_name is None:\r\n return client.list_by_subscription()\r\n\r\n return client.list_by_resource_group(resource_group_name)", "def cli_cosmosdb...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Invokes a command in Azure Managed Cassandra Cluster host
def cli_cosmosdb_managed_cassandra_cluster_invoke_command(client, resource_group_name, cluster_name, command_name, ...
[ "def execute(self, cluster, commands):\n raise NotImplementedError", "def execute(cluster_yaml, cmd):\n config = load_config(cluster_yaml)\n head_updater = get_head_updater(config)\n head_updater.ssh_cmd(\" \".join(cmd), verbose=True)", "def cluster_execute(cluster_id, command):\n # TODO chec...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get Azure Managed Cassandra Cluster Node Status
def cli_cosmosdb_managed_cassandra_cluster_status(client, resource_group_name, cluster_name): return client.status(resource_group_name, cluster_name)
[ "def cluster_status():\n cluster_json = H2OConnection.get_json(\"Cloud?skip_ticks=true\")\n\n print(\"Version: {0}\".format(cluster_json['version']))\n print(\"Cloud name: {0}\".format(cluster_json['cloud_name']))\n print(\"Cloud size: {0}\".format(cluster_json['cloud_size']))\n if cluster_json['locked']: prin...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Deallocate Azure Managed Cassandra Cluster
def cli_cosmosdb_managed_cassandra_cluster_deallocate(client, resource_group_name, cluster_name): return client.begin_deallocate(resource_group_name, cluster_name)
[ "def destroy_cluster(self, cluster_id):", "def host_cluster_delete(context, cluster_id, host_name):\n # If we weren't given a session, then we need to create a new one\n session = nova_db_sa_api.get_session()\n # Create a Transaction around the delete in the Database\n with session.begin():\n q...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Start Azure Managed Cassandra Cluster
def cli_cosmosdb_managed_cassandra_cluster_start(client, resource_group_name, cluster_name): return client.begin_start(resource_group_name, cluster_name)
[ "def __cassandra_connect(self):\n for i in range(10):\n try:\n self.cluster = Cluster()#['panoptes-cassandra.zooniverse.org'],protocol_version = 3)\n self.cassandra_session = self.cluster.connect('zooniverse')\n return\n except cassandra.clus...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates an Azure Managed Cassandra DataCenter
def cli_cosmosdb_managed_cassandra_datacenter_create(client, resource_group_name, cluster_name, data_center_name, ...
[ "def cli_cosmosdb_managed_cassandra_datacenter_create(client,\r\n resource_group_name,\r\n cluster_name,\r\n data_center_name,\r\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Updates an Azure Managed Cassandra DataCenter
def cli_cosmosdb_managed_cassandra_datacenter_update(client, resource_group_name, cluster_name, data_center_name, ...
[ "def cli_cosmosdb_managed_cassandra_datacenter_update(client,\r\n resource_group_name,\r\n cluster_name,\r\n data_center_name,\r\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Clone my dotfiles and set it up.
def clone_dotfiles(): GIT_REPO_DOTFILES = '' if env.git_repo_dotfiles is not '': GIT_REPO_DOTFILES = '%(git_repo_dotfiles)s' % env else: hr(symbol='+', width=80) entry = raw_input(red('Please enter the repository to your dotfiles: ')) GIT_REPO_DOTFILES = entry hr(width=8...
[ "def dotfiles():\n PROJECT_ROOT = os.path.split(__file__)[0]\n HOME = os.environ[\"HOME\"]\n sources = (\n run(\n 'find \"{}\" -name \".[^.]*\" -maxdepth 1'.format(PROJECT_ROOT),\n hide='out'\n )\n .stdout\n .strip()\n .split(\"\\n\")\n )\n\n for source in sources:\n if sour...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Set up database, create user, create database, set privileges.
def _set_up_database(): print red('Start set up mysql:') _mysql_execute(env.db_create_user, env.db_root, env.db_root_pass) _mysql_execute(env.db_create_database, env.db_root, env.db_root_pass) _mysql_execute(env.db_grant_all_privileges, env.db_root, env.db_root_pass) _mysql_execute(env.db_flush_priv...
[ "def setupAllDB():\n createDatabase(CONFIG_DB['db_name'])\n runMigrations()\n setupJobTrackerDB()\n setupErrorDB()\n setupUserDB()\n setupJobQueueDB()\n setupValidationDB()", "def create_database(self):\n self._create_tables()\n self._create_functions()\n self._create_tri...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Executes passed sql command using mysql shell.
def _mysql_execute(sql, user=None, password=None): user = user or env.conf.DB_USER if user == 'root' and password is None: password = _get_root_password() elif password is None: password = env.conf.DB_PASSWORD sql = sql.replace("'", r'\"') return run("echo '%s' | mysql --user='...
[ "def execute(self, sql, params=[]):\n #print(\"schema:\", sql)\n # Log the command we're running, then run it\n logger.debug(\"%s; (params %r)\" % (sql, params))\n if self.collect_sql:\n self.collected_sql.append((sql % tuple(map(self.quote_value, params))) + \";\")\n e...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
RESTART BOTH WEBSERVERS NGINX AND GREEN UNICORN.
def restart_webservers(): hr() print magenta('Restart Web Servers') hr() print magenta('Restart Green Unicorn..') sudo('stop atrend_shop_app; start atrend_shop_app') print magenta('Restart Nginx..') sudo('service nginx restart') hr() print magenta('[DONE] Web Servers is up.')
[ "def restart_webserver():\n sudo(\"stop joshgachnang\")\n # Give uwsgi time to shut down cleanly\n time.sleep(2)\n sudo(\"start joshgachnang\")\n \n \n sudo(\"/etc/init.d/nginx reload\")", "def restart_webserver():\n require('service_name')\n sudo('service nginx reload')\n try:\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check whether the variable is a Parameter. This function checks whether the input variable is a Parameter.
def is_parameter(var): return isinstance(var, Parameter)
[ "def is_parameter(self, ):\n\t\tpass", "def is_param(obj):\n return isParameter(obj)", "def is_parameter(self, obj):\r\n return isinstance(obj, Tensor) and obj.is_parameter", "def is_parameter(name):\n return name.startswith('par-')", "def has_parameter(self, a_name):\n return a_name in ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Save variables to directory by executor.
def save_vars(executor, dirname, main_program=None, vars=None, predicate=None, save_file_name=None): if vars is None: if main_program is None: main_program = default_main_program() if not isinstance(main_program, Progr...
[ "def save_params(executor, dirname, main_program=None, save_file_name=None):\n save_vars(\n executor,\n dirname=dirname,\n main_program=main_program,\n vars=None,\n predicate=is_parameter,\n save_file_name=save_file_name)", "def save_data_dir(self):\n kill_all_p...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Save all parameters to directory with executor.
def save_params(executor, dirname, main_program=None, save_file_name=None): save_vars( executor, dirname=dirname, main_program=main_program, vars=None, predicate=is_parameter, save_file_name=save_file_name)
[ "def save_params(self):\n self.autoencoder.save_parameters('/Users/wenqin/Documents/GitHub/grade-12-assignments-wenqinYe/Culminating/parameters/encoder')", "def _save(self):\n conf = {}\n for param in self.params:\n name, value = param.read()\n conf[name] = value\n\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Load variables from directory by executor.
def load_vars(executor, dirname, main_program=None, vars=None, predicate=None, load_file_name=None): if vars is None: if main_program is None: main_program = default_main_program() if not isinstance(main_program, Progr...
[ "def load_persistables(executor, dirname, main_program=None,\n load_file_name=None):\n load_vars(\n executor,\n dirname=dirname,\n main_program=main_program,\n predicate=is_persistable,\n load_file_name=load_file_name)", "def load_params(executor, dirname...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
load all parameters from directory by executor.
def load_params(executor, dirname, main_program=None, load_file_name=None): load_vars( executor, dirname=dirname, main_program=main_program, predicate=is_parameter, load_file_name=load_file_name)
[ "def loadParameters(self, filepath) -> retval:\n ...", "def load(self, parts: dict, directory = None, *args, **kwargs): \n if directory is not None and '_directory' in self.basemap:\n self.basemap['_directory'] = kwargs['_directory']\n elif directory is None and '_dir...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
load all persistables from directory by executor.
def load_persistables(executor, dirname, main_program=None, load_file_name=None): load_vars( executor, dirname=dirname, main_program=main_program, predicate=is_persistable, load_file_name=load_file_name)
[ "def start_persistence(self):\n self.tasks.start_persistence()", "async def start_persistence(self):\n await self.tasks.start_persistence()", "def load(self):\n for driver_name in STORAGE:\n driver = importlib.import_module('nazurin.storage.' +\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Load inference model from a directory
def load_inference_model(dirname, executor, load_file_name=None): if not os.path.isdir(dirname): raise ValueError("There is no directory named '%s'", dirname) model_file_name = dirname + "/__model__" with open(model_file_name, "rb") as f: program_desc_str = f.read() program = Program.p...
[ "def inference(path, model_inf):\n inference_dataset = ImageDetectionDataset()\n inference_dataset.load_inference_classes()\n class_names = inference_dataset.get_class_names()\n\n define_path(path, model_inf, class_names)", "def load_model(model_dir):\n return spacy.load(model_dir)", "def load_mo...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Judge string status of MaxMind { Null, English, nonEnglish }
def is_en(s): if s == 'nan': return 0 else: ans = re.search(r"[a-zA-Z\']+$", s) return 1 if ans else 2
[ "def detect_yes_no_dk(input_str):", "def valid_lang(x: str) -> bool:\n return x in LANGS", "def non_eng(self):\n return self.raw.get(\"tags\", {\"language\": \"eng\"}).get(\"language\", \"eng\").lower() != \"eng\"", "def test_nonlatin_word_is_0_percent_latin(self):\n l = logic.Logic(\"tamasan...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Merge lat&lon to data.
def merge_geoID_data(geoID, data="../data/GeoIP2-City-CSV/GeoIP2-City-CSV_20190625/GeoIP2-City-Locations-en.csv"): data = pd.read_csv(data, encoding='utf-8') print('merging...') res = pd.merge(data, geoID, on='geoname_id') res.to_csv('../fileout/GeoIP2-City-Locations-abnormal-vsLatLot.csv') return r...
[ "def airdata_combine(data1,data2):\n\n lon = numpy.ma.concatenate((data1[\"lon\"],data2[\"lon\"]))\n lat = numpy.ma.concatenate((data1[\"lat\"],data2[\"lat\"]))\n alt = numpy.ma.concatenate((data1[\"alt\"],data2[\"alt\"]))\n jday = numpy.ma.concatenate((data1[\"jday\"],data2[\"jday\"]))\n data = nump...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Batch geo inverse coding.
def batch_geo_reverse_coding(data): # Modify default params using functools.partial georeverse = functools.partial(geolocator.reverse, language='en') location = RateLimiter(georeverse, min_delay_seconds=1, ) data['latitude'] = data['latitude'].astype(str) data['longitude'] = data['longitude'].astype...
[ "def cube2latlon_preprocess(x, y, xi, yi):", "def collate_fn(batch):\n # Remember batch size for later reference\n batch_size = torch.tensor(len(batch), dtype=torch.int16)\n # Prepare the list of labels\n labels = []\n # Prepare empty arrays for indices and values.\n indi...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns a list of integers (numbers of gpus)
def get_gpus(gpus): return list(map(int, gpus.split(',')))
[ "def count_nvidia_gpus() -> int:\n\n # I don't have nvidia-smi, but cwltool knows how to do this, so we do what\n # they do:\n # <https://github.com/common-workflow-language/cwltool/blob/6f29c59fb1b5426ef6f2891605e8fa2d08f1a8da/cwltool/cuda.py>\n # Some example output is here: <https://gist.github.com/l...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
get_pretrained_embedding(top_words, sequence_length, word_index, pretrained) Returns compiled keras lstm model ready for training Best with epochs=3, batch_size=256
def lstm(top_words, sequence_length, word_index, gpus, pretrained=None): units = 256 inputs = Input(shape=(sequence_length, ), dtype='int32') x = get_pretrained_embedding(top_words, sequence_length, word_index, pretrained)(inputs) x = SpatialDropout1D(0.2)(x) # For m...
[ "def load_pretrained_lm(vocab) :\n lm = get_language_model(AWD_LSTM, len(vocab))\n model_path = untar_data('https://s3.amazonaws.com/fast-ai-modelzoo/wt103-1', data=False)\n fnames = [list(model_path.glob(f'*.{ext}'))[0] for ext in ['pth', 'pkl']]\n old_itos = pickle.load(open(fnames[1], 'rb'))\n old...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Dictionary defining the settings that this plugin expects to receive through the settings parameter in the accept, validate, publish and finalize methods.
def settings(self): return {}
[ "def get_plugin_settings(self):\n pass", "def settings(self):\n\n opts = Expando({})\n try:\n opts = getattr(self.site.config, self.plugin_name)\n except AttributeError:\n pass\n return opts", "def get_settings(self):\r\n\r\n settings = {'serial_de...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Executes the publish logic for the given item and settings.
def publish(self, settings, item): publisher = self.parent # get the path in a normalized state. no trailing separator, separators # are appropriate for current os, no double separators, etc. path = sgtk.util.ShotgunPath.normalize(_session_path()) # ensure the session is saved...
[ "def publish(nodeIdentifier, items, requestor):", "def publish_items(self, request, queryset):\n # We should exclude any draft copies: these can only be published \n # through merging.\n original_length = len(queryset)\n rows_updated = queryset.filter(copy_of__exact=None).update(\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Try to extract and return a version number for the supplied path.
def _get_version_number(self, path, item): publisher = self.parent version_number = None work_template = item.properties.get("work_template") if work_template: if work_template.validate(path): self.logger.debug( "Using work template to de...
[ "def version_from_path(self):\n try:\n self.version_label = self.path.split(\"/\")[1]\n (self.major, self.minor, self.revision) = [\n int(s) for s in self.version_label.lstrip(\"v\").split(\".\")\n ]\n except (IndexError, ValueError):\n return...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the path to the current session
def _session_path(): path = FarmAPI.GetKatanaFileName() if isinstance(path, unicode): path = path.encode("utf-8") return path
[ "def _get_session_path(self, client_pid):\n return f'{self._RUN_PATH}/session-{client_pid}.json'", "def makeSessionPath(self):\n try:\n pth = os.path.join(self.getSessionsPath(), self.__uid)\n if not os.access(pth, os.F_OK):\n os.makedirs(pth)\n return...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Save the current session to the supplied path.
def _save_session(path): # Maya can choose the wrong file type so we should set it here # explicitly based on the extension KatanaFile.Save( path )
[ "def save(self, path):", "def save(self, must_create=False):\n self._session_key = self._get_session_key()\n self.modified = True", "def setSessionPath(self, inputSessionPath=None):\n self.__sessionPath = inputSessionPath", "def save(self, session):\n expire = datetime.now() + time...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Simple helper for returning a log action to show version docs
def _get_version_docs_action(): return { "action_open_url": { "label": "Version Docs", "tooltip": "Show docs for version formats", "url": "https://support.shotgunsoftware.com/hc/en-us/articles/115000068574-User-Guide-WIP-#What%20happens%20when%20you%20publish" } ...
[ "def shortlog(web):\n return changelog(web, shortlog=True)", "def _cmd_help_version(self, ident, _from, to, msg, cmd):\n cinfo = self.init_cmd(ident, _from, to, msg)\n access = \"all\"\n\n if cmds[cmd][CMD_LEVEL] == 4:\n access = \"root\"\n elif cmds[cmd][CMD_LEVEL] == ir...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Initializes |experiment| in the database by creating the experiment entity. Warning you probably should not be using this method if connected to anything other than a throwaway sqlite db. Most of this code is copied from dispatcher.py which usually has the job of setting up an experiment.
def _initialize_db(): # TODO(metzman): Most of the strings in this function should probably be # configurable. db_utils.initialize() # One time set up for any db used by FuzzBench. models.Base.metadata.create_all(db_utils.engine) # Now set up the experiment. with db_utils.session_scope() a...
[ "async def add_experiment(data: Experiment) -> Experiment:\n db_connect = DBConnect()\n collection = await db_connect.get_collection(COLLECTION_NAME)\n experiment_id = await db_connect.get_next_id(COLLECTION_NAME, PREFIX)\n data.id = experiment_id\n timestamp = await get_timestamp()\n data.creatio...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Constructor Job created with status JobStatus.CREATED jobExecutionStrategy A JobExecutionStrategy instance defining how the job must be executed
def __init__(self, jobExecutionStrategy=None): if not jobExecutionStrategy: jobExecutionStrategy = JobExecutionStrategy() self._jobExecutionStrategy = jobExecutionStrategy else: self._jobExecutionStrategy = jobExecutionStrategy self.setStatus(Job.JobStatus.CR...
[ "def createJob(self):\n # Create job\n job = author.Job()\n job.title = self.jobTitle\n if self.paused:\n job.paused = True\n job.projects = self.projects\n job.priority = self.priority\n if self.maxActiveTgl:\n job.maxactive = self.maxActive\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Executes this job with the job strategy passed in constructor
def execute(self): self._jobExecutionStrategy.execute(job=self)
[ "def run_job():", "def __init__(self, job):\n self._job = job", "def __init__(self, jobExecutionStrategy=None):\n if not jobExecutionStrategy:\n jobExecutionStrategy = JobExecutionStrategy()\n self._jobExecutionStrategy = jobExecutionStrategy\n else:\n self....
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Indicates that executePreJobs(...) will be executed.
def willExecutePreJobs(self): pass
[ "def didExecutePreJobs(self):\n pass", "def willExecuteJobs(self):\n pass", "def pre_execute(self):\r\n if self.do_before:\r\n if type(self.do_before) is list:\r\n for action in self.do_before:\r\n action(self)\r\n else:\r\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Indicates that executePreJobs(...) did execute.
def didExecutePreJobs(self): pass
[ "def willExecutePreJobs(self):\n pass", "def didExecuteJobs(self):\n pass", "def didExecutePostJobs(self):\n pass", "def willExecuteJobs(self):\n pass", "def willExecutePostJobs(self):\n pass", "def _on_pre_execute(self):\n pass", "def on_pre_execution(action_lo...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Indicates that executePostJobs(...) will be executed.
def willExecutePostJobs(self): pass
[ "def didExecutePostJobs(self):\n pass", "def didExecuteJobs(self):\n pass", "def on_post_execution(action_log_params: ActionLogParams) -> None:\n LOGGER.debug(\"Calling callbacks: {}\".format(__post_exec_callbacks))\n for call_back_function in __post_exec_callbacks:\n try:\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Indicates that executePostJobs(...) did execute.
def didExecutePostJobs(self): pass
[ "def willExecutePostJobs(self):\n pass", "def didExecuteJobs(self):\n pass", "def willExecuteJobs(self):\n pass", "def didExecutePreJobs(self):\n pass", "def on_post_execution(action_log_params: ActionLogParams) -> None:\n LOGGER.debug(\"Calling callbacks: {}\".format(__post_e...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Indicates that executeJobs(...) will be executed.
def willExecuteJobs(self): pass
[ "def didExecuteJobs(self):\n pass", "def willExecutePostJobs(self):\n pass", "def execute(self):\n self._jobExecutionStrategy.execute(job=self)", "def didExecutePostJobs(self):\n pass", "def willExecutePreJobs(self):\n pass", "def didExecutePreJobs(self):\n pass",...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Indicates that executeJobs(...) did execute.
def didExecuteJobs(self): pass
[ "def didExecutePostJobs(self):\n pass", "def willExecuteJobs(self):\n pass", "def willExecutePostJobs(self):\n pass", "def didExecutePreJobs(self):\n pass", "def jobs_finished(self, jobs_finished):\n\n self._jobs_finished = jobs_finished", "def job_completed(self, build_...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Counts number of pre jobs Number of pre jobs
def preJobsCount(self): return len(self._preJobs)
[ "def count(self, args=None):\n jobs_url = \"%s/api/json\" % self.url\n req = requests.get(\n jobs_url, verify=False,\n auth=HTTPBasicAuth(self.user, self.token))\n jobs = req.json()[\"jobs\"]\n LOG.info(\"Number of jobs: {}\".format(len(jobs)))", "def count_type(s...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Counts number of post jobs Number of post jobs
def postJobsCount(self): return len(self._postJobs)
[ "def count(self, args=None):\n jobs_url = \"%s/api/json\" % self.url\n req = requests.get(\n jobs_url, verify=False,\n auth=HTTPBasicAuth(self.user, self.token))\n jobs = req.json()[\"jobs\"]\n LOG.info(\"Number of jobs: {}\".format(len(jobs)))", "def count_posts(...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the list of pre jobs List of pre jobs
def getPreJobs(self): return self._preJobs
[ "def get_jobs_list(self):\n return [j['job'] for j in self._running_jobs.values()]", "def jobs(self):\n return self.get_jobs()", "def preorder_list(self)->list:\n #---- to do ----\n # complete this method by calling bst.preorder_list()\n # return a list of BST keys representin...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the list of post jobs List of post jobs
def getPostJobs(self): return self._postJobs
[ "def get_jobs_list(self):\n return [j['job'] for j in self._running_jobs.values()]", "def jobpost_recent_posts(limit=5):\n return list(JobPost.objects.published()[:limit])", "def jobs(self):\n return self.get_jobs()", "def jobs(self):\n return self.job_set.all()", "def getJobs(self, ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Appends a list of jobs (i.e list of Job instances) to the current job list
def addJobs(self, jobs=None): if jobs: self._jobs.extend(jobs)
[ "def add_jobs(self, jobs):\n for j in jobs:\n self.add_job(j)", "def add(self, job):\n jobList = self.makelist(job)\n self.newjobs.extend(jobList)\n return", "def add_list(args):\n joblist = args.joblist\n print('Adding jobs from', joblist)\n\n assert os.path.exis...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Adds a job to pre jobs list
def addPreJob(self, job=None): if job: self._preJobs.append(job)
[ "def add_job(self, job: QuantumInspireJob) -> None:\n self.jobs.append(job)", "def add(self, job):\n jobList = self.makelist(job)\n self.newjobs.extend(jobList)\n return", "def add_sub_job(self, job):\n job_id = job.id\n self.jobs[job_id] = job", "def add_jobs(self, j...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Adds a job to post jobs list
def addPostJob(self, job=None): if job: self._postJobs.append(job)
[ "def addPostJobs(self, jobs=None):\n if jobs:\n self._postJobs.extend(jobs)", "def add(self, job):\n jobList = self.makelist(job)\n self.newjobs.extend(jobList)\n return", "def add_job(self, job: QuantumInspireJob) -> None:\n self.jobs.append(job)", "def put_job(s...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Appends a list of jobs (i.e list of Job instances) to the current post Jobs list
def addPostJobs(self, jobs=None): if jobs: self._postJobs.extend(jobs)
[ "def add_jobs(self, jobs):\n for j in jobs:\n self.add_job(j)", "def addJobs(self, jobs=None):\n if jobs:\n self._jobs.extend(jobs)", "def commit(self):\n self.jobs.extend(self.newjobs)\n self.newjobs = []", "def add(self, job):\n jobList = self.makelis...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
fix the corrupted account
def fix_account(self, account): try: acc = self._get_account(account) except Exception: return False acc.value = 0
[ "def fix_account(self, account):\n\t\taccount.id = self.account.index(account) + 1\n\t\treturn False if self.check_corruption(account) else True", "def reset_after_failed_account_creation_or_login(self) -> None:\n self.cryptocompare.db = None", "def resetAccounts():\n \n if debug: print(\"resetAcco...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Send request to SMSC.ru service.
async def request_smsc(method, login, password, payload): if method not in ['send', 'status']: raise SmscApiError(f'unknown {method=}') if not payload['phones']: raise SmscApiError(f'unknown phones') url = get_url(method) payload.update({'login': login, 'psw': password, 'fmt': JSON_FMT})...
[ "async def send_request(session):\n client_id = randint(0, CLIENTS)\n logging.info('sending request to %s/?clientId=%d', SERVER_URL, client_id)\n async with session.get(SERVER_URL, params=[('clientId', client_id)]) as _:\n pass\n return TaskType.REQUEST", "async def send_request(self, action: U...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
load categorization from subjects mongo table
def __init__(self, abbr): self.abbr = abbr self.categorizer = defaultdict(set) subs = db.subjects.find({"abbr": abbr}) for sub in subs: self.categorizer[sub['remote']] = sub['normal']
[ "def load_categories(self):\n\n # Connect to the database\n conn = sqlite3.connect(config.cfg['db_location'])\n crsr = conn.cursor()\n\n # Retrieve list of all tags from SQL database\n crsr.execute(\"SELECT id, category \"\n \"FROM Categories;\")\n\n # W...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PBXProject name,the root node
def get_proj_root(self): pbxproject_ptn = re_compile('(?<=PBXProject ").*(?=")') with open(self.xcode_pbxproj_path) as pbxproj_file: for line in pbxproj_file: # project.pbxproj is an utf-8 encoded file line = decoded_string(line, 'utf-8') resul...
[ "def __creatProjectTree(self):\n # get last project\n prj = self.lastProject()\n # creat substractions\n subs = {\n 'PROJECTNAME': prj.name,\n 'PROJECTDESCRIPTION': prj.description\n }\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Store input sizes in case cursor needs to be reopened.
def setinputsizes(self, sizes): self._inputsize = sizes
[ "def _setsizes(self, cursor=None):\n\t\tif cursor is None:\n\t\t\tcursor = self._cursor\n\t\tif self._inputsize is not None:\n\t\t\tcursor.setinputsizes(self._inputsize)\n\t\tif self._outputsize is not None:\n\t\t\tfor column, size in self._outputsize:\n\t\t\t\tif column is None:\n\t\t\t\t\tcursor.setoutputsize(siz...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Store output sizes in case cursor needs to be reopened.
def setoutputsize(self, size, column=None): if self._outputsize is None or column is None: self._outputsize = [(column, size)] else: self._outputsize.append(column, size)
[ "def _setsizes(self, cursor=None):\n\t\tif cursor is None:\n\t\t\tcursor = self._cursor\n\t\tif self._inputsize is not None:\n\t\t\tcursor.setinputsizes(self._inputsize)\n\t\tif self._outputsize is not None:\n\t\t\tfor column, size in self._outputsize:\n\t\t\t\tif column is None:\n\t\t\t\t\tcursor.setoutputsize(siz...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }