query stringlengths 9 9.05k | document stringlengths 10 222k | negatives listlengths 19 20 | metadata dict |
|---|---|---|---|
Tests the neutron_utils.create_neutron_subnet() function for an Exception when the subnet CIDR value is empty | def test_create_subnet_empty_cidr(self):
self.net_config.network_settings.subnet_settings[0].cidr = ''
with self.assertRaises(Exception):
self.network = neutron_utils.create_network(
self.neutron, self.os_creds, self.net_config.network_settings) | [
"def test_create_subnet_null_cidr(self):\n self.net_config.network_settings.subnet_settings[0].cidr = None\n with self.assertRaises(Exception):\n self.network = neutron_utils.create_network(\n self.neutron, self.os_creds, self.net_config.network_settings)",
"def test_create... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.create_network() with an IPv6 subnet where DHCP is True and IPv6 modes are slaac | def test_create_network_slaac(self):
sub_setting = SubnetConfig(
name=self.guid + '-subnet', cidr='1:1:0:0:0:0:0:0/64',
ip_version=6, dns_nameservers=['2620:0:ccc:0:0:0:0:2'],
gateway_ip='1:1:0:0:0:0:0:1', start='1:1::ff', end='1:1::ffff',
enable_dhcp=True, ipv6_r... | [
"def test_create_network_no_dhcp_slaac(self):\n sub_setting = SubnetConfig(\n name=self.guid + '-subnet', cidr='1:1:0:0:0:0:0:0/64',\n ip_version=6, dns_nameservers=['2620:0:ccc:0:0:0:0:2'],\n gateway_ip='1:1:0:0:0:0:0:1', start='1:1::ff', end='1:1::ffff',\n enable... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.create_network() with an IPv6 subnet where DHCP is True and IPv6 modes are stateful | def test_create_network_stateful(self):
sub_setting = SubnetConfig(
name=self.guid + '-subnet', cidr='1:1:0:0:0:0:0:0/64',
ip_version=6, dns_nameservers=['2620:0:ccc:0:0:0:0:2'],
gateway_ip='1:1:0:0:0:0:0:1', start='1:1::ff', end='1:1::ffff',
enable_dhcp=True, ipv... | [
"def test_create_port_with_multiple_ipv4_and_ipv6_subnets(self):\n res = self._create_network(fmt=self.fmt, name='net',\n admin_state_up=True)\n network = self.deserialize(self.fmt, res)\n sub_dicts = [\n {'gateway': '10.0.0.1', 'cidr': '10.0.0.0/24'... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.create_network() for a BadRequest when DHCP is not enabled and the RA and address modes are both 'slaac' | def test_create_network_no_dhcp_slaac(self):
sub_setting = SubnetConfig(
name=self.guid + '-subnet', cidr='1:1:0:0:0:0:0:0/64',
ip_version=6, dns_nameservers=['2620:0:ccc:0:0:0:0:2'],
gateway_ip='1:1:0:0:0:0:0:1', start='1:1::ff', end='1:1::ffff',
enable_dhcp=Fals... | [
"def test_create_network_slaac(self):\n sub_setting = SubnetConfig(\n name=self.guid + '-subnet', cidr='1:1:0:0:0:0:0:0/64',\n ip_version=6, dns_nameservers=['2620:0:ccc:0:0:0:0:2'],\n gateway_ip='1:1:0:0:0:0:0:1', start='1:1::ff', end='1:1::ffff',\n enable_dhcp=Tr... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.create_network() that contains one IPv6 subnet with an invalid start IP to ensure Neutron assigns it the smallest IP possible | def test_create_network_invalid_start_ip(self):
sub_setting = SubnetConfig(
name=self.guid + '-subnet', cidr='1:1::/48', ip_version=6,
start='foo')
self.network_settings = NetworkConfig(
name=self.guid + '-net', subnet_settings=[sub_setting])
self.network = n... | [
"def test_create_network_invalid_end_ip(self):\n sub_setting = SubnetConfig(\n name=self.guid + '-subnet', cidr='1:1::/48', ip_version=6,\n end='bar')\n self.network_settings = NetworkConfig(\n name=self.guid + '-net', subnet_settings=[sub_setting])\n\n self.net... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.create_network() that contains one IPv6 subnet with an invalid end IP to ensure Neutron assigns it the largest IP possible | def test_create_network_invalid_end_ip(self):
sub_setting = SubnetConfig(
name=self.guid + '-subnet', cidr='1:1::/48', ip_version=6,
end='bar')
self.network_settings = NetworkConfig(
name=self.guid + '-net', subnet_settings=[sub_setting])
self.network = neutr... | [
"def test_create_network_invalid_start_ip(self):\n sub_setting = SubnetConfig(\n name=self.guid + '-subnet', cidr='1:1::/48', ip_version=6,\n start='foo')\n self.network_settings = NetworkConfig(\n name=self.guid + '-net', subnet_settings=[sub_setting])\n\n self... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.create_network() for a BadRequest when the subnet CIDR is invalid | def test_create_network_with_bad_cidr(self):
sub_setting = SubnetConfig(
name=self.guid + '-subnet', cidr='1:1:1:/48', ip_version=6)
self.network_settings = NetworkConfig(
name=self.guid + '-net', subnet_settings=[sub_setting])
with self.assertRaises(BadRequest):
... | [
"def test_create_subnet_empty_cidr(self):\n self.net_config.network_settings.subnet_settings[0].cidr = ''\n with self.assertRaises(Exception):\n self.network = neutron_utils.create_network(\n self.neutron, self.os_creds, self.net_config.network_settings)",
"def test_create_... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.create_network() for a BadRequest when the subnet gateway IP is invalid | def test_create_network_invalid_gateway_ip(self):
sub_setting = SubnetConfig(
name=self.guid + '-subnet', cidr='1:1::/48', ip_version=6,
gateway_ip='192.168.0.1')
self.network_settings = NetworkConfig(
name=self.guid + '-net', subnet_settings=[sub_setting])
w... | [
"def test_create_network_with_bad_cidr(self):\n sub_setting = SubnetConfig(\n name=self.guid + '-subnet', cidr='1:1:1:/48', ip_version=6)\n self.network_settings = NetworkConfig(\n name=self.guid + '-net', subnet_settings=[sub_setting])\n\n with self.assertRaises(BadReques... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.create_network() for a BadRequest when the DNS IP is invalid | def test_create_network_with_bad_dns(self):
sub_setting = SubnetConfig(
name=self.guid + '-subnet', cidr='1:1::/48', ip_version=6,
dns_nameservers=['foo'])
self.network_settings = NetworkConfig(
name=self.guid + '-net', subnet_settings=[sub_setting])
with sel... | [
"def test_create_network_invalid_gateway_ip(self):\n sub_setting = SubnetConfig(\n name=self.guid + '-subnet', cidr='1:1::/48', ip_version=6,\n gateway_ip='192.168.0.1')\n self.network_settings = NetworkConfig(\n name=self.guid + '-net', subnet_settings=[sub_setting])\... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.create_router() function with a pubic interface | def test_create_router_with_public_interface(self):
subnet_setting = self.net_config.network_settings.subnet_settings[0]
self.net_config = openstack_tests.OSNetworkConfig(
project_name=self.os_creds.project_name,
net_name=self.net_config.network_settings.name,
subnet_... | [
"def test_add_interface_router(self):\n self.network = neutron_utils.create_network(\n self.neutron, self.os_creds, self.net_config.network_settings)\n self.assertEqual(self.net_config.network_settings.name,\n self.network.name)\n self.assertTrue(validate_netw... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.add_interface_router() function | def test_add_interface_router(self):
self.network = neutron_utils.create_network(
self.neutron, self.os_creds, self.net_config.network_settings)
self.assertEqual(self.net_config.network_settings.name,
self.network.name)
self.assertTrue(validate_network(
... | [
"def test_add_interface_router_null_router(self):\n self.network = neutron_utils.create_network(\n self.neutron, self.os_creds, self.net_config.network_settings)\n self.assertEqual(self.net_config.network_settings.name,\n self.network.name)\n self.assertTrue(v... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.add_interface_router() function for an Exception when the router value is None | def test_add_interface_router_null_router(self):
self.network = neutron_utils.create_network(
self.neutron, self.os_creds, self.net_config.network_settings)
self.assertEqual(self.net_config.network_settings.name,
self.network.name)
self.assertTrue(validate_ne... | [
"def test_add_interface_router_null_subnet(self):\n self.network = neutron_utils.create_network(\n self.neutron, self.os_creds, self.net_config.network_settings)\n self.assertEqual(self.net_config.network_settings.name,\n self.network.name)\n self.assertTrue(v... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.add_interface_router() function for an Exception when the subnet value is None | def test_add_interface_router_null_subnet(self):
self.network = neutron_utils.create_network(
self.neutron, self.os_creds, self.net_config.network_settings)
self.assertEqual(self.net_config.network_settings.name,
self.network.name)
self.assertTrue(validate_ne... | [
"def test_add_interface_router_missing_subnet(self):\n self.network = neutron_utils.create_network(\n self.neutron, self.os_creds, self.net_config.network_settings)\n self.assertEqual(self.net_config.network_settings.name,\n self.network.name)\n self.assertTru... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.add_interface_router() function for an Exception when the subnet object has been deleted | def test_add_interface_router_missing_subnet(self):
self.network = neutron_utils.create_network(
self.neutron, self.os_creds, self.net_config.network_settings)
self.assertEqual(self.net_config.network_settings.name,
self.network.name)
self.assertTrue(validate... | [
"def test_add_interface_router_null_subnet(self):\n self.network = neutron_utils.create_network(\n self.neutron, self.os_creds, self.net_config.network_settings)\n self.assertEqual(self.net_config.network_settings.name,\n self.network.name)\n self.assertTrue(v... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.create_port() function | def test_create_port(self):
self.network = neutron_utils.create_network(
self.neutron, self.os_creds, self.net_config.network_settings)
self.assertEqual(self.net_config.network_settings.name,
self.network.name)
self.assertTrue(validate_network(
se... | [
"def test_create_port(self):\n port = create_ofport({'device': 'a'})\n port_dict = {'some-port-attributes-go-here': 42,\n 'firewall_group': 1}\n self.map.create_port(port, port_dict)\n self._check_port('a', 1)\n self._check_fwg(1, ['a'])",
"def test_create_po... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.create_port() when the port name value is None | def test_create_port_null_name(self):
self.network = neutron_utils.create_network(
self.neutron, self.os_creds, self.net_config.network_settings)
self.assertEqual(self.net_config.network_settings.name,
self.network.name)
self.assertTrue(validate_network(
... | [
"def test_create_port_null_network_object(self):\n with self.assertRaises(Exception):\n self.port = neutron_utils.create_port(\n self.neutron, self.os_creds,\n PortConfig(\n name=self.port_name,\n network_name=self.net_config.netw... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.create_port() function for an Exception when the network object is None | def test_create_port_null_network_object(self):
with self.assertRaises(Exception):
self.port = neutron_utils.create_port(
self.neutron, self.os_creds,
PortConfig(
name=self.port_name,
network_name=self.net_config.network_setting... | [
"def test_create_port_null_ip(self):\n self.network = neutron_utils.create_network(\n self.neutron, self.os_creds, self.net_config.network_settings)\n self.assertEqual(self.net_config.network_settings.name,\n self.network.name)\n self.assertTrue(validate_netwo... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.create_port() function for an Exception when the IP value is None | def test_create_port_null_ip(self):
self.network = neutron_utils.create_network(
self.neutron, self.os_creds, self.net_config.network_settings)
self.assertEqual(self.net_config.network_settings.name,
self.network.name)
self.assertTrue(validate_network(
... | [
"def test_create_port_null_network_object(self):\n with self.assertRaises(Exception):\n self.port = neutron_utils.create_port(\n self.neutron, self.os_creds,\n PortConfig(\n name=self.port_name,\n network_name=self.net_config.netw... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the SecurityGroupConfig constructor and neutron_utils.create_security_group() function to ensure that attempting to create a security group without a name will raise an exception | def test_create_sec_grp_no_name(self):
with self.assertRaises(Exception):
sec_grp_settings = SecurityGroupConfig()
self.security_groups.append(
neutron_utils.create_security_group(
self.neutron, self.keystone, sec_grp_settings)) | [
"def test_create_sec_grp_no_rules(self):\n sec_grp_settings = SecurityGroupConfig(\n name=self.sec_grp_name, description='hello group')\n self.security_groups.append(\n neutron_utils.create_security_group(\n self.neutron, self.keystone, sec_grp_settings))\n\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.create_security_group() function | def test_create_sec_grp_no_rules(self):
sec_grp_settings = SecurityGroupConfig(
name=self.sec_grp_name, description='hello group')
self.security_groups.append(
neutron_utils.create_security_group(
self.neutron, self.keystone, sec_grp_settings))
self.asser... | [
"def test_create_sec_grp_no_name(self):\n with self.assertRaises(Exception):\n sec_grp_settings = SecurityGroupConfig()\n self.security_groups.append(\n neutron_utils.create_security_group(\n self.neutron, self.keystone, sec_grp_settings))",
"def crea... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the neutron_utils.create_security_group() and list_security_groups function | def test_create_list_sec_grp_no_rules(self):
sec_grp_settings = SecurityGroupConfig(
name=self.sec_grp_name + "-1", description='hello group')
self.security_groups.append(neutron_utils.create_security_group(
self.neutron, self.keystone, sec_grp_settings))
sec_grp_setting... | [
"def test_050_create_security_groups(self):\n sg = self.vpc_client.create_security_group(\n data_utils.rand_name(\"WebServerSG-\"),\n data_utils.rand_name(\"description \"),\n self.ctx.vpc.id)\n self.assertIsNotNone(sg)\n self.assertTrue(sg.id)\n self.add... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests the creation of a floating IP | def test_floating_ips(self):
initial_fips = neutron_utils.get_floating_ips(self.neutron)
self.floating_ip = neutron_utils.create_floating_ip(
self.neutron, self.keystone, self.ext_net_name)
all_fips = neutron_utils.get_floating_ips(self.neutron)
self.assertEqual(len(initial_... | [
"def ex_create_floating_ip(self):\r\n resp = self.connection.request('/os-floating-ips',\r\n method='POST',\r\n data={})\r\n data = resp.object['floating_ip']\r\n id = data['id']\r\n ip_address = data['ip']\r\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns true if a network for a given name DOES NOT exist if the exists parameter is false conversely true. Returns false if a network for a given name DOES exist if the exists parameter is true conversely false. | def validate_network(neutron, keystone, name, exists, project_name, mtu=None):
network = neutron_utils.get_network(
neutron, keystone, network_name=name, project_name=project_name)
if exists and network:
return True
if not exists and not network:
return True
if mtu:
retur... | [
"async def name_exists(self, name: str) -> bool:",
"def CheckDuplicateNetwork(self, cidr, name):\n subn = self.neutron.list_subnets()\n for i in range(len(subn['subnets'])):\n if cidr == subn['subnets'][i]['cidr']:\n log.warning('Duplicate subnet found: %s' % cidr)\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns true if a subnet for a given name DOES NOT exist if the exists parameter is false conversely true. Returns false if a subnet for a given name DOES exist if the exists parameter is true conversely false. | def validate_subnet(neutron, network, name, cidr, exists):
subnet = neutron_utils.get_subnet(
neutron, network, subnet_name=name)
if exists and subnet and subnet.name == name:
return subnet.cidr == cidr
if not exists and not subnet:
return True
return False | [
"def subnet_exists(\n subnet_id=None,\n name=None,\n subnet_name=None,\n cidr=None,\n tags=None,\n zones=None,\n region=None,\n key=None,\n keyid=None,\n profile=None,\n):\n if name:\n log.warning(\n \"boto_vpc.subnet_exists: name parameter is deprecated \"\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns true if a router for a given name DOES NOT exist if the exists parameter is false conversely true. Returns false if a router for a given name DOES exist if the exists parameter is true conversely false. | def validate_router(neutron, keystone, name, project_name, exists):
router = neutron_utils.get_router(
neutron, keystone, router_name=name, project_name=project_name)
if exists and router:
return True
return False | [
"async def name_exists(self, name: str) -> bool:",
"def exists(self, identifier):\n return False",
"def is_named_route(self, name, params={}):\n if self._get_named_route(name, params) == self.path:\n return True\n\n return False",
"def exists_folder(route):\n if os.path.exis... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns true if the router ID & subnet ID have been properly included into the interface router object | def validate_interface_router(interface_router, router, subnet):
subnet_id = interface_router.subnet_id
router_id = interface_router.port_id
return subnet.id == subnet_id and router.id == router_id | [
"def validate_router(neutron, keystone, name, project_name, exists):\n router = neutron_utils.get_router(\n neutron, keystone, router_name=name, project_name=project_name)\n if exists and router:\n return True\n return False",
"def _validate_router_interface_request(self, interface_info):\n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This test checks that a call to loadArraysIfNecessary works (does nothing) on field data structure whatever its level 1TS, MTS, Fields. | def testLoadIfNecessaryOnFromScratchFields0(self):
fname="Pyfile77.med"
coords=DataArrayDouble([(0,0,0),(2,1,0),(1,0,0),(1,1,0),(2,0,0),(0,1,0)])
m=MEDCouplingUMesh("mesh",2) ; m.setCoords(coords)
m.allocateCells()
m.insertNextCell(NORM_QUAD4,[0,5,3,2])
m.insertNextCell(N... | [
"def test_field_deletion(self):\n self.assertIn(\"array\", self.mapper.fields)",
"def test_nested(dataset):\n failure = \"flat_legacy\" in dataset or \"directory_default\" in dataset or \"fs_default\" in dataset\n verify(Array(store=NestedDirectoryStore(dataset)), failure)",
"def test_fsstore(datas... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This test is very important, because the same mechanism is used by the MEDReader to generate a field on all the mesh without any processing and memory. | def testField1TSSetFieldNoProfileSBTPerGeoTypes(self):
fname="Pyfile78.med"
coords=DataArrayDouble([-0.3,-0.3,0., 0.2,-0.3,0., 0.7,-0.3,0., -0.3,0.2,0., 0.2,0.2,0., 0.7,0.2,0., -0.3,0.7,0., 0.2,0.7,0., 0.7,0.7,0. ],9,3)
targetConn=[0,3,4,1, 1,4,2, 4,5,2, 6,7,4,3, 7,8,5,4];
m0=MEDCoupling... | [
"def testInstantiation(self):\n\t\tm = Mesh.Mesh(self.mesh_length, self.mesh_area, self.num_zones);\n\t\tfzd = Field.FieldZoneDouble(m);\n\t\tfzn = Field.FieldNodeDouble(m);\n\t\tfzm = Field.FieldZoneMat(m);\n\t\tself.assertEqual(m.numZones(), self.num_zones) \n\t\tself.assertEqual(m.length(), self.mesh_length) \n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This test is a small but important one for MEDReader in sauv mode. When .sauv file is loaded the convertion is performed in memory and a preparation is done then. This preparation makes access to internal MEDCouplingMesh pointers whose name must be updated. | def testMEDFileUMeshSetName(self):
fname="Pyfile79.med"
targetConn=[0,3,4,1, 1,4,2, 4,5,2, 6,7,4,3, 7,8,5,4];
mm=MEDFileUMesh()
m0=MEDCouplingUMesh() ; m0.setMeshDimension(2) # important no name here.
coords=DataArrayDouble([-0.3,-0.3,0., 0.2,-0.3,0., 0.7,-0.3,0., -0.3,0.2,0., 0.... | [
"def test_no_spw():\n uvobj = UVData()\n testfile_no_spw = os.path.join(DATA_PATH, \"zen.2456865.60537.xy.uvcRREAAM.ms\")\n uvobj.read(testfile_no_spw, use_future_array_shapes=True)\n del uvobj",
"def test_read_svs_cmu_1():\n fname = private_file('AperioSVS/CMU-1.svs')\n with TiffFile(fname) as ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Formats the given amount using the given resource. | def format_amount(amount, resource):
return resource.format_amount(amount) | [
"def format_amount(amount):\n return '${:.2f}'.format(round(amount, 2))",
"def amount_formatted(self) -> str:\n return self.unit.amount_formatted(self.amount())",
"def _map_non_formatted_money_to_version_with_currency(cost, resource, token):\n return '$%.3f' % cost",
"def amount_formatted(self, n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Allows the user to choose a cipher and decrypts a message This message is then shown to the user | def decrypt():
cipher = get_cipher()
message = cipher.get_user_message()
decrypted_message = cipher.decryption(message)
print(f'Your decrypted message is: {decrypted_message}')
input('Enter anything to continue') | [
"def encrypt():\n cipher = get_cipher()\n\n message = cipher.get_user_message()\n encrypted_message = cipher.encryption(message)\n print(f'Your encrypted message is: {encrypted_message}')\n\n input('Enter anything to continue')",
"def decrypt_txt():\n\n key = ent_choose_key.get() # Get key from... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Allows the user to choose a cipher and encrypts a message This message is then shown to the user | def encrypt():
cipher = get_cipher()
message = cipher.get_user_message()
encrypted_message = cipher.encryption(message)
print(f'Your encrypted message is: {encrypted_message}')
input('Enter anything to continue') | [
"def decrypt():\n cipher = get_cipher()\n\n message = cipher.get_user_message()\n decrypted_message = cipher.decryption(message)\n\n print(f'Your decrypted message is: {decrypted_message}')\n\n input('Enter anything to continue')",
"def encrypt_txt():\n\n key = ent_choose_key.get() # Get key fr... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Determines if the user wants to encrypt, or decrypt a cipher They can also choose to exit the program | def encrypt_decrypt_or_exit():
while True:
request = input(
"Would you like to use a cipher to encrypt or decrypt a message?\n"
"Enter [E]ncrypt or [D]ecrypt\n\n"
"Or type 'exit' to end the program\n"
).lower()
# Determine if the user typed in a proper re... | [
"def decrypt():\n cipher = get_cipher()\n\n message = cipher.get_user_message()\n decrypted_message = cipher.decryption(message)\n\n print(f'Your decrypted message is: {decrypted_message}')\n\n input('Enter anything to continue')",
"def encrypt():\n cipher = get_cipher()\n\n message = cipher.... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Asks the user what cipher they want to use | def get_cipher():
all_ciphers_list = [cipher for cipher in CIPHERS]
all_ciphers_string = ', '.join(all_ciphers_list)
while True:
cipher = input(
'Please choose a cipher from the following:\n'
f' {all_ciphers_string}.\n'
).title()
if cipher in CIPHERS:
... | [
"def get_cipher_mode():\n while True:\n cipher_mode = str(Input[1])\n if cipher_mode in \"encrypt e decrypt d\".split():\n return cipher_mode\n else:\n print(\"wrong input\")",
"def encryption_method():\n # Requests, returns, and error checks for the method of en/d... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Determines the time of the day, and creates a welcome message based on that | def get_time_welcome_message():
current_time = datetime.datetime.now()
hour = current_time.hour
if 6 < hour < 12:
return 'May the sun rise and show you the way.'
elif 12 <= hour < 15:
return 'Beware of the bright sun, the enemy can your your path!'
elif 15 <= hour < 18:
retu... | [
"def greeting():\n if datetime.now().hour >= 1 and datetime.now().hour < 6:\n print(\"\\nYou're up late.\")\n elif datetime.now().hour >= 6 and datetime.now().hour < 12:\n print(\"\\nGood morning!\")\n elif datetime.now().hour >= 12 and datetime.now().hour < 17:\n print(\"\\nGood after... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
The main program which answers the user's cipher requests until they exit the program | def main_program_loop():
while True:
clear()
user_request = encrypt_decrypt_or_exit()
# calls the encryption/decryption functions or ends the program
if user_request == 'exit':
print('May your adventures go well.')
sys.exit()
elif user_request == 'en... | [
"def encrypt_decrypt_or_exit():\n while True:\n request = input(\n \"Would you like to use a cipher to encrypt or decrypt a message?\\n\"\n \"Enter [E]ncrypt or [D]ecrypt\\n\\n\"\n \"Or type 'exit' to end the program\\n\"\n ).lower()\n\n # Determine if the us... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return copy of current statement's substmts and i_children | def _copy_substmts_ichildren(stmt):
chs = set(stmt.i_children) if hasattr(stmt, 'i_children') else set()
non_chs = set()
for s in stmt.substmts:
if s not in chs:
non_chs.add(s)
chs = list(chs)
non_chs = list(non_chs)
return chs + non_chs, chs | [
"def _set_d_substmts_ichildren(stmt):\n if not hasattr(stmt, 'd_children') and not hasattr(stmt, 'd_substmts'):\n d_substmts, d_children = _copy_substmts_ichildren(stmt)\n stmt.d_children = d_children\n stmt.d_substmts = d_substmts",
"def rewrite_compound_select(self, stmt: CompoundSelect)... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Set d_substmts and d_children for further recovery | def _set_d_substmts_ichildren(stmt):
if not hasattr(stmt, 'd_children') and not hasattr(stmt, 'd_substmts'):
d_substmts, d_children = _copy_substmts_ichildren(stmt)
stmt.d_children = d_children
stmt.d_substmts = d_substmts | [
"def _remove_d_info(ctx, stmt):\n t = stmt.parent.i_target_node\n if t is None:\n return\n if stmt.arg == 'not-supported':\n t = t.parent\n if hasattr(t, 'd_children') and hasattr(t, 'd_substmts'):\n if hasattr(t, 'i_children'):\n t.i_children = t.d_children\n t.su... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Use the copied d_children and d_substmts to revcover i_childre and substmts stmt.keyword == 'deviate' | def _remove_d_info(ctx, stmt):
t = stmt.parent.i_target_node
if t is None:
return
if stmt.arg == 'not-supported':
t = t.parent
if hasattr(t, 'd_children') and hasattr(t, 'd_substmts'):
if hasattr(t, 'i_children'):
t.i_children = t.d_children
t.substmts = t.d_s... | [
"def _set_d_substmts_ichildren(stmt):\n if not hasattr(stmt, 'd_children') and not hasattr(stmt, 'd_substmts'):\n d_substmts, d_children = _copy_substmts_ichildren(stmt)\n stmt.d_children = d_children\n stmt.d_substmts = d_substmts",
"def _copy_substmts_ichildren(stmt):\n chs = set(stmt... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Add deviation information to target statement | def _add_deviation(target, dev_type, dev_module, dev_stmt):
if not hasattr(target, 'i_deviation'):
target.i_deviation = {}
if dev_type not in target.i_deviation:
target.i_deviation[dev_type] = []
target.i_deviation[dev_type].append((dev_module, dev_stmt)) | [
"def _add_deviation_r(target, dev_type, dev_module, dev_stmt):\n _add_deviation(target, dev_type, dev_module, dev_stmt)\n sub = target.substmts\n if hasattr(target, 'i_children'):\n sub = sub + target.i_children\n for d in sub:\n _add_deviation(d, dev_type, dev_module, dev_stmt)",
"def a... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Add deviation information to target node recursively | def _add_deviation_r(target, dev_type, dev_module, dev_stmt):
_add_deviation(target, dev_type, dev_module, dev_stmt)
sub = target.substmts
if hasattr(target, 'i_children'):
sub = sub + target.i_children
for d in sub:
_add_deviation(d, dev_type, dev_module, dev_stmt) | [
"def _remove_d_info(ctx, stmt):\n t = stmt.parent.i_target_node\n if t is None:\n return\n if stmt.arg == 'not-supported':\n t = t.parent\n if hasattr(t, 'd_children') and hasattr(t, 'd_substmts'):\n if hasattr(t, 'i_children'):\n t.i_children = t.d_children\n t.su... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Use pyang to parse the files and get a list of modules. | def _parse_and_return_modules(resolved_model_dir):
repos = pyang.FileRepository(resolved_model_dir, False)
ctx = pyang.Context(repos)
statements.add_validation_fun(
'reference_3', ['deviation'],
_add_i_deviation)
statements.add_validation_fun(
'reference_3', ['deviation'],
... | [
"def files_parser(self):\n # create the list of module names\n self.module_names = set([\n os.path.splitext(f)[0] for f in self.files if self._ismodule(f)\n ])\n\n self.modules = [\n Module(\n name=module,\n path=os.path.join(self.root,... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Generate ydkpy based in the output_directory using the supplied profile_file | def generate(profile_file, output_directory, nodoc, ydk_root, groupings_as_class=False):
resolved_model_dir = None
if profile_file is None:
logger.error('profile_file is None.')
raise YdkGenException('profile_file cannot be None.')
if output_directory is None:
logger.error('output... | [
"def generate_profile_file(name, hash, base_dir):\n with open(name) as fid:\n data = json.loads(fid.read().decode(\"utf-8\", \"ignore\"))\n with open(os.path.join(base_dir, hash + \"_profile.txt\"), \"w+\") as out:\n # For each query\n for key in data:\n for iteration in data[key]:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create Kafka message consumer | def create_consumer(host, port, topic):
consumer = KafkaConsumer(
topic,
bootstrap_servers=f"{host}:{port}",
client_id="system-metrics1",
security_protocol="SSL",
ssl_cafile="ca.pem",
ssl_certfile="service.cert",
ssl_keyfile="service.key",
value_deseri... | [
"def get_consumer():\n return KafkaConsumer(\n 'movielog1',\n bootstrap_servers=[\"fall2020-comp598.cs.mcgill.ca:9092\"],\n enable_auto_commit=False,\n auto_offset_reset=\"earliest\",\n value_deserializer=lambda x: x.decode('utf-8'))",
"def create_consumer(\n self,\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Stores the data collection mx_collection, and the beamline setup if provided. | def _store_data_collection(self, mx_collection, beamline_setup = None):
if self.__disabled:
return (0,0,0)
if self.__collection:
data_collection = ISPyBValueFactory().\
from_data_collect_parameters(mx_collection)
group_id = self.store_data_co... | [
"def update_data_collection(self, mx_collection, wait=False): \n\tprint \"*************************\"\n\tprint \"mx collection DATA\"\n\tprint mx_collection\n \tlogging.getLogger(\"ispyb_client\").debug(\"asdlfjksdh f\")\n if self.__disabled:\n return\n\tmx_collection['collection_id'] = 8818\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Stores the beamline setup dict . | def store_beamline_setup(self, session_id, beamline_setup):
blSetupId = None
if self.__collection:
session = {}
try:
session = self.get_session(session_id)
except:
logging.getLogger("ispyb_client").exception(\
... | [
"def getBeam() :\n beam = {'nCore_y':1,'nCore_z':1,\n 'mass': 938.27231e6, # eV/c^2\n 'energy': 150.0e6, # eV\n 'n_particles': 1,\n 'distribution id':3, # water bag distribution\n 'error study flag':0,\n 'restart flag':0,\n 'standard ou... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Updates the datacollction mx_collection, this requires that the collectionId attribute is set and exists in the database. | def update_data_collection(self, mx_collection, wait=False):
print "*************************"
print "mx collection DATA"
print mx_collection
logging.getLogger("ispyb_client").debug("asdlfjksdh f")
if self.__disabled:
return
mx_collection['collection_id'] = 8818
if self.__collection:... | [
"def _store_data_collection(self, mx_collection, beamline_setup = None):\n if self.__disabled:\n return (0,0,0)\n \n if self.__collection:\n data_collection = ISPyBValueFactory().\\\n from_data_collect_parameters(mx_collection)\n\n group_id = self... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the sample with the matching "search criteria" and/or within the list sample_ref_list. The sample_ref object is defined in the head of the file. | def __find_sample(self, sample_ref_list, code = None, location = None):
for sample_ref in sample_ref_list:
if code and location:
if sample_ref.code == code and \
sample_ref.container_reference == location[0] and \
sample_re... | [
"def get_sample_matches(self, sample, top=False):\n if top:\n all_matches = self._top_step_matches\n else:\n all_matches = self.step_matches\n\n try:\n sample_matches = all_matches.loc[sample]\n except KeyError:\n sample_matches = None\n\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Fetch the BLSample entry with the id bl_sample_id | def get_bl_sample(self, bl_sample_id):
if self.__tools_ws:
try:
result = self.__tools_ws.service.findBLSample(bl_sample_id)
except WebFault, e:
logging.getLogger("ispyb_client").exception(e.message)
except URLError:
logging.g... | [
"def get_sample(self, sample_id):\n result = self.get(\n cc_urls['sample_details'].format(\n sample_id=sample_id\n )\n )\n return result['sample']",
"def get_sample(self, _id):\n\n sample = self.collection.find_one({'_id': ObjectId(_id)})\n\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Stores a xfe spectrum. | def store_xfe_spectrum(self, xfespectrum_dict):
status = {'xfeFluorescenceSpectrumId': -1}
if self.__collection:
try:
xfespectrum_dict['startTime'] = datetime.\
strptime(xfespectrum_dict["startTime"],"%Y-%m-%d %H:%M:%S")
xfespectrum_dict... | [
"def __saveSpectrum(self):\n my_name = '__saveSpectrum'\n\n # check that transient members are present\n if not hasattr(self, \"_Spectrum__spectrum\") and self.__extract:\n raise SpectrumNameError(my_name, \"__spectrum is missing.\")\n if not hasattr(self, \"_Spectrum__wavelen... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns True if the proposal is considered to be a inhouse user. | def isInhouseUser(self, proposal_code, proposal_number):
for proposal in self['inhouse']:
if proposal_code == proposal.code:
if str(proposal_number) == str(proposal.number):
return True
return False | [
"def is_principal_investigator(self, proposal_code):\n\n sql = \"\"\"\nSELECT COUNT(*) AS User_Count\n FROM ProposalContact AS pco\n JOIN Investigator AS i ON pco.Leader_Id=i.Investigator_Id\n JOIN ProposalCode AS pc ON pco.ProposalCode_Id = pc.ProposalCode_Id\n WHERE Proposal_Code=%(... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Stores or updates a DataCollectionGroup object. The entry is updated of the group_id in the mx_collection dictionary is set to an exisitng DataCollectionGroup id. | def store_data_collection_group(self, mx_collection):
if self.__collection:
group = ISPyBValueFactory().dcg_from_dc_params(mx_collection)
group_id = self.__collection.service.\
storeOrUpdateDataCollectionGroup(group)
return group_id | [
"def store_group(group_data):\n group_id = group_data.get('id')\n if not Group.objects.filter(id=group_id).exists():\n group = Group()\n group.id = group_data.get('id')\n group.name = group_data.get('name')\n group.description = group_data.get('description')\n group.updated_... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Creates a beamLineSetup3VO from the bl_config dictionary. | def from_bl_config(bl_config):
ws_client = None
beamline_setup = None
try:
ws_client = Client(_WS_COLLECTION_URL,
cache = None)
beamline_setup = ws_client.factory.create('ns0:beamLineSetup3VO')
except:
raise
... | [
"def from_config(cls, config: Dict[str, Any]) -> \"ResNeXt3D\":\n ret_config = ResNeXt3D._parse_config(config)\n return cls(**ret_config)",
"def __init__(self, config_model, ignore_thre=0.7):\n super(YOLOv3, self).__init__()\n\n if config_model['TYPE'] == 'YOLOv3':\n self.mo... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Creates a dataCollectionGroupWS3VO object from a mx_collect_dict. | def dcg_from_dc_params(mx_collect_dict):
group = None
try:
ws_client = Client(_WS_COLLECTION_URL,
cache = None)
group = \
ws_client.factory.create('ns0:dataCollectionGroupWS3VO')
except:
raise
else:
... | [
"def from_data_collect_parameters(mx_collect_dict):\n if len(mx_collect_dict['oscillation_sequence']) != 1:\n raise ISPyBArgumentError(\"ISPyBServer: number of oscillations\" + \\\n \" must be 1 (until further notice...)\")\n ws_client = None\n dat... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Ceates a dataCollectionWS3VO from mx_collect_dict. | def from_data_collect_parameters(mx_collect_dict):
if len(mx_collect_dict['oscillation_sequence']) != 1:
raise ISPyBArgumentError("ISPyBServer: number of oscillations" + \
" must be 1 (until further notice...)")
ws_client = None
data_collection = ... | [
"def update_data_collection(self, mx_collection, wait=False): \n\tprint \"*************************\"\n\tprint \"mx collection DATA\"\n\tprint mx_collection\n \tlogging.getLogger(\"ispyb_client\").debug(\"asdlfjksdh f\")\n if self.__disabled:\n return\n\tmx_collection['collection_id'] = 8818\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Logs ``msg`` to the current log file (not console) The ``msg`` will logged only if the level is bigger than the global level ``${DEBUG}`` which could be defined at runtime. If ``${DEBUG}`` is not defined, it will be considered as the default ``level`` as 1. | def log(msg,level=1):
_level = None
try:
_level = BuiltIn().get_variable_value('${DEBUG}')
except:
pass
if _level is None: _level=1
if int(_level) >= int(level):
BuiltIn().log(msg) | [
"def log(self, msg=\"\", level=1):\n\n if self.log_level >= level:\n print(\"[%s] %s\" % (time.strftime(\"%I:%M.%S\"), msg))",
"def log(msg, level='INFO'):\n if level not in ['DEBUG', 'INFO', 'CRITICAL', 'WARNING']:\n level = 'INFO'\n loglevel = getattr(QgsMessageLog, level)\n Qg... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the absolute path of RENAT folder | def get_renat_path():
return _folder | [
"def extract_reconall_base_dir(file):\n # print(\"Extract reconall base dir : %s\" % file[:-17])\n out_path = str(file[:-17])\n return out_path",
"def settings_folder():\n if platform.system() == 'Windows':\n config_folder = os.path.join('~', 'AppData', 'Local', 'NatCap')\n else:\n co... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the name of the running item | def get_item_name():
return os.path.basename(os.getcwd()) | [
"def get_name_item(self):\n return self.name_item",
"def _get_name(self) -> \"std::string\" :\n return _core.ListItem__get_name(self)",
"def item_name(self) -> Optional[str]:\n return self.data.get(self._DATA_KEY_ITEM_NAME)",
"def name(self):\n if self.ready():\n return ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns absolute path of RENAT config folder path | def get_config_path():
return _folder + "/config" | [
"def config_path(self):\n return self.get_config_path(self.base_dir)",
"def get_config_filepath():\n tmp = ABSOLUTE_HERE.split(\"/\")\n\n if SCRIPTS_PATH in tmp:\n tmp.remove(SCRIPTS_PATH)\n\n tmp.extend([\"config\", \"config-template.yaml\"])\n\n return \"/\".join(tmp)",
"def settings... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns absolute path of current item config folder | def get_item_config_path():
return os.getcwd() + '/config/' | [
"def config_path(self):\n return self.get_config_path(self.base_dir)",
"def get_config_path():\n return _folder + \"/config\"",
"def get_config_path():\n return get_project_home() + '/config.json'",
"def config_dir(self):\n return self.client.fldigi.config_dir()",
"def _get_config_file_p... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns absolute path of the current result folder | def get_result_path():
return os.getcwd() + '/' + _result_folder | [
"def get_result_folder():\n return _result_folder",
"def get_results_path(self):\n\n return constants[\"RESULTS_BASE_PATH\"] / self.get_module_path()",
"def get_output_dir(self) -> Path:\n return self.output_dir",
"def base_dir(self):\n return self.get_base_dir(self.out_dir, self.exper... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns current result folder name. Default is ``result`` in current test case. | def get_result_folder():
return _result_folder | [
"def get_result_path():\n return os.getcwd() + '/' + _result_folder",
"def get_default_result_file_name(self):\n backtestResultsFolder = 'Backtest Results'\n symbol = 'Imported' if not self.symbol else self.symbol\n dateString = datetime.now().strftime(\"%Y-%m-%d_%H-%M\")\n resultFi... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the result folder to ``folder`` and return the old result folder. The result folder contains all output files from the test likes tester ouput, config file ... ``folder`` is a folder name that under current test case folder The system will create a new folder if it does not exist and set its mode to `0775` | def set_result_folder(folder):
global _result_folder
old_folder = _result_folder
_result_folder = folder
folder_path = os.getcwd() + '/' + folder
try:
if not os.path.exists(folder_path):
os.makedirs(folder_path)
os.chmod(folder_path,int('0775',8))
except Exception ... | [
"def get_result_folder():\n return _result_folder",
"def get_result_path():\n return os.getcwd() + '/' + _result_folder",
"def _initialize_results(self):\n self.result_dir = os.path.join(self.result_dir, 'SPM')\n if os.path.exists(self.result_dir):\n if not self.overwrite:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a list of nodes which have attribute ``attr_name`` with value ``value`` | def node_with_attr(attr_name,value):
result = [ node for node in NODE if attr_name in NODE[node] and NODE[node][attr_name] == value ]
BuiltIn().log("Found %d nodes with condition `%s`=`%s`" % (len(result),attr_name,value))
return result | [
"def searchAttribute(self,keys_list=None,attribute=\"objType\",value=\".obj.pub\"):\n\t\tif not keys_list:\n\t\t\tkeys_list = self.getEntryList()\n\t\t\t\n\t\treturn [k for k in keys_list if k in self.getEntryList() and hasattr(self.entries[k],attribute) and getattr(self.entries[k],attribute) == value ]",
"def pa... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns list of ``node`` or ``webapp`` from ``local.yaml`` that has ALL tags defined by ``tag_list`` Tag was defined like this in local.yaml | def node_with_tag(*tag_list):
result = []
if sys.version_info[0] > 2:
s0 = set(tag_list)
if 'node' in LOCAL and LOCAL['node']:
for item in LOCAL['node']:
if 'tag' in LOCAL['node'][item]:
if LOCAL['node'][item]['tag']:
s1 ... | [
"def node_without_tag(*tag_list):\n\n result = []\n if sys.version_info[0] > 2:\n s0 = set(tag_list)\n if not LOCAL['node']: return result\n for node in LOCAL['node']:\n if 'tag' in LOCAL['node'][node]:\n if LOCAL['node'][node]['tag']:\n s1 = ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns list of ``node`` from ``local.yaml`` that does not has ANY tags defined by ``tag_list`` Tag was defined like this in local.yaml | def node_without_tag(*tag_list):
result = []
if sys.version_info[0] > 2:
s0 = set(tag_list)
if not LOCAL['node']: return result
for node in LOCAL['node']:
if 'tag' in LOCAL['node'][node]:
if LOCAL['node'][node]['tag']:
s1 = set(LOCAL['nod... | [
"def node_with_tag(*tag_list):\n\n result = []\n\n if sys.version_info[0] > 2:\n s0 = set(tag_list)\n if 'node' in LOCAL and LOCAL['node']:\n for item in LOCAL['node']:\n if 'tag' in LOCAL['node'][item]:\n if LOCAL['node'][item]['tag']:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the mib file name for this ``node`` mib file is define by ``mib`` keyword under the ``node`` in ``local.yaml`` | ... | def mib_for_node(node):
mib_file = None
if 'mib' in LOCAL['node'][node]:
mib_file = LOCAL['node'][node]['mib']
if mib_file is None:
device = LOCAL['node'][node]['device']
type = GLOBAL['device'][device]['type']
mib_file = GLOBAL['snmp-template'][type]['mib']
... | [
"def get_path(self, name):\n return os.path.join(config.path.mib_path, \"%s.mib\" % name)",
"def get_attr_action_name(self, node, attr_name):\n return 'set_%s%s' % (Himesis.to_non_RAM_attribute(attr_name), self.vs[node][Himesis.Constants.MT_LABEL])",
"def get_attr_constraint_name(self, node, attr_... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Repeatly executes RF ``keyword`` for nodes that has tag ``tags`` | def loop_for_node_tag(var,tags,*keywords):
nodes = node_with_tag(*tags.split(':'))
for node in nodes:
BuiltIn().set_test_variable(var,node)
BuiltIn().run_keywords(*keywords) | [
"def search_by_tags(tags: list):\n reader = initialize_reader()\n key_words = [{\"movie\": row[10], \"key_words\": row[16]} for row in reader]\n words = []\n for key_word in key_words:\n for tag in tags:\n key_words_iterable = key_word.get(\"key_words\").split(\"|\")\n if ta... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a sequence from string format | def str2seq(str_index,size):
if ':' in str_index:
# tmp = map(lambda x: 0 if x=='' else int(x),str_index.split(':'))
tmp = [ int(x) if x!='' else 0 for x in str_index.split(':') ]
if len(tmp) > 3:
return None
else:
result = range(*list(tmp))
if len... | [
"def StrToSeq(string):\n if isinstance(string, np.ndarray):\n return string\n arr = np.empty(len(string)).astype(int)\n for i in range(len(string)):\n if ( string[i] == 'A' or string[i] == 'a' ):\n arr[i] = 0\n elif ( string[i] == 'T' or string[i] == 't' ):\n arr[i] = 1\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Select part of the CSV file and write it to other file ``row`` and ``col`` are used to specify necessary rows and columns. They are using the same format with slice for Python list. | def csv_select(src_file,row=u':',col=u':',dst_file=None,flatten=False,header=None):
src_pd = pandas.read_csv(src_file,header=header)
s = src_pd.shape
data = src_pd.iloc[str2seq(row,s[0]),str2seq(col,s[1])]
if dst_file:
data.to_csv(dst_file,index=None,header=header)
BuiltIn().log("Wrote t... | [
"def updateIntoCsv(self,filename,where):\n\t\tpass",
"def saveSubcontent(subcontentRow,path):\n exportRowCsv(path,subcontentRow)",
"def save_as(self, arg_new_file, arg_delimiter):\n with open(self.file, 'r') as fr_handler:\n with open(arg_new_file, 'w', newline='') as fw_handler:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return a column of value from csv to list | def csv_to_list(filepath,col=u"0",header=None):
df = pandas.read_csv(filepath,header=header)
result = df.iloc[:,int(col)].values.tolist()
BuiltIn().log("Return %d values from `%s`" % (len(result),filepath))
return result | [
"def convert_CSV_to_list(self, filepath):\n outdata = []\n #open file path and run csv reader, add to list row by row\n with open(filepath, 'r') as fin:\n reader = csv.reader(fin)\n #outdata = list[reader]\n for row in reader:\n outdata.append(row... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Concatinates CSV files vertically If the CSV files has header, set ``has_header`` to ``${TRUE}`` | def csv_concat(src_pattern, dst_name,input_header=None,result_header=True):
file_list = sorted(glob.glob(src_pattern))
num = len(file_list)
if num < 1:
BuiltIn().log("Could not find any file to concatinate")
return False
file = file_list.pop(0)
pd = pandas.read_csv(file,header=inp... | [
"def join_csv():\n # Initialize csv container\n csv = []\n # Get all csv\n for item in os.listdir():\n if item.endswith('.csv'):\n csv.append(item)\n # Comprehension to come\n frame = pd.concat((pd.read_csv(item) for item in csv))\n # We skipped the index of csv after the firs... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Merges all CSV files ``horizontally`` by ``key`` key from ``src_pattern`` ``input_header`` defines whether the input files has header row or not. If ``input_header`` is ``${NULL}``, the keyword assume that input files have no header and automatically define columns name. When ``input_header`` is not null (default is ze... | def csv_merge(src_pattern,dst_name,input_header=None,key='0',select_column=':',result_header=True):
file_list = sorted(glob.glob(src_pattern))
num = len(file_list)
if not select_column == ':':
columns = '%s,%s' % (key,select_column)
else:
columns = select_column
if num < 1:
... | [
"def csv_concat(src_pattern, dst_name,input_header=None,result_header=True):\n\n file_list = sorted(glob.glob(src_pattern))\n num = len(file_list)\n if num < 1:\n BuiltIn().log(\"Could not find any file to concatinate\")\n return False\n file = file_list.pop(0)\n pd = pandas.read_csv(... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Merges all the text files defined by ``path_name`` to ``file_name`` | def merge_files(path_name,file_name):
file_list = glob.glob(path_name)
with open(file_name,'w') as fout:
fin = fileinput.input(file_list)
for line in fin:
fout.write(line)
fin.close()
BuiltIn().log("Merges %d files to %s" % (len(file_list),file_name)) | [
"def merge_per_folder(folder_path, output_filename):\n # make sure there's a slash to the folder path \n folder_path += \"\" if folder_path[-1] == \"/\" else \"/\"\n # get all text files\n txt_files = glob.glob(folder_path + \"*.txt\")\n # get first lines; map to each text file (sorted)\n output_s... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Changes file mod, likes Unix chmod ``mod`` is a string specifying the privilege mode ``relative`` is ``False`` or ``True`` | def change_mod(name,mod,relative=True):
if relative:
path = os.getcwd() + "/" + name
else:
path = name
os.chmod(path,int(mod,8))
BuiltIn().log("Changed `%s` to mode %s" % (path,mod)) | [
"def set_chmod(self, file, value):\n pass",
"def chmod(self, path, mode):\n q.logger.log(\"chmod: %s (mode %s)\" % (path, oct(mode)))",
"def setperms(self,filename,chmod,chgrp,chown):\n \n #chmod\n if chmod:\n perm=int(chmod,8)\n try:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return a list of all test device that is used in this test | def get_test_device():
devices = []
for node_name,node in LOCAL["node"].iteritems():
device = node["device"]
if device not in devices: devices.append(device)
return devices | [
"def list_devices(self):\n pass",
"def getDevices():\n\n # Create a list\n suitable_devices = []\n\n # Iterate over each device\n for i in range(p.get_device_count()):\n # Extract device metadata (note: there is more available)\n d = {}\n d['name'] = p.get_device_info_by_in... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Displays the message ``msg`` and pauses the test execution and wait for user input In case of ``error_on_timeout`` is True(default), the keyword will raise an error when timeout occurs. Otherwise, it will continue the test. Inf succeed, the keyword returns the input from user. | def pause(msg="",time_out='3h',error_on_timeout=True,default_input=''):
BuiltIn().log("Pause and wait `%s` for user input" % time_out)
BuiltIn().log_to_console(msg)
input = None
wait = DateTime.convert_time(time_out)
renat_batch = BuiltIn().get_variable_value('${RENAT_BATCH}')
if renat_batch i... | [
"def DisplayFail(self, message = 'FAIL', timeout=0):\n self.DisplayMessage(message, fgcolor=colorama.Fore.RED)\n time.sleep(timeout)",
"def DisplayError(self, message = 'ERROR', timeout=0):\n self.DisplayMessage(message, fgcolor=colorama.Fore.RED)",
"def user32_MessageBoxTimeout(jitter, get_str, set_st... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Shows difference between files Returns the diff result (multi lines) ``path1``, ``path2`` are absolute paths. | def diff_file(path1,path2,method=u'uniq',newline=True):
result = ""
with codecs.open(path1,'r','utf-8') as f: f1 = f.readlines()
with codecs.open(path2,'r','utf-8') as f: f2 = f.readlines()
if newline and len(f1) > 0 and len(f2) > 0:
f1[-1] = f1[-1]+'\n'
f2[-1] = f2[-1]+'\n'
diff =... | [
"def diff(path1, name1, path2, name2):\n if hide_known and name1.lstrip('models_1_0/') in known_diff:\n global skipped\n skipped += 1\n return\n\n # 5.4.2 files should not be the same in both versions\n if '5.4.2' in name2:\n w(s(red, 'x 5.4.2 not updated to 5.4.3: ' + name2))\n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Ping a ``node`` until it gets response. Then wait for more ``wait_str`` Default ``extra`` option is ``c 3`` | def ping_until_ok(node,wait_str='5s',extra='-c 3'):
device = LOCAL['node'][node]['device']
ip = GLOBAL['device'][device]['ip']
result = os.system("ping %s %s" % (extra,ip))
wait = DateTime.convert_time(wait_str)
time.sleep(wait)
BuiltIn().log("Pinged to host `%s(%s)` with result = %d" ... | [
"def wait_for_ping(node):\n\n tries = DEFAULT_TRIES\n while tries > 0:\n utils.puts(\n 'Waiting for ping to node {}'.format(\n node,\n )\n )\n with settings(warn_only=True):\n res = local('ping6 -c3 {}'.format(node))\n if res.succeede... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Count the number of lines contains the ``keyword`` | def count_keyword_line(keyword,*pattern_list):
counter = 0
for pattern in pattern_list:
file_list = glob.glob(pattern)
for file in file_list:
with open(file,"r") as f:
BuiltIn().log("Check keyword in file `%s`" % file)
for i,line in enumerate(f.readlin... | [
"def count_keyword(keyword,*pattern_list):\n\n counter = 0\n for pattern in pattern_list:\n file_list = glob.glob(pattern)\n for file in file_list:\n with open(file,\"r\") as f:\n BuiltIn().log(\"Check keyword in file `%s`\" % file)\n for word in [word fo... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks whether the number of line containing the keyword be less than a number | def keyword_line_should_not_be_bigger_than(num,keyword,*pattern_list):
counter = count_keyword_line(keyword, *pattern_list)
if counter <= int(num):
BuiltIn().log("Found %d lines `%s`" % (counter,keyword))
return True
else:
raise Exception("Found %d lines that matched `%s`, bigger th... | [
"def keyword_should_not_be_bigger_than(num,keyword,*pattern_list):\n counter = count_keyword(keyword, *pattern_list)\n\n if counter <= int(num):\n BuiltIn().log(\"Found %d of keyword `%s`\" % (counter,keyword))\n return True\n else:\n raise Exception(\"Number of `%s` is %d, bigger than... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Count the number of ``regex`` found in ``pattern_list`` | def count_match_regexp(regexp,*pattern_list):
counter = 0
for pattern in pattern_list:
file_list = glob.glob(pattern)
for file in file_list:
with open(file,"r") as f:
BuiltIn().log("Find pattern `%s` in file `%s`" % (regexp,file))
for i,line in enumera... | [
"def _count_matches(regex, s):\n return len(regex.findall(s))",
"def pattern_count(pattern, text):\n count = 0\n for i in range(len(text) - len(pattern) + 1):\n if text[i:i + len(pattern)] == pattern:\n count = count + 1\n return count",
"def pattern_count(text,pattern):\n c... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Count the keyword in files. Keyword is not casesensitive | def count_keyword(keyword,*pattern_list):
counter = 0
for pattern in pattern_list:
file_list = glob.glob(pattern)
for file in file_list:
with open(file,"r") as f:
BuiltIn().log("Check keyword in file `%s`" % file)
for word in [word for line in f for w... | [
"def count_keyword_line(keyword,*pattern_list):\n counter = 0\n for pattern in pattern_list:\n file_list = glob.glob(pattern)\n for file in file_list:\n with open(file,\"r\") as f:\n BuiltIn().log(\"Check keyword in file `%s`\" % file)\n for i,line in enu... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks whether the number of keyword be less than a number | def keyword_should_not_be_bigger_than(num,keyword,*pattern_list):
counter = count_keyword(keyword, *pattern_list)
if counter <= int(num):
BuiltIn().log("Found %d of keyword `%s`" % (counter,keyword))
return True
else:
raise Exception("Number of `%s` is %d, bigger than %s" % (keyword... | [
"def keyword_line_should_not_be_bigger_than(num,keyword,*pattern_list):\n counter = count_keyword_line(keyword, *pattern_list)\n\n if counter <= int(num):\n BuiltIn().log(\"Found %d lines `%s`\" % (counter,keyword))\n return True\n else:\n raise Exception(\"Found %d lines that matched ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks whether the number of lines that contains ``error`` be less than a number | def error_line_should_not_be_bigger_than(num,*pattern_list):
return keyword_line_should_not_be_bigger_than(num,'error',*pattern_list) | [
"def high_low_flows(self, error = 0.1):\n \n if self.get_low_flow_error() > error:\n return True\n\n return False",
"def low_low_flows(self, error = 0.1):\n \n if self.get_low_flow_error() < -error:\n return True\n\n return False",
"def low_high_fl... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Folds a string by adding NonWidthSpace char (0x200b) at 6th char | def fold_str(str):
s1 = str[:6]
s2 = str[6:]
if s2 != '' :
result = s1 + u'\u200b' + s2
else:
result = s1
return result | [
"def sfill(s, width):\n for c in range(width-len(s)):\n s = s + ' '\n return s",
"def mask_string(word):\n l = len(word)\n return multiply_str(\"#\", l-4) + word[-4:]",
"def get_padding(line):\n out = \"\"\n for c in line:\n if c == \" \":\n out += \" \"\n else:... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Pauses the execution and wait for the pattern is matched if the file `log_file_name` located in the current result folder. By default the `log_file_name` is `./result/syslogtrap.log` which is created by `Follow Syslog and Trap` keyword. The keyword should be in tests between `Follow Syslog and Trap Start` and `Follow S... | def follow_syslog_and_trap(pattern,logname=u"syslog-trap.log",delay=u'2s'):
if BuiltIn().get_variable_value('${RENAT_BATCH}') is not None:
BuiltIn().log("Pausing is ignored in batch mode")
return
match_pattern = re.compile(pattern)
filepath = "%s/%s_%s" % ( BuiltIn().get_variable_value("${... | [
"def watch_log_for(expected_string, logfile=None, expected_count=1, timeout=DEFAULT_STANDARD_TIMEOUT):\n if logfile is None:\n try:\n logfile = find_latest_log()\n except OSError as e:\n log.error('Error fetching latest log file - %s', e)\n return False\n\n log.i... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Set multiple varibles to be `suite variable` at the same time Suite variables (or item variable) could be access anywhere in all the item scenario. | def set_multi_item_variable(*vars):
for var in vars:
BuiltIn().set_suite_variable(var)
BuiltIn().log('Set %d variables to suite(item) scope' % len(vars)) | [
"def set_multiple_test_variables(**variables):\n for key, value in variables.items():\n BuiltIn().set_test_variable(f'${{{key}}}', value)",
"def test_variables_on_site_four(self):\n site = Site(4, self.clock.time, self.logger)\n\n self.confirm_even_variables_are_present(site)\n\n se... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a random name by a `base` and a random number between [a,b] | def random_name(base,a='0',b='99'):
number = random.randint(int(a),int(b))
result = base % number
BuiltIn().log("Created a random name as `%s`" % result)
return result | [
"def create_name(add_number=99, hashable=None):\n if hashable:\n random.seed(hashable)\n name = [random.choice(wlist) for wlist in words]\n if add_number:\n name.append(str(random.randint(1, add_number)))\n return '-'.join(name)",
"def generate_unique_shot_name(base_name, shot_name_incre... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Converts html file to pdf file | def convert_html_to_pdf(html_file,pdf_file):
options = {
'page-size': 'A4',
'margin-top': '0.1in',
'margin-right': '0.1in',
'margin-bottom': '0.1in',
'margin-left': '0.1in',
'encoding': "UTF-8",
'no-outline': None
}
pdfkit.from_file(html_file,pdf_file,... | [
"def test_html_to_pdf_file():\n\n test_content = \"\"\"\n <!DOCTYPE html>\n <html>\n <head>\n <title>A demo html page</title>\n </head>\n <body>\n <p>Hello world!</p>\n </body>\n </html>\n \"\"\"\n\n # GIVEN an HTML report to be converted to PD... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Cleans up the result folder Deletes all files in current active folder that does not match the ``ignore`` expression and are older than the time the test has started. | def cleanup_result(ignore=u'^(log.html|output.xml|report.html)$'):
BuiltIn().log("Delete files in result folder `%s`" % _result_folder)
candidates=[]
for root, dirs, files in os.walk(_result_folder):
for basename in files:
if not re.search(ignore,basename) and not '/.svn' in root:
... | [
"def cleanup_results(self):\n if os.path.exists(os.path.join(self.project_root, 'results')):\n shutil.rmtree(os.path.join(self.project_root, 'results'))\n os.makedirs(self.result_folder)",
"def clear_output_folder(self):\n fh.delete_directory_tree(self.analysis_root)",
"def __cle... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Use Slack Post instead | def slack(msg,channel='#automation_dev',user='renat',host=GLOBAL['default']['slack-proxy']):
BuilIn().log("WRN: This keyword is deprecated. Use `Slack Post` instead", console=True)
return slack_post | [
"def post_hi():\n\tslack_utility.send_message(channel='#general', msg='hi')",
"def slack_post(dialogue, url):\n slack_data = {'text': \"\" + dialogue}\n response = requests.post(\n url, data=json.dumps(slack_data),\n headers={'Content-Type': 'application/json'}\n )",
"def send_message_to_... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Upload a file to a Slack channel | def slack_upload(file_path,channels='automation_dev',title='',msg='Powered by RENAT',host=None):
BuiltIn().log("Upload a file to Slack channel")
renat_batch = BuiltIn().get_variable_value('${RENAT_BATCH}')
if renat_batch is None:
host = host or GLOBAL['default']['slack-proxy']
cmd = GLOBAL['... | [
"def post_snippet_to_kugawana_slack(self, slack_channel, post_message, initial_comment=\"\", post_title=\"\"):\n self.slack.files.upload(channels=slack_channel,\n initial_comment=initial_comment,\n title=post_title,\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Load plugin in renat/plugin folder | def load_plugin():
for item in glob.glob(get_renat_path() + '/plugin/*.robot'):
plugin_name = os.path.basename(item)
BuiltIn().import_resource('./plugin/' + plugin_name)
BuiltIn().log("Loaded plugin `%s`" % plugin_name) | [
"def load_plugin(plugin_name, direc):\n filename = 'run.py'\n\n plugin_dir = os.path.join(direc, plugin_name)\n\n if not os.path.isdir(plugin_dir):\n mes = 'Folder for plugin ({}) could not been found!'\n mes += 'Make sure it is in the plugin dir!'\n mes = mes.f... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
skip the test case if global_variable RUN_ME is not defined | def explicit_run():
var = BuiltIn().get_variable_value('${FORCE}')
if var != '':
BuiltIn().pass_execution('Bypassed this step') | [
"def setUp(self): # suppress(N802)\n super(TestCaseRequiring, self).setUp()\n if platform.system() != system:\n self.skipTest(\"\"\"not running on system - {0}\"\"\".format(system))",
"def test_target_variable_command_no_fail(self):\n d = {'C_SOURCES': 'globals.c', 'EX... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns value of a key for renat configuration with this other LOCAL[base][key] > GLOBAL[base][key] > None Users could override the global setting by test's local setting | def get_config_value(key,base=u'default',default=None):
if base in LOCAL and key in LOCAL[base]:
return LOCAL[base][key]
if base in GLOBAL and key in GLOBAL[base]:
return GLOBAL[base][key]
else:
return default
return None | [
"def get_setting(cls, settings, key):\n part1, _, part2 = key.partition('/')\n if part2:\n value = settings[part1][part2]\n else:\n value = settings[part1]\n return value",
"def get(key):\n if key not in values:\n if key == 'noto_tools':\n return DEFAULT_... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Logs a content of ``csv_file`` into default log.html `index`, `border` are table attributes | def log_csv(csv_file,index=False,border=0):
df = pandas.read_csv(csv_file)
BuiltIn().log(df.to_html(index=index,border=border),html=True) | [
"def updateLogTableWidget(self, csvFilePath):\n try:\n self.logTableHeader = ['Action', 'Description', 'Variable', 'Settings', 'Value', 'Wait', 'Actual Value', 'Step Verdict', 'Step Message']\n header = self.logTableHeader\n\n # stepCol = header.index('Step')\n act... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Waits for `waittime` and display the proress bar `wait_time` used RF `DateTime` format. | def wait(wait_time,size=u'10'):
wait_sec = DateTime.convert_time(wait_time)
bar_size = int(size)
step = float(wait_sec/bar_size)
time1 = datetime.datetime.now()
time2 = time1 + datetime.timedelta(seconds=wait_sec)
epoch1 = int(time1.strftime('%s'))
epoch2 = int(time2.strftime('%s'))
disp... | [
"def formatWait(self):\n return format_time(self.waitUntil - time())",
"def spinner(seconds):\n symbols = itertools.cycle('-\\|/')\n tend = time.time() + seconds\n while time.time() < tend:\n # '\\r' is carriage return: return cursor to the start of the line.\n sys.stdout.write('\\rP... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Starts a virtual display | def start_display():
global DISPLAY
display_info = get_config_value('display')
logging.getLogger("easyprocess").setLevel(logging.INFO)
w = int(display_info['width']) + 100
h = int(display_info['height']) + 100
DISPLAY = Display(visible=0, size=(w,h))
DISPLAY.start()
time.sleep(2)
Bui... | [
"def __init__(self, visible=0, size=(1920, 1080)):\n self.display = Display(visible=visible, size=size)\n log.info(\"Virtual display set up, visible: {}, size: {}\".\n format(False if not visible else True, size))\n self.display.start()\n time.sleep(1)",
"def displ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Closes the opened display | def close_display():
global DISPLAY
# tmpfile = '/tmp/xvfb.%s' % DISPLAY.new_display_var.replace(':','')
DISPLAY.stop()
# DISPLAY.sendstop()
BuiltIn().log('Closed the virtual display') | [
"def closeSession(self):\n self.hide()",
"def close_window(self):\r\n Window.close()",
"def _close_wid(self, wid):\n pass",
"def close_board(self):\n self.ctrl.close()\n return",
"def close(self, state: 'SoState') -> \"void\":\n return _coin.SoGLDisplayList_close(se... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |