query stringlengths 9 9.05k | document stringlengths 10 222k | negatives listlengths 19 20 | metadata dict |
|---|---|---|---|
Parsing a server descriptor when the bridge's networkstatus document didn't have a digest of the server descriptor should raise a MissingServerDescriptorDigest. | def test_Bridge_checkServerDescriptor(self):
# Create a networkstatus descriptor without a server descriptor digest:
filename = self._networkstatusFile + "-missing-digest"
fh = open(filename, 'w')
invalid = BRIDGE_NETWORKSTATUS.replace("c4EVu2rO/iD/DJYBX/Ll38DGQWI", "foo")
fh.see... | [
"def test_Bridge_checkServerDescriptor_digest_missing(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n\n self.bridge.descriptorDigest = None\n self.assertRaises(bridges.MissingServerDescriptorDigest,\n self.bridge._checkServerDescriptor,\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Parsing a server descriptor whose digest doesn't match the one given in the bridge's networkstatus document should raise a ServerDescriptorDigestMismatch. | def test_Bridge_checkServerDescriptor_digest_mismatch_ns(self):
# Create a networkstatus descriptor without a server descriptor digest:
filename = self._networkstatusFile + "-mismatched-digest"
fh = open(filename, 'w')
invalid = BRIDGE_NETWORKSTATUS.replace("c4EVu2rO/iD/DJYBX/Ll38DGQWI",... | [
"def test_Bridge_checkServerDescriptor_digest_mismatch_sd(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n\n self.bridge.descriptorDigest = 'deadbeef'\n self.assertRaises(bridges.ServerDescriptorDigestMismatch,\n self.bridge._checkServerDescriptor,\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Parsing a server descriptor when the corresponding networkstatus descriptor didn't include a server bridge.descriptorDigest that matches should raise a ServerDescriptorDigestMismatch exception. | def test_Bridge_checkServerDescriptor_digest_mismatch_sd(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.descriptorDigest = 'deadbeef'
self.assertRaises(bridges.ServerDescriptorDigestMismatch,
self.bridge._checkServerDescriptor,
... | [
"def test_Bridge_checkServerDescriptor(self):\n # Create a networkstatus descriptor without a server descriptor digest:\n filename = self._networkstatusFile + \"-missing-digest\"\n fh = open(filename, 'w')\n invalid = BRIDGE_NETWORKSTATUS.replace(\"c4EVu2rO/iD/DJYBX/Ll38DGQWI\", \"foo\")... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Parsing a server descriptor when the corresponding networkstatus descriptor didn't include a server bridge.descriptorDigest should raise a MissingServerDescriptorDigest exception. | def test_Bridge_checkServerDescriptor_digest_missing(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.descriptorDigest = None
self.assertRaises(bridges.MissingServerDescriptorDigest,
self.bridge._checkServerDescriptor,
... | [
"def test_Bridge_checkServerDescriptor(self):\n # Create a networkstatus descriptor without a server descriptor digest:\n filename = self._networkstatusFile + \"-missing-digest\"\n fh = open(filename, 'w')\n invalid = BRIDGE_NETWORKSTATUS.replace(\"c4EVu2rO/iD/DJYBX/Ll38DGQWI\", \"foo\")... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling getBridgeLine with a valid request should return a bridge line. | def test_Bridge_getBridgeLine_request_valid(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
request = BridgeRequestBase()
request.isValid(True)
... | [
"def test_Bridge_getBridgeLine_no_include_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n re... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling getBridgeLine with an invalid request should return None. | def test_Bridge_getBridgeLine_request_invalid(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
request = BridgeRequestBase()
request.isValid(Fals... | [
"def test_Bridge_getBridgeLine_no_vanilla_addresses(self):\n request = BridgeRequestBase()\n request.isValid(True)\n\n self.assertIsNone(self.bridge.getBridgeLine(request))",
"def test_Bridge_getBridgeLine_no_include_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.network... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling getBridgeLine() on a Bridge without any vanilla addresses should return None. | def test_Bridge_getBridgeLine_no_vanilla_addresses(self):
request = BridgeRequestBase()
request.isValid(True)
self.assertIsNone(self.bridge.getBridgeLine(request)) | [
"def test_Bridge_getBridgeLine_no_include_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n re... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling getBridgeLine() with a valid request for bridges not blocked in Iran should return a bridge line. | def test_Bridge_getBridgeLine_request_without_block_in_IR(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
request = BridgeRequestBase()
request.... | [
"def test_Bridge_getBridgeLine_blocked_and_request_without_block(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.bridge.setBlockedIn('ir')... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling getBridgeLine() with a valid request for bridges not blocked in Iran, when the bridge is completely blocked in Iran, shouldn't return a bridge line. | def test_Bridge_getBridgeLine_blocked_and_request_without_block(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
self.bridge.setBlockedIn('ir')
... | [
"def test_Bridge_getBridgeLine_request_without_block_in_IR(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling getBridgeLine() with a valid request for obfs3 bridges not blocked in Iran, when the obfs3 line is blocked in Iran, shouldn't return a bridge line. | def test_Bridge_getBridgeLine_blocked_pt_and_request_without_block_pt(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
self.bridge.setBlockedIn('ir', met... | [
"def test_Bridge_getBridgeLine_blocked_obfs3_and_request_without_block_obfs4(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.bridge.setBlo... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling getBridgeLine() with a valid request for obfs4 bridges not blocked in Iran, when the obfs3 line is blocked in Iran, should return a bridge line. | def test_Bridge_getBridgeLine_blocked_obfs3_and_request_without_block_obfs4(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
self.bridge.setBlockedIn('ir... | [
"def test_Bridge_getBridgeLine_blocked_pt_and_request_without_block_pt(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.bridge.setBlockedIn... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling getBridgeLine() with a valid request for IPv6 bridges should return a bridge line. | def test_Bridge_getBridgeLine_IPv6(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
request = BridgeRequestBase()
request.isValid(True)
r... | [
"def test_Bridge_getBridgeLine_IPv6_no_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n reque... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling getBridgeLine(includeFingerprint=False) with a valid request for IPv6 bridges should return a bridge line without the fingerprint. | def test_Bridge_getBridgeLine_IPv6_no_fingerprint(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
request = BridgeRequestBase()
request.isValid(... | [
"def test_Bridge_getBridgeLine_IPv6(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling getBridgeLine() with a request for IPv6 obfs3 bridges (when the Bridge doesn't have any) should raise a PluggableTransportUnavailable exception. | def test_Bridge_getBridgeLine_obfs3_IPv6(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
request = BridgeRequestBase()
request.isValid(True)
... | [
"def test_Bridge_getBridgeLine_IPv6(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling getBridgeLine() with a request for an unknown PT should raise a PluggableTransportUnavailable exception. | def test_Bridge_getBridgeLine_googlygooglybegone(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
request = BridgeRequestBase()
request.isValid(T... | [
"def test_Bridge_getBridgeLine_blocked_pt_and_request_without_block_pt(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.bridge.setBlockedIn... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling getBridgeLine() with bridgePrefix=True should prefix the returned bridge line with 'Bridge '. | def test_Bridge_getBridgeLine_bridge_prefix(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
request = BridgeRequestBase()
request.isValid(True)
... | [
"def test_PluggableTransport_getTransportLine_bridge_prefix(self):\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n {'sharedsecret': 'foobar',\n 'passw... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling getBridgeLine() with includeFingerprint=False should return a bridge line without a fingerprint. | def test_Bridge_getBridgeLine_no_include_fingerprint(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
request = BridgeRequestBase()
request.isVal... | [
"def test_Bridge_getBridgeLine_IPv6_no_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n reque... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling getNetworkstatusLastPublished() should tell us the last published time of the Bridge's serverdescriptor. | def test_Bridge_getNetworkstatusLastPublished(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
published = self.bridge.getNetworkstatusLastPublished()
self.assertIsNotNone(published)
self.assertIsInstance(published, datetime.datetime)
self.assertEqual(str(published... | [
"def test_Bridge_getDescriptorLastPublished(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n\n published = self.bridge.getDescriptorLastPublished()\n self.assertIsNotNone(published)\n self.assertIsIn... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling getDescriptorLastPublished() should tell us the last published time of the Bridge's serverdescriptor. | def test_Bridge_getDescriptorLastPublished(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
published = self.bridge.getDescriptorLastPublished()
self.assertIsNotNone(published)
self.assertIsInstance(publ... | [
"def test_Bridge_getNetworkstatusLastPublished(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n\n published = self.bridge.getNetworkstatusLastPublished()\n self.assertIsNotNone(published)\n self.assertIsInstance(published, datetime.datetime)\n self.assertEqual(st... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling isBlockedIn('IS') should return False when the bridge isn't blocked in Iceland. | def test_Bridge_isBlockedIn_IS(self):
self.assertFalse(self.bridge.isBlockedIn('IS')) | [
"def test_Bridge_setBlockedIn_IR_address(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.bridge.setBlockedIn('IR', address='179.178.155.14... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling setBlockedIn('CN', 'obfs2') should mark all obfs2 transports of the bridge as being blocked in CN. | def test_Bridge_setBlockedIn_CN_obfs2(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
self.bridge.setBlockedIn('CN', methodname='obfs2')
self.as... | [
"def test_Bridge_setBlockedIn_GB_address_port(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n # Should block the obfs4 bridge:\n self.br... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling setBlockedIn('IR', address) should mark all matching addresses of the bridge as being blocked in IR. | def test_Bridge_setBlockedIn_IR_address(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
self.bridge.setBlockedIn('IR', address='179.178.155.140')
... | [
"def test_Bridge_setBlockedIn_GB_address_port(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n # Should block the obfs4 bridge:\n self.br... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calling setBlockedIn('GB', address, port) should mark all matching | def test_Bridge_setBlockedIn_GB_address_port(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
# Should block the obfs4 bridge:
self.bridge.setBlo... | [
"def test_Bridge_setBlockedIn_IR_address(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.bridge.setBlockedIn('IR', address='179.178.155.14... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Send an ARP reply. reply_to is a PacketIn event corresponding to an ARP request mac is the MAC address to reply with src_mac is the MAC address that the reply comes from (the L2 address) | def send_arp_reply (reply_to, mac, src_mac = None):
if mac is False:
mac = reply_to.connection.eth_addr
elif mac is True:
mac = reply_to.connection.ports[reply_to.port].hw_addr
mac = EthAddr(mac)
if src_mac is None:
src_mac = mac
elif src_mac is False:
src_mac = reply_to.connection.eth_addr
... | [
"def send_arp_reply(reply_to, mac, src_mac=None):\n if mac is False:\n mac = reply_to.connection.eth_addr\n elif mac is True:\n mac = reply_to.connection.ports[reply_to.port].hw_addr\n mac = EthAddr(mac)\n\n if src_mac is None:\n src_mac = mac\n elif src_mac is False:\n sr... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Send an ARP reply reply_to is a an ARP request PacketIn event mac is the MAC address to reply with, True for the port MAC or False for the "DPID MAC". src_mac can be a MAC, True/False as above, None to use "mac", or if unspecified, defaults to self.default_src_mac. | def send_arp_reply (self, reply_to, mac, src_mac = _default_mac):
if src_mac is _default_mac:
src_mac = self.default_reply_src_mac
return send_arp_reply(reply_to, mac, src_mac) | [
"def send_arp_reply(reply_to, mac, src_mac=None):\n if mac is False:\n mac = reply_to.connection.eth_addr\n elif mac is True:\n mac = reply_to.connection.ports[reply_to.port].hw_addr\n mac = EthAddr(mac)\n\n if src_mac is None:\n src_mac = mac\n elif src_mac is False:\n sr... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets stock input and output account according to the difference of old price and new price. | def onchange_price(self, cr, uid, ids, new_price, context=None):
if context is None:
context = {}
product_obj = self.pool.get('product.product').browse(cr, uid, context.get('active_id', False), context=context)
price = product_obj.standard_price
diff = price - new_price
... | [
"def _update_buy_amount_from_new_sell_amount(\n buy_amount_old, sell_amount_new, sell_amount_old\n ):\n buy_amount_new = buy_amount_old * sell_amount_new / sell_amount_old\n return buy_amount_new.to_integral_value(rounding=ROUND_UP)",
"def change_price(self, cr, uid, ids, context=None):\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Changes the Standard Price of Product. And creates an account move accordingly. | def change_price(self, cr, uid, ids, context=None):
if context is None:
context = {}
rec_id = context and context.get('active_id', False)
assert rec_id, _('Active ID is not set in Context.')
prod_obj = self.pool.get('product.product')
res = self.browse(cr, uid, ids, c... | [
"def change_sale_price(self):\n sale = self.find_brokered_sale_by_id(self.lhs)\n if sale.owner != self.caller.player_ob.Dominion:\n raise self.BrokerError(\"You can only change the price of your own sales.\")\n price = self.get_amount(self.rhs, \"price\")\n if price == sale.pr... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Draw a Koch curve. | def draw_koch_curve(t, order, x):
if order == 0:
t.forward(x)
else:
for angle in [60, -120, 60, 0]:
draw_koch_curve(t, order-1, x/3)
t.left(angle) | [
"def draw_snowflake(t, order, x):\n for i in range(3):\n draw_koch_curve(t, order, x)\n t.rt(120)",
"def drawK():\r\n\r\n turtle.down()\r\n turtle.left(90)\r\n turtle.forward(40)\r\n turtle.left(180)\r\n turtle.forward(20)\r\n turtle.left(45)\r\n turtle.forward(27)\r\n tur... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Draw a snowflake using three Koch curves | def draw_snowflake(t, order, x):
for i in range(3):
draw_koch_curve(t, order, x)
t.rt(120) | [
"def main():\n t.setup(800, 800)\n t.pu()\n t.goto(-300, 150)\n t.pd()\n t.pensize(2)\n level = 3\n for i in range(3):\n koch(600, level)\n t.right(120)\n t.hideturtle()\n t.done()",
"def main(start):\r\n #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\r\... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Prompt user to input the length for the Koch curve | def user_input():
print("Welcome to drawing a Koch curve.\n")
order = int(input("Please enter the order of magnitude for the Koch curve: "))
x = int(input("Please enter a length x: "))
# Instantiate the Turtle
bob = turtle.Turtle()
bob.hideturtle()
draw_snowflake(bob, order, x) | [
"def KochCurveLength(L, iteration):\n print(f\"Steps Taken: {iteration}\")\n num_segs, seg_len = input_segs(iteration)\n print(f\"Number of Segments: {num_segs}\")\n print(f\"Length of Segments: {seg_len}\")\n\n L = L * (num_segs/seg_len)\n\n return print(f\"Curve Length: {Fraction(L).limit_denomi... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Wait for the bucket to available. | def _wait_for_bucket(self, retries: int = 0, delay: int = 0) -> None:
try:
waiter = self.client.get_waiter('bucket_exists')
waiter.wait(
Bucket=self._bucket,
WaiterConfig={
'Delay': delay,
'MaxAttempts': retries
... | [
"def __ensure_bucket_availability(self):\n storage_client = storage.Client()\n if storage_client.lookup_bucket(self.__bucket_name) is None:\n # Create the new bucket\n storage_client.create_bucket(self.__bucket_name)",
"def wait(self):\n self._lock.acquire()",
"def wait(se... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get the current store session for this application. | def current_session(cls) -> 'Store':
g = get_application_global()
if g is None:
return cls.get_session()
if 'store' not in g:
g.store = cls.get_session()
store: Store = g.store
return store | [
"def get_session():\n return DatabaseService.connector.get_session()",
"def __get_session__(self):\n session = boto3.session.Session()\n return session",
"def _get_session(self):\n session = Session.object_session(self)\n if not session:\n session = sessionmaker(bin... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Download and unzip data set folder from url | def download_and_unzip(url, dataset_name, data_folder):
print("Downloading", dataset_name, "data set...")
data_zip = wget.download(url, out=data_folder)
print("\tunzipping...")
zip_ = zipfile.ZipFile(data_zip, "r")
zip_.extractall(data_folder)
zip_.close()
print("\tdone") | [
"def download_and_uncompress_tarball(tarball_url, dataset_dir):",
"def download_and_unzip_data(\n url = \"https://storage.googleapis.com/simpeg/bookpurnong/bookpurnong_inversion.tar.gz\"\n):\n # download the data\n downloads = Utils.download(url)\n\n # directory where the downloaded files are\n dir... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Converts categorical features to dummy variables in the data frame | def convertColumnsToDummy(df):
#Users categorical information to dummy!
res = pd.get_dummies(df['gender'])
df = df.join(res)
res = pd.get_dummies(df['income'])
df = df.join(res)
res = pd.get_dummies(df['marital'])
df = df.join(res)
res = pd.get_dummies(df['education'])
df = df.join(res)
#Images categorical... | [
"def dummization(self):\n #TODO: use sklearn ColumnTransformer instead\n\n return pd.get_dummies(\n self.simple_imputer(),\n prefix_sep='_',\n prefix=self.categorical_cols,\n columns=self.categorical_cols,\n drop_first=False\n )",
"def tr... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks the importances of features considering the best configuration of classifiers previously tested | def test_features_importances(classifiers_names, predictors_agrad, answer_agrad, predictors_seg, answer_seg, group=""):
classifiers = load_classifiers_wodraw(group)#load_classifiers_rnr(group)#load_classifiers_3classes(group)
classifiers_agrad = [classifiers[0][0]]
classifiers_seg = [classifiers[1][0]]
for pair i... | [
"def test_calculated_feature_importances(self):\n # Setting up lorax\n lrx = TheLorax(global_clf, data, id_col='entity_id')\n lrx_out = lrx.explain_example(idx=1, pred_class=1, graph=False)\n\n feature1_contrib = lrx_out.contribution.loc['feature1']\n feature5_contrib = lrx_out.co... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
All room details are stored inside typeclasses.Attributes db_value as a dictionary. For each attribute that is a valid dictionary, we'll need to create a RoomDetail object that points to the objectdb_set[0] object that represents the room, and do a create_or_update on that RoomDetail object with the dictionary values. ... | def convert_room_details(apps, schema_editor):
Attribute = apps.get_model("typeclasses", "Attribute")
RoomDetail = apps.get_model("room_extensions", "RoomDetail")
qs = Attribute.objects.filter(db_key="details")
num = 0
total = len(qs)
fails = 0
bad_dict_count = 0
success = 0
if total... | [
"def __data_to_attributes(self, data):\n\n if not self.__is_row:\n raise Exception(\"A non data bound class cannot be bound to data without being converted\")\n\n for k, v in self.fields.items():\n if v.default_parameters['null']:\n if k not in data:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create a test binary image. | def create_test_img_bin(sz_img):
img = np.zeros(sz_img)
img[3:7, 2:8] = 1
return img | [
"def make_binary_image(im):",
"def test_create(self):\n \n image = StringIO.StringIO(base64.b64decode(image_base64))\n \n img = self.pdfu.images.create(image, 'path/to/test_image.jpg')\n\n self.assertTrue(img)\n self.assertEqual(img.get('src'), 'path/to/test_image... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Load data saved in a NumPy .npz file. | def load_npz(npz_file):
data = np.load(npz_file, encoding="latin1")
return data['arr_0'] | [
"def loadnpz(npzfile):\n return np.load(npzfile, allow_pickle=True)",
"def load_data_from_npz():\n \n file_data = np.load(\"data.npz\")\n latitudes = file_data[\"lats\"][:] \n longitudes = file_data[\"lons\"][:]\n times = file_data[\"time\"][:] \n air_temperatu... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Test data object for the PlantCV transform submodule. | def transform_test_data():
return TransformTestData() | [
"def getTestingData(self):",
"def learn_test_data():\n return LearnTestData()",
"def load_test_dataset(self):\n\n self.xyzrph = test_xyzrph\n self.load_from_existing_xyzrph()",
"def test(self):\n\n reconstructed_data = torch.zeros(size=(1, self.dataset_dims))\n\n for i, data in ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Make new client instance. Client needs to specify endpoint for wmgr (host and port number) and optionally provide athentication parameters auth type and either secretFile or (user, passwd) pair. If auth is 'none' then authentication is not used and all other parameters are not used. If none of the secretFile or user/pa... | def __init__(self, host, port, secretFile=None, user=None, passwd=None, auth="digest"):
if secretFile and (user or passwd):
raise ValueError('WmgrClient: cannot specify secretFile and user or passwd')
if auth not in ('none', 'basic', 'digest'):
raise ValueError('WmgrClient: auth... | [
"def create_auth_client(self):\n client = APIClient()\n client.credentials(HTTP_AUTHORIZATION=self.auth_token)\n return client",
"def init_client(self):\n self._transport = RequestsHTTPTransport(url=self._url,\n use_json=True,\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the list of database names. | def databases(self):
_log.debug('get database list')
result = self._requestJSON('dbs', '')
return self._getKey(result, 'name') | [
"def list_database(db=None):\n if db is None:\n return CONNECTION.get_connection().database_names()\n return CONNECTION.get_connection()[db].collection_names()",
"def get_glue_database_names(self):\n try:\n self.response = self.glue_client.get_databases()\n database_names... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create new table. Table schema ("CREATE TABLE ...") may be specified in schema argument, if schema is None then table schema will be loaded from CSS. If chunkColumns is True then delete colums "_chunkId", "_subChunkId" from table (if they exist) and add columns "chunkId", "subChunkId" (if they don't exist). | def createTable(self, dbName, tableName, schema=None, chunkColumns=False):
_log.debug('create table: %s.%s', dbName, tableName)
data = dict(table=tableName, chunkColumns=str(int(chunkColumns)))
if schema:
data['schema'] = schema
else:
data['schemaSource'] = 'CSS'... | [
"def createTable(name, directory, schema, rz_id=None, column_wise=True):\n\n # Make sure the table is being put in an HBOOK file.\n if not isinstance(directory, Directory):\n raise TypeError, \"directory is not in an HBOOK file\"\n # Make sure the file is writable.\n if not directory.writable:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Delete existing table. If dropChunks is True then delete all chunks tables as well. If mustExist is True and table does not exist then exception is raised. | def dropTable(self, dbName, tableName, dropChunks=True, mustExist=True):
_log.debug('drop table: %s.%s', dbName, tableName)
params = dict(dropChunks=str(int(dropChunks)))
try:
self._requestJSON('dbs', dbName + '/tables/' + tableName, method='DELETE', params=params)
except Ser... | [
"def delete_if_exists(self) -> None:\n\t\tif self.exists:\n\t\t\tself._database_api.execute_query('DROP TABLE ' + self.table_name + ';', True)\n\t\t\tself._exists = False",
"def _delete(self):\n self.db_engine.execute(\"drop table if exists {}\".format(self.distance_table))",
"def delete(self):\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the list of chunks in given table. | def chunks(self, dbName, tableName):
_log.debug('get chunks, table: %s.%s', dbName, tableName)
resource = dbName + '/tables/' + tableName + '/chunks'
result = self._requestJSON('dbs', resource)
return self._getKey(result, 'chunkId') | [
"def get_chunks(self, n_chunks: int | None = None) -> Iterable[Column]:",
"def get_chunks_list(self):\n return self._chunkFiles",
"def get_partitions(self, table, db=\"default\"):\n partitions = []\n try:\n return self.get(\"ddl/database/%s/table/%s/partition\" % (db, table))['partitions... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create new chunk, this should work with both tables and view. If overlap is True then create overlap table in addition to chunk table. | def createChunk(self, dbName, tableName, chunkId, overlap):
_log.debug('create table: %s.%s', dbName, tableName)
overlapFlag = 'yes' if overlap else 'no'
data = dict(chunkId=chunkId, overlapFlag=overlapFlag)
resource = dbName + '/tables/' + tableName + '/chunks'
self._requestJSO... | [
"def init_overlap_tables(metadata):\n init_overlap_table(metadata, \"chunk_overlaps\", chunked=True)\n init_overlap_table(metadata, \"max_overlaps\", chunked=False)",
"def init_chunks(metadata):\n return Table(\"chunks\", metadata,\n Column(\"id\", Integer, primary_key=True),\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return index data (array of [objectId, chunkId, subChunkId] arrays). This only works on partitined tables and is only supposed to be used with director table. If chunkId is None then index data for all chunks is returned, otherwise only for specified chunkId (must be an integer number). Optional parameter columns can b... | def getIndex(self, dbName, tableName, chunkId=None, columns=None):
if columns is None:
columns = "objectId,chunkId,subChunkId"
else:
columns = ','.join(columns)
if chunkId is None:
resource = dbName + '/tables/' + tableName + '/index'
else:
... | [
"def _chunk_index_dataframe(data_path: str, amount_chunks: int, asset_id_col: str) -> Generator[\n Tuple[int, int], None, None]:\n\n # reading in the index to make ranges for reading in the dataframe\n asset_index = pd.read_csv(data_path, usecols=[asset_id_col], dtype={asset_id_col: str})\n asset_index[... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Reset chunk cache (a.k.a. empty chunks list) for specified database name. | def resetChunksCache(self, dbName):
_log.debug('reset chunk cache: %s', dbName)
# resource URL
resource = dbName + '/chunks/cache'
result = self._requestJSON('dbs', resource, method='PUT') | [
"def reset():\n global GROUPS, NODES, PIPES, JOBS, _ID\n GROUPS = {}\n NODES = {}\n PIPES = {}\n JOBS = {}\n _ID = count(1)\n logger.info(\"Cleared cache and reset counter.\")",
"def reset_mysql_db(self, db_name=None):\n self._mysql_db = self._get_mysql_connection(db_name=db_name)",
... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the list of service names. | def services(self):
_log.debug('get service list')
result = self._requestJSON('services', '')
return self._getKey(result, 'name') | [
"def service_names(self):\n return self.services.keys()",
"def get_services_names(self):\n return self._speakers[0].get_services_names()",
"def getServiceNames(self):\n self.send_getServiceNames()\n return self.recv_getServiceNames()",
"def CustomServiceNames(self) -> ServiceNameCollection... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return service state. This method returns string describing current service state, currently defined states are "active" and "stopped". | def serviceState(self, service):
_log.debug('get service state: %s', service)
result = self._requestJSON('services', service)
return self._getKey(result, 'state') | [
"def status(self):\n if self.state == service_states.SHUTTING_DOWN or \\\n self.state == service_states.SHUT_DOWN or \\\n self.state == service_states.UNSTARTED or \\\n self.state == service_states.WAITING_FOR_USER_ACTION:\n pass\n elif self._check_daemon('slur... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the list of database names known to xrootd. | def xrootdDbs(self):
_log.debug('get xrd db list')
result = self._requestJSON('xrootd', 'dbs')
return self._getKey(result, 'name') | [
"def list_database(db=None):\n if db is None:\n return CONNECTION.get_connection().database_names()\n return CONNECTION.get_connection()[db].collection_names()",
"def databases(self):\n _log.debug('get database list')\n result = self._requestJSON('dbs', '')\n return self._getKey(... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns list of chunks for a given database which are known to xrootd. | def xrootdChunks(self, dbName):
_log.debug('list chunks in xrootd: %s', dbName)
result = self._requestJSON('xrootd', 'dbs/' + dbName, method='GET')
return self._getKey(result, 'chunkId') | [
"def chunks(self, dbName, tableName):\n _log.debug('get chunks, table: %s.%s', dbName, tableName)\n resource = dbName + '/tables/' + tableName + '/chunks'\n result = self._requestJSON('dbs', resource)\n return self._getKey(result, 'chunkId')",
"def xrootdDbs(self):\n _log.debug(... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns key value in the result, throw exception if key is not found. Result can be a dict or a list of dicts, for list the returned value is the list of values. | def _getKey(result, key):
try:
if isinstance(result, list):
return [obj[key] for obj in result]
else:
return result[key]
except KeyError:
raise ServerResponseError('Missing "%s" key' % key, result) | [
"def get_safe_result(result,key,default=None):\r\n\t\tif result is None:\r\n\t\t\treturn default\r\n\t\telif result.has_key(key):\r\n\t\t\treturn result[key]\r\n\t\telse:\r\n\t\t\treturn default",
"def __getitem__(self, key):\n query = select([self.store.c.value]).where(self.store.c.key == key)\n re... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return expanded environment dictionary. | def expanded_env_dict():
return generate_expanded_env_dict() | [
"def _environment_variables() -> Dict[str, str]:\n return {key: value for key, value in os.environ.items() if _is_encodable(value)}",
"def user_env_as_dict(self):\r\n return {RESOURCE_REGISTRY: self.registry.as_dict(),\r\n PARAMETERS: self.params}",
"def environment(self):\r\n en... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return dictionary containing all default global options. | def default_global_options():
return copy.deepcopy(ASTRALITY_DEFAULT_GLOBAL_SETTINGS) | [
"def get_default_options(self):\n return {}",
"def show_default_options(self):\n return copy.deepcopy(dict(must=VW_MUST_OPTIONS, defaults=VW_DEFAULT_OPTIONS))",
"def _RegisteredEnvironmentOptions(self):\n return {}",
"def get_default_state(self) -> dict:\n return {\n k: ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return path to test config directory. | def test_config_directory():
return Path(__file__).parent / 'test_config' | [
"def get_test_configuration_path() -> Path:\n return get_project_root() / '.test_configuration'",
"def get_tests_dir_path(): \n fmod_path = ctbto.tests.__path__\n \n test_dir = \"%s/conf_tests\" % fmod_path[0]\n \n return test_dir",
"def get_config_path():\n return _folder + \"/c... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return path to directory containing several context files. | def context_directory(test_config_directory):
return test_config_directory / 'context' | [
"def context_path(): # pragma: no cover",
"def get_template_dir(self) -> str:",
"def get_data_files_path():\n return _os.path.dirname(_inspect.getfile(_sys._getframe(1)))",
"def _find_java_web_context(self):\n globPath = os.path.join(self.workingDir, '**')\n results = glob.glob(globPath, recur... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
inserting an campaign element in the queue | def insert(self, campaign):
self.queue.append(campaign) | [
"def add(self, element):\n\n self.queue.insert(0, element)",
"def push(queue, item):\n queue.append(item)",
"def enqueue(self, element):\n\t\tassert self.capacity < self.max_capacity, \"Capacity exceeded for queue\"\n\t\tself.queue[self.back] = element\n\t\tself.back = (self.back + 1) % self.max_capac... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Loads the datasets, converts their images to the desired size and format, assembles them in one big dataset and saves it in a pickled file before returning it. | def load_and_pickle_datasets(augment=False):
subdirs = ['vehicles/GTI_Far',
'vehicles/GTI_Left',
'vehicles/GTI_MiddleClose',
'vehicles/GTI_Right',
'/object-dataset-select',
'non-vehicles/Extras',
'non-vehicles/GTI',
... | [
"def _load_data(self, filename):\n\n # Load the pickled data-file.\n data = self._unpickle(filename)\n\n # Get the raw images.\n raw_images = data[b'data']\n\n # Get the class-numbers for each image. Convert to numpy-array.\n cls = np.array(data[b'labels'])\n #pdb.se... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Instantiates, trains and validates a SVM classifier on the given datasets, after optionally scaling them. The trained classifier and data scaler are saved in a pickled file. The method also prints validation statistics. | def fit_and_pickle_classifier(train_x, train_y, valid_x, valid_y, scale=False):
start = time()
train_feat_x = [compute_image_features(image) for image in train_x]
valid_feat_x = [compute_image_features(image) for image in valid_x]
if scale:
scaler = StandardScaler()
scaler.fit(train_feat... | [
"def train_svm_classifier(dataset, labels):\n svm_classifier = SVC()\n return svm_classifier.fit(dataset, labels)",
"def train_svm():\n df = load_dataframe()\n X_train, X_test, y_train, y_test = get_train_test_split(df)\n\n classifier = svm.SVC()\n classifier.fit(X_train, y_train)\n y_pred = ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Debugging function that displays the given image with overlaid detection windows, one size of detection windows at a time. | def display_image_with_windows(image):
windows = Perspective_grid(image.shape[1], image.shape[0])
plt.subplots()
for enlargement in range(2, 4):
image_copy = np.copy(image)
color = [0, 255, 0]
for window in windows:
if window[2] - window[0] + 1 == 64 * enlargement:
... | [
"def generate_debug_image(image, detections):\n result = image.copy()\n\n for detection in detections:\n draw_detection_in_image(result, detection)\n\n return result",
"def show_images_in_windows(imgs, win_names, win_size):\r\n x = y = 0\r\n for i, img in enumerate(imgs):\r\n w_compre... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Find cars bounding boxes in the given camera frame. | def find_bounding_boxes(frame, classifier, scaler):
windows = Perspective_grid(frame.shape[1], frame.shape[0])
total_windows, positive_windows = 0, 0
bounding_boxes = [] # Windows where cars are detected will be appended here
# Iterate over detection windows
for window in windows:
total_win... | [
"def get_bounding_box(vehicle, camera):\n\n bb_cords = BBoxUtil._create_bb_points(vehicle)\n cords_x_y_z = BBoxUtil._vehicle_to_sensor(bb_cords, vehicle, camera)[:3, :]\n cords_y_minus_z_x = np.concatenate([cords_x_y_z[1, :], -cords_x_y_z[2, :], cords_x_y_z[0, :]])\n bbox = np.transpose(... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Detects cars in all the test images coming with the project, and saves the resulting images, using the given classifier and scaler. | def process_test_images(classifier, scaler):
fnames = [name for name in glob.glob('test_images/*.jpg')] + [name for name in glob.glob('test_images/*.png')]
for fname in fnames:
frame = cv2.imread(fname)
start = time()
bounding_boxes, total_windows = find_bounding_boxes(frame, classifier,... | [
"def process_test():\n\n test_entry = unpickle(test_file)\n test_dataset = test_entry[b'data']\n test_targets = test_entry[b'fine_labels']\n test_dataset = np.vstack(test_dataset).reshape(-1, 3, 32, 32)\n test_dataset = test_dataset.transpose((0, 2, 3, 1)) \n\n root_path = data_dir + '/cifar100/te... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Updates a heatmap with the given bounding boxes, and provides a thresholded copy of it. The heatmap passed as parameter is modified and also returned. For every pixel that is in a bounding box, the corresponding pixel in the heatmap is incremented by 100. If the pixel is in multiple bounding boxes, the corresponding he... | def update_heat_map(heat_map, bounding_boxes):
threshold = 56
new_heat = np.zeros_like(heat_map)
for bbox in bounding_boxes:
x0, y0, x1, y1 = bbox
new_heat[y0:y1 + 1, x0:x1 + 1] += 100
heat_map = (14 * heat_map + new_heat) / 15
thresholded = np.rint(heat_map).astype(np.uint)
thre... | [
"def add_heat(heatmap, bounding_boxes_list):\n # Iterate through list of bounding boxes\n for box in bounding_boxes_list:\n # Add += 1 for all pixels inside each bbox\n # Assuming each \"box\" takes the form ((x1, y1), (x2, y2))\n heatmap[box[0][1]:box[1][1], box[0][0]:box[1][0]] += 1\n\n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Change a user's plan details Does not touch Stripe fields | def change_plan(email: str, plan: str) -> int:
mdb = MongoClient(environ["MONGO_URI"])
plan_data = mdb.account.plan.find_one({"key": plan}, {"_id": 0})
if not plan_data:
print(f"No plan found for {plan}")
return 1
resp = mdb.account.user.update_one({"email": email}, {"$set": {"plan": pla... | [
"def change_subscription(plan: Plan) -> bool:\n if not current_user.stripe:\n return False\n sub_id = current_user.stripe.subscription_id\n if not sub_id or current_user.plan == plan:\n return False\n sub = stripe.Subscription.retrieve(sub_id)\n sub.modify(\n sub_id,\n can... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
versions up the given file based on other files in the same directory. The given filepath should not have a version at the end. e.g. given "/tmp/file.txt" this function will return "/tmp/file0000.txt" unless there is already a file0000.txt in /tmp, in which case it will return "/tmp/file0001.txt". | def version_file(filepath):
zero_padding = 4
dirpath, filename = os.path.split(filepath)
base, ext = os.path.splitext(filename)
searchpath = os.path.join(dirpath, "*")
files = glob.glob(searchpath)
versions = []
for file in files:
filename_to_match = os.path.basename(file)
if re.match(base+"[0-9]{%d}"%zero... | [
"def manage_old_version_file(file_path):\n # Set old version file path\n file_path = Path(file_path)\n old_version_file = modify_filename_in_path(file_path,\n added='old_',\n prefix=True)\n\n # If old version exi... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
versions up the given directory based on other directories in the same directory. The given dirpath should not have a version at the end. e.g. given "/tmp/v" this function will return "/tmp/v000" unless there is already a v000 dir in /tmp, in which case it will return "/tmp/v001". zero_padding specifies how many digits... | def version_dir(dirpath, zero_padding=3):
raise NotImplementedError() # TODO | [
"def get_versioned_dir(repodir, version):\n return os.path.join(repodir, version)",
"def updir(path, num=1):\n for _ in range(num):\n path = os.path.dirname(path)\n return path",
"def get_version_dir(*, sha: str) -> pathlib.Path:\n\n return settings.VERSIONS_DIRECTORY / sha[: settings.VERSION... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
returns a string of the same length as the given name with all the nonalphanumeric actors replaced by underscores name string to make alphanumeric | def alphanumeric(name):
seq = []
for char in name:
if not char.isalnum():
seq.append('_')
else:
seq.append(char)
return ''.join(seq) | [
"def make_ident(name):\n ident = ''\n for c in name:\n if idaapi.is_ident_char(ord(c)):\n ident += c\n else:\n ident += '_'\n return ident",
"def mangle(self):\n wrk = self._title\n wrk = wrk.strip().lower()\n last_ch = None\n tmp = \"\"\n for ch in wrk:... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calculate the interval between w_time and s_time. | def _get_duration(self, w_time, s_time):
w_time_list = list(map(int, w_time.split(':')))
s_time_list = list(map(int, s_time.split(':')))
if w_time_list[1] < s_time_list[1]: # wake minute < sleep minute
w_time_list[1] += 60
w_time_list[0] -= 1
if w_time_list[0] < ... | [
"def interval(self):\n return str(self.time_interval) + self.time_unit",
"def scen_t_secs(self):\n hydro_datetimes=self.t_secs*self.scenario.scu + self.time0 \n start_i, stop_i=np.searchsorted(hydro_datetimes,\n [self.scenario.start_time,\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Coerce a number of minutes q to the nearest quarter hour. | def _get_closest_quarter(q: int):
if q < 8:
closest_quarter = 0
elif 8 <= q < 23:
closest_quarter = 15
elif 23 <= q < 37:
closest_quarter = 30
elif q < 60:
closest_quarter = 45
else:
raise ValueError(f'q must be < 60 in ... | [
"def _convert_time(self, quarter_length):\n # TODO(annahuang): Take tempo change into account.\n # Time is in quarter-note counts from the beginning of the score.\n return quarter_length * 60.0 / _DEFAULT_QPM",
"def steps_per_quarter_to_steps_per_second(steps_per_quarter=4, qpm=120):\n return steps_pe... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Write sleep states onto row_out from current position to start of curr_triple. | def _insert_leading_sleep_states(self, curr_triple, row_out):
curr_posn = self.QS_IN_DAY - self.spaces_left
if curr_posn < curr_triple.start:
triple_to_insert = self.Triple(curr_posn,
curr_triple.start - curr_posn,
... | [
"def _write_row(self, row):\n return",
"def write_rows(self, rows):\n for row in rows:\n self.write_row(row)",
"def led(self,pos,flash,quick):\n self.ledAllOff()\n #Get row\n aRow = pos[1]\n #Get column\n aCol = pos[0]\n #Set col to LOW\n if aCol == 'a':\n theL... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Obtain, from a time string, its starting position in a line of output. | def _get_start_posn(self, time_str):
if time_str:
m = re.search(self.re_hr_min_time, time_str)
assert bool(m)
return (int(m.group(1)) * 4 + # 4 output chars per hour
int(m.group(2)) // 15) % self.QS_IN_DAY
return 0 | [
"def _look_for_timestamp_and_index_of_last_timestamp_char(self, a_line):\n\n last_char_timestamp_index = a_line.find(\"]\")\n string_timestamp = a_line[a_line.find(\"[\") + 1:last_char_timestamp_index]\n\n return datetime.datetime.strptime(string_timestamp, _DATE_FORMAT), \\\n las... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sync exploring ml tool to s3 | def sync_s3():
subprocess.run(["aws", "s3", "sync", "./", "s3://{0}".format(BLOG_BUCKET_NAME)]) | [
"def syncToS3(self):\n\t\t#return_code = subprocess.call(\"echo Hello World\", shell=True)\n\t\treturn_code = subprocess.call(\"aws s3 ls s3://prometheus-bucket-raspberry-pi\",shell=True)\n\t\treturn_code1 = subprocess.call(\"aws s3 sync /mnt/usb s3://prometheus-bucket-raspberry-pi\",shell=True)\n\t\treturn_code2 =... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
reads the actions from all yml files given in `filenames` and returns the objects specified in it | def get_action_objects(filenames):
all_actions = {'repeat every': RepeatEvery, 'fix wifi': WifiFixer, 'switch tabs': TabSwitcher}
if type(filenames) == str:
actions = get_actions(filenames, all_actions)
else:
actions = []
for filename in filenames:
actions += get_actions(... | [
"def read(self, filenames):\r\n if isinstance(filenames, str):\r\n filenames = [filenames]\r\n read_ok = []\r\n for filename in filenames:\r\n try:\r\n with open(filename) as fp:\r\n self._read(fp, filename)\r\n except IOError:\... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
given a time unit, returns the current time in the specified unit | def get_current_time(unit):
now = datetime.now()
time = {"days": now.day, "hours": now.hour, "minutes": now.minute, "seconds": now.second}
return time[unit] | [
"def current_time_ns():\n return int(time.time() * (10 ** 9))",
"def localTime():\n\treturn convertTime(time.time())",
"def time_start(self, section):\r\n if (section == 0):\r\n return self.t0\r\n else:\r\n time_start_index = range(-self.number_of_section - 1, 0)\r\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
funtion to mutate current object image with new image. | def updateImage(self):
self.image = self.getImage(self.location, self.name, self.imageType) | [
"def edit_image(self):\n self.update()",
"def image(self, obj):",
"def update_image(self):\n self.image = self.capture_image()\n self.update_background()",
"def change_img(obj: pygame.sprite.Sprite, img):\r\n obj.image = img\r\n obj.image.set_colorkey(service.colors[\"BLACK\"])",
... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
function that updates the LED display to the number provided | def updateLED(self, num):
self.displayOff() # call to function turn off any currently display number
if num == 1:
self.oneOn() # call to fucntion for Display of number 1
elif num == 2:
self.twoOn() # call to fucntion for Display of number 2
elif num == 3:
... | [
"def flashLed(count):\r\n print(\"My LED\")\r\n for i in range(count):\r\n GPIO.output(21, True)\r\n time.sleep(0.25)\r\n GPIO.output(21, False)\r\n time.sleep(0.25)",
"def update():\n\n global ind\n global DNA\n\n #leds = [[\"blue\"] * 8 for _ in xrange(10)];\n #leds... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
fucntion that captures and stores and image using the camera and the led flash | def captureImage(self, location, name, type):
self.camLightOn() #turn flash on
time.sleep(.25)
self.cam.capture(location+name+type) # call to camera image capture function
time.sleep(.25)
self.camLightOff() # flash off | [
"def capture(self):\n self.camera = self.ids['camera']\n timestr = time.strftime(\"%Y%m%d_%H%M%S\")\n self.camera.export_to_png(\"IMG_{}.png\".format(timestr))\n print(\"Captured\")",
"def take_picture(self):\n imgpath = \"\"\n # print(\"Take pic from device %d\" % (self.... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Repeat a melody x times | def repeat_melody(self, melody, n, offset=0):
# parent_melody = Melody([melody] * n)
parent_melody = Melody([melody.clone().step(offset*j) for j in range(n)], key=self.key)
self.play_melody(parent_melody) | [
"def at_repeat(self):\r\n pass",
"def repeat(self, repeat, number, domain, *args, **kwargs):\n return PlotTimings(\n self.timer.repeat(domain, repeat, number, *args, **kwargs),\n {\"functions\": self.timer.functions, \"domain\": domain},\n )",
"def repeat(c, cmd, repea... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Job to start spiders. Return Deferred, which will execute after crawl has completed. | def crawl_job():
settings = get_project_settings()
runner = CrawlerRunner(settings)
return runner.crawl(GamesSpider) | [
"def schedule_crawler(self) :\n\t\tself.create_new_workspace()\n\t\t#self.add_query_keywords()\n\n\t\treq = urllib2.Request(self.url, json.dumps(self.search_terms), {\"Content-type\" : \"application/json\"})\n\n\t\ttry:\n\t\t\tresponse = urllib2.urlopen(req)\n\t\texcept IOError, e:\n\t\t print \"It looks like so... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Loads bars for a given instrument from a CSV formatted file. The instrument gets registered in the bar feed. | def addBarsFromCSV(self, instrument, path, timezone=None):
if timezone is None:
timezone = self.__timezone
rowParser = RowParser(self.getDailyBarTime(), self.getFrequency(), timezone, self.__sanitizeBars)
super().addBarsFromCSV(instrument, path, rowParser) | [
"def addBarsFromCSV(self, instrument, path, timezone = None):\n\n csvfeed.YahooFeed.addBarsFromCSV(self, instrument, path, timezone)",
"def addBarsFromFile(self, instrument, date, path, frequency, timezone=None):\n\n if timezone is not None:\n raise Exception('timezone is not supported, s... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Builds wait in line for more slaves. This method executes in the background on another thread and watches for idle slaves, then gives them out to the waiting builds. | def _slave_allocation_loop(self):
while True:
# This is a blocking call that will block until there is a prepared build.
build_scheduler = self._scheduler_pool.next_prepared_build_scheduler()
while build_scheduler.needs_more_slaves():
claimed_slave = self._id... | [
"def wait(self, num_slaves = 0):\n\n\t\t# wait for one to finish\n\t\twhile len(self.slaves) > num_slaves:\n\n\t\t\ttime.sleep(.1)\n\n\t\t\tfor pid in self.slaves.keys():\n\n\t\t\t\tself.slaves[pid].update()\n\t\t\t\tif self.incremental_output:\n\t\t\t\t\tself.slaves[pid].print_new_output()\n\t\t\t\t\tsys.stdout.fl... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Add a slave to the idle queue. | def add_idle_slave(self, slave):
try:
slave.mark_as_idle()
self._idle_slaves.put(slave)
except SlaveMarkedForShutdownError:
pass | [
"def add(self, slave):\n\n\t\tself.slaves[slave.pid] = slave",
"def add_to_master(self):\n ordered_queue = self.queueorderer.get_queue()\n if (len(ordered_queue) == 0):\n pass\n else:\n for line in ordered_queue:\n self.master.write(line)",
"def multiroo... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Set up of the Tuya switch. | def setup_platform(hass, config, add_devices, discovery_info=None):
from . import pytuya
devices = config.get(CONF_SWITCHES)
switches = []
pytuyadevice = pytuya.OutletDevice(config.get(CONF_DEVICE_ID), config.get(CONF_HOST), config.get(CONF_LOCAL_KEY))
pytuyadevice.set_version(float(config.... | [
"def init_tetanus_lib(self) -> None:\n\n # Can't set instance attributes in fixture with scope='class', only class attributes.\n cls = type(self)\n cls._tetanus_lib = Tetanus() # pylint: disable=protected-access\n cls._echo_port = 1337 # pylint: disable=protected-access",
"def initCo... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Turn Tuya switch on. | def turn_on(self, **kwargs):
self._device.set_status(True, self._switch_id) | [
"def turn_on(self, **kwargs):\n self.data.switch_on()",
"def turbo_on(self):\n command = 'SET:DEV:TURB' + self._turbo_channel + ':PUMP:SIG:STATE:ON\\r\\n'\n response = self.query_and_receive(command)\n\n if not response:\n raise RuntimeError(\"Enabling of turbo pump unsucces... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
called when the other bot returns false for recieve_move. This is sent to the bot who made the move, telling it to undo the last move it made. If the last move had several jumps, all of them are undone, so the board is in the same state it was in before the move was made. | def undo_last_move(self):
raise NotImplementedError() | [
"def redo_last_move(self):\n # If there's no undone moves, simply return without doing\n # anything. Otherwise, get the last undone move.\n try:\n last_move = self.undone_moves.pop()\n except IndexError:\n return\n if last_move.is_drop:\n # Drop th... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Iterates through query args and transforms any oneelement lists to single items. | def _flatten_query_args(args):
def _make_flat(item):
if not item:
return None
if not isinstance(item, list):
return item
# item -> list
if len(item) == 1:
return item[0] if item[0] else None # Empty string -> None
... | [
"def process_some(self, items):\n process_one = self.process_one\n ret = process_one(self.null)\n for item in items:\n ret.extend(process_one(item))\n return ret",
"def _normalizeargs(sequence, output = None):\n if output is None:\n output = []\n\n cls = sequenc... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
[]+ Returns metadata values for the specified package. | def metadata(argv):
if (len(argv) < 4):
print >> sys.stderr, "ERROR: insufficient parameters!"
sys.exit(2)
root, pkgtype, pkgspec = argv[0:3]
metakeys = argv[3:]
type_map = {
"ebuild":"porttree",
"binary":"bintree",
"installed":"vartree"}
if pkgtype not in type_map:
print >> sys.stderr, "Unrecognized ... | [
"def _fetch(self, package_name=str):\n package_metadata = self._from_npm_registry(package_name)\n\n # If key words are not found in repository, get it from github.\n if package_metadata and len(package_metadata.get(\"keywords\", [])) == 0 and \\\n len(package_metadata.get(\"repository... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
[]+ Given a list of files, print the packages that own the files and which files belong to each package. Files owned by a package are listed on the lines below it, indented by a single tab character (\\t). All file paths must start with . Returns 1 if no owners could be found, and 0 otherwise. | def owners(argv):
if len(argv) < 2:
sys.stderr.write("ERROR: insufficient parameters!\n")
sys.stderr.flush()
return 2
from portage import catsplit, dblink
settings = portage.settings
root = settings["ROOT"]
vardb = portage.db[root]["vartree"].dbapi
cwd = None
try:
cwd = os.getcwd()
except OSError:
p... | [
"def display_result(file_paths):\n for paths in file_paths:\n print(paths, stat.filemode(os.stat(paths).st_mode))\n print(f'Found {len(file_paths)} file(s).')",
"def check_owners(self, dir_name, owned_directories, error_messages):\n found = False\n for owned in owned_directories:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns all best_visible packages (without .ebuild). | def all_best_visible(argv):
if (len(argv) < 1):
print "ERROR: insufficient parameters!"
#print portage.db[argv[0]]["porttree"].dbapi.cp_all()
for pkg in portage.db[argv[0]]["porttree"].dbapi.cp_all():
mybest=portage.best(portage.db[argv[0]]["porttree"].dbapi.match(pkg))
if mybest:
print mybest | [
"def get_installedpackages():\n\n # TODO finish me\n\n return []",
"def get_local_packages():\n sys.stdout = mystdout = StringIO()\n pip.main(['freeze', '-l'])\n sys.stdout = sys.__stdout__\n \n pkgs = mystdout.getvalue().split('\\n')\n return [p.split('==') for p in pkgs]",
"def all_packa... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the mirrors set to use in the portage configuration. | def gentoo_mirrors(argv):
print portage.settings["GENTOO_MIRRORS"] | [
"def get_mirrors(self):\r\n return [mirror if isinstance(mirror, type) else mirror\r\n for mirror in self._mirrors]",
"def ListMirrorPlanes(self):\n lde = self.TheSystem.LDE\n nSurf = lde.NumberOfSurfaces\n surfList = []\n for n in range(0,nSurf):\n sur... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the PORTDIR path. | def portdir(argv):
print portage.settings["PORTDIR"] | [
"def configDir(self):\n p = os.path.dirname(self.cctDir())\n return p",
"def make_host_port_path(uds_path, port):\n return \"{}_{}\".format(uds_path, port)",
"def get_port(self) -> str:\n return self.__serial.port",
"def get_file_name(self, port):\n \n port_file_name = \"%s... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the CONFIG_PROTECT_MASK paths. | def config_protect_mask(argv):
print portage.settings["CONFIG_PROTECT_MASK"] | [
"def ignored_paths_patterns(self) -> ConfigNodePropertyArray:\n return self._ignored_paths_patterns",
"def _get_preserved_paths(self, root_dir=None):\n root_dir = root_dir if root_dir else self.processed_dataset_dir\n preserved_paths = _glob_multiple(_list_of_strings(self.config.preserve_path... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the PORTDIR_OVERLAY path. | def portdir_overlay(argv):
print portage.settings["PORTDIR_OVERLAY"] | [
"def portal_path(): # pragma: no cover",
"def get_overlay_template_dir():\n return DEFAULT_OVERLAY_TEMPLATE_DIR",
"def portdir(argv):\n\tprint portage.settings[\"PORTDIR\"]",
"def get_dashboard_path(self):\n db_file = pkg_resources.resource_filename(\n 'emuvim.dashboard', \"index.html\")... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the DISTDIR path. | def distdir(argv):
print portage.settings["DISTDIR"] | [
"def getBuildDir(self):\n default = 'build'\n pathstr = self.getCustom('Build', 'builddir', default)\n pathstr = self._getAbsPath(pathstr)\n\n return pathstr",
"def get_build_dir_path(rel_path=''):\n build_root = os.environ['BUILD_ROOT']\n return os.path.join(build_root, rel_path... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Generates x number of users | def generate_users(self, x):
for i in range(x):
user = id_generator()
self.create_user(user) | [
"def create_n_users(size):\n users = []\n for i in range(size):\n users.append({\n \"first_name\": \"First%d\" % i,\n \"last_name\": \"First%d\" % i,\n \"credit_card\": i,\n \"email\": \"%dgmai.com\" % i,\n \"username\": \"username%d\" % i,\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
return the subject name in test, function or component file | def _subject_name(path):
subject = os.path.basename(path)
subject = subject.replace('-test', '')
subject = subject.replace('-spec', '')
subject = subject.replace('-unit', '')
subject = subject.replace('.test', '')
subject = subject.replace('.spec', '')
subject = subject.replace('.unit', '')
subject = su... | [
"def test_get_subject(self):\n pass",
"def get_subject_common_name(self):\n return self.subject_info.get_common_name()",
"def getSubject(self):\n subject_st = urllib.unquote(self.path[1:].split('?', 1)[0]).decode('utf8')\n return article.Subject.fromString(subject_st)",
"def show_s... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
return the subject relative path from current file there is no magic, for now it only replaces __tests__ with ../ | def subject_relative_path(path):
directory = path
subject = component_name(path)
filename = os.path.basename(path)
directory = os.path.dirname(path)
parent = os.path.basename(directory)
if re.match(r"index(?:[-._](?:spec|unit|test|acceptance))?\.jsx?$", filename):
if re.match(r"__tests?__/?", parent):... | [
"def tests_dir():\n return Path(os.path.realpath(__file__)).parent",
"def __get_testfile_path(self, path):\n path = os.path.relpath(\n path, os.path.join(self.__data_path, os.pardir))\n return path",
"def getAbsPath() -> str:\n thisFile:str = os.path.realpath(__file__)\n absPat... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return all initialization methods for the comparison algorithm. Initialization methods must start with 'initialize_' and take no parameters. | def get_initialization_functions(self):
initialization_methods = [
(
method,
getattr(self, method),
) for method in dir(self) if method.startswith('initialize_')
]
return {
key: value for (key, value) in initialization_methods
... | [
"def get_init_ops(self):\n return self.get_train_init_op(), self.get_test_init_op()",
"def _slot_initializers(self) -> List[init_ops_v2.Initializer]:\n raise NotImplementedError",
"def init_make_method_static(self):\n refactoring_main = make_method_static_2.main\n params = {\"udb_path\":... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Download the NLTK vader lexicon for sentiment analysis that is required for this algorithm to run. | def initialize_nltk_vader_lexicon(self):
from .utils import nltk_download_corpus
nltk_download_corpus('sentiment/vader_lexicon') | [
"def __init__(self):\n nltk.download(\"vader_lexicon\", quiet=True)\n self.vader = SentimentIntensityAnalyzer()",
"def _open_lexicon(self):\n # Positive Lexicon\n with open(opinion_lexicon_path + '/positive-words.txt', encoding='iso-8859-1') as f:\n positive_words = np.loadt... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Download the NLTK wordnet corpora that is required for this algorithm to run only if the corpora has not already been downloaded. | def initialize_nltk_wordnet(self):
from .utils import nltk_download_corpus
nltk_download_corpus('corpora/wordnet') | [
"def _maybe_download_corpora(tmp_dir):\n cnn_filename = \"cnn_stories.tgz\"\n dailymail_filename = \"dailymail_stories.tgz\"\n cnn_finalpath = os.path.join(tmp_dir, \"cnn/stories/\")\n dailymail_finalpath = os.path.join(tmp_dir, \"dailymail/stories/\")\n if not tf.gfile.Exists(cnn_finalpath):\n cnn_file = g... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |