query stringlengths 9 9.05k | document stringlengths 10 222k | negatives listlengths 19 20 | metadata dict |
|---|---|---|---|
Getter method for mapVersion, mapped from YANG variable /input/mapping_record/mapVersion (int16) | def _get_mapVersion(self):
return self.__mapVersion | [
"def _set_mapVersion(self, v, load=False):\n try:\n t = YANGDynClass(v,base=np.int16, is_leaf=True, yang_name=\"mapVersion\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"mapVersion must be of... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for mapVersion, mapped from YANG variable /input/mapping_record/mapVersion (int16) | def _set_mapVersion(self, v, load=False):
try:
t = YANGDynClass(v,base=np.int16, is_leaf=True, yang_name="mapVersion", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""mapVersion must be of a type compat... | [
"def version(self) -> str:\n try:\n return self._data[MapData].version\n except KeyError:\n version = get_latest_version(region=self.region, endpoint=\"map\")\n self(version=version)\n return self._data[MapData].version",
"def _get_mapVersion(self):\n r... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for action, mapped from YANG variable /input/mapping_record/action (enumeration) | def _set_action(self, v, load=False):
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'Drop': {}, u'NativelyForward': {}, u'SendMapRequest': {}, u'NoAction': {}},), is_lea... | [
"def set_action(self, action):\n self.action = action",
"def action(self, action: str):\n\n self._action = action",
"def action(self, action):\n allowed_values = [\"DELETE\", \"NONE\"]\n if action not in allowed_values:\n raise ValueError(\n \"Invalid value ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Getter method for authoritative, mapped from YANG variable /input/mapping_record/authoritative (boolean) | def _get_authoritative(self):
return self.__authoritative | [
"def _set_authoritative(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"authoritative\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"authoritative m... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for authoritative, mapped from YANG variable /input/mapping_record/authoritative (boolean) | def _set_authoritative(self, v, load=False):
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="authoritative", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""authoritative must be of a ty... | [
"def _get_authoritative(self):\n return self.__authoritative",
"def authoritative_copy(self, authoritative_copy):\n\n self._authoritative_copy = authoritative_copy",
"def supports_assessment_basic_authoring(self):\n return # boolean",
"def is_canonical(self):\n return False",
"def au... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Getter method for LocatorRecord, mapped from YANG variable /input/LocatorRecord (list) | def _get_LocatorRecord(self):
return self.__LocatorRecord | [
"def _set_LocatorRecord(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_nam... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for LocatorRecord, mapped from YANG variable /input/LocatorRecord (list) | def _set_LocatorRecord(self, v, load=False):
try:
t = YANGDynClass(v,base=YANGListType("locator_id",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name="LocatorRecord", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name="LocatorR... | [
"def record_locator(self, record_locator):\n if record_locator is None:\n raise ValueError(\"Invalid value for `record_locator`, must not be `None`\")\n\n self._record_locator = record_locator",
"def airline_record_locator(self, airline_record_locator):\n if airline_record_locator ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for mapping_record, mapped from YANG variable /input/mapping_record (container) | def _set_mapping_record(self, v, load=False):
try:
t = YANGDynClass(v,base=yc_mapping_record_pyangbind_example__input_mapping_record, is_container='container', yang_name="mapping-record", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, Value... | [
"def field_mapping(self, field_mapping):\n self._field_mapping = field_mapping",
"def add_mapping(self, tag, parent):\n self._dict.update({tag: parent})",
"def mapLogRecord(self, record):\n newrec = record.__dict__\n for p in self.params:\n newrec[p] = self.params[p]\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Setter method for input, mapped from YANG variable /input (container) | def _set_input(self, v, load=False):
try:
t = YANGDynClass(v,base=yc_input_pyangbind_example__input, is_container='container', yang_name="input", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""input mu... | [
"def _set_input(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_input_openconfig_qos__qos_interfaces_interface_input, is_container='container', yang_name=\"input\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, regis... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
create a new object based on this genotype | def fromgenotype(self):
pass | [
"def new_object(self):\r\n\t\tpass",
"def new_Protein():\n return Protein()",
"def __init__(self, *args):\n this = _libsbml.new_SpeciesType(*args)\n try: self.this.append(this)\n except: self.this = this",
"def __init__(self,\r\n kwargs):\r\n self.data = list()\r... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Gets the ParaMeshBodies object from a component. | def getFromComponent(self, component):
return ParaMeshBodies() | [
"def item(self, index):\n return ParaMeshBody()",
"def exportMesh(self, fusionComponent: adsk.fusion.Component) -> Optional[Mesh]:\n bRepBodies = fusionComponent.bRepBodies\n if len(bRepBodies) == 0:\n return\n\n mesh = Mesh()\n mesh.name = fusionComponent.name\n\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Creates a new mesh body by importing an .stl or .obj file. Because of a current limitation, if you want to create a mesh body in a parametric model, you must first call the edit method of the base or form feature, use this method to create the mesh body, and then call the finishEdit method of the base or form feature. ... | def add(self, fullFilename, units, baseOrFormFeature):
return ParaMeshBodyList() | [
"def createMesh(context):\n global main, sp, smesh, salome, subprocess, QtGui, pdb, spawn, tool\n # get active module and check if SMESH\n active_module = context.sg.getActiveComponent()\n if active_module != \"SMESH\":\n QtGui.QMessageBox.information(None, str(active_module),\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Provides access to a mesh body within the collection. | def item(self, index):
return ParaMeshBody() | [
"def get_mesh(self):\n return self.mesh",
"def mesh(self):\n return self._mesh",
"def getFromComponent(self, component):\n return ParaMeshBodies()",
"def createCubeBoundary(self):\n mesh = Mesh()\n self._createCppHandle()\n mesh.cppHandle = self.cppHandle.createCubeBoundary(self.... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the parent Component. | def parentComponent(self):
return fusion.Component() | [
"def get_parent(self) :\n return self.parent",
"def get_parent(self):\n if self.parent:\n return self.parent()\n else:\n return None",
"def parent_block(self):\n parent = self.parent\n while (parent is not None) and \\\n (not parent._is_compo... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the assembly occurrence (i.e. the occurrence) of this object in an assembly. This is only valid in the case where this is acting as a proxy in an assembly. Returns null in the case where the object is not in the context of an assembly but is already the native object. | def assemblyContext(self):
return fusion.Occurrence() | [
"def get_co_code_addr(obj):\n # Get all addresses referenced in memory\n all_addrs = set(get_referenced_addresses(obj))\n # Get addresses of all properties that are exposed at the Python layer\n public_addrs = set(get_exposed_addresses(obj))\n # co_code is the attribute that's referenced in memory, b... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Provides access to a mesh body within the collection. | def item(self, index):
return ParaMeshBody() | [
"def get_mesh(self):\n return self.mesh",
"def mesh(self):\n return self._mesh",
"def getFromComponent(self, component):\n return ParaMeshBodies()",
"def createCubeBoundary(self):\n mesh = Mesh()\n self._createCppHandle()\n mesh.cppHandle = self.cppHandle.createCubeBoundary(self.... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create object session from the app key, secret and type | def create_session(self):
try:
self.session = session.DropboxSession(APP_KEY, APP_SECRET, ACCESS_TYPE)
except Exception, e:
logger.error('Exception at create_session')
logger.debug('*' + sys.exc_info()[0]) | [
"def make_session(self, data):\n\n start_time = data['start_time']\n username = data['user']\n sessnum = data['session_number']\n sess_data = data['data']\n new_session = Session(sessnum, start_time, username, self.classify_on_key, sess_data)\n return new_session",
"def c... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Obtains an authorization url; After authorization, creates an access token and builds an instance of the Dropbox client. Creates the metadata cache. | def create_access_token(self):
# Wraper for also caching invalid results
#def getMetadataRofs(path):
# try:
# return self.client.metadata(path)
# except Exception, e:
# log.write('Exception at getMetadataRofs... | [
"def build_client(config, auth_token = None):\n if auth_token:\n pass\n\n elif not auth_token and config.get(\"auth_token\"):\n auth_token = config.get(\"auth_token\")\n\n elif not auth_token and not config.get(\"auth_token\"):\n auth_token, config = start_auth_flow(config)\n\n __lo... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Downloads the file given by path and writes using the file descriptor out | def downloadFile(self, path, out):
try:
logger.info("downloadFile('%s', ...)" % (path))
# Downloads from dropbox
# Manually :( update the metadata cache
f, metadata = self.client.get_file_and_metadata(path)
f = f.read()
logger.info('* file downloaded')
self.cache_metadata.setNewValue(path, metad... | [
"def download(self, path, file):\n\n resp = self._sendRequest(\"GET\", path)\n if resp.status_code == 200:\n with open(file, \"wb\") as f:\n f.write(resp.content)\n else:\n raise YaDiskException(resp.status_code, resp.content)",
"def download(url, file_nam... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Iterate over groups of `df`, and, if provided, matching labels. | def _iter_groups(self, df, y=None):
groups = df.groupby(self.groupby).indices
for key, sub_idx in groups.items():
sub_df = df.iloc[sub_idx]
if y is not None:
# y is either a numpy array or a pd.Series so index accordingly
sub_y = y.iloc[sub_idx] if... | [
"def iter_groups(ds, by, dim=\"sample\"):\n\n table = get_annotation_table(ds, dim=dim)\n for group, group_df in table.groupby(by):\n group_ds = ds.loc[{f\"{dim}_id\": list(group_df.index.values)}]\n yield group, group_ds",
"def get_labels(df, label):\n return df[label]",
"def _iter_objs_... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
json encode the message and prepend the topic | def themify(topic,msg):
return topic + ' ' + json.dumps(msg) | [
"def mogrify(topic, msg):\n return topic + ' ' + json.dumps(msg)",
"def json_dumps(msg):\r\n return json.dumps(msg)",
"def encode_message_properties_for_topic(message_to_send: Message) -> str:\n topic = \"\"\n\n system_properties: List[Tuple[str, str]] = []\n\n if message_to_send.output_name:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Test that we can decode the energy sums. These can be tricky b/c the baseline is encoded in IEEE 754 format. | def test_decode_energy_sums(self):
self.assertEqual(td.esums(decoded=True),
decoder.decode_energy_sums(BytesIO(td.esums(True)))) | [
"def test_energy(self):\n a = EnergyArray(1.1, \"eV\")\n b = a.to(\"Ha\")\n self.assertAlmostEqual(float(b), 0.0404242579378)\n c = EnergyArray(3.14, \"J\")\n self.assertAlmostEqual(float(c.to(\"eV\")), 1.959833865527343e+19, 5)\n # self.assertRaises(ValueError, Energy, 1, ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests that we can decode external timestamps appropriately. | def test_decode_external_timestamp(self):
self.assertEqual(td.external_timestamp(), decoder.decode_external_timestamp(
BytesIO(td.external_timestamp(True)), self.mask)) | [
"def test_get_time_stamps(self):\n pass",
"def test_to_timestamp(self):\n self.assertEqual(\n to_timestamp(DATE_STR1),\n datetime.fromisoformat(DATE_STR1[:-1]).timestamp())\n self.assertEqual(\n to_timestamp(DATE_STR2),\n datetime.fromisoformat(DATE... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests that we can decode the QDC header into an array. | def test_decode_qdc(self):
self.assertEqual(td.qdc(), decoder.decode_qdc(BytesIO(td.qdc(True)))) | [
"def test_hdlc_decode(self):\n pass",
"def array_from_header(header):\n\n pass",
"def test_decode(self):\n pass # TODO(tlarsen)",
"def parse_header(input_array):\n codec = struct.unpack(\">i\", input_array[0:4])[0]\n length = struct.unpack(\">i\", input_array[4:8])[0]\n param = stru... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests that we can decode a trace from the data stream. | def test_decode_trace(self):
self.assertEqual(td.trace(), decoder.decode_trace(BytesIO(td.trace(True)))) | [
"def test_decode(self):\n pass # TODO(tlarsen)",
"def test_decode_external_timestamp(self):\n self.assertEqual(td.external_timestamp(), decoder.decode_external_timestamp(\n BytesIO(td.external_timestamp(True)), self.mask))",
"def test_decode_sample_data_ii(self):\n # setup\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests that we raise a Value Error when decoding a bad header length. | def test_process_optional_header_data_bad_header_length(self):
with self.assertRaises(ValueError):
decoder.process_optional_header_data(BytesIO(td.external_timestamp(True)), 3, self.mask) | [
"def test_error_message_header_bad_request_codes(self):\n error_type = 1\n error_type_value = Error.ErrorType.OFPET_BAD_REQUEST\n\n error_code = 0\n\n iter_given_code = Error.ErrorType.get_class(error_type_value).__iter__()\n length = Error.ErrorType.get_class(error_type_value).__... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Resolves a list of requirements for the same package. Given a list of package details in the form of `packaging.requirements.Requirement` objects, combine the specifier, extras, url and marker information to create a new requirement object. | def resolve_requirement_versions(package_versions):
resolved = Requirement(str(package_versions[0]))
for package_version in package_versions[1:]:
resolved.specifier = resolved.specifier & package_version.specifier
resolved.extras = resolved.extras.union(package_version.extras)
resolved.... | [
"def resolve(self, requirements, env=None, installer=None,\n replace_conflicting=False, extras=None):\n\n # set up the stack\n requirements = list(requirements)[::-1]\n # set of processed requirements\n processed = {}\n # key -> dist\n best = {}\n to_a... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
List installed and missing dependencies. Given a package and, optionally, a tuple of extras, identify any packages which should be installed to match the requirements and return any which are missing. | def find_dependencies(package="sunpy", extras=None):
requirements = get_requirements(package)
installed_requirements = {}
missing_requirements = defaultdict(list)
extras = extras or ["required"]
for group in requirements:
if group not in extras:
continue
for package, pack... | [
"def missing_dependencies_by_extra(package=\"sunpy\", exclude_extras=None):\n exclude_extras = exclude_extras or []\n requirements = get_requirements(package)\n missing_dependencies = {}\n for group in requirements.keys():\n if group in exclude_extras:\n continue\n missing_depen... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get all the specified extras for a package and report any missing dependencies. This function will also return a "required" item in the dict which is the dependencies associated with no extras. | def missing_dependencies_by_extra(package="sunpy", exclude_extras=None):
exclude_extras = exclude_extras or []
requirements = get_requirements(package)
missing_dependencies = {}
for group in requirements.keys():
if group in exclude_extras:
continue
missing_dependencies[group]... | [
"def find_dependencies(package=\"sunpy\", extras=None):\n requirements = get_requirements(package)\n installed_requirements = {}\n missing_requirements = defaultdict(list)\n extras = extras or [\"required\"]\n for group in requirements:\n if group not in extras:\n continue\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Load zoning districts from the given shapefile. | def load_zoning_districts(source=ZONING_DATA_FILE):
layer = DataSource(source)[0]
for feature in layer:
try:
_save_base_district(feature)
except:
print ('Could not save base district for feature with OBJECTID=%s.'
' Skipping.') % feature['OBJECTID']
... | [
"def load_graph(self, graph: nx.Graph) -> None:\n\n self.pos = dict()\n self.polygons = dict()\n districts = len(self.data[\"features\"])\n\n for i in range(districts):\n\n # Get shape information from file\n nodes = self.data[\"features\"][i][\"geometry\"][\"coor... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Extract the hidden message fro the given image. Authenticate the hidden message by validating the hmac hash sliced from the hidden message. | def get_msg(img):
i = Image.open('%s.ste' % img)
secret = stg.extract_msg(i)
mac = secret.split('--:--')[0]
print 'HMAC hex is: \n%s\n' % mac.encode('hex')
data = secret.split('--:--')[1]
print 'The hidden message is: \n%s\n' % data
check_hmac(mac)
i.show() | [
"def hide(self, img, message):\r\n encoded = img.copy()\r\n width, height = img.size\r\n index = 0\r\n\r\n message = message + '~~~'\r\n message_bits = \"\".join(tools.a2bits_list(message))\r\n\r\n npixels = width * height\r\n if len(message_bits) > npixels * 3:\r\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Check if the given hmac ist valid by creating a new hmac with the supplied password and the data. | def check_hmac(mac, data):
h_mac = hmac.new(args['m'], bytes(data), digestmod=hashlib.sha256).digest()
print 'HMAC validation: \n%s\n' % str(h_mac == mac) | [
"def check_hmac(msg: bytes, mac: bytes, hmac_key: bytes) -> bool:\n\n valid = False\n h = HMAC.new(hmac_key, digestmod=SHA256)\n h.update(msg)\n try:\n h.hexverify(mac)\n valid = True\n except ValueError:\n return valid\n\n return valid",
"def verify_hmac(self, payload):\r\n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
If s is the numer of sides in a polygon, then the formula for the nth | def polygonal_number(s, n):
return (n*n*(s-2)-n*(s-4))/2 | [
"def area_polygon(n, s):\n area = ((float(1)/float(4)) * n * s ** 2) / (math.tan(math.pi / n))\n return area",
"def polysum(n, s):\n area = 0\n \n #avoiding division by zero\n if n != 0: \n area = (0.25 * n * (s**2)) / math.tan(math.pi / n)\n perimeter = n * s\n \n return ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create a list of figurate numbers for a given s, between 1000 and 9999, with the additional property that the third digit of each number will not be 0. | def figurate_list(s):
f = polygon_gen(s)
ans = []
c = next(f)
while c < 999: c = next(f)
while c < 10000:
c = str(c)
if c[2] != '0': ans.append(FigurateNode(c[:2], c[-2:], s))
c = next(f)
return ans | [
"def scaleToInt(listOfNums): \n \n newList = []\n maxVal = max(listOfNums)\n minVal = min(listOfNums)\n for num in listOfNums:\n scaledNum = round(15 * (num - minVal) / (maxVal - minVal))\n newList.append(scaledNum)\n\n### complete this part for 2c \n return newList",
"def spi... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Finds all complete figurate cycles for given svalues. | def figurate_cycles(*s_vals):
assert len(s_vals) > 1 #incomplete sanity check
# Since a DFS has to start SOMEWHERE and we're looking for cycles, we
# arbitrarily take the first list of figurates and use them as the
# roots of our search.
roots = figurate_list(s_vals[0])
# Make a big list of all ... | [
"def find_all_cycles(s,graph):\n\n grph = u.edge_to_list_dict(graph)\n node_cnt = len(grph)\n k = z.Int(\"k\")\n syms = [z.Int('node%s'%i) for i in range(node_cnt)]\n\n # s.add(syms[0] == 0) # start node is a 0\n s.add(k < node_cnt)\n s.add(k > 1)\n\n o = z.Optimize()\n\n #... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks that when we have no more candidates, that our path 'endpoints' are cyclical. | def have_cycle(candidates, path):
return (not candidates and path[0].prefix == path[-1].suffix) | [
"def check_paths(self):\n for path in self.paths:\n # check that arc starts at s\n arc = path[0]\n arc_start = self.arc_info[arc][\"start\"]\n assert(arc_start == self.source()), \"Path does not start at s\"\n # check that internal arcs are valid\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks that we have at least one candidate whose prefix is cyclical with the new element's suffix. | def have_dead_end(candidates, new_elem):
return new_elem.suffix not in map(lambda x: x.prefix, candidates) | [
"def have_cycle(candidates, path):\n return (not candidates and path[0].prefix == path[-1].suffix)",
"def _causes_name_clash(candidate, path_list, allowed_occurences=1):\n duplicate_counter = -allowed_occurences\n for path in path_list:\n parts = tuple(reversed(path.parts))\n if len(par... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a new list where all sgonal candidates have been removed. | def remove_sgons(s_value, candidates):
return list(filter(lambda x: x.s != s_value,
candidates)) | [
"def Clear_candidates(self):\n\t\tself.all_candidates = []",
"def remove_all(self):\n\n # Cave: loop while modifying loop item source. Create a new list.\n #\n for name in list(self.subplanes_list):\n\n self.remove(name)\n\n return",
"def get_good_candidates(chrom,candidat... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Prunes the obtained tree according to the minimal gain (entropy or Gini). | def prune(tree, minGain, evaluationFunction=entropy, notify=False):
# recursive call for each branch
if tree.trueBranch.results == None: prune(tree.trueBranch, minGain, evaluationFunction, notify)
if tree.falseBranch.results == None: prune(tree.falseBranch, minGain, evaluationFunction, notify)
# merge ... | [
"def prune(self, threshold = 0.02):\n\n\n limits, quantiles = self.eval_limit(\"prune_whole\")\n total_limit = limits[2]\n\n n_pruned_away = 0\n\n for l in self.get_leaves():\n \n # Store the discriminator axis\n original_discriminator = l.dis... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Loads a CSV file and converts all floats and ints into basic datatypes. | def loadCSV(file):
def convertTypes(s):
s = s.strip()
try:
return float(s) if '.' in s else int(s)
except ValueError:
return s
reader = csv.reader(open(file, 'rt'))
return [[convertTypes(item) for item in row] for row in reader] | [
"def loadCSV(input_file):",
"def read_csv_file(csv_file):\n return cudf.read_csv(csv_file, delimiter=' ',\n dtype=['int32', 'int32', 'float32'], header=None)",
"def load_from_file_csv(cls):\n file_name = cls.__name__ + \".cvs\"\n\n if cls.__name__ == \"Rectangle\":\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Ban an ip from all DDNet servers in given region. Minutes need to be greater than 0. Region needs to be the 3 char server code. | async def global_ban_region(self, ctx: commands.Context, region: str, ip: str, name: str, minutes: int, *, reason: clean_content):
await self._global_ban(ctx, ip, name, minutes, reason, region) | [
"def test_exclude_ip_ban(self):\n pass",
"def ban_host(self, host, hard=False, duration=None):\n # TODO: Timed bans?\n logger.verbose(\"Banning IP {0}\".format(host))\n self.ip_bans.add(host, hard)",
"async def ip_blacklist_filter(request: Request):\n if request.app.ip_blacklist.i... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Unban an ip from all DDNet servers. If you pass a name, all currently globally banned ips associated with that name will be unbanned. | async def global_unban(self, ctx: commands.Context, *, name: str):
if re.match(r'^[\d\.-]*$', name) is None:
query = 'SELECT ip FROM ddnet_bans WHERE name = $1;'
ips = [r['ip'] for r in await self.bot.pool.fetch(query, name)]
if not ips:
return await ctx.send(... | [
"async def unban(self, ctx, name: str):\n try:\n bans = await self.bot.get_bans(ctx.message.server)\n user = discord.utils.get(bans, name=name)\n if user is not None:\n await self.bot.unban(ctx.message.server, user)\n except discord.Forbidden:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get the definition of this managed folder. The definition contains name, description checklists, tags, connection and path parameters, metrics and checks setup. | def get_definition(self):
return self.client._perform_json(
"GET", "/projects/%s/managedfolders/%s" % (self.project_key, self.odb_id)) | [
"def getDefinition(self):\n\n return self.__definition;",
"def definition(self) -> 'outputs.ReportDefinitionResponse':\n return pulumi.get(self, \"definition\")",
"def get_definition(self):\n return self.client._perform_json(\n \"GET\", \"/admin/groups/%s\" % self.name)",
"def ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Set the definition of this managed folder. | def set_definition(self, definition):
return self.client._perform_json(
"PUT", "/projects/%s/managedfolders/%s" % (self.project_key, self.odb_id),
body=definition) | [
"def definition(self, definition):\n\n self._definition = definition",
"def set_definition(self, definition):\n return self.client._perform_json(\n \"PUT\", \"/admin/groups/%s\" % self.name,\n body = definition)",
"def _set_definition(self, definition: Dict[str, Any]):\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get a file from the managed folder | def get_file(self, path):
return self.client._perform_raw(
"GET", "/projects/%s/managedfolders/%s/contents/%s" % (self.project_key, self.odb_id, utils.quote(path))) | [
"def get_file(self, path):\n file = self.get('data_request?id=file¶meters=%s' % path)\n return file",
"def get_file(bundle, dst):\n bundle = path.join(cs, bundle, 'archive', dst)\n req = requests.get(bundle)\n if not req.ok:\n raise Exception(\"Could not query file in charmstore:... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Delete a file from the managed folder | def delete_file(self, path):
return self.client._perform_empty(
"DELETE", "/projects/%s/managedfolders/%s/contents/%s" % (self.project_key, self.odb_id, utils.quote(path))) | [
"def delete_file(file_path):\n pass",
"def delete_file(file_path):\n\n raise RuntimeError('delete_file function not implemented in Artella Abstract API!')",
"def delete_file(self):\n # DELETE /files/{user_id}/{path}\n pass",
"def delete(self, filename, **kw):\n\n file_path = os.path... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Upload the content of a folder to a managed folder. | def upload_folder(self, path, folder):
for root, _, files in os.walk(folder):
for file in files:
filename = os.path.join(root, file)
with open(filename, "rb") as f:
rel_posix_path = "/".join(os.path.relpath(filename, folder).split(os.sep))
... | [
"def upload(self, folder, recursive=True, test=False):\n return self._gphotocli_image_tasks.upload(folder, recursive, test)",
"def UploadFolderToGD(token_path, source_path, gd_folder): \n google_drive = ConnectGoogleDrive(token_path)\n file_cmd = spike.FileCMD()\n file_list = file_cmd.ListFiles(so... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get the last values of the metrics on this managed folder. | def get_last_metric_values(self):
return ComputedMetrics(self.client._perform_json(
"GET", "/projects/%s/managedfolders/%s/metrics/last" % (self.project_key, self.odb_id))) | [
"def channels_last_values(self):\n return self._channels_last_values",
"def last_metric_count(self):\n return self._last_metric_count",
"def getLastSensorData(self):\n return self._latestSensorData",
"def get_last_logs(self):\n return [log[-1] for log in self.log.values()]",
"def... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get the history of the values of a metric on this managed folder. | def get_metric_history(self, metric):
return self.client._perform_json(
"GET", "/projects/%s/managedfolders/%s/metrics/history" % (self.project_key, self.odb_id),
params={'metricLookup' : metric if isinstance(metric, str) or isinstance(metric, unicode) else json.dumps(metric)}) | [
"def GetHistoryResults(self):\r\n\r\n return self._histories.values()",
"def get_history(self):\n return self.history",
"def get_history(self):\n return self.ringmaster.historyfile.getvalue()",
"def get_metric_history(self, project_id, run_id, key):\n endpoint = \"/project/{}/run/{... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get the flow zone of this managed folder. | def get_zone(self):
return self.project.get_flow().get_zone_of_object(self) | [
"def access_zone(self):\n return self._access_zone",
"def zone(self) -> Zone:\n return self._send_and_receive({})['zone']",
"def gcp_zone(self) -> str:\n return pulumi.get(self, \"gcp_zone\")",
"def edge_zone(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"edge_zo... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Move this object to a flow zone. | def move_to_zone(self, zone):
if isinstance(zone, basestring):
zone = self.project.get_flow().get_zone(zone)
zone.add_item(self) | [
"def move_to_zone(self, zone):\n if isinstance(zone, basestring):\n zone = self.project.get_flow().get_zone(zone)\n zone.add_item(self)",
"def move_transfer(self):\n self.move('transfer')",
"def move_stage_to_z(self, z, safe_mode=True):\n raise NotImplementedError",
"def... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Share this object to a flow zone. | def share_to_zone(self, zone):
if isinstance(zone, basestring):
zone = self.project.get_flow().get_zone(zone)
zone.add_shared(self) | [
"def move_to_zone(self, zone):\n if isinstance(zone, basestring):\n zone = self.project.get_flow().get_zone(zone)\n zone.add_item(self)",
"def flow(self, flow):\n\n self._flow = flow",
"def move_to_zone(self, zone):\n if isinstance(zone, basestring):\n zone = sel... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Unshare this object from a flow zone. | def unshare_from_zone(self, zone):
if isinstance(zone, basestring):
zone = self.project.get_flow().get_zone(zone)
zone.remove_shared(self) | [
"def unlink_from_zone(self):\n\n if self.Zone is None:\n return\n\n # Face is linked.\n # Unlink it.\n self.Zone.Faces.remove(self)\n self.Zone = None",
"def unshare(self):\n if self._kind == _ABO_KIND_GROUP:\n yield super(AddressBookObjectSharingMix... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get a handle to manage discussions on the managed folder. | def get_object_discussions(self):
return DSSObjectDiscussions(self.client, self.project_key, "MANAGED_FOLDER", self.odb_id) | [
"def forum_form_discussion(self):\n self.get('discussion/forum', name=\"forums:forum_form_discussion\")",
"def get_object_discussions(self):\n return DSSObjectDiscussions(self.client, self.project_key, \"SCENARIO\", self.id)",
"def get_object_discussions(self):\n return DSSObjectDiscussions... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Save the changes to the settings on the managed folder. | def save(self):
self.folder.client._perform_empty(
"PUT", "/projects/%s/managedfolders/%s" % (self.folder.project_key, self.folder.odb_id),
body=self.settings) | [
"def save_settings(self):\n self._click_button('save_settings')",
"def save_settings(self):\n logging.info(\"Saving settings.\")\n write_settings(self.settings)",
"def saveSettings(self):\n self.userFiles.applyData()\n self.userPersonal.applyData()",
"def saveSettings(self):... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Add a discrete partitioning dimension. | def add_discrete_partitioning_dimension(self, dim_name):
self.settings["partitioning"]["dimensions"].append({"name": dim_name, "type": "value"}) | [
"def _add_dimensions(self, item, dims, constant_keys):\n if isinstance(item, Layout):\n item.fixed = False\n\n dim_vals = [(dim, val) for dim, val in dims[::-1]\n if dim not in self.drop]\n if isinstance(item, self.merge_type):\n new_item = item.clone(cd... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Add a time partitioning dimension. | def add_time_partitioning_dimension(self, dim_name, period="DAY"):
self.settings["partitioning"]["dimensions"].append({"name": dim_name, "type": "time", "params":{"period": period}}) | [
"def add_discrete_partitioning_dimension(self, dim_name):\n self.settings[\"partitioning\"][\"dimensions\"].append({\"name\": dim_name, \"type\": \"value\"})",
"def add_timedim(data, date=\"1970-01-01\"):\n if isinstance(data, xr.DataArray):\n if \"time\" in data.dims:\n raise ValueErr... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Set the partitioning pattern of the folder. The pattern indicates which paths inside the folder belong to | def set_partitioning_file_pattern(self, pattern):
self.settings["partitioning"]["filePathPattern"] = pattern | [
"def set_folder_filter_pattern(self):\n pattern = self.ui.lineEditFolderFilter.text()\n self.folder_include_pattern = str(pattern)\n self.populate_file_list()",
"def setSplitPattern(self, value):\n return self._set(splitPattern=value)",
"def pattern(self, pattern):\n\n self._p... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get the predicted cost for each of the actions given the provided context. | def get_costs_per_action(self, context: np.ndarray) -> Dict[Action, Cost]:
costs_per_action = {}
for action in self._get_actions():
if self.categorize_actions:
action_one_hot = self._get_actions_one_hot(action)
x = np.append(action_one_hot, context)
... | [
"def _compute_targets(self, states, actions, values, is_dones, rewards, current_state):\n \n next_advantage = 0\n advantages = torch.Tensor(states.size(0), states.size(1), 1)\n if self.cuda:\n advantages = advantages.cuda()\n \n _, next_value = self.ppo(Variable(... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests na_element gets appended as child. | def test_setter_na_element(self):
root = netapp_api.NaElement('root')
root['e1'] = netapp_api.NaElement('nested')
self.assertEqual(len(root.get_children()), 1)
e1 = root.get_child_by_name('e1')
self.assertIsInstance(e1, netapp_api.NaElement)
self.assertIsInstance(e1.get_c... | [
"def test_setter_na_element(self):\n root = netapp_api.NaElement('root')\n root['e1'] = netapp_api.NaElement('nested')\n self.assertEqual(1, len(root.get_children()))\n e1 = root.get_child_by_name('e1')\n self.assertIsInstance(e1, netapp_api.NaElement)\n self.assertIsInstan... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests dict is appended as child to root. | def test_setter_child_dict(self):
root = netapp_api.NaElement('root')
root['d'] = {'e1': 'v1', 'e2': 'v2'}
e1 = root.get_child_by_name('d')
self.assertIsInstance(e1, netapp_api.NaElement)
sub_ch = e1.get_children()
self.assertEqual(len(sub_ch), 2)
for c in sub_ch:... | [
"def test_setter_child_dict(self):\n root = netapp_api.NaElement('root')\n root['d'] = {'e1': 'v1', 'e2': 'v2'}\n e1 = root.get_child_by_name('d')\n self.assertIsInstance(e1, netapp_api.NaElement)\n sub_ch = e1.get_children()\n self.assertEqual(2, len(sub_ch))\n for ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tests list/tuple are appended as child to root. | def test_setter_child_list_tuple(self):
root = netapp_api.NaElement('root')
root['l'] = ['l1', 'l2']
root['t'] = ('t1', 't2')
l = root.get_child_by_name('l')
self.assertIsInstance(l, netapp_api.NaElement)
t = root.get_child_by_name('t')
self.assertIsInstance(t, ne... | [
"def test_setter_child_list_tuple(self):\n root = netapp_api.NaElement('root')\n root['l'] = ['l1', 'l2']\n root['t'] = ('t1', 't2')\n l_element = root.get_child_by_name('l')\n self.assertIsInstance(l_element, netapp_api.NaElement)\n t = root.get_child_by_name('t')\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get sentiment analysis immediately on document save | def get_sentiment_analysis(sender, instance, **kwargs):
text_analysis = TextAnalysis(instance.text)
# Prevent sentiment_analysis API call every time the document is saved
if instance.sentiment_analysis is None:
instance.get_sentiment_analysis() | [
"def text_analytics(self):\n\n headers = {\n # Request headers\n 'Content-Type': 'application/json',\n 'Ocp-Apim-Subscription-Key': self.keys['text_analytics'],\n }\n \n sentiment_url = 'https://westus.api.cognitive.microsoft.com/text/analytics/v2.0/senti... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This function is used in the property self.embeddings. | def set_embeddings(self): | [
"def get_embedding_output(self):\n return self.embedding_output",
"def get_embeddings(self, data):\n raise NotImplementedError()",
"def _add_embeddings_internal(self, sentences: List[Sentence]) -> List[Sentence]:\n pass",
"def _embed(self):\n with tf.variable_scope('word_embedding'):\n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
For each string, output 1 if the DFA accepts it, 0 otherwise. The input is guaranteed to be a DFA. | def task_4(parser):
dfa = parser.parse_fa()
test_strings = parser.parse_test_strings()
# calculate and print acceptance for each string
for string in test_strings:
if follow_dfa(dfa["graph"][dfa["start"]], string):
print("1")
else:
print("0")
print("end") | [
"def matchDFA(automata: DFA, s: str) -> bool:",
"def accepts_word(dfa, word):\n\n status = dfa['initial'][0]\n for char in word:\n dfa.setdefault(char, [])\n if len(dfa[char]) > 0:\n for edge in dfa[char]:\n #print(edge[0], edge[1])\n #print(type(status... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
get access token from wxapi this is the second step to login with wechat after the client get the code | def get_access_token(self, code):
url = get_config("login.wechat.access_token_url") % code
r = self._access_wxapi_or_raise(url)
return (r["access_token"], r["openid"]) | [
"def get_token(self, code):\n\n # live need post a form to get token\n headers = {'Content-type': 'application/x-www-form-urlencoded'}\n data = {\n 'client_id': get_config('login.live.client_id'),\n 'client_secret': get_config('login.live.client_secret'),\n 'red... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
get user info from wxapi this is the final step to login with wechat | def get_user_info(self, access_token, openid):
url = get_config("login.wechat.user_info_url") % (access_token, openid)
return self._access_wxapi_or_raise(url) | [
"def login():\n resp = {'code':200, 'msg':'操作成功~', 'data':{}}\n req = request.values\n code = req['code'] if 'code' in req else ''\n encryptedData = req['encryptedData'] if 'encryptedData' in req else ''\n iv = req['iv'] if 'iv' in req else ''\n if not code or len(code) < 1:\n resp['code'] ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get qq access token | def get_token(self, code, redirect_uri):
token_resp = get_remote(get_config("login.qq.access_token_url") % (redirect_uri, code))
if token_resp.find('callback') == 0:
error = json.loads(token_resp[10:-4])
raise Exception(error)
query = qs_dict(token_resp)
return q... | [
"def get_token(self, code):\n\n token_resp = get_remote(get_config(\"login.qq.access_token_url\") + code)\n if token_resp.find('callback') == 0:\n error = json.loads(token_resp[10:-4])\n raise Exception(error)\n\n query = qs_dict(token_resp)\n return query[\"access_... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get live access token | def get_token(self, code):
# live need post a form to get token
headers = {'Content-type': 'application/x-www-form-urlencoded'}
data = {
'client_id': get_config('login.live.client_id'),
'client_secret': get_config('login.live.client_secret'),
'redirect_uri': ... | [
"def _get_token(self):\n if self._access_token is None or self._is_expired():\n self._refresh_token()\n return self._access_token",
"def get_access_token():\n if not ACCESS_TOKEN or timezone.now() > ACCESS_TOKEN_EXPIRES_IN:\n _refresh_access_token()\n return ACCESS_TOKEN",
... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
generate 2 random numbers to add get input as addition answer check if correct, if right countdown to get 3 in a row right to end program if wrong lets keep adding and restart the 3 in a row count down | def main():
min_random = 10 #keeping constant for the min random number range
max_random = 99 #keeping constant for the max random number range
count = 0 #creating a counter variable to keep track of user's answers in a row
while count != 3: #this loop will keep goin until user get 3 answers correct i... | [
"def addition_of_two_random_numbers():\r\n user_wants_to_play = True\r\n while user_wants_to_play == True:\r\n print(input(\"Press enter to generate a problem!\"))\r\n\r\n # Generate two random numbers, format the addition problem, and print to screen.\r\n randomly_generated_number_one = ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Loads acquisition data Returns PD DataFrame | def pd_load_acquisition_csv(acquisition_path, **kwargs):
columns = [
'loan_id', 'orig_channel', 'seller_name', 'orig_interest_rate', 'orig_upb', 'orig_loan_term',
'orig_date', 'first_pay_date', 'orig_ltv', 'orig_cltv', 'num_borrowers', 'dti', 'borrower_credit_score',
'first_home_buyer', 'lo... | [
"def _fetch_dataframe(self):\n\n df = pd.DataFrame([self._reshape(component) for component in self._get_trial_components()])\n return df",
"def pd_load_acquisition_csv(acquisition_path, **kwargs):\n\n cols = [\n 'loan_id', 'orig_channel', 'seller_name', 'orig_interest_rate', 'orig_upb', 'o... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
quote the elements of a dotted name | def quote_dotted(
name: Union["quoted_name", str], quote: functools.partial
) -> Union["quoted_name", str]:
if isinstance(name, quoted_name):
return quote(name)
result = ".".join([quote(x) for x in name.split(".")])
return result | [
"def quote_dotted(name, quote):\n\n result = '.'.join([quote(x) for x in name.split('.')])\n return result",
"def dotted_name(s):\n forbidden = forbidden_chars.intersection(s)\n if forbidden:\n raise ValueError('%(s)s contains forbidden characters'\n ' (%(forbidden)s)'\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Convert text to float or 0.0 if invalid. | def convert_to_number(text):
try:
value = float(text)
return value
except ValueError:
return 0.0 | [
"def convert_input_to_float(self, text):\n try:\n return float(text)\n except ValueError:\n return 0",
"def convert_to_number(text):\n try:\n return float(text)\n except ValueError:\n return 0.0",
"def ffloat(string):\n try:\n ret... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
persist tweet data into cassandra | def persist_data(tweet_data, cassandra_session):
try:
logger.debug('Start to persist data to cassandra %s \n', tweet_data)
parsed = json.loads(tweet_data)
unit_id = str(parsed.get('_unit_id'))
gender = parsed.get('gender')
tweet_text = str(parsed.get('text'))
hashtags... | [
"def persist_db(database, tweets):\n log.debug(\"{} tweets to db\".format(len(tweets)))\n\n for tweet in tweets:\n tweet['_id'] = tweet['id_str']\n database.update(tweets)",
"def store_tweets(table, tweets):\n conn, cursor = _get_sqlite_connection()\n # Create a list of values as id, ts, twe... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create an Nbin discrete colormap from a specified input map | def discrete_cmap(N, base_cmap=None):
base = plt.get_cmap(base_cmap)
color_list = base(np.linspace(0, 1, N))
cmap_name = base.name + str(N)
return base.from_list(cmap_name, color_list, N) | [
"def discrete_cmap(N, base_cmap=None):\n # see https://gist.github.com/jakevdp/91077b0cae40f8f8244a\n base = plt.cm.get_cmap(base_cmap)\n color_list = base(np.linspace(0, 1, N))\n cmap_name = base.name + str(N)\n return base.from_list(cmap_name, color_list, N)",
"def discrete_cmap(N, base_cmap='pri... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Plot the Fisher/mutual information after the linear layer in a network of unstructured weights, averaged over many repetitions, as a function of network size. | def unstruct_weight_plot_mu(
Ns, mus, sigma, repetitions, plot, design='lognormal',
sigmaP=1., sigmaS=1., sigmaC=1., fax=None
):
# create plot
if fax is None:
fig, ax = plt.subplots(1, 1, figsize=(8, 8))
else:
fig, ax = fax
# create data arrays
data = np.zeros((Ns.size, mus.... | [
"def nodes_strength(i_s, f_s, dst, show=True):\n \n import matplotlib.pyplot as plt\n import numpy as np\n \n weights_name = ['input', 'layer1_conv1', 'layer2_conv2', 'layer3_dense1', 'output_dense2']\n \n init_s = {}\n fin_s = {}\n init_s = {'l0': i_s.item().get('o-l0')}\n fin_s = ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Plots the asymptotic coefficients for the. | def plot_asymptotic_coefficients(filename, fax=None):
# create plot
labels = [1, 2, 3]
# create plot
if fax is None:
fig, ax = plt.subplots(1, 1, figsize=(8, 8))
else:
fig, ax = fax
coef_file = h5py.File(filename, 'r')
sigmaP_vals = list(coef_file)
ks = np.arange(1, 26)... | [
"def plot_fitting_coefficients(self):\n from matplotlib import pyplot as plt\n coeff = self.linear_fit[\"coeff\"]\n order = self.linear_fit[\"order\"]\n\n data = {}\n annotations = {}\n for c, o in zip(coeff, order):\n if len(o) == 0:\n continue\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets y attribute with exceptions | def y(self, value):
self.data_validator("y", value)
self.__y = value | [
"def sety(self, value):\n self.y = value",
"def setY(self, y):\n self.y = y\n pass",
"def y(self, value):\n self.validate_input(y=value)\n self.__y = value",
"def set_y(self, new_y):\r\n self.y = new_y",
"def setY(self, val):\n if not isinstance(val, (int, fl... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Regenerate key for a topic. Regenerate a shared access key for a topic. | def regenerate_key(
self, resource_group_name, topic_name, key_name, custom_headers=None, raw=False, **operation_config):
regenerate_key_request = models.TopicRegenerateKeyRequest(key_name=key_name)
# Construct URL
url = self.regenerate_key.metadata['url']
path_format_argume... | [
"async def regenerate_key(\n self,\n resource_group_name: str,\n account_name: str,\n regenerate_key: IO,\n *,\n content_type: str = \"application/json\",\n **kwargs: Any\n ) -> JSON:",
"async def regenerate_key(\n self,\n resource_group_name: str,... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Определяем что в разделе Carparks | def Carparks(package_name):
Back_text=poco(package_name+":id/back").get_text()
Hidenearbycarparks_text=poco(package_name+":id/carparks_nearby_hide_button").get_text()
# Whereismycar_text=poco(package_name+":id/last_park_place").get_text()
# Detectorenabled_text=poco(package_name+":id/carparks_detector_c... | [
"def is_explored(self):\n return bool(self.parent)",
"def Visible(self) -> bool:",
"def isChild(self):\n \n pass",
"def _candyOnEmptyScreen(self,candy):\n create = True\n for part in self._snake:\n if self._whetherCollides(part,candy):\n create = False\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Place a sell limit order with given quantity and price. | def limit_sell(self, order_id, quantity, price):
Library.functions.limit_sell(self._book, order_id, quantity, price) | [
"def limit_sell(self,quantity, price):\n\n self.is_tickSize_valid(price)\n\n side=\"Sell\"\n order = self.make_order(\n quantity=quantity, \n price=price,\n side=side, \n )\n return order",
"def create_sell_order(self, pair, quantity, price):\n\t... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return true if the order with given ID is in the book, false otherwise. | def has(self, order_id):
return Library.functions.has(self._book, order_id) | [
"def is_book_id_in_repository(self, book_id):\n books_list = self._book_repository.get_all_books()\n for book in books_list:\n if book.id == book_id:\n return True\n return False",
"def validate_bookid(self,book_id):\r\n if int(book_id) in [i.book_id for i in ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the best sell price in the book. | def best_sell(self):
return Library.functions.best_sell(self._book) | [
"def best_buy(self):\n return Library.functions.best_buy(self._book)",
"def get_best_offer(self,way):\n if way==\"BUY\":\n return self.book[Trade.WAY_BUY][0].get_price()\n elif way==\"SELL\":\n return self.book[Trade.WAY_SELL][len(self.book[Trade.WAY_SELL])-1].get_price(... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the best buy price in the book. | def best_buy(self):
return Library.functions.best_buy(self._book) | [
"def best_sell(self):\n return Library.functions.best_sell(self._book)",
"def get_best_offer(self,way):\n if way==\"BUY\":\n return self.book[Trade.WAY_BUY][0].get_price()\n elif way==\"SELL\":\n return self.book[Trade.WAY_SELL][len(self.book[Trade.WAY_SELL])-1].get_pric... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the best price for the given side. | def best(self, side):
return Library.functions.best(self._book, side) | [
"def get_best_offer(self,way):\n if way==\"BUY\":\n return self.book[Trade.WAY_BUY][0].get_price()\n elif way==\"SELL\":\n return self.book[Trade.WAY_SELL][len(self.book[Trade.WAY_SELL])-1].get_price()",
"def best_price(self):\n # TODO rename this to \"display_lowest_price... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the volume of the sell side of the book at the given price. | def volume_sell(self, price=None):
if price is None:
return Library.functions.volume_sell(self._book)
return Library.functions.volume_sell_price(self._book, price) | [
"def volume(self, price=None):\n if price is None:\n return Library.functions.volume(self._book)\n return Library.functions.volume_price(self._book, price)",
"def get_own_volume_at(self, price, typ=None):\r\n volume = 0\r\n for order in self.owns:\r\n if order.pri... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the volume of the buy side of the book at the given price. | def volume_buy(self, price=None):
if price is None:
return Library.functions.volume_buy(self._book)
return Library.functions.volume_buy_price(self._book, price) | [
"def volume(self, price=None):\n if price is None:\n return Library.functions.volume(self._book)\n return Library.functions.volume_price(self._book, price)",
"def volume_sell(self, price=None):\n if price is None:\n return Library.functions.volume_sell(self._book)\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the volume of the book at the given price. | def volume(self, price=None):
if price is None:
return Library.functions.volume(self._book)
return Library.functions.volume_price(self._book, price) | [
"def get_own_volume_at(self, price, typ=None):\r\n volume = 0\r\n for order in self.owns:\r\n if order.price == price and (not typ or typ == order.typ):\r\n volume += order.volume\r\n return volume",
"def get_volume(self, ticker):\n return self.trading_client.... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the count at the given limit price. | def count_at(self, price):
return Library.functions.count_at(self._book, price) | [
"def getLimitPrice(self):\n return self.__limitPrice",
"def count_above(iterable, limit):\n return 0",
"def _determine_limit(self, limit):\n\n # Note: +1 is allowed here because it allows\n # the user to fetch one beyond to see if they\n # are at the end of the list\n if no... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the count of the book on the sell side. | def count_sell(self):
return Library.functions.count_sell(self._book) | [
"def count_buy(self):\n return Library.functions.count_buy(self._book)",
"def count(self):\n return Library.functions.count(self._book)",
"def book_count(self):\n return self.book_set.all().count()",
"def get_number_of_books(self) -> int:\n raise NotImplementedError",
"def count_... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the count of the book on the buy side. | def count_buy(self):
return Library.functions.count_buy(self._book) | [
"def count_sell(self):\n return Library.functions.count_sell(self._book)",
"def count(self):\n return Library.functions.count(self._book)",
"def book_count(self):\n return self.book_set.all().count()",
"def get_number_of_books(self) -> int:\n raise NotImplementedError",
"def coun... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the total count of the book (number of orders). | def count(self):
return Library.functions.count(self._book) | [
"def book_count(self):\n return self.book_set.all().count()",
"def books_total():\n books_count = mongo.db.books.count\n return dict(books_count=books_count)",
"def total_orders(self) -> int:\n return self.orders.count()",
"def total_orders(cls) -> int:\n return sa.func.count(cls.or... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return price of Promotion from given timecall(second) | def promotion(time, sum_price):
time = second_to_minute(time)
for (pro, price) in [(24*60, 150), (12*60, 100), (8*60, 80), (3*60, 40), (60, 15), (20, 10)]:
sum_price = sum_price + (time//pro)*price
time = time % pro
oneminute = time - 3
return sum_price + oneminute if oneminute > 0 else ... | [
"def get_buy_price(self,ticker,time):\n return self.broker.get_buy_price(ticker,time)",
"def compute_time_price(supplier_with_transaction):\n supplier_item = supplier_with_transaction.get('supplier_detail')\n transaction_item = supplier_with_transaction.get('supplier_transaction')\n # ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Convert second to minute | def second_to_minute(time):
if time % 60 != 0:
time = time + 60
return time // 60 | [
"def int_convert_to_minute(value):\n min = int(int(value) / 60)\n sec = int(int(value) % 60)\n return \"%02d\" % min + \":\" + \"%02d\" % sec",
"def MINUTE(time):\n return _make_datetime(time).minute",
"def get_minute(time):\n m = time[4] + (time[3]*60) + (time[2]*60*24) * time[1] * time[0]\n re... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
The default path for auth files. Since auth is imported by common, not all functions from common are available yet, so we have to duplicate common.get_etc(). | def default_path():
return os.path.join(os.environ.get('OVERRIDE_ETC', '/etc'), 'auth') | [
"def get_auth_path():\n base_path = click.get_app_dir('clion')\n auth_file_path = path.join(base_path, 'auth.json')\n\n return auth_file_path",
"def API_DEFAULTDIR() -> str:\n return user_cache_dir(appname=__name__.split('.')[0])",
"def getuserbase():\n\tpass",
"def get_default_config_path(cls) ->... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return True iff a == b, and do it in constant time. | def constant_time_equals(a, b):
a = bytearray(a)
b = bytearray(b)
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
result |= x ^ y
return result == 0 | [
"def _eq(a, b):\n return (a - b) % 2 == 0",
"def insecure_equals(s1, s2):\n for b1, b2 in zip(s1, s2):\n if b1 != b2:\n return False\n\n sleep(delay)\n\n return True",
"def is_equal(x, y):\n return (x == y)",
"def equal (a, b):\n assert is_iterable(a)\n assert is_i... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Check that the authentication data directory is owned by current user, with safe permissions. throw exception if not. | def check_sane(self):
st = os.stat(self.path)
if st.st_uid != os.getuid():
raise Exception('Auth dir %s not owned by user %d.' % (
self.path, os.getuid()))
# Mode 16832 is equal to (stat.S_IFDIR | stat.S_IRWXU)
# In other words, a directory with mode bits rwx-... | [
"def _check_creds_file_perms():\n credentials_file = faculty.config.resolve_credentials_path()\n if oct(os.stat(credentials_file).st_mode & 0o777)[-2:] != \"00\":\n msg = textwrap.dedent(\n \"\"\"\\\n Permissions for {0} are too open.\n Your credentials file must not be accessi... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Save data into file, with mode bits rw. | def write(self, filename, data):
owner_rw = 0600
fd = os.open(filename, os.O_WRONLY | os.O_CREAT, owner_rw)
# In case file existed already with wrong permissions, fix them.
os.chmod(filename, owner_rw)
os.write(fd, data)
os.close(fd) | [
"def write_data_in_file(self, data, fichier, mode=\"w\"):",
"def write(self, filename, data):\n raise NotImplementedError",
"def write(self, data, mode=\"w\", ensure=False):\n if ensure:\n self.dirpath().ensure(dir=1)\n if \"b\" in mode:\n if not isinstance(data, bytes... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the number of failed passwords the can be entered before logins attempts are disabled for a day. The rate limit information is stored as a count of failed attempts so far. If there have been no failed attempts, or they were more than a day ago, treat that as zero failed attempts. | def rate_limit_remaining(self):
if os.path.isfile(self.rate_limit_filename):
st = os.stat(self.rate_limit_filename)
if time.time() - st.st_ctime > self.RATE_LIMIT_DURATION:
return self.RATE_LIMIT_COUNT
else:
with open(self.rate_limit_filename, ... | [
"def allowed_failed_attempts(self) -> int:\n return pulumi.get(self, \"allowed_failed_attempts\")",
"def max_attempts(self):\n return 1",
"def get_retry_count(self):\r\n return self.retried_nomax + self.retried_withmax",
"def attempts(self):\n return self._attempts",
"def maxlogi... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return whether a password file exists. | def password_exists(self):
return os.path.isfile(self.password_filename) | [
"def secrets_file_path_exists(self):\n return self.get_secrets_file_path().exists()",
"def client_secret_file_exists(self, file_path):\n return os.path.isfile(file_path)",
"def has_credentials(credentials_file=CREDENTIALS_FILE):\n return os.path.exists(credentials_file)",
"def file_exist() ->... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |