query
stringlengths
9
9.05k
document
stringlengths
10
222k
negatives
listlengths
19
20
metadata
dict
Set billable unit field
def set_billable_unit(self, option): (Select(self.driver.find_element(*ProjectFormLoc.FIELD_BILLABLE_UNIT)). select_by_visible_text(option))
[ "def set_bunit(self,bunit):\n self.bunit = bunit", "def setBookableUnit(self, account, acl, equipment, unit):\n acl.assertIsAdministrator(account)\n\n unit = BookingConstraint.bookableUnitIDFromName(to_string(unit))\n\n if unit != self.booking_unit:\n item = equipment._getFr...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Set contract amount field
def set_contract_amount(self, value): (self.driver.find_element(*ProjectFormLoc.FIELD_CONTRACT_AMOUNT). send_keys(value))
[ "def setCash(self, amt):\n self.cash = amt", "def setDebt(self, amt):\n self.debt = amt", "def contract(self, contract):\n if contract is None:\n raise ValueError(\"Invalid value for `contract`, must not be `None`\")\n\n self._contract = contract", "def credit(self, amount):\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Set payment schedule dropdown
def set_payment_schedule(self, option): (Select(self.driver.find_element (*ProjectFormLoc.FIELD_PAYMENT_SCHEDULE)). select_by_visible_text(option))
[ "def __schedule_unit_selected (self, event):\n self.selected_schedule_unit = self.schedule_unit_selector.get_selection ( )\n self.__set_permissions ( )", "def select_schedule():\n global current_schedule\n global schedules\n global current_run\n global current_line\n if len(schedules) < 1:\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Set payment comment field
def set_payment_comment(self, value): (self.driver.find_element(*ProjectFormLoc.FIELD_PAYMENT_COMMENT). send_keys(value))
[ "def comment(self, comment):\n self._comment = comment", "def set_Comment(self, value):\n super(UpdateTicketInputSet, self)._set_input('Comment', value)", "def addCommentField(self, field, comment, attrs, tip):\n\n widget = custom_widgets.PlainTextWidget\n comment_field = CharField(label='Co...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Click form save button
def click_save_button(self): self.driver.find_element(*ProjectFormLoc.BUTTON_SAVE).click()
[ "def select_save_btn(self):\n self.driver.click(\"save_btn\")", "def _save_button_clicked(self):\n current_widget = self._get_selected_widget()\n current_widget.save_changes()", "def save(self):\r\n name = self.saveNameField.text() # to get the name given in the text feild\r\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Click project delete button
def click_delete_button(self): self.driver.find_element(*ProjectFormLoc.BUTTON_DELETE).click() self.driver.find_element(*ProjectForm.BUTTON_CONFIRM_DELETE).click()
[ "def click_delete_button(self):\n self.visible_element_click(locators.SuiteManagerPageLocators.DELETE_CASE_FROM_SUITE_BTN)", "def delete(self,id):\n adm = ElectionSystemAdministration()\n single_pj = adm.get_project_by_id(id)\n adm.delete_project(single_pj)\n return '', 200", ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check region field for an error
def is_region_flagged(self): element = self.driver.find_element(*ProjectFormLoc.ERROR_REGION) assert('Region is required' in element.text), 'Region error missing'
[ "def region_check(self, field) -> None:\n if field.region() != (None, None) and field.region() != self._region:\n raise FieldOperationError('Cant operate on ' + str(type(self)) + ' and ' + str(type(field)) + '!')", "def regionError(df, C, R):\r\n if C == None:\r\n C = ['USA']\r\n av...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check country field for an error
def is_country_flagged(self): element = self.driver.find_element(*ProjectFormLoc.ERROR_COUNTRY) assert('Country is required' in element.text), 'Country error missing'
[ "def verify_country(rec, orig):\n pass", "def test_invalid_country_code(self) -> None:\n try:\n address_with_invalid_country()\n except ValidationError as err:\n assert err.request_id is None\n assert err.source is ErrorSource.SHIPENGINE.value\n assert ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check project name field for an error
def is_project_name_flagged(self): element = self.driver.find_element(*ProjectFormLoc.ERROR_PROJECT_NAME) assert('Project Name is required' in element.text), \ 'Project name error missing'
[ "def check_project_name(parser, project_name):\n if iskeyword(project_name):\n parser.error(\"'{project_name}' can not be a reserved Python keyword.\".format(project_name=project_name))\n\n try:\n __import__(project_name)\n except ImportError:\n pass\n else:\n parser.error(\"...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check pt status field for an error
def is_pt_status_flagged(self): element = self.driver.find_element(*ProjectFormLoc.ERROR_PT_STATUS) assert('PT Status is required' in element.text), \ 'PT status error missing'
[ "def get_status(self):\n return self.error", "def _check_status(sdp_state):\n try:\n errval = \"error\"\n errdict = dict(state=\"unknown\", reason=\"unknown\")\n if sdp_state.current_state == \"unknown\":\n errdict['reason'] = 'database not initialised.'\n LOG.debu...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check salesperson field for an error
def is_salesperson_flagged(self): element = self.driver.find_element(*ProjectFormLoc.ERROR_SALESPERSON) assert('If the Scope is not INT, the Sales Person must be specified.' in element.text), 'Salesperson error missing'
[ "def test_person_valueerror_not_mutate():\n person = Person()\n data = {\n 'is_organization': True\n }\n assert_raises(ValueError, person.format_data_set, data)", "def validate(self, field):", "def testFieldErrorIsLookupError(self):\n self.assertTrue(issubclass(sqlresult.FieldError, Lookup...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check solution architect field for and error
def is_solution_architect_flagged(self): element = self.driver.find_element(*ProjectFormLoc.ERROR_SA) assert('If the Scope is not INT, the ' 'Solution Architect must be specified.' in element.text), 'SA error missing'
[ "def _check_custom_build(self):\n pass", "def any_build_failures(self):", "def _sanity_check(self) -> None:\n if self.archive_action:\n script_path, _ = self.archive_action\n if not script_path.exists() and script_path.is_file():\n self.errors += [f\"Unable to loca...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check contract close date field for an error
def is_contract_close_flagged(self): element = self.driver.find_element(*ProjectFormLoc.ERROR_CONTRACT_CLOSE) assert('If the Scope is not INT, the Contract ' 'Closure date must be specified.' in element.text), 'Contract close error missing'
[ "def test_close_facility_invalid_closing_date(self):\n facility = mommy.make(Facility)\n now = timezone.now()\n tomorrow = now + timedelta(days=1)\n facility.closed = True\n facility.closed_date = tomorrow\n with self.assertRaises(ValidationError):\n facility.sav...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check contract currency field for an error
def is_contract_currency_flagged(self): element = self.driver.find_element(*ProjectFormLoc.ERROR_CONTRACT_CUR) assert('If the Scope is not INT, the Contract ' 'Currency must be specified.' in element.text), 'Contract currency error missing'
[ "def check_currency_format(value):\n\n if not re.match('^\\$[0-9]+?.[0-9]+$', value):\n raise ValidationError(\"Amount Entered does not match format ($amount)\")", "def testC():\n assert currency_response('USD','EUR',2.5)=='{ \"from\" : \"2.5 United States Dollars\", \"to\" : \"2.0952375 Euros\", \"s...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check if a project has been sucessfully created
def is_creation_successful(self): assert 'Project Created' in self.driver.title assert '<h1 class="page-title">Project Created</h1>' \ in self.driver.page_source, 'Project Created title missing' assert '<h2>Financial Quarters</h2>' \ in self.driver.page_source, 'Financi...
[ "def test_create_project(self):\n self.assertIsNotNone(self.pid)", "def _validate_project_exists(self):\n odooclient = odoo_client.get_odoo_client()\n try:\n search = [['tenant_id', '=', self.project_id]]\n project = odooclient.projects.list(search)[0]\n self....
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Builds a string of the expected class name, plus the actual one being used if it's not the same.
def get_class_name(self): class_name_str = "'%s'" % self._expected_override_name if self._expected_override_name != self.__class__.__name__: class_name_str += ' (using %s)' % self.__class__.__name__ return class_name_str
[ "def get_class_name(self):\n return self.name[:-6]", "def safe_classname(name, default_str='_'):\n classname = ''.join(word.title() for word in safe_name(name).split('_')\n if word)\n if not classname:\n raise ValueError('cannot convert {!r} to a safe class name'\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Adds data of type 'data_type' to the database. Descendant classes should implement a function 'add_actual_data' that has the
def add_data(self, data_type, host, *args, **kwargs): self._perform_data_action(self.FN_ADD, data_type.name, host, *args, **kwargs)
[ "def addDataType(self, dataType):\r\n \r\n self._dataTypes[dataType.name] = deepcopy(dataType)", "def define_data_field(self, data_type, header, **kwargs):\n f = DataField(data_type, header, **kwargs)\n self.row_definition.add_data_field(f)", "def create_data_type():\n logger.info...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Removes all data associated with 'data_type'. See instructions for 'add_data' for implementation guidance.
def remove_data(self, data_type, host, *args, **kwargs): self._perform_data_action(self.FN_REMOVE, data_type.name, host, *args, **kwargs)
[ "def removeDataType(self, name):\r\n \r\n if name in self._dataTypes:\r\n del self._dataTypes[name]\r\n for relation in self._relations.values():\r\n relation.removeSourceDataTypeName(name)\r\n relation.removeTargetDataTypeName(name)", "def remove_...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Build the base data functions specified by FN_TYPE_ for each of the types defined in the DataType class. For example, 'add_small_data' and 'verify_large_data'. These functions are set to call '_actual_data' and will pass in sane values for label, start and size. The '_actual_data' methods should be overwritten by a des...
def _build_data_fns(self): for fn_type in self.FN_TYPES: fn_dict = self._data_fns[fn_type] for data_type in DataType: self._data_fn_builder(fn_type, data_type.name, fn_dict) self._data_fn_builder(fn_type, self.DT_ACTUAL, fn_dict) self._override_data_fn...
[ "def data_func(dtype, data_dir=None):\n # MKWC seeing (mass, dimm, masspro) or weather (cfht) files\n if dtype in ['cfht']+expand['seeing']:\n return lambda files,mjds: mkwc.from_nirc2(mjds, dtype, data_dir), True\n # Temperature files (k2AO, k2L4, or k2ENV)\n if dtype in expand['temp']:\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a list of valid database JSON definitions. These definitions will be used by tests that create databases. Return an empty list if the datastore does not support databases.
def get_valid_database_definitions(self): return list()
[ "def databases(self):\n _log.debug('get database list')\n result = self._requestJSON('dbs', '')\n return self._getKey(result, 'name')", "def listDB(self):\n # Responses: list of db names\n return self.get(\"/_all_dbs\", descr='listDB').addCallback(\n self.parseResult)...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a list of valid user JSON definitions. These definitions will be used by tests that create users. Return an empty list if the datastore does not support users.
def get_valid_user_definitions(self): return list()
[ "def user_fields(self):\n return set(\n id for id, value in self.field_map.items()\n if value.get(\"schema\", {}).get(\"type\", \"\") == \"user\"\n )", "def get_users(self):\n res = self.getuserslist()\n # convert to user object\n return [WithingsUser.creat...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a valid JSON definition for a nonexisting database. This definition will be used by negative database tests. The database will not be created by any of the tests. Return None if the datastore does not support databases.
def get_non_existing_database_definition(self): valid_defs = self.get_valid_database_definitions() return self._get_non_existing_definition(valid_defs)
[ "def test_get_database_fail(self):\n self.backend._database = None\n\n list_db = [{'name': 'database_1'}]\n self.backend._client.get_list_database.return_value = list_db\n\n result = self.backend._get_database()\n\n self.assertEqual(result, False)", "def load_database( filename:...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a valid JSON definition for a nonexisting user. This definition will be used by negative user tests. The user will not be created by any of the tests. Return None if the datastore does not support users.
def get_non_existing_user_definition(self): valid_defs = self.get_valid_user_definitions() return self._get_non_existing_definition(valid_defs)
[ "def generate_user(self, name=None, data=None, metadata=None,\n json_string=None, uge_version=None,\n add_required_data=True):\n return self.user_manager.generate_object(\n name=name, data=data, metadata=metadata,\n json_string=json_string, uge_...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a definition of a dynamic configuration group. A dynamic group should contain only properties that do not require database restart. Return an empty dict if the datastore does not have any.
def get_dynamic_group(self): return dict()
[ "def get_configuration(self, scaling_group):\r\n return self._manager.get_configuration(scaling_group)", "def get_definition(self) -> LabwareDefinitionDict:\n return cast(LabwareDefinitionDict, self._definition.dict(exclude_none=True))", "def refresh_group_config_cache(dbcon):\n\n # Subset grou...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a definition of a nondynamic configuration group. A nondynamic group has to include at least one property that requires database restart. Return an empty dict if the datastore does not have any.
def get_non_dynamic_group(self): return dict()
[ "def get_definition(self) -> LabwareDefinitionDict:\n return cast(LabwareDefinitionDict, self._definition.dict(exclude_none=True))", "def refresh_group_config_cache(dbcon):\n\n # Subset group item's default icon and order\n default_group_icon = qtawesome.icon(\"fa.object-group\",\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a list of configuration groups with invalid values. An empty list indicates that no 'invalid' tests should be run.
def get_invalid_groups(self): return []
[ "def test_service_groups_missing_group(self):\n self.assertNotIn(\"not_a_service_group\", EFConfig.SERVICE_GROUPS)", "def get_invalid_fields(self):\n return sorted(set(self.invalid_fields))", "def get_invalid_users():\n \n samba_section_invalid_users_dict = {}\n for section in SAMBA_CONFIG_PARSE...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the list of exposed logs for the datastore. This method shouldn't need to be overridden.
def get_exposed_log_list(self): logs = [] try: logs.extend(self.get_exposed_user_log_names()) except SkipTest: pass try: logs.extend(self.get_exposed_sys_log_names()) except SkipTest: pass return logs
[ "def get_logging_list(self):\n return self.__logging_list", "def get_full_log_list(self):\n logs = self.get_exposed_log_list()\n try:\n logs.extend(self.get_unexposed_user_log_names())\n except SkipTest:\n pass\n try:\n logs.extend(self.get_unexp...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the full list of all logs for the datastore. This method shouldn't need to be overridden.
def get_full_log_list(self): logs = self.get_exposed_log_list() try: logs.extend(self.get_unexposed_user_log_names()) except SkipTest: pass try: logs.extend(self.get_unexposed_sys_log_names()) except SkipTest: pass return l...
[ "def get_logging_list(self):\n return self.__logging_list", "def get_root_logs(self):\n return # osid.logging.LogList", "def allLogs(self):\n\t\tres = \"\"\n\t\tcases = self.logs.keys()\n\t\tif \"by_date\" in cases: cases.remove(\"by_date\")\n\t\tfor case in cases:\n\t\t\tres += self.prettify.cas...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the names of the user logs that are visible to all users. The first log name will be used for tests.
def get_exposed_user_log_names(self): raise SkipTest("No exposed user log names defined.")
[ "def get_full_log_list(self):\n logs = self.get_exposed_log_list()\n try:\n logs.extend(self.get_unexposed_user_log_names())\n except SkipTest:\n pass\n try:\n logs.extend(self.get_unexposed_sys_log_names())\n except SkipTest:\n pass\n\n...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the names of the user logs that not visible to all users. The first log name will be used for tests.
def get_unexposed_user_log_names(self): raise SkipTest("No unexposed user log names defined.")
[ "def get_exposed_user_log_names(self):\n raise SkipTest(\"No exposed user log names defined.\")", "def get_full_log_list(self):\n logs = self.get_exposed_log_list()\n try:\n logs.extend(self.get_unexposed_user_log_names())\n except SkipTest:\n pass\n try:\n...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the names of SYS logs that are visible to all users. The first log name will be used for tests.
def get_exposed_sys_log_names(self): raise SkipTest("No exposed sys log names defined.")
[ "def get_unexposed_sys_log_names(self):\n return ['guest']", "def get_exposed_log_list(self):\n logs = []\n try:\n logs.extend(self.get_exposed_user_log_names())\n except SkipTest:\n pass\n try:\n logs.extend(self.get_exposed_sys_log_names())\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the names of the sys logs that not visible to all users. The first log name will be used for tests.
def get_unexposed_sys_log_names(self): return ['guest']
[ "def get_exposed_sys_log_names(self):\n raise SkipTest(\"No exposed sys log names defined.\")", "def get_unexposed_user_log_names(self):\n raise SkipTest(\"No unexposed user log names defined.\")", "def get_full_log_list(self):\n logs = self.get_exposed_log_list()\n try:\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns whether enabling or disabling a USER log requires a restart of the datastore.
def log_enable_requires_restart(self): return False
[ "def log_enabled(self):\n ret = self._get_attr(\"logEnabled\")\n return ret", "def can_manage_smart_log(self):\n return # boolean", "def can_log(self):\n return # boolean", "def has_been_enabled(self):\n return self.engine.get('audit.has_been_armed', False)", "def can_up...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a valid module type.
def get_valid_module_type(self): return "Ping"
[ "def type(self) -> ModuleType:\n\n ptr = ffi.wasmtime_module_type(self._ptr)\n return ModuleType._from_ptr(ptr, None)", "def _is_module_allowed(module_name: str, type_name: str):\n\n if module_name not in [\"BTrees\", \"builtins\", \"datetime\", \"persistent\", \"renku\", \"zc\", \"zope\", \"deal...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns a list of cluster type lists to use when creating instances. The list should be the same size as the number of cluster instances that will be created. If not specified, no types are sent to clustercreate. Cluster grow uses the first type in the list for the first instance, and doesn't use anything for the secon...
def get_cluster_types(self): return None
[ "def gen_random_clusters(num_clusters):\r\n cluster_list = []\r\n \r\n for dummy_idx in range(num_clusters):\r\n cluster_list.append(alg_cluster.Cluster(set([]), random.uniform(-1, 1), \\\r\n random.uniform(-1, 1), 0, 0))\r\n\r\n return cluster_list"...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Compute 4x4 transformation matrix that map the reference xyplane at origin to the TV/TC plane defined by landmarks.
def extract_tform(landmarks, plane_name): if plane_name=='tv': # Fit plane and project landmarks onto plane z_vec, p_plane = fit_plane(landmarks) landmarks_proj = project_on_plane(landmarks, z_vec, p_plane) # Fit mid line landmarks_line = landmarks_proj[[0,1,2], :] x...
[ "def calculateTransform(self):\n dist = np.array([0, 0, 0, 0, 0]) # The distortion matrix\n if self.corners:\n self.rvec, self.tvec, _ = aruco.estimatePoseSingleMarkers(self.corners, 0.08, self.cameraMat, dist)", "def transform_landmarks(landmarks, rotate):\n ones = np.ones(shape=(len...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Extract a 2D plane image from the 3D volume given the mesh coordinates of the plane.
def extract_plane_from_mesh(image, mesh, mesh_siz, order): # Set image matrix corner as origin img_siz = np.array(image.shape) img_c = (img_siz-1)/2.0 mesh_new = mesh[:3, :] + np.expand_dims(img_c, axis=1) # Reshape coordinates x_coords = mesh_new[0, :].reshape(mesh_siz) y_coords = mesh_new...
[ "def extract_plane_from_pose(image, mat, plane_siz, order):\n # Initialise identity plane\n xyz_coords = init_mesh(plane_siz)\n\n # Rotate and translate plane\n xyz_coords = np.dot(mat, xyz_coords)\n\n # Extract image plane\n slice, xyz_coords_new = extract_plane_from_mesh(image, xyz_coords, plane...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Extract a 2D plane image from the 3D volume given the pose (transformation matrix) wrt the identity plane.
def extract_plane_from_pose(image, mat, plane_siz, order): # Initialise identity plane xyz_coords = init_mesh(plane_siz) # Rotate and translate plane xyz_coords = np.dot(mat, xyz_coords) # Extract image plane slice, xyz_coords_new = extract_plane_from_mesh(image, xyz_coords, plane_siz, order) ...
[ "def get_inverse_pespective(perspective_matrix: np.array)-> np.array:\n #Take 5 homogenous points on the floor(Unit is in Meters)\n pts_dst = np.array([[0,0,0,1],\n [0,1,0,1],\n [1,0,0,1],\n [1,1,0,1],\n [0,0,0,1]\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the length of a number. Negative and positive numbers have the same length, i.e. the sign is not taken into account. Only works with integers, not floats.
def num_length(number: int) -> int: return floor(log10(abs(number))) + 1
[ "def get_len(x):\n if x == 0:\n return 1\n else:\n l = 0\n while x != 0:\n x /= 10\n l += 1\n return l", "def __num_digits(num: int):\n return len(str(num))", "def get_number_of_digits(number):\n return int(math.log10(nu...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates a nested dictionary that represents the folder structure of rootdir
def get_directory_structure(rootdir: str): dir = {} rootdir = rootdir.rstrip(os.sep) start = rootdir.rfind(os.sep) + 1 for path, dirs, files in os.walk(rootdir): folders = path[start:].split(os.sep) subdir = dict.fromkeys(files) parent = reduce(dict.get, folders[:-1], dir) ...
[ "def get_directory_structure(rootdir):\r\n dir = {}\r\n rootdir = rootdir.rstrip(os.sep)\r\n start = rootdir.rfind(os.sep) + 1\r\n for path, dirs, files in os.walk(rootdir):\r\n folders = path[start:].split(os.sep)\r\n subdir = dict.fromkeys(files)\r\n parent = reduce(dict.get, fold...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Prints the header of the students table
def print_header(): header = "| {:<18} | {:<18} | {:<21} | {:<21} |".format("ROLL_NUMBER", "NAME", "DATE-OF-BIRTH", "REG...
[ "def print_headers():\n print(\"symbol\\t count\\t price\\t\\t total\")\n print(\"-\" * 71)", "def _Header(numCols):\n return \"\\\\begin{center}\\n\\\\begin{tabular}{\" + \"|c\" * numCols + \"|}\\n\"", "def print_header():\n print(\"STEM Center Temperature Project\")\n print(\"Shaoto...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
In order to remove everything from the server destroy the camera then the vehicle.
def destroy(self): if(self.camera_manager is not None): self.camera_manager.sensor.destroy() if(self.vehicle is not None): self.vehicle.destroy()
[ "def cleanup(self):\n os.system(\"rm -rf /dev/shm/images/kinect_rgb\")\n os.system(\"rm -rf /dev/shm/images/kinect_depth\")", "def clean_up(self):\n self.mesh_client.close()", "def destroy_actors(self):\n # Remove vehicles\n print(f'Destroying {len(self.vehicles)} vehicles...'...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Parses a line of GFF into a dictionary.
def _gff_line_map(line): gff3_kw_pat = re.compile("\w+=") def _split_keyvals(keyval_str): """Split key-value pairs in a GFF2, GTF and GFF3 compatible way. GFF3 has key value pairs like: count=9;gene=amx-2;sequence=SAGE:aacggagccg GFF2 and GTF have: Sequence...
[ "def from_gff3_line_to_dict( line ):\n fields = line.strip().split( \"\\t\" )\n assert len( fields ) == 9 # sanity check\n result = {\n \"seqid\": fields[0],\n \"source\": fields[1],\n \"type\": fields[2],\n \"start\": None if fields[3] == \".\" else int( fields[3] ),\n \...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Split keyvalue pairs in a GFF2, GTF and GFF3 compatible way.
def _split_keyvals(keyval_str): quals = collections.defaultdict(list) if keyval_str is None: return quals # ensembl GTF has a stray semi-colon at the end if keyval_str[-1] == ';': keyval_str = keyval_str[:-1] # GFF2/GTF has a semi-colon with at least one s...
[ "def _gff_line_map(line):\n gff3_kw_pat = re.compile(\"\\w+=\")\n def _split_keyvals(keyval_str):\n \"\"\"Split key-value pairs in a GFF2, GTF and GFF3 compatible way.\n GFF3 has key value pairs like:\n count=9;gene=amx-2;sequence=SAGE:aacggagccg\n GFF2 and GTF have: \n...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Provide nesting of GFF2 transcript parts with transcript IDs. exons and coding sequences are mapped to a parent with a transcript_id in GFF2. This is implemented differently at different genome centers and this function attempts to resolve that and map things to the GFF3 way of doing them.
def _nest_gff2_features(gff_parts): # map protein or transcript ids to a parent for transcript_id in ["transcript_id", "transcriptId", "proteinId"]: try: gff_parts["quals"]["Parent"] = \ gff_parts["quals"][transcript_id] break ...
[ "def build_transcripts(input_fasta, output_fasta):\n\n # read the input file\n names, seqs = gen.read_fasta(input_fasta)\n exon_list = collections.defaultdict(lambda: collections.defaultdict())\n for i, name in enumerate(names):\n exon_list[name.split(\".\")[0]][int(name.split(\".\")[-1].split(\"...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Provide a mapping of parent to child relationships in the file.
def parent_child_id_map(gff_handle): # collect all of the parent and child types mapped to IDs parent_sts = dict() child_sts = collections.defaultdict(list) for line in gff_handle: line_type, line_info = _gff_line_map(line)[0] if (line_type == 'parent' or (line_type == 'child' and line_i...
[ "def parent_links(self):\n child_parents = defaultdict(list)\n for parent, children in self.items():\n for child in children:\n child_parents[child].append(parent)\n return dict(child_parents)", "def relMap(self):\n relmap = RelationshipMap...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Compare two JSON files (or dicts) and yield the result.
def compare_gen(location1, location2, print_all=False): json1 = get_content(location1) json2 = get_content(location2) diff1 = Diff(json1, json2, True).difference diff2 = Diff(json2, json1, False).difference for type_, path, before, after in diff1: if before and after: action = "T...
[ "def compare_jsons(json1, json2):\n return json.loads(json1) == json.loads(json2)", "def compare_files(origin_file, new_file, file_suffix='.json'):\n origin_content = helpers.load_file(origin_file, file_suffix)\n new_content = helpers.load_file(new_file, file_suffix)\n\n if origin_content['md5'] == ne...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Takes two bytes objects, XORs them together, and returns the result as a bytes object. If one of the arrays is longer than the other, this function will repeat the shorter one until the longer is xored.
def xor(a, b): if (type(a) != bytes and type(a) != bytearray): raise TypeError("a is not a bytes object") if (type(b) != bytes and type(b) != bytearray): raise TypeError("b is not a bytes object") if len(a) < 1 or len(b) < 1: raise ValueError("Length of byte arrays must be greater th...
[ "def xor_byte_arrays(array1, array2):\r\n if (len(array2) < len(array1)):\r\n array2 = array2.rjust(len(array1),bytes([0]))\r\n\r\n result = bytearray(len(array1))\r\n\r\n for i in range(len(array1)):\r\n result[i] = (array1[i] ^ array2[i])\r\n\r\n return bytes(result)", "def xor(a, b):\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Calculates the Hamming distance between two bytes objects. They must be of equal length.
def calcHammDist(a, b): if (type(a) != bytes and type(a) != bytearray): raise TypeError("a is not a bytes object") if (type(b) != bytes and type(b) != bytearray): raise TypeError("b is not a bytes object") if len(a) != len(b): raise ValueError("Bytes objects must be of equal length."...
[ "def hamming_distance(bits1: str, bits2: str) -> int:\n bits1 = [int(b) for b in bits1]\n bits2 = [int(b) for b in bits2]\n return hamming(bits1, bits2) * len(bits1)", "def hamming_d(a: bytes, b: bytes) -> int:\n assert len(a) == len(b)\n return sum(\n count_set_bits(a[i] ^ b[i])\n fo...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Convert a general purpose register to its corresponding abi name
def gpr_to_abi(gpr): switcher = { "x0" : "zero", "x1" : "ra", "x2" : "sp", "x3" : "gp", "x4" : "tp", "x5" : "t0", "x6" : "t1", "x7" : "t2", "x8" : "s0", "x9" : "s1", "x10": "a0", "x11": "a1", "x12": "a2", ...
[ "def describe_reg_name(regnum, machine_arch=None):\r\n if machine_arch is None:\r\n machine_arch = _MACHINE_ARCH\r\n\r\n if machine_arch == 'x86':\r\n return _REG_NAMES_x86[regnum]\r\n elif machine_arch == 'x64':\r\n return _REG_NAMES_x64[regnum]\r\n else:\r\n return '<none>'...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Convert pseudo instruction to regular instruction
def convert_pseudo_instr(instr_name, operands, binary): if instr_name == "nop": instr_name = "addi" operands = "zero,zero,0" elif instr_name == "mv": instr_name = "addi" operands = operands + ",0" elif instr_name == "not": instr_name = "xori" operands = operan...
[ "def performSymbolicInstructionSubstitution (self, mne, op1, op2):\n\n return None", "def CrossMnemonic():\n\n global Asm\n\n if dec.Asm.Mnemonic in dec.Asm.Instructions:\n func = dec.Asm.Instructions[dec.Asm.Mnemonic][0]\n func()\n else:\n errors.DoError('badopco', False)", ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Bind an environment variable to an argument action. The env value will traditionally be something uppercase like `MYAPP_FOO_ARG`. Note that the ENV value is assigned using `set_defaults()` and as such it will be overridden if the argument is set via `parse_args()`
def bind_env(self, action, env): if env in self._env_actions: raise ValueError('Duplicate ENV variable: %s' % env) self._env_actions[env] = action action.env = env
[ "def _argparse_check_env(cls, env_prefix, prefix, k, args):\n if env_prefix is None:\n return\n\n argname = f'{prefix}{k}'\n name = f'{env_prefix}_{prefix}{k}'.upper().replace('-', '_')\n v = os.environ.get(name)\n if v is not None:\n args.setdefault(argname,...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Unbind an environment variable from an argument action. Only used when the subcommand hierarchy changes.
def unbind_env(self, action): del self._env_actions[action.env] delattr(action, 'env')
[ "def _unset_environment_variable(self, env_var):\n raise NotImplementedError(\"_unset_environment_variable should be \"\n \"implemented if the shell is capable for \"\n \"managing env vars.\")", "def lambda_unset(argv: typing.List[str], args...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Add pager support to help output.
def print_help(self, *args, **kwargs): if self._command.session.allow_pager: desc = 'Help\: %s' % '-'.join(self.prog.split()) pager_kwargs = self._command.get_pager_spec() with paging.pager_redirect(desc, **pager_kwargs): return super().print_help(*args, **kwa...
[ "def pager(text):\r\n global pager\r\n pager = getpager()\r\n pager(text)", "def start_pager(self) -> None:\n if sys.stdout.isatty():\n if subprocess.call([\"which\", PAGER], stdout=subprocess.DEVNULL) == 0:\n # https://chase-seibert.github.io/blog/2012/10/31/python-fork-...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Computes an approximation of the Mandelbrot set via the distance estimation method
def M_DEM(M, nx, ny, x_min, x_max, y_min, y_max, max_it, R, threshold): """Inputs: M: an output array of size nx*ny nx, ny: the image resolution in the x- and y direction x_min, x_max: the limits of the x-axis in the region y_min, y_max: the limits of the y-axis in the region max_it: the m...
[ "def distances(self):\n\n\n # Distances between atoms and ESP points\n self.dist = np.zeros((self.natoms, self.npoints))\n self.dist_3 = np.zeros((self.natoms, self.npoints))\n self.dist_x = np.zeros((self.natoms, self.npoints))\n self.dist_y = np.zeros((self.natoms, self.npoints)...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Initialize the signal processor.
def __init__(self, *args, **kwargs): self.is_setup = False self._can_process_signals = False self._handlers = {} self._pending_user_changes = threading.local() super(SignalProcessor, self).__init__(*args, **kwargs)
[ "def __start_signal_processor(self):\n from core.signal_processor import OutputSignalQueueProcessor\n signal_processor = OutputSignalQueueProcessor(self.__signals_to_process_queue)\n signal_processor.start()", "def __init__(self):\n ThreadManager.__init__(self)\n\n # register si...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Whether the signal processor can currently process signals.
def can_process_signals(self): if not self._can_process_signals: try: SiteConfiguration.objects.get_current() self._can_process_signals = True except ObjectDoesNotExist: pass return self._can_process_signals
[ "def can_register_for_schedule_slot_notifications(self):\n return # boolean", "async def can_process(self, incoming: Incoming) -> bool:\n raise NotImplementedError() # pragma: no cover", "def can_register_for_event_notifications(self):\n return # boolean", "def can_register_for_schedul...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Update the search index when an object is deleted. If there's any error writing to the search backend, the error will be caught and logged.
def handle_delete(self, **kwargs): try: super(SignalProcessor, self).handle_delete(**kwargs) except Exception as e: logger.error('Error updating the search index. Check to ' 'make sure the search backend is running and ' 'configur...
[ "def update_object(self, instance, **kwargs):\n\n if self.should_index(instance):\n logging.info('Updating search index %r' % get_identifier(instance))\n super(CommonSearchIndex, self).update_object(instance, **kwargs)\n return True\n else:\n self.remove_obj...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Handle a Group.users relation changing.
def _handle_group_m2m_changed(self, instance, action, pk_set, reverse, **kwargs): backend = search_backend_registry.current_backend if not (backend and search_backend_registry.on_the_fly_indexing_enabled): return if not hasattr(self...
[ "def test_update_users_of_a_group(self):\n profile = self.env['res.users'].create(\n {'name': 'P', 'login': 'p_login', 'is_user_profile': True})\n user = self.env['res.users'].create(\n {'name': 'U', 'login': 'u_login', 'user_profile_id': profile.id})\n self.group1.with_co...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
`context` can either be a Tensor of rank 0 indicating no context, a Tensor of rank 1 indicating that the same context should be used for all samples, or a Tensor of rank 2 where the first dimention is equal to `n` indicating a different context for each sample
def sample(self, n, context=tf.constant(0.)): # Duplicate context for each sample if tf.rank(context) == 1: context = tf.tile(context[None, :], [n, 1]) # Start with random noise height, width, channels = self.image_shape n_pixels = height * width samples = tf...
[ "def tile_to_match_context(net, context):\n with tf.name_scope('tile_to_context'):\n num_samples = tf.shape(context)[1]\n net_examples = tf.expand_dims(net, 1) # [batch_size, 1, ...]\n\n net_ndim = len(net_examples.get_shape().as_list())\n # Tile net by num_samples in axis=1.\n multiples = [1]*net_...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
recv([port[, addr[,buf_size]]]) waits for a datagram and returns the data.
def recv(port=50000, addr="239.192.1.100", buf_size=1024): # Create the socket s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # Set some options to make it multicast-friendly s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) try: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) except Attri...
[ "def recvfrom(self, num_bytes, flags=None):\r\n try:\r\n # This is the old 2.1 behaviour\r\n #assert self.sock_impl\r\n # This is amak's preferred interpretation\r\n #raise error(errno.ENOTCONN, \"Recvfrom on unbound udp socket meaningless operation\")\r\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Resource reference to the secret. ie. subs/rg/profile/secret
def secret(self) -> Optional[pulumi.Input['ResourceReferenceArgs']]: return pulumi.get(self, "secret")
[ "def get_secret(self):\r\n return self.secret", "def _get_secret(\n self,\n cred: Dict,\n name: str | None = None,\n secret_field: str | None = None,\n ) -> str | None:\n # from literal 'secret' property\n secret = cred.get('secret')\n if ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines how Frontdoor caches requests that include query strings. You can ignore any query strings when caching, ignore specific query strings, cache every request with a unique URL, or cache specific query strings.
def query_string_caching_behavior(self) -> Optional[pulumi.Input[Union[str, 'AfdQueryStringCachingBehavior']]]: return pulumi.get(self, "query_string_caching_behavior")
[ "def dynCache():\n pass", "def get_cached_urls(self):\n if not self._cached:\n self._cached = self.get_urls()\n for appcache in self.registry:\n self._cached.update(appcache.get_assets(self.request))\n self._cached.update(self._external_appcaches['cached']...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Indicates whether content compression is enabled. If compression is enabled, content will be served as compressed if user requests for a compressed version. Content won't be compressed on AzureFrontDoor when requested content is smaller than 1 byte or larger than 1 MB.
def is_compression_enabled(self) -> Optional[pulumi.Input[Union[str, 'RuleIsCompressionEnabled']]]: return pulumi.get(self, "is_compression_enabled")
[ "def enableHTTPCompression(self, REQUEST={}, force=0, disable=0, query=0):\n if query:\n return self.use_HTTP_content_compression\n\n elif disable:\n # in the future, a gzip cache manager will need to ensure that\n # compression is off\n self.use_HTTP_conten...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
List of content types on which compression applies. The value should be a valid MIME type.
def content_types_to_compress(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: return pulumi.get(self, "content_types_to_compress")
[ "def valid_content_types() -> List[str]:", "def get_accepted_content_types(request):\n def qualify(raw_content_type):\n parts = raw_content_type.split(';', 1)\n if len(parts) == 2:\n match = re.match(\n r'(^|;)q=(0(\\.\\d{,3})?|1(\\.0{,3})?)(;|$)',\n parts...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Indicates whether content compression is enabled on AzureFrontDoor. Default value is false. If compression is enabled, content will be served as compressed if user requests for a compressed version. Content won't be compressed on AzureFrontDoor when requested content is smaller than 1 byte or larger than 1 MB.
def is_compression_enabled(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "is_compression_enabled")
[ "def is_compression_enabled(self) -> Optional[pulumi.Input[Union[str, 'RuleIsCompressionEnabled']]]:\n return pulumi.get(self, \"is_compression_enabled\")", "def enableHTTPCompression(self, REQUEST={}, force=0, disable=0, query=0):\n if query:\n return self.use_HTTP_content_compression\n\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Describes if the custom rule is in enabled or disabled state. Defaults to Enabled if not specified.
def enabled_state(self) -> Optional[pulumi.Input[Union[str, 'CustomRuleEnabledState']]]: return pulumi.get(self, "enabled_state")
[ "def GetEnabled(self):\n return self._is_enabled", "def is_custom_mode_enabled(base_mode,custom_mode):\n if base_mode & mavlink.MAV_MODE_FLAG_CUSTOM_MODE_ENABLED:\n return True\n else:\n return False", "def get_enabled(self):\n\n return self.proxied.enabled", "def is_enabled(...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Version of the secret to be used
def secret_version(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "secret_version")
[ "def get_secret(self):\r\n return self.secret", "def get_secret_key(self) -> str:\n if self.secret_key is None:\n self.secret_key = secrets.token_urlsafe(32)\n self.save_ini_file()\n return self.secret_key", "def getSecretKey(self) -> bytes:\r\n return self.secr...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Health probe settings to the origin that is used to determine the health of the origin.
def health_probe_settings(self) -> Optional[pulumi.Input['HealthProbeParametersArgs']]: return pulumi.get(self, "health_probe_settings")
[ "def check_health(self):\n pass", "def __setHealth(self,health):\n\t\tself.health = health", "def createHealth(self, gridSize: list):\n healthChart = list(reversed([gridSize[0] * i for i in range(1, 6)]))\n self.externHealth = healthChart[self.severity]\n self.trueHealth = np.random....
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Time in minutes to shift the traffic to the endpoint gradually when an unhealthy endpoint comes healthy or a new endpoint is added. Default is 10 mins. This property is currently not supported.
def traffic_restoration_time_to_healed_or_new_endpoints_in_minutes(self) -> Optional[pulumi.Input[int]]: return pulumi.get(self, "traffic_restoration_time_to_healed_or_new_endpoints_in_minutes")
[ "def extend_refresh_timer(self):\n # factor order is(chill out to the next access):\n # 1(5min) 2(10min) 3(15min) 6(1hour) 12(1hour) 24(2hour) 48(4hour) 96(8hour) then i make reset in 'has_error()'\n # the last 2 hours of the day will be: 1(5min) 2(10min) 3(15min) 6(1hour) 12(1hour)\n if...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The Alias of the Private Link resource. Populating this optional field indicates that this origin is 'Private'
def private_link_alias(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "private_link_alias")
[ "def private_link(self) -> Optional[pulumi.Input['FrontdoorOriginPrivateLinkArgs']]:\n return pulumi.get(self, \"private_link\")", "def private_link_state(self) -> Optional[str]:\n return pulumi.get(self, \"private_link_state\")", "def alias(self):\n return self.sys_info['alias']", "def a...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
A custom message to be included in the approval request to connect to the Private Link.
def private_link_approval_message(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "private_link_approval_message")
[ "def get_request_msg(self) -> str:", "def privateMessage(con, nick, message):", "def send_approval_notification(self):\n if self.channel:\n link = \"\".join([\"http://\", Site.objects.get_current().domain, self.approval_link()])\n message = render_to_string('email/approval_notificat...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The location of the Private Link resource. Required only if 'privateLinkResourceId' is populated
def private_link_location(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "private_link_location")
[ "def private_link(self) -> Optional[pulumi.Input['FrontdoorOriginPrivateLinkArgs']]:\n return pulumi.get(self, \"private_link\")", "def private_link_state(self) -> Optional[str]:\n return pulumi.get(self, \"private_link_state\")", "def update_private_link_resource_policies(\n self,\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the cache expiration action for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['CacheExpirationActionParametersArgs']): pulumi.set(__self__, "name", 'CacheExpiration') pulumi.set(__self__, "parameters", parameters)
[ "def set_expiration(self,e):\n _ldns.ldns_key_set_expiration(self,e)\n #parameters: ldns_key *,uint32_t,\n #retvals: ", "def _expiration(self, k: K) -> None:\n self.telemetry[\"expiration\"] += 1", "def update_expiration_for_hit(HITId=None, ExpireAt=None):\n pass", "def schedule...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the cachekey query string action for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['CacheKeyQueryStringActionParametersArgs']): pulumi.set(__self__, "name", 'CacheKeyQueryString') pulumi.set(__self__, "parameters", parameters)
[ "def _build_cache_key(self, *args):\n return self.key if not self.key_mod else self.key % tuple(args)", "def cache_key(self, url):\n\n return f\"IXF-CACHE-{url}\"", "def render_cachekey(fun, self):\n context = aq_inner(self.context)\n return \"\".join((\n api.portal.get().absolute_url...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the ClientPort condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['ClientPortMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'ClientPort') pulumi.set(__self__, "parameters", parameters)
[ "def requires_port(self):\n return self in {self.__class__.UDP, self.__class__.TCP}", "def __init__(__self__, *,\n name: pulumi.Input[str],\n parameters: pulumi.Input['ServerPortMatchConditionParametersArgs']):\n pulumi.set(__self__, \"name\", 'ServerPort')\n p...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the Cookies condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['CookiesMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'Cookies') pulumi.set(__self__, "parameters", parameters)
[ "def require_set_cookie_string(self, strict=False):\n if strict:\n return self._require_set_cookie_string_strict()\n try:\n name, value = self.require_name_value_pair()\n except ValueError:\n name, value = None, None\n if self.parse(b';'):\n at...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the HostName condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['HostNameMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'HostName') pulumi.set(__self__, "parameters", parameters)
[ "def get_host_name(self):\n return self.__get_value(\"agentLevelParams/hostname\")", "def host_name(self):\n return self.__host_name", "def host(name):\n return socket.gethostname() == name", "def route_host_name(self) -> Optional[str]:\n return pulumi.get(self, \"route_host_name\")", ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the HttpVersion condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['HttpVersionMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'HttpVersion') pulumi.set(__self__, "parameters", parameters)
[ "def _match_version_string(self, subject, req):\r\n match = self.version_uri_regex.match(subject)\r\n if match:\r\n major_version, minor_version = match.groups(0)\r\n major_version = int(major_version)\r\n minor_version = int(minor_version)\r\n req.environ['...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the IsDevice condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['IsDeviceMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'IsDevice') pulumi.set(__self__, "parameters", parameters)
[ "def condition(self, device, log):\n path = device.deviceClass().getPrimaryUrlPath()\n\n if path.startswith(\"/zport/dmd/Devices/Server/Cmd\"):\n result = device.os.uname == \"Linux\"\n else:\n result = True\n\n return result", "def IsDevice(self):\n if self._s...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the PostArgs condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['PostArgsMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'PostArgs') pulumi.set(__self__, "parameters", parameters)
[ "def set_post_conditions(self, target_host, capability_id):\n\n logger.debug(\n \"Setting post-conditions for capability: %s\", capability_id\n )\n\n postconditions = self.capabilities[capability_id].postconditions\n\n self.update_status(capability_id)\n\n for condition...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the QueryString condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['QueryStringMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'QueryString') pulumi.set(__self__, "parameters", parameters)
[ "def parse_query(self, query_string):\n \n query_string = query_string.strip()\n if not query_string[:4] == \"GET \":\n raise ValueError('Invalid Query Format, must start with \"GET\"')\n\n query_string = query_string[4:]\n components = query_string.split('WHERE')\n\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the RemoteAddress condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['RemoteAddressMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'RemoteAddress') pulumi.set(__self__, "parameters", parameters)
[ "def _default_allow_remote(self):\n try:\n addr = ipaddress.ip_address(self.ip)\n except ValueError:\n # Address is a hostname\n for info in socket.getaddrinfo(self.ip, self.port, 0, socket.SOCK_STREAM):\n addr = info[4][0]\n if not py3com...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the RequestBody condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['RequestBodyMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'RequestBody') pulumi.set(__self__, "parameters", parameters)
[ "async def validation_of_body(\n self,\n resource_group_name: str,\n id: int,\n body: Optional[JSON] = None,\n *,\n content_type: str = \"application/json\",\n **kwargs: Any\n ) -> JSON:", "def _should_accept_regulated_payload(payload, condition):\n return no...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the RequestHeader condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['RequestHeaderMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'RequestHeader') pulumi.set(__self__, "parameters", parameters)
[ "def __make_request_headers(self, teststep_dict, entry_json):\n teststep_headers = {}\n for header in entry_json[\"request\"].get(\"headers\", []):\n if header[\"name\"].lower() in IGNORE_REQUEST_HEADERS:\n continue\n\n teststep_headers[header[\"name\"]] = header[\...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the RequestMethod condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['RequestMethodMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'RequestMethod') pulumi.set(__self__, "parameters", parameters)
[ "def toggleRequestMethod(self, request):\n # type: (bytearray) -> bytearray", "def _get_method(payload):\n if payload and \"RequestMethod\" in payload and payload[\"RequestMethod\"]:\n return payload[\"RequestMethod\"]\n\n raise InvalidRequestException(\"Payload is missing RequestM...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the RequestScheme condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['RequestSchemeMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'RequestScheme') pulumi.set(__self__, "parameters", parameters)
[ "def is_request(cls):\n\n if cls[TCP].sport != 502 and cls[TCP].dport == 502:\n return True\n elif cls[TCP].sport == 502 and cls[TCP].dport != 502:\n return False\n elif cls[TCP].sport == cls[TCP].dport == 502:\n return cls.guess_request_response # Define anoth...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the RequestUri condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['RequestUriMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'RequestUri') pulumi.set(__self__, "parameters", parameters)
[ "def _get_uri(payload):\n if payload and \"RequestUri\" in payload and payload[\"RequestUri\"]:\n return payload[\"RequestUri\"]\n\n return None", "def create_url_rules(self):\n\n def p(route):\n \"\"\"Prefix a route with the URL prefix.\"\"\"\n return f\"{sel...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the route configuration override action for the delivery rule. Only applicable to Frontdoor Standard/Premium Profiles.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['RouteConfigurationOverrideActionParametersArgs']): pulumi.set(__self__, "name", 'RouteConfigurationOverride') pulumi.set(__self__, "parameters", parameters)
[ "def rule_action_overrides(self) -> Optional[Sequence['outputs.WebAclRuleActionOverride']]:\n return pulumi.get(self, \"rule_action_overrides\")", "def create_url_rules(self):\n\n def p(route):\n \"\"\"Prefix a route with the URL prefix.\"\"\"\n return f\"{self.config.url_prefi...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the ServerPort condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['ServerPortMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'ServerPort') pulumi.set(__self__, "parameters", parameters)
[ "def requires_port(self):\n return self in {self.__class__.UDP, self.__class__.TCP}", "def __init__(__self__, *,\n name: pulumi.Input[str],\n parameters: pulumi.Input['ClientPortMatchConditionParametersArgs']):\n pulumi.set(__self__, \"name\", 'ClientPort')\n p...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the SocketAddress condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['SocketAddrMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'SocketAddr') pulumi.set(__self__, "parameters", parameters)
[ "def broadcast(self):\n if self._module.version == 4 and (self._module.width - self._prefixlen) <= 1:\n return None\n else:\n return IPAddress(self._value | self._hostmask_int, self._module.version)", "def _make_broadcast_socket(self):\n self.broadcast_socket = socket.so...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the SslProtocol condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['SslProtocolMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'SslProtocol') pulumi.set(__self__, "parameters", parameters)
[ "def get_ssl_protocol(self):\n return self._ssl_protocol", "def get_force_https_protocol_value(self):\n return getattr(ssl, self.get_force_https_protocol_name())", "def test_protocol_sslv3(self):\n if support.verbose:\n sys.stdout.write(\"\\n\")\n try_protocol_comb...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the UrlFileExtension condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['UrlFileExtensionMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'UrlFileExtension') pulumi.set(__self__, "parameters", parameters)
[ "def get_url_extension(self):", "def _is_with_extension(self, filename):\n return self._extension in filename", "def test_extension_file(self):\n\n def test_cmp(value, expected_value):\n return value in expected_value\n\n # we named our named file: named_file.jpg\n file_fi...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the UrlFileName condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['UrlFileNameMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'UrlFileName') pulumi.set(__self__, "parameters", parameters)
[ "def __init__(__self__, *,\n name: pulumi.Input[str],\n parameters: pulumi.Input['UrlFileExtensionMatchConditionParametersArgs']):\n pulumi.set(__self__, \"name\", 'UrlFileExtension')\n pulumi.set(__self__, \"parameters\", parameters)", "def hasfilename(self):\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Defines the UrlPath condition for the delivery rule.
def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input['UrlPathMatchConditionParametersArgs']): pulumi.set(__self__, "name", 'UrlPath') pulumi.set(__self__, "parameters", parameters)
[ "def create_url_rules(self):\n\n def p(route):\n \"\"\"Prefix a route with the URL prefix.\"\"\"\n return f\"{self.config.url_prefix}{route}\"\n\n routes = self.config.routes\n url_rules = super(RDMRecordResource, self).create_url_rules()\n url_rules += [\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
A list of the delivery rules.
def rules(self) -> pulumi.Input[Sequence[pulumi.Input['DeliveryRuleArgs']]]: return pulumi.get(self, "rules")
[ "def get_rules():\n # Removed years due to 1970 bug\n # Removed nationality due to discussiona at\n # https://www.wikidata.org/wiki/User_talk:Andr%C3%A9_Costa_%28WMSE%29#AndreCostaWMSE-bot_adding_nationality\n rules = {\n # u'deathDate': None,\n u'deathPlace': None,...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Action of the geo filter, i.e. allow or block access.
def action(self) -> pulumi.Input['GeoFilterActions']: return pulumi.get(self, "action")
[ "def allow_access(self, share, access, share_server):", "def authorize(self):\n return True", "def esri_access(self, value):\r\n if self._portal.is_arcgisonline:\r\n if value == True:\r\n ret = self._portal.update_user(self._user_id,\r\n ...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Name of the header to modify
def header_name(self) -> pulumi.Input[str]: return pulumi.get(self, "header_name")
[ "def header_name(self, header_name):\n\n self._header_name = header_name", "def api_header_name(self, service):\n pass", "def simulator_header_name(self, services):\n pass", "def changeHeader(self):\n col = self.table_widget.currentColumn()\n\n text, ok = QInputDialog.getTex...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The number of seconds between health probes.Default is 240sec.
def probe_interval_in_seconds(self) -> Optional[pulumi.Input[int]]: return pulumi.get(self, "probe_interval_in_seconds")
[ "def _get_ping_timeout_duration(self):\n if \"PingTimeoutDuration\" in self._config:\n return self._config[\"PingTimeoutDuration\"] / 1000\n else:\n return 5", "def sleep_time(self):\n return self._sleep", "def seconds_to_sleep(self):\n if self.next_request_time...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The type of health probe request that is made.
def probe_request_type(self) -> Optional[pulumi.Input['HealthProbeRequestType']]: return pulumi.get(self, "probe_request_type")
[ "def get_type(self) -> str:\n return self.request_type", "def get_request_type(self, ):\n return self._request_type", "def get_metric_type(self):\n search_for_metric = self.raw_metric['type']\n if search_for_metric in self.supported_metrics.keys():\n return self.supported_...
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }